diff --git a/go.mod b/go.mod index fc3fc6f1..2a8bf4e2 100644 --- a/go.mod +++ b/go.mod @@ -1,45 +1,47 @@ module github.com/DelineaXPM/dsv-k8s-sidecar -go 1.18 +go 1.21 + +toolchain go1.21.6 require ( - github.com/bitfield/script v0.21.4 + github.com/bitfield/script v0.22.0 github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1 github.com/ericchiang/k8s v1.2.0 github.com/golang/mock v1.6.0 - github.com/golang/protobuf v1.2.0 - github.com/gorilla/mux v1.8.0 - github.com/magefile/mage v1.14.0 - github.com/pterm/pterm v0.12.58 - github.com/sheldonhull/magetools v1.0.0 - github.com/sirupsen/logrus v1.2.0 - github.com/stretchr/testify v1.8.2 + github.com/golang/protobuf v1.5.3 + github.com/gorilla/mux v1.8.1 + github.com/magefile/mage v1.15.0 + github.com/pterm/pterm v0.12.74 + github.com/sheldonhull/magetools v1.0.1 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.8.4 google.golang.org/grpc v1.16.0 ) require ( - atomicgo.dev/cursor v0.1.1 // indirect + atomicgo.dev/schedule v0.1.0 // indirect + google.golang.org/protobuf v1.26.0 // indirect + mvdan.cc/sh/v3 v3.6.0 // indirect +) + +require ( + atomicgo.dev/cursor v0.2.0 // indirect atomicgo.dev/keyboard v0.2.9 // indirect - bitbucket.org/creachadair/shell v0.0.7 // indirect github.com/containerd/console v1.0.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect - github.com/dustin/go-humanize v1.0.0 // indirect - github.com/gookit/color v1.5.3 // indirect - github.com/itchyny/gojq v0.12.7 // indirect - github.com/itchyny/timefmt-go v0.1.3 // indirect - github.com/konsorten/go-windows-terminal-sequences v1.0.1 // indirect - github.com/kr/pretty v0.2.1 // indirect - github.com/lithammer/fuzzysearch v1.1.5 // indirect - github.com/logrusorgru/aurora v2.0.3+incompatible // indirect - github.com/mattn/go-runewidth v0.0.14 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/gookit/color v1.5.4 // indirect + github.com/itchyny/gojq v0.12.12 // indirect + github.com/itchyny/timefmt-go v0.1.5 // indirect + github.com/lithammer/fuzzysearch v1.1.8 // indirect + github.com/mattn/go-runewidth v0.0.15 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rivo/uniseg v0.4.4 // indirect github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect - github.com/ztrue/tracerr v0.3.0 // indirect - golang.org/x/crypto v0.17.0 // indirect - golang.org/x/mod v0.8.0 // indirect + github.com/ztrue/tracerr v0.4.0 // indirect + golang.org/x/mod v0.10.0 // indirect golang.org/x/net v0.17.0 // indirect; indirect // indirect - golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 // indirect golang.org/x/sys v0.15.0 // indirect golang.org/x/term v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect diff --git a/go.sum b/go.sum index a32ad8dd..af521ab7 100644 --- a/go.sum +++ b/go.sum @@ -1,10 +1,11 @@ atomicgo.dev/assert v0.0.2 h1:FiKeMiZSgRrZsPo9qn/7vmr7mCsh5SZyXY4YGYiYwrg= -atomicgo.dev/cursor v0.1.1 h1:0t9sxQomCTRh5ug+hAMCs59x/UmC9QL6Ci5uosINKD4= -atomicgo.dev/cursor v0.1.1/go.mod h1:Lr4ZJB3U7DfPPOkbH7/6TOtJ4vFGHlgj1nc+n900IpU= +atomicgo.dev/assert v0.0.2/go.mod h1:ut4NcI3QDdJtlmAxQULOmA13Gz6e2DWbSAS8RUOmNYQ= +atomicgo.dev/cursor v0.2.0 h1:H6XN5alUJ52FZZUkI7AlJbUc1aW38GWZalpYRPpoPOw= +atomicgo.dev/cursor v0.2.0/go.mod h1:Lr4ZJB3U7DfPPOkbH7/6TOtJ4vFGHlgj1nc+n900IpU= atomicgo.dev/keyboard v0.2.9 h1:tOsIid3nlPLZ3lwgG8KZMp/SFmr7P0ssEN5JUsm78K8= atomicgo.dev/keyboard v0.2.9/go.mod h1:BC4w9g00XkxH/f1HXhW2sXmJFOCWbKn9xrOunSFtExQ= -bitbucket.org/creachadair/shell v0.0.7 h1:Z96pB6DkSb7F3Y3BBnJeOZH2gazyMTWlvecSD4vDqfk= -bitbucket.org/creachadair/shell v0.0.7/go.mod h1:oqtXSSvSYr4624lnnabXHaBsYW6RD80caLi2b3hJk0U= +atomicgo.dev/schedule v0.1.0 h1:nTthAbhZS5YZmgYbb2+DH8uQIZcTlIrd4eYr3UQxEjs= +atomicgo.dev/schedule v0.1.0/go.mod h1:xeUa3oAkiuHYh8bKiQBRojqAMq3PXXbJujjb0hw8pEU= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= github.com/MarvinJWendt/testza v0.1.0/go.mod h1:7AxNvlfeHP7Z/hDQ5JtE3OKYT3XFUeLCDE2DQninSqs= github.com/MarvinJWendt/testza v0.2.1/go.mod h1:God7bhG8n6uQxwdScay+gjm9/LnO4D3kkcZX4hv9Rp8= @@ -14,69 +15,76 @@ github.com/MarvinJWendt/testza v0.2.12/go.mod h1:JOIegYyV7rX+7VZ9r77L/eH6CfJHHzX github.com/MarvinJWendt/testza v0.3.0/go.mod h1:eFcL4I0idjtIx8P9C6KkAuLgATNKpX4/2oUqKc6bF2c= github.com/MarvinJWendt/testza v0.4.2/go.mod h1:mSdhXiKH8sg/gQehJ63bINcCKp7RtYewEjXsvsVUPbE= github.com/MarvinJWendt/testza v0.5.2 h1:53KDo64C1z/h/d/stCYCPY69bt/OSwjq5KpFNwi+zB4= +github.com/MarvinJWendt/testza v0.5.2/go.mod h1:xu53QFE5sCdjtMCKk8YMQ2MnymimEctc4n3EjyIYvEY= github.com/atomicgo/cursor v0.0.1/go.mod h1:cBON2QmmrysudxNBFthvMtN32r3jxVRIvzkUiF/RuIk= -github.com/bitfield/script v0.21.4 h1:XPMD/ti7pa9KW1aPMq7Hfh+mVznQdlqxkbiZSM2lnbE= -github.com/bitfield/script v0.21.4/go.mod h1:l3AZPVAtKQrL03bwh7nlNTUtgrgSWurpJSbtqspYrOA= +github.com/bitfield/script v0.22.0 h1:LA7QHuEsXMPD52YLtxWrlqCCy+9FOpzNYfsRHC5Gsrc= +github.com/bitfield/script v0.22.0/go.mod h1:ms4w+9B8f2/W0mbsgWDVTtl7K94bYuZc3AunnJC4Ebs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw= github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1 h1:CaO/zOnF8VvUfEbhRatPcwKVWamvbYd8tQGRWacE9kU= github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1/go.mod h1:+hnT3ywWDTAFrW5aE+u2Sa/wT555ZqwoCS+pk3p6ry4= -github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/ericchiang/k8s v1.2.0 h1:vxrMwEzY43oxu8aZyD/7b1s8tsBM+xoUoxjWECWFbPI= github.com/ericchiang/k8s v1.2.0/go.mod h1:/OmBgSq2cd9IANnsGHGlEz27nwMZV2YxlpXuQtU3Bz4= +github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.4/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= -github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o= -github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/renameio/v2 v2.0.0/go.mod h1:BtmJXm5YlszgC+TD4HOEEUFgkJP3nLxehU6hfe7jRt4= github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo= -github.com/gookit/color v1.5.3 h1:twfIhZs4QLCtimkP7MOxlF3A0U/5cDPseRT9M/+2SCE= -github.com/gookit/color v1.5.3/go.mod h1:NUzwzeehUfl7GIb36pqId+UGmRfQcU/WiiyTTeNjHtE= -github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= -github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/itchyny/gojq v0.12.7 h1:hYPTpeWfrJ1OT+2j6cvBScbhl0TkdwGM4bc66onUSOQ= -github.com/itchyny/gojq v0.12.7/go.mod h1:ZdvNHVlzPgUf8pgjnuDTmGfHA/21KoutQUJ3An/xNuw= -github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= -github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= +github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0= +github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w= +github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= +github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= +github.com/itchyny/gojq v0.12.12 h1:x+xGI9BXqKoJQZkr95ibpe3cdrTbY8D9lonrK433rcA= +github.com/itchyny/gojq v0.12.12/go.mod h1:j+3sVkjxwd7A7Z5jrbKibgOLn0ZfLWkV+Awxr/pyzJE= +github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE= +github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.10/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c= github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU= -github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/lithammer/fuzzysearch v1.1.5 h1:Ag7aKU08wp0R9QCfF4GoGST9HbmAIeLP7xwMrOBEp1c= -github.com/lithammer/fuzzysearch v1.1.5/go.mod h1:1R1LRNk7yKid1BaQkmuLQaHruxcC4HmAH30Dh61Ih1Q= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8= -github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= -github.com/magefile/mage v1.14.0 h1:6QDX3g6z1YvJ4olPhT1wksUcSa/V0a1B+pJb73fBjyo= -github.com/magefile/mage v1.14.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= -github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lithammer/fuzzysearch v1.1.8 h1:/HIuJnjHuXS8bKaiTMeeDlW2/AyIWk2brx1V8LFgLN4= +github.com/lithammer/fuzzysearch v1.1.8/go.mod h1:IdqeyBClc3FFqSzYq/MXESsS4S0FsZ5ajtkr5xPLts4= +github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg= +github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= +github.com/matryer/is v1.4.1 h1:55ehd8zaGABKLXQUe2awZ99BD/PTc2ls+KV/dXphgEQ= +github.com/matryer/is v1.4.1/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU= github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= +github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pterm/pterm v0.12.27/go.mod h1:PhQ89w4i95rhgE+xedAoqous6K9X+r6aSOI2eFF7DZI= @@ -86,59 +94,60 @@ github.com/pterm/pterm v0.12.31/go.mod h1:32ZAWZVXD7ZfG0s8qqHXePte42kdz8ECtRyEej github.com/pterm/pterm v0.12.33/go.mod h1:x+h2uL+n7CP/rel9+bImHD5lF3nM9vJj80k9ybiiTTE= github.com/pterm/pterm v0.12.36/go.mod h1:NjiL09hFhT/vWjQHSj1athJpx6H8cjpHXNAK5bUw8T8= github.com/pterm/pterm v0.12.40/go.mod h1:ffwPLwlbXxP+rxT0GsgDTzS3y3rmpAO1NMjUkGTYf8s= -github.com/pterm/pterm v0.12.58 h1:MEImvkbvty8JvoJH64bJ+CvoCkcuRw2iBIJvRwAEgHI= -github.com/pterm/pterm v0.12.58/go.mod h1:Ro9CV954hiaxt3mcpDx4a8XF5EmRDlIIpPdlfCKF9fE= +github.com/pterm/pterm v0.12.74 h1:fPsds9KisCyJh4NyY6bv8QJt3FLHceb5DxI6W0An9cc= +github.com/pterm/pterm v0.12.74/go.mod h1:+M33aZWQVpmLmLbvjykyGZ4gAfeebznRo8JMbabaxQU= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= +github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= +github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/sheldonhull/magetools v1.0.0 h1:IkGv46ZShJvVMfzkgdFWW6VXigoKkzlVSN3Ir1OU8ZU= -github.com/sheldonhull/magetools v1.0.0/go.mod h1:2NgMhFb1nyBhCiP4lljJ54osHHChyxyi4FBO/dWGA28= -github.com/sirupsen/logrus v1.2.0 h1:juTguoYk5qI21pwyTXY3B3Y5cOTH3ZUyZCg1v/mihuo= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sheldonhull/magetools v1.0.1 h1:EzTRk0JUNl2MijssyrA2Cx+J/7SyjcTgXQmsMccDpaI= +github.com/sheldonhull/magetools v1.0.1/go.mod h1:K5W5pCBkaBlDqnheO2mSWZ8s9t8IOsKehxvj70g8kpg= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no= github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/ztrue/tracerr v0.3.0 h1:lDi6EgEYhPYPnKcjsYzmWw4EkFEoA/gfe+I9Y5f+h6Y= -github.com/ztrue/tracerr v0.3.0/go.mod h1:qEalzze4VN9O8tnhBXScfCrmoJo10o8TN5ciKjm6Mww= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/ztrue/tracerr v0.4.0 h1:vT5PFxwIGs7rCg9ZgJ/y0NmOpJkPCPFK8x0vVIYzd04= +github.com/ztrue/tracerr v0.4.0/go.mod h1:PaFfYlas0DfmXNpo7Eay4MFhZUONqvXM+T2HyGPpngk= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561 h1:MDc5xs78ZrZr3HMQugiXOAkSZtfTpbJLDr/lwfgO53E= +golang.org/x/exp v0.0.0-20220909182711-5c715a9e8561/go.mod h1:cyybsKvd6eL0RnXn6p/Grxp8F5bW7iYuBgsNCOHpMYE= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.8.0 h1:LUYupSeNrTNCGzR/hVBk2NHZO4hXcVaW1k4Qx7rjPx8= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.10.0 h1:lFO9qtOdlre5W1jxS3r/4szv2/6iXxScdzjoBMXNhYk= +golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -146,27 +155,37 @@ golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211013075003-97ac67df715c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -177,6 +196,9 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0 google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/grpc v1.16.0 h1:dz5IJGuC2BB7qXR5AyHNwAUBhZscK2xVez7mznh72sY= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -187,3 +209,6 @@ gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +mvdan.cc/editorconfig v0.2.0/go.mod h1:lvnnD3BNdBYkhq+B4uBuFFKatfp02eB6HixDvEz91C0= +mvdan.cc/sh/v3 v3.6.0 h1:gtva4EXJ0dFNvl5bHjcUEvws+KRcDslT8VKheTYkbGU= +mvdan.cc/sh/v3 v3.6.0/go.mod h1:U4mhtBLZ32iWhif5/lD+ygy1zrgaQhUu+XFy7C8+TTA= diff --git a/vendor/atomicgo.dev/cursor/.golangci.yml b/vendor/atomicgo.dev/cursor/.golangci.yml index d18a485d..796ca35c 100644 --- a/vendor/atomicgo.dev/cursor/.golangci.yml +++ b/vendor/atomicgo.dev/cursor/.golangci.yml @@ -17,55 +17,83 @@ linters-settings: - ptrToRefParam - paramTypeCombine - unnamedResult - misspell: - locale: US linters: disable-all: true enable: + # default linters - errcheck - gosimple - govet - ineffassign - staticcheck + - typecheck + - unused + # additional linters + - asasalint - asciicheck + - bidichk - bodyclose + - containedctx + - contextcheck + - decorder - dupl - durationcheck + - errchkjson + - errname - errorlint - exhaustive - - gci - - gocognit + - exhaustruct + - exportloopref + - forcetypeassert + - gocheckcompilerdirectives - gocritic - godot - godox - goerr113 - gofmt - - goimports - goprintffuncname - - misspell + - gosec + - gosmopolitan + - importas + - ireturn + - nakedret + - nestif - nilerr - - nlreturn - - noctx + - nilnil - prealloc - predeclared + - revive + - rowserrcheck + - tagalign + - tenv - thelper + - tparallel - unconvert - unparam + - usestdlibvars - wastedassign + - whitespace - wrapcheck + - wsl + - gocyclo + - misspell issues: - # Excluding configuration per-path, per-linter, per-text and per-source + include: + - EXC0012 + - EXC0014 exclude-rules: - path: _test\.go linters: + - gocyclo - errcheck - dupl + - gosec - gocritic - - wrapcheck - - goerr113 - # https://github.com/go-critic/go-critic/issues/926 - linters: - gocritic text: "unnecessaryDefer:" + - linters: + - gocritic + text: "preferDecodeRune:" service: - golangci-lint-version: 1.39.x # use the fixed version to not introduce new linters unexpectedly + golangci-lint-version: 1.53.x diff --git a/vendor/atomicgo.dev/cursor/README.md b/vendor/atomicgo.dev/cursor/README.md index c9d6d60d..0f799745 100644 --- a/vendor/atomicgo.dev/cursor/README.md +++ b/vendor/atomicgo.dev/cursor/README.md @@ -1,13 +1,14 @@

AtomicGo | cursor

+Downloads Latest Release -Tests +Tests @@ -18,21 +19,19 @@ Unit test count - -Issues - - License: MIT + + +Go report +

---

-Get The Module -| Documentation | Contributing @@ -49,11 +48,6 @@

-
-
- ----------------------------------------------------------------------------------------------------- - -

@@ -61,214 +55,542 @@

-
-
- ----------------------------------------------------------------------------------------------------- - -

-## Description + -Package cursor contains cross-platform methods to move the terminal cursor in -different directions. This package can be used to create interactive CLI tools -and games, live charts, algorithm visualizations and other updatable output of -any kind. + -Works niceley with https://github.com/atomicgo/keyboard +# cursor -Special thanks to github.com/k0kubun/go-ansi which this project is based on. +```go +import "atomicgo.dev/cursor" +``` +Package cursor contains cross\-platform methods to move the terminal cursor in different directions. This package can be used to create interactive CLI tools and games, live charts, algorithm visualizations and other updatable output of any kind. -## Usage +Works niceley with https://github.com/atomicgo/keyboard -#### func Bottom +Special thanks to github.com/k0kubun/go\-ansi which this project is based on. + +## Index + +- [func Bottom\(\)](<#Bottom>) +- [func Clear\(\)](<#Clear>) +- [func ClearLine\(\)](<#ClearLine>) +- [func ClearLinesDown\(n int\)](<#ClearLinesDown>) +- [func ClearLinesUp\(n int\)](<#ClearLinesUp>) +- [func Down\(n int\)](<#Down>) +- [func DownAndClear\(n int\)](<#DownAndClear>) +- [func Hide\(\)](<#Hide>) +- [func HorizontalAbsolute\(n int\)](<#HorizontalAbsolute>) +- [func Left\(n int\)](<#Left>) +- [func Move\(x, y int\)](<#Move>) +- [func Right\(n int\)](<#Right>) +- [func SetTarget\(w Writer\)](<#SetTarget>) +- [func Show\(\)](<#Show>) +- [func StartOfLine\(\)](<#StartOfLine>) +- [func StartOfLineDown\(n int\)](<#StartOfLineDown>) +- [func StartOfLineUp\(n int\)](<#StartOfLineUp>) +- [func TestCustomIOWriter\(t \*testing.T\)](<#TestCustomIOWriter>) +- [func Up\(n int\)](<#Up>) +- [func UpAndClear\(n int\)](<#UpAndClear>) +- [type Area](<#Area>) + - [func NewArea\(\) Area](<#NewArea>) + - [func \(area \*Area\) Bottom\(\)](<#Area.Bottom>) + - [func \(area \*Area\) Clear\(\)](<#Area.Clear>) + - [func \(area \*Area\) ClearLinesDown\(n int\)](<#Area.ClearLinesDown>) + - [func \(area \*Area\) ClearLinesUp\(n int\)](<#Area.ClearLinesUp>) + - [func \(area \*Area\) Down\(n int\)](<#Area.Down>) + - [func \(area \*Area\) DownAndClear\(n int\)](<#Area.DownAndClear>) + - [func \(area \*Area\) Move\(x, y int\)](<#Area.Move>) + - [func \(area \*Area\) StartOfLine\(\)](<#Area.StartOfLine>) + - [func \(area \*Area\) StartOfLineDown\(n int\)](<#Area.StartOfLineDown>) + - [func \(area \*Area\) StartOfLineUp\(n int\)](<#Area.StartOfLineUp>) + - [func \(area \*Area\) Top\(\)](<#Area.Top>) + - [func \(area \*Area\) Up\(n int\)](<#Area.Up>) + - [func \(area \*Area\) UpAndClear\(n int\)](<#Area.UpAndClear>) + - [func \(area \*Area\) Update\(content string\)](<#Area.Update>) + - [func \(area Area\) WithWriter\(writer Writer\) Area](<#Area.WithWriter>) +- [type Cursor](<#Cursor>) + - [func NewCursor\(\) \*Cursor](<#NewCursor>) + - [func \(c \*Cursor\) Clear\(\)](<#Cursor.Clear>) + - [func \(c \*Cursor\) ClearLine\(\)](<#Cursor.ClearLine>) + - [func \(c \*Cursor\) Down\(n int\)](<#Cursor.Down>) + - [func \(c \*Cursor\) Hide\(\)](<#Cursor.Hide>) + - [func \(c \*Cursor\) HorizontalAbsolute\(n int\)](<#Cursor.HorizontalAbsolute>) + - [func \(c \*Cursor\) Left\(n int\)](<#Cursor.Left>) + - [func \(c \*Cursor\) Right\(n int\)](<#Cursor.Right>) + - [func \(c \*Cursor\) Show\(\)](<#Cursor.Show>) + - [func \(c \*Cursor\) Up\(n int\)](<#Cursor.Up>) + - [func \(c \*Cursor\) WithWriter\(w Writer\) \*Cursor](<#Cursor.WithWriter>) +- [type Writer](<#Writer>) + + + +## func [Bottom]() ```go func Bottom() ``` -Bottom moves the cursor to the bottom of the terminal. This is done by -calculating how many lines were moved by Up and Down. -#### func ClearLine +Bottom moves the cursor to the bottom of the terminal. This is done by calculating how many lines were moved by Up and Down. + + +## func [Clear]() + +```go +func Clear() +``` + +Clear clears the current position and moves the cursor to the left. + + +## func [ClearLine]() ```go func ClearLine() ``` + ClearLine clears the current line and moves the cursor to it's start position. -#### func ClearLinesDown + +## func [ClearLinesDown]() ```go func ClearLinesDown(n int) ``` -ClearLinesDown clears n lines downwards from the current position and moves the -cursor. -#### func ClearLinesUp +ClearLinesDown clears n lines downwards from the current position and moves the cursor. + + +## func [ClearLinesUp]() ```go func ClearLinesUp(n int) ``` -ClearLinesUp clears n lines upwards from the current position and moves the -cursor. -#### func Down +ClearLinesUp clears n lines upwards from the current position and moves the cursor. + + +## func [Down]() ```go func Down(n int) ``` + Down moves the cursor n lines down relative to the current position. -#### func DownAndClear + +## func [DownAndClear]() ```go func DownAndClear(n int) ``` + DownAndClear moves the cursor down by n lines, then clears the line. -#### func Hide + +## func [Hide]() ```go func Hide() ``` -Hide the cursor. Don't forget to show the cursor at least at the end of your -application with Show. Otherwise the user might have a terminal with a -permanently hidden cursor, until he reopens the terminal. -#### func HorizontalAbsolute +Hide the cursor. Don't forget to show the cursor at least at the end of your application with Show. Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. + + +## func [HorizontalAbsolute]() ```go func HorizontalAbsolute(n int) ``` -HorizontalAbsolute moves the cursor to n horizontally. The position n is -absolute to the start of the line. -#### func Left +HorizontalAbsolute moves the cursor to n horizontally. The position n is absolute to the start of the line. + + +## func [Left]() ```go func Left(n int) ``` + Left moves the cursor n characters to the left relative to the current position. -#### func Move + +## func [Move]() ```go func Move(x, y int) ``` + Move moves the cursor relative by x and y. -#### func Right + +## func [Right]() ```go func Right(n int) ``` -Right moves the cursor n characters to the right relative to the current -position. -#### func SetTarget +Right moves the cursor n characters to the right relative to the current position. + + +## func [SetTarget]() ```go func SetTarget(w Writer) ``` -SetTarget allows for any arbitrary Writer to be used -#### func Show +SetTarget sets to output target of the default curser to the provided cursor.Writer \(wrapping io.Writer\). + + +## func [Show]() ```go func Show() ``` -Show the cursor if it was hidden previously. Don't forget to show the cursor at -least at the end of your application. Otherwise the user might have a terminal -with a permanently hidden cursor, until he reopens the terminal. -#### func StartOfLine +Show the cursor if it was hidden previously. Don't forget to show the cursor at least at the end of your application. Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. + + +## func [StartOfLine]() ```go func StartOfLine() ``` + StartOfLine moves the cursor to the start of the current line. -#### func StartOfLineDown + +## func [StartOfLineDown]() ```go func StartOfLineDown(n int) ``` -StartOfLineDown moves the cursor down by n lines, then moves to cursor to the -start of the line. -#### func StartOfLineUp +StartOfLineDown moves the cursor down by n lines, then moves to cursor to the start of the line. + + +## func [StartOfLineUp]() ```go func StartOfLineUp(n int) ``` -StartOfLineUp moves the cursor up by n lines, then moves to cursor to the start -of the line. -#### func TestCustomIOWriter +StartOfLineUp moves the cursor up by n lines, then moves to cursor to the start of the line. + + +## func [TestCustomIOWriter]() ```go func TestCustomIOWriter(t *testing.T) ``` -#### func Up +TestCustomIOWriter tests the cursor functions with a custom Writer. + + +## func [Up]() ```go func Up(n int) ``` + Up moves the cursor n lines up relative to the current position. -#### func UpAndClear + +## func [UpAndClear]() ```go func UpAndClear(n int) ``` + UpAndClear moves the cursor up by n lines, then clears the line. -#### type Area + +## type [Area]() + +Area displays content which can be updated on the fly. You can use this to create live output, charts, dropdowns, etc. ```go type Area struct { + // contains filtered or unexported fields } ``` -Area displays content which can be updated on the fly. You can use this to -create live output, charts, dropdowns, etc. - -#### func NewArea + +### func [NewArea]() ```go func NewArea() Area ``` + NewArea returns a new Area. -#### func (*Area) Clear + +### func \(\*Area\) [Bottom]() + +```go +func (area *Area) Bottom() +``` + +Bottom moves the cursor to the bottom of the terminal. This is done by calculating how many lines were moved by Up and Down. + + +### func \(\*Area\) [Clear]() ```go func (area *Area) Clear() ``` + Clear clears the content of the Area. -#### func (*Area) Update + +### func \(\*Area\) [ClearLinesDown]() + +```go +func (area *Area) ClearLinesDown(n int) +``` + +ClearLinesDown clears n lines downwards from the current position and moves the cursor. + + +### func \(\*Area\) [ClearLinesUp]() + +```go +func (area *Area) ClearLinesUp(n int) +``` + +ClearLinesUp clears n lines upwards from the current position and moves the cursor. + + +### func \(\*Area\) [Down]() + +```go +func (area *Area) Down(n int) +``` + +Down moves the cursor of the area down one line. + + +### func \(\*Area\) [DownAndClear]() + +```go +func (area *Area) DownAndClear(n int) +``` + +DownAndClear moves the cursor down by n lines, then clears the line. + + +### func \(\*Area\) [Move]() + +```go +func (area *Area) Move(x, y int) +``` + +Move moves the cursor relative by x and y. + + +### func \(\*Area\) [StartOfLine]() + +```go +func (area *Area) StartOfLine() +``` + +StartOfLine moves the cursor to the start of the current line. + + +### func \(\*Area\) [StartOfLineDown]() + +```go +func (area *Area) StartOfLineDown(n int) +``` + +StartOfLineDown moves the cursor down by n lines, then moves to cursor to the start of the line. + + +### func \(\*Area\) [StartOfLineUp]() + +```go +func (area *Area) StartOfLineUp(n int) +``` + +StartOfLineUp moves the cursor up by n lines, then moves to cursor to the start of the line. + + +### func \(\*Area\) [Top]() + +```go +func (area *Area) Top() +``` + +Top moves the cursor to the top of the area. This is done by calculating how many lines were moved by Up and Down. + + +### func \(\*Area\) [Up]() + +```go +func (area *Area) Up(n int) +``` + +Up moves the cursor of the area up one line. + + +### func \(\*Area\) [UpAndClear]() + +```go +func (area *Area) UpAndClear(n int) +``` + +UpAndClear moves the cursor up by n lines, then clears the line. + + +### func \(\*Area\) [Update]() ```go func (area *Area) Update(content string) ``` -Update overwrites the content of the Area. -#### type Writer +Update overwrites the content of the Area and adjusts its height based on content. + + +### func \(Area\) [WithWriter]() ```go -type Writer interface { - io.Writer - Fd() uintptr +func (area Area) WithWriter(writer Writer) Area +``` + +WithWriter sets the custom writer. + + +## type [Cursor]() + +Cursor displays content which can be updated on the fly. You can use this to create live output, charts, dropdowns, etc. + +```go +type Cursor struct { + // contains filtered or unexported fields } ``` + +### func [NewCursor]() + +```go +func NewCursor() *Cursor +``` + +NewCursor creates a new Cursor instance writing to os.Stdout. + + +### func \(\*Cursor\) [Clear]() + +```go +func (c *Cursor) Clear() +``` + +Clear clears the current position and moves the cursor to the left. + + +### func \(\*Cursor\) [ClearLine]() + +```go +func (c *Cursor) ClearLine() +``` + +ClearLine clears the current line and moves the cursor to it's start position. + + +### func \(\*Cursor\) [Down]() + +```go +func (c *Cursor) Down(n int) +``` + +Down moves the cursor n lines down relative to the current position. + + +### func \(\*Cursor\) [Hide]() + +```go +func (c *Cursor) Hide() +``` + +Hide the cursor. Don't forget to show the cursor at least at the end of your application with Show. Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. + + +### func \(\*Cursor\) [HorizontalAbsolute]() + +```go +func (c *Cursor) HorizontalAbsolute(n int) +``` + +HorizontalAbsolute moves the cursor to n horizontally. The position n is absolute to the start of the line. + + +### func \(\*Cursor\) [Left]() + +```go +func (c *Cursor) Left(n int) +``` + +Left moves the cursor n characters to the left relative to the current position. + + +### func \(\*Cursor\) [Right]() + +```go +func (c *Cursor) Right(n int) +``` + +Right moves the cursor n characters to the right relative to the current position. + + +### func \(\*Cursor\) [Show]() + +```go +func (c *Cursor) Show() +``` + +Show the cursor if it was hidden previously. Don't forget to show the cursor at least at the end of your application. Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. + + +### func \(\*Cursor\) [Up]() + +```go +func (c *Cursor) Up(n int) +``` + +Up moves the cursor n lines up relative to the current position. + + +### func \(\*Cursor\) [WithWriter]() + +```go +func (c *Cursor) WithWriter(w Writer) *Cursor +``` + +WithWriter allows for any arbitrary Writer to be used for cursor movement abstracted. + + +## type [Writer]() + Writer is an expanded io.Writer interface with a file descriptor. +```go +type Writer interface { + io.Writer + Fd() uintptr +} +``` + +Generated by [gomarkdoc]() + + + + --- > [AtomicGo.dev](https://atomicgo.dev)  ·  diff --git a/vendor/atomicgo.dev/cursor/area.go b/vendor/atomicgo.dev/cursor/area.go index e8cd72d5..5b26b5db 100644 --- a/vendor/atomicgo.dev/cursor/area.go +++ b/vendor/atomicgo.dev/cursor/area.go @@ -1,49 +1,164 @@ package cursor import ( - "fmt" - "runtime" + "os" "strings" ) // Area displays content which can be updated on the fly. // You can use this to create live output, charts, dropdowns, etc. type Area struct { - height int + height int + writer Writer + cursor *Cursor + cursorPosY int } // NewArea returns a new Area. func NewArea() Area { - return Area{} + return Area{ + height: 0, + writer: os.Stdout, + cursor: cursor, + cursorPosY: 0, + } +} + +// WithWriter sets the custom writer. +func (area Area) WithWriter(writer Writer) Area { + area.writer = writer + area.cursor = area.cursor.WithWriter(writer) + + return area } // Clear clears the content of the Area. func (area *Area) Clear() { - Bottom() + // Initialize writer if not done yet + if area.writer == nil { + area.writer = os.Stdout + } + if area.height > 0 { - ClearLinesUp(area.height) + area.Bottom() + area.ClearLinesUp(area.height) + area.StartOfLine() + } else { + area.StartOfLine() + area.cursor.ClearLine() } } -// Update overwrites the content of the Area. +// Update overwrites the content of the Area and adjusts its height based on content. func (area *Area) Update(content string) { area.Clear() - lines := strings.Split(content, "\n") - - fmt.Println(strings.Repeat("\n", len(lines)-1)) // This appends space if the terminal is at the bottom - Up(len(lines)) + area.writeArea(content) + area.cursorPosY = 0 + area.height = strings.Count(content, "\n") +} - if runtime.GOOS == "windows" { - for _, line := range lines { - fmt.Print(line) - StartOfLineDown(1) +// Up moves the cursor of the area up one line. +func (area *Area) Up(n int) { + if n > 0 { + if area.cursorPosY+n > area.height { + n = area.height - area.cursorPosY } - } else { - for _, line := range lines { - fmt.Println(line) + + area.cursor.Up(n) + area.cursorPosY += n + } +} + +// Down moves the cursor of the area down one line. +func (area *Area) Down(n int) { + if n > 0 { + if area.cursorPosY-n < 0 { + n = area.height - area.cursorPosY } + + area.cursor.Down(n) + area.cursorPosY -= n } - height = 0 +} + +// Bottom moves the cursor to the bottom of the terminal. +// This is done by calculating how many lines were moved by Up and Down. +func (area *Area) Bottom() { + if area.cursorPosY > 0 { + area.Down(area.cursorPosY) + area.cursorPosY = 0 + } +} + +// Top moves the cursor to the top of the area. +// This is done by calculating how many lines were moved by Up and Down. +func (area *Area) Top() { + if area.cursorPosY < area.height { + area.Up(area.height - area.cursorPosY) + area.cursorPosY = area.height + } +} - area.height = len(lines) +// StartOfLine moves the cursor to the start of the current line. +func (area *Area) StartOfLine() { + area.cursor.HorizontalAbsolute(0) +} + +// StartOfLineDown moves the cursor down by n lines, then moves to cursor to the start of the line. +func (area *Area) StartOfLineDown(n int) { + area.Down(n) + area.StartOfLine() +} + +// StartOfLineUp moves the cursor up by n lines, then moves to cursor to the start of the line. +func (area *Area) StartOfLineUp(n int) { + area.Up(n) + area.StartOfLine() +} + +// UpAndClear moves the cursor up by n lines, then clears the line. +func (area *Area) UpAndClear(n int) { + area.Up(n) + area.cursor.ClearLine() +} + +// DownAndClear moves the cursor down by n lines, then clears the line. +func (area *Area) DownAndClear(n int) { + area.Down(n) + area.cursor.ClearLine() +} + +// Move moves the cursor relative by x and y. +func (area *Area) Move(x, y int) { + if x > 0 { + area.cursor.Right(x) + } else if x < 0 { + area.cursor.Left(-x) + } + + if y > 0 { + area.Up(y) + } else if y < 0 { + area.Down(-y) + } +} + +// ClearLinesUp clears n lines upwards from the current position and moves the cursor. +func (area *Area) ClearLinesUp(n int) { + area.StartOfLine() + area.cursor.ClearLine() + + for i := 0; i < n; i++ { + area.UpAndClear(1) + } +} + +// ClearLinesDown clears n lines downwards from the current position and moves the cursor. +func (area *Area) ClearLinesDown(n int) { + area.StartOfLine() + area.cursor.ClearLine() + + for i := 0; i < n; i++ { + area.DownAndClear(1) + } } diff --git a/vendor/atomicgo.dev/cursor/area_other.go b/vendor/atomicgo.dev/cursor/area_other.go new file mode 100644 index 00000000..b92390de --- /dev/null +++ b/vendor/atomicgo.dev/cursor/area_other.go @@ -0,0 +1,13 @@ +//go:build !windows +// +build !windows + +package cursor + +import ( + "fmt" +) + +// Update overwrites the content of the Area and adjusts its height based on content. +func (area *Area) writeArea(content string) { + fmt.Fprint(area.writer, content) +} diff --git a/vendor/atomicgo.dev/cursor/area_windows.go b/vendor/atomicgo.dev/cursor/area_windows.go new file mode 100644 index 00000000..de7dd292 --- /dev/null +++ b/vendor/atomicgo.dev/cursor/area_windows.go @@ -0,0 +1,21 @@ +//go:build windows +// +build windows + +package cursor + +import ( + "fmt" +) + +// writeArea is a helper for platform dependant output. +// For Windows newlines '\n' in the content are replaced by '\r\n' +func (area *Area) writeArea(content string) { + last := ' ' + for _, r := range content { + if r == '\n' && last != '\r' { + fmt.Fprint(area.writer, "\r\n") + continue + } + fmt.Fprint(area.writer, string(r)) + } +} diff --git a/vendor/atomicgo.dev/cursor/cursor.go b/vendor/atomicgo.dev/cursor/cursor.go index fb4c010b..89a3efcf 100644 --- a/vendor/atomicgo.dev/cursor/cursor.go +++ b/vendor/atomicgo.dev/cursor/cursor.go @@ -1,69 +1,26 @@ -//go:build !windows -// +build !windows - package cursor import ( - "fmt" "os" ) -var target Writer = os.Stdout - -// SetTarget allows for any arbitrary io.Writer to be used -// for cursor movement (will not work on Windows). -func SetTarget(w Writer) { - target = w +// Cursor displays content which can be updated on the fly. +// You can use this to create live output, charts, dropdowns, etc. +type Cursor struct { + writer Writer } -// Up moves the cursor n lines up relative to the current position. -func Up(n int) { - fmt.Fprintf(target, "\x1b[%dA", n) - height += n +// NewCursor creates a new Cursor instance writing to os.Stdout. +func NewCursor() *Cursor { + return &Cursor{writer: os.Stdout} } -// Down moves the cursor n lines down relative to the current position. -func Down(n int) { - fmt.Fprintf(target, "\x1b[%dB", n) - if height-n <= 0 { - height = 0 - } else { - height -= n +// WithWriter allows for any arbitrary Writer to be used +// for cursor movement abstracted. +func (c *Cursor) WithWriter(w Writer) *Cursor { + if w != nil { + c.writer = w } -} - -// Right moves the cursor n characters to the right relative to the current position. -func Right(n int) { - fmt.Fprintf(target, "\x1b[%dC", n) -} - -// Left moves the cursor n characters to the left relative to the current position. -func Left(n int) { - fmt.Fprintf(target, "\x1b[%dD", n) -} - -// HorizontalAbsolute moves the cursor to n horizontally. -// The position n is absolute to the start of the line. -func HorizontalAbsolute(n int) { - n += 1 // Moves the line to the character after n - fmt.Fprintf(target, "\x1b[%dG", n) -} - -// Show the cursor if it was hidden previously. -// Don't forget to show the cursor at least at the end of your application. -// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. -func Show() { - fmt.Fprint(target, "\x1b[?25h") -} - -// Hide the cursor. -// Don't forget to show the cursor at least at the end of your application with Show. -// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. -func Hide() { - fmt.Fprintf(target, "\x1b[?25l") -} -// ClearLine clears the current line and moves the cursor to it's start position. -func ClearLine() { - fmt.Fprintf(target, "\x1b[2K") + return c } diff --git a/vendor/atomicgo.dev/cursor/cursor_other.go b/vendor/atomicgo.dev/cursor/cursor_other.go new file mode 100644 index 00000000..7c185578 --- /dev/null +++ b/vendor/atomicgo.dev/cursor/cursor_other.go @@ -0,0 +1,67 @@ +//go:build !windows +// +build !windows + +package cursor + +import ( + "fmt" +) + +// Up moves the cursor n lines up relative to the current position. +func (c *Cursor) Up(n int) { + if n > 0 { + fmt.Fprintf(c.writer, "\x1b[%dA", n) + } +} + +// Down moves the cursor n lines down relative to the current position. +func (c *Cursor) Down(n int) { + if n > 0 { + fmt.Fprintf(c.writer, "\x1b[%dB", n) + } +} + +// Right moves the cursor n characters to the right relative to the current position. +func (c *Cursor) Right(n int) { + if n > 0 { + fmt.Fprintf(c.writer, "\x1b[%dC", n) + } +} + +// Left moves the cursor n characters to the left relative to the current position. +func (c *Cursor) Left(n int) { + if n > 0 { + fmt.Fprintf(c.writer, "\x1b[%dD", n) + } +} + +// HorizontalAbsolute moves the cursor to n horizontally. +// The position n is absolute to the start of the line. +func (c *Cursor) HorizontalAbsolute(n int) { + n++ // Moves the line to the character after n + fmt.Fprintf(c.writer, "\x1b[%dG", n) +} + +// Show the cursor if it was hidden previously. +// Don't forget to show the cursor at least at the end of your application. +// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. +func (c *Cursor) Show() { + fmt.Fprint(c.writer, "\x1b[?25h") +} + +// Hide the cursor. +// Don't forget to show the cursor at least at the end of your application with Show. +// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. +func (c *Cursor) Hide() { + fmt.Fprintf(c.writer, "\x1b[?25l") +} + +// ClearLine clears the current line and moves the cursor to it's start position. +func (c *Cursor) ClearLine() { + fmt.Fprintf(c.writer, "\x1b[2K") +} + +// Clear clears the current position and moves the cursor to the left. +func (c *Cursor) Clear() { + fmt.Fprintf(c.writer, "\x1b[K") +} diff --git a/vendor/atomicgo.dev/cursor/cursor_test_linux.go b/vendor/atomicgo.dev/cursor/cursor_test_linux.go index 25179b23..6a37f406 100644 --- a/vendor/atomicgo.dev/cursor/cursor_test_linux.go +++ b/vendor/atomicgo.dev/cursor/cursor_test_linux.go @@ -6,75 +6,93 @@ import ( "testing" ) +// TestCustomIOWriter tests the cursor functions with a custom Writer. func TestCustomIOWriter(t *testing.T) { tmpFile, err := os.CreateTemp("", "testingTmpFile-") + defer os.Remove(tmpFile.Name()) + if err != nil { log.Fatal(err) } - defer os.Remove(tmpFile.Name()) w := tmpFile SetTarget(w) Up(2) + expected := "\x1b[2A" actual := getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) Down(2) + expected = "\x1b[2B" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) Right(2) + expected = "\x1b[2C" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) Left(2) + expected = "\x1b[2D" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) Hide() + expected = "\x1b[?25l" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) Show() + expected = "\x1b[?25h" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) ClearLine() + expected = "\x1b[2K" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } clearFile(t, w) HorizontalAbsolute(3) + expected = "\x1b[4G" actual = getFileContent(t, w.Name()) + if expected != actual { t.Errorf("wanted: %v, got %v", expected, actual) } @@ -82,6 +100,7 @@ func TestCustomIOWriter(t *testing.T) { func getFileContent(t *testing.T, fileName string) string { t.Helper() + content, err := os.ReadFile(fileName) if err != nil { t.Errorf("failed to read file contents: %s", err) @@ -94,12 +113,14 @@ func getFileContent(t *testing.T, fileName string) string { func clearFile(t *testing.T, file *os.File) { t.Helper() + err := file.Truncate(0) if err != nil { t.Errorf("failed to clear file") return } + _, err = file.Seek(0, 0) if err != nil { t.Errorf("failed to clear file") diff --git a/vendor/atomicgo.dev/cursor/cursor_windows.go b/vendor/atomicgo.dev/cursor/cursor_windows.go index 0a3be0af..ebe2bc59 100644 --- a/vendor/atomicgo.dev/cursor/cursor_windows.go +++ b/vendor/atomicgo.dev/cursor/cursor_windows.go @@ -1,46 +1,35 @@ +//go:build windows +// +build windows + package cursor import ( - "os" "syscall" "unsafe" ) -var target Writer = os.Stdout - -// SetTarget allows for any arbitrary Writer to be used -func SetTarget(w Writer) { - target = w -} - // Up moves the cursor n lines up relative to the current position. -func Up(n int) { - move(0, -n) - height += n +func (c *Cursor) Up(n int) { + c.move(0, -n) } // Down moves the cursor n lines down relative to the current position. -func Down(n int) { - move(0, n) - if height-n <= 0 { - height = 0 - } else { - height -= n - } +func (c *Cursor) Down(n int) { + c.move(0, n) } // Right moves the cursor n characters to the right relative to the current position. -func Right(n int) { - move(n, 0) +func (c *Cursor) Right(n int) { + c.move(n, 0) } // Left moves the cursor n characters to the left relative to the current position. -func Left(n int) { - move(-n, 0) +func (c *Cursor) Left(n int) { + c.move(-n, 0) } -func move(x int, y int) { - handle := syscall.Handle(target.Fd()) +func (c *Cursor) move(x int, y int) { + handle := syscall.Handle(c.writer.Fd()) var csbi consoleScreenBufferInfo _, _, _ = procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) @@ -54,8 +43,8 @@ func move(x int, y int) { // HorizontalAbsolute moves the cursor to n horizontally. // The position n is absolute to the start of the line. -func HorizontalAbsolute(n int) { - handle := syscall.Handle(target.Fd()) +func (c *Cursor) HorizontalAbsolute(n int) { + handle := syscall.Handle(c.writer.Fd()) var csbi consoleScreenBufferInfo _, _, _ = procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) @@ -74,8 +63,8 @@ func HorizontalAbsolute(n int) { // Show the cursor if it was hidden previously. // Don't forget to show the cursor at least at the end of your application. // Otherwise the user might have a terminal with a permanently hidden cursor, until he reopens the terminal. -func Show() { - handle := syscall.Handle(target.Fd()) +func (c *Cursor) Show() { + handle := syscall.Handle(c.writer.Fd()) var cci consoleCursorInfo _, _, _ = procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&cci))) @@ -87,8 +76,8 @@ func Show() { // Hide the cursor. // Don't forget to show the cursor at least at the end of your application with Show. // Otherwise the user might have a terminal with a permanently hidden cursor, until he reopens the terminal. -func Hide() { - handle := syscall.Handle(target.Fd()) +func (c *Cursor) Hide() { + handle := syscall.Handle(c.writer.Fd()) var cci consoleCursorInfo _, _, _ = procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&cci))) @@ -97,9 +86,9 @@ func Hide() { _, _, _ = procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&cci))) } -// ClearLine clears the current line and moves the cursor to it's start position. -func ClearLine() { - handle := syscall.Handle(target.Fd()) +// ClearLine clears the current line and moves the cursor to its start position. +func (c *Cursor) ClearLine() { + handle := syscall.Handle(c.writer.Fd()) var csbi consoleScreenBufferInfo _, _, _ = procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) @@ -110,3 +99,20 @@ func ClearLine() { x = csbi.size.x _, _, _ = procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(x), uintptr(*(*int32)(unsafe.Pointer(&cursor))), uintptr(unsafe.Pointer(&w))) } + +// Clear clears the current position and moves the cursor to the left. +func (c *Cursor) Clear() { + handle := syscall.Handle(c.writer.Fd()) + + var csbi consoleScreenBufferInfo + _, _, _ = procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + + var w uint32 + cursor := csbi.cursorPosition + _, _, _ = procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(1), uintptr(*(*int32)(unsafe.Pointer(&cursor))), uintptr(unsafe.Pointer(&w))) + + if cursor.x > 0 { + cursor.x-- + } + _, _, _ = procSetConsoleCursorPosition.Call(uintptr(handle), uintptr(*(*int32)(unsafe.Pointer(&cursor)))) +} diff --git a/vendor/atomicgo.dev/cursor/go.work b/vendor/atomicgo.dev/cursor/go.work new file mode 100644 index 00000000..c71ff3ba --- /dev/null +++ b/vendor/atomicgo.dev/cursor/go.work @@ -0,0 +1,10 @@ +go 1.18 + +use . + +// replace git.neotel.at/go/c5rest => /home/rl/work/c5rest +// replace git.neotel.at/go/c5db => /home/rl/work/c5db + +// replace github.com/pterm/pterm => H:/work/github.com/pterm/pterm + +// replace atomicgo.dev/cursor => H:/github.com/atomicgo.dev/cursor diff --git a/vendor/atomicgo.dev/cursor/utils.go b/vendor/atomicgo.dev/cursor/utils.go index cde36686..4b75f09e 100644 --- a/vendor/atomicgo.dev/cursor/utils.go +++ b/vendor/atomicgo.dev/cursor/utils.go @@ -1,16 +1,92 @@ package cursor -import "io" +import ( + "io" + "os" +) -var height int +// +// Helpers for global cursor handling on os.Stdout +// + +var autoheight int +var cursor = &Cursor{writer: os.Stdout} + +// Writer is an expanded io.Writer interface with a file descriptor. +type Writer interface { + io.Writer + Fd() uintptr +} + +// SetTarget sets to output target of the default curser to the +// provided cursor.Writer (wrapping io.Writer). +func SetTarget(w Writer) { + cursor = cursor.WithWriter(w) +} + +// Up moves the cursor n lines up relative to the current position. +func Up(n int) { + cursor.Up(n) + autoheight += n +} + +// Down moves the cursor n lines down relative to the current position. +func Down(n int) { + cursor.Down(n) + + if autoheight > 0 { + autoheight -= n + } +} + +// Right moves the cursor n characters to the right relative to the current position. +func Right(n int) { + cursor.Right(n) +} + +// Left moves the cursor n characters to the left relative to the current position. +func Left(n int) { + cursor.Left(n) +} + +// HorizontalAbsolute moves the cursor to n horizontally. +// The position n is absolute to the start of the line. +func HorizontalAbsolute(n int) { + cursor.HorizontalAbsolute(n) +} + +// Show the cursor if it was hidden previously. +// Don't forget to show the cursor at least at the end of your application. +// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. +func Show() { + cursor.Show() +} + +// Hide the cursor. +// Don't forget to show the cursor at least at the end of your application with Show. +// Otherwise the user might have a terminal with a permanently hidden cursor, until they reopen the terminal. +func Hide() { + cursor.Hide() +} + +// ClearLine clears the current line and moves the cursor to it's start position. +func ClearLine() { + cursor.ClearLine() +} + +// Clear clears the current position and moves the cursor to the left. +func Clear() { + cursor.Clear() +} // Bottom moves the cursor to the bottom of the terminal. // This is done by calculating how many lines were moved by Up and Down. func Bottom() { - if height > 0 { - Down(height) + if autoheight > 0 { + Down(autoheight) StartOfLine() - height = 0 + + autoheight = 0 } } @@ -73,9 +149,3 @@ func ClearLinesDown(n int) { DownAndClear(1) } } - -// Writer is an expanded io.Writer interface with a file descriptor. -type Writer interface { - io.Writer - Fd() uintptr -} diff --git a/vendor/atomicgo.dev/schedule/.gitignore b/vendor/atomicgo.dev/schedule/.gitignore new file mode 100644 index 00000000..7e5f3f45 --- /dev/null +++ b/vendor/atomicgo.dev/schedule/.gitignore @@ -0,0 +1,40 @@ +# Go template + +## Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +## Test binary, built with `go test -c` +*.test + +## Output of the go coverage tool, specifically when used with LiteIDE +*.out + +## Dependency directories (remove the comment below to include it) +vendor/ + +# IDEs + +## IntelliJ +.idea +*.iml +out +gen + +## Visual Studio Code +.vscode +*.code-workspace + +# Operating System Files + +## macOS +### General +.DS_Store + +# Other + +## Experimenting folder +experimenting diff --git a/vendor/atomicgo.dev/schedule/.golangci.yml b/vendor/atomicgo.dev/schedule/.golangci.yml new file mode 100644 index 00000000..796ca35c --- /dev/null +++ b/vendor/atomicgo.dev/schedule/.golangci.yml @@ -0,0 +1,99 @@ +linters-settings: + gocritic: + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + disabled-checks: + - dupImport + - ifElseChain + - octalLiteral + - whyNoLint + - wrapperFunc + - exitAfterDefer + - hugeParam + - ptrToRefParam + - paramTypeCombine + - unnamedResult +linters: + disable-all: true + enable: + # default linters + - errcheck + - gosimple + - govet + - ineffassign + - staticcheck + - typecheck + - unused + # additional linters + - asasalint + - asciicheck + - bidichk + - bodyclose + - containedctx + - contextcheck + - decorder + - dupl + - durationcheck + - errchkjson + - errname + - errorlint + - exhaustive + - exhaustruct + - exportloopref + - forcetypeassert + - gocheckcompilerdirectives + - gocritic + - godot + - godox + - goerr113 + - gofmt + - goprintffuncname + - gosec + - gosmopolitan + - importas + - ireturn + - nakedret + - nestif + - nilerr + - nilnil + - prealloc + - predeclared + - revive + - rowserrcheck + - tagalign + - tenv + - thelper + - tparallel + - unconvert + - unparam + - usestdlibvars + - wastedassign + - whitespace + - wrapcheck + - wsl + - gocyclo + - misspell +issues: + include: + - EXC0012 + - EXC0014 + exclude-rules: + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + - gocritic + - linters: + - gocritic + text: "unnecessaryDefer:" + - linters: + - gocritic + text: "preferDecodeRune:" +service: + golangci-lint-version: 1.53.x diff --git a/vendor/atomicgo.dev/schedule/LICENSE b/vendor/atomicgo.dev/schedule/LICENSE new file mode 100644 index 00000000..b42989ef --- /dev/null +++ b/vendor/atomicgo.dev/schedule/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Marvin Wendt (MarvinJWendt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/atomicgo.dev/schedule/README.md b/vendor/atomicgo.dev/schedule/README.md new file mode 100644 index 00000000..3f7f9a84 --- /dev/null +++ b/vendor/atomicgo.dev/schedule/README.md @@ -0,0 +1,281 @@ +

AtomicGo | schedule

+ +

+Downloads + + +Latest Release + + + +Tests + + + +Coverage + + + +Unit test count + + + +License: MIT + + + +Go report + + +

+ +--- + +

+Documentation +| +Contributing +| +Code of Conduct +

+ +--- + +

+ AtomicGo +

+ +

+ + + +
+

+

go get atomicgo.dev/schedule

+

+ + + +
+

+ + + + + +# schedule + +```go +import "atomicgo.dev/schedule" +``` + +Package schedule provides a simple scheduler for Go. + +It can run a function at a given time, in a given duration, or repeatedly at a given interval. + +## Index + +- [type Task](<#Task>) + - [func After\(d time.Duration, task func\(\)\) \*Task](<#After>) + - [func At\(t time.Time, task func\(\)\) \*Task](<#At>) + - [func Every\(interval time.Duration, task func\(\) bool\) \*Task](<#Every>) + - [func \(s \*Task\) ExecutesIn\(\) time.Duration](<#Task.ExecutesIn>) + - [func \(s \*Task\) IsActive\(\) bool](<#Task.IsActive>) + - [func \(s \*Task\) NextExecutionTime\(\) time.Time](<#Task.NextExecutionTime>) + - [func \(s \*Task\) StartedAt\(\) time.Time](<#Task.StartedAt>) + - [func \(s \*Task\) Stop\(\)](<#Task.Stop>) + - [func \(s \*Task\) Wait\(\)](<#Task.Wait>) + + + +## type [Task]() + +Task holds information about the running task and can be used to stop running tasks. + +```go +type Task struct { + // contains filtered or unexported fields +} +``` + + +### func [After]() + +```go +func After(d time.Duration, task func()) *Task +``` + +After executes the task after the given duration. The function is non\-blocking. If you want to wait for the task to be executed, use the Task.Wait method. + +
Example +

+ + + +```go +package main + +import ( + "fmt" + "time" + + "atomicgo.dev/schedule" +) + +func main() { + task := schedule.After(5*time.Second, func() { + fmt.Println("5 seconds are over!") + }) + + fmt.Println("Some stuff happening...") + + task.Wait() +} +``` + +

+
+ + +### func [At]() + +```go +func At(t time.Time, task func()) *Task +``` + +At executes the task at the given time. The function is non\-blocking. If you want to wait for the task to be executed, use the Task.Wait method. + +
Example +

+ + + +```go +package main + +import ( + "fmt" + "time" + + "atomicgo.dev/schedule" +) + +func main() { + task := schedule.At(time.Now().Add(5*time.Second), func() { + fmt.Println("5 seconds are over!") + }) + + fmt.Println("Some stuff happening...") + + task.Wait() +} +``` + +

+
+ + +### func [Every]() + +```go +func Every(interval time.Duration, task func() bool) *Task +``` + +Every executes the task in the given interval, as long as the task function returns true. The function is non\-blocking. If you want to wait for the task to be executed, use the Task.Wait method. + +
Example +

+ + + +```go +package main + +import ( + "fmt" + "time" + + "atomicgo.dev/schedule" +) + +func main() { + task := schedule.Every(time.Second, func() bool { + fmt.Println("1 second is over!") + return true // return false to stop the task + }) + + fmt.Println("Some stuff happening...") + + time.Sleep(10 * time.Second) + + task.Stop() +} +``` + +

+
+ + +### func \(\*Task\) [ExecutesIn]() + +```go +func (s *Task) ExecutesIn() time.Duration +``` + +ExecutesIn returns the duration until the next execution. + + +### func \(\*Task\) [IsActive]() + +```go +func (s *Task) IsActive() bool +``` + +IsActive returns true if the scheduler is active. + + +### func \(\*Task\) [NextExecutionTime]() + +```go +func (s *Task) NextExecutionTime() time.Time +``` + +NextExecutionTime returns the time when the next execution will happen. + + +### func \(\*Task\) [StartedAt]() + +```go +func (s *Task) StartedAt() time.Time +``` + +StartedAt returns the time when the scheduler was started. + + +### func \(\*Task\) [Stop]() + +```go +func (s *Task) Stop() +``` + +Stop stops the scheduler. + + +### func \(\*Task\) [Wait]() + +```go +func (s *Task) Wait() +``` + +Wait blocks until the scheduler is stopped. After and At will stop automatically after the task is executed. + +Generated by [gomarkdoc]() + + + + +--- + +> [AtomicGo.dev](https://atomicgo.dev)  ·  +> with ❤️ by [@MarvinJWendt](https://github.com/MarvinJWendt) | +> [MarvinJWendt.com](https://marvinjwendt.com) diff --git a/vendor/atomicgo.dev/schedule/codecov.yml b/vendor/atomicgo.dev/schedule/codecov.yml new file mode 100644 index 00000000..bfdc9877 --- /dev/null +++ b/vendor/atomicgo.dev/schedule/codecov.yml @@ -0,0 +1,8 @@ +coverage: + status: + project: + default: + informational: true + patch: + default: + informational: true diff --git a/vendor/atomicgo.dev/schedule/doc.go b/vendor/atomicgo.dev/schedule/doc.go new file mode 100644 index 00000000..4801fdb5 --- /dev/null +++ b/vendor/atomicgo.dev/schedule/doc.go @@ -0,0 +1,6 @@ +/* +Package schedule provides a simple scheduler for Go. + +It can run a function at a given time, in a given duration, or repeatedly at a given interval. +*/ +package schedule diff --git a/vendor/atomicgo.dev/schedule/schedule.go b/vendor/atomicgo.dev/schedule/schedule.go new file mode 100644 index 00000000..635d29d3 --- /dev/null +++ b/vendor/atomicgo.dev/schedule/schedule.go @@ -0,0 +1,116 @@ +package schedule + +import "time" + +// Task holds information about the running task and can be used to stop running tasks. +type Task struct { + stop chan struct{} + nextExecution time.Time + startedAt time.Time +} + +// newTask creates a new Task. +func newTask() *Task { + return &Task{ + stop: make(chan struct{}), + startedAt: time.Now(), + } +} + +// StartedAt returns the time when the scheduler was started. +func (s *Task) StartedAt() time.Time { + return s.startedAt +} + +// NextExecutionTime returns the time when the next execution will happen. +func (s *Task) NextExecutionTime() time.Time { + return s.nextExecution +} + +// ExecutesIn returns the duration until the next execution. +func (s *Task) ExecutesIn() time.Duration { + return time.Until(s.nextExecution) +} + +// IsActive returns true if the scheduler is active. +func (s *Task) IsActive() bool { + select { + case <-s.stop: + return false + default: + return true + } +} + +// Wait blocks until the scheduler is stopped. +// After and At will stop automatically after the task is executed. +func (s *Task) Wait() { + <-s.stop +} + +// Stop stops the scheduler. +func (s *Task) Stop() { + close(s.stop) +} + +// After executes the task after the given duration. +// The function is non-blocking. If you want to wait for the task to be executed, use the Task.Wait method. +func After(d time.Duration, task func()) *Task { + scheduler := newTask() + scheduler.nextExecution = time.Now().Add(d) + + go func() { + select { + case <-time.After(d): + task() + scheduler.Stop() + case <-scheduler.stop: + return + } + }() + + return scheduler +} + +// At executes the task at the given time. +// The function is non-blocking. If you want to wait for the task to be executed, use the Task.Wait method. +func At(t time.Time, task func()) *Task { + scheduler := newTask() + scheduler.nextExecution = t + + go func() { + select { + case <-time.After(time.Until(t)): + task() + scheduler.Stop() + case <-scheduler.stop: + return + } + }() + + return scheduler +} + +// Every executes the task in the given interval, as long as the task function returns true. +// The function is non-blocking. If you want to wait for the task to be executed, use the Task.Wait method. +func Every(interval time.Duration, task func() bool) *Task { + scheduler := newTask() + scheduler.nextExecution = time.Now().Add(interval) + + ticker := time.NewTicker(interval) + + go func() { + for { + select { + case <-ticker.C: + task() + scheduler.nextExecution = time.Now().Add(interval) + case <-scheduler.stop: + ticker.Stop() + return + } + } + }() + + return scheduler +} diff --git a/vendor/bitbucket.org/creachadair/shell/LICENSE b/vendor/bitbucket.org/creachadair/shell/LICENSE deleted file mode 100644 index 10d72735..00000000 --- a/vendor/bitbucket.org/creachadair/shell/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2015, Michael J. Fromberger -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/bitbucket.org/creachadair/shell/README.md b/vendor/bitbucket.org/creachadair/shell/README.md deleted file mode 100644 index 73282bed..00000000 --- a/vendor/bitbucket.org/creachadair/shell/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# shell - -http://godoc.org/bitbucket.org/creachadair/shell - -The `shell` package implements basic shell command-line splitting. - - diff --git a/vendor/bitbucket.org/creachadair/shell/bitbucket-pipelines.yml b/vendor/bitbucket.org/creachadair/shell/bitbucket-pipelines.yml deleted file mode 100644 index 8acd906c..00000000 --- a/vendor/bitbucket.org/creachadair/shell/bitbucket-pipelines.yml +++ /dev/null @@ -1,23 +0,0 @@ -definitions: - steps: - - step: &Verify - script: - - PACKAGE_PATH="${GOPATH}/src/bitbucket.org/${BITBUCKET_REPO_OWNER}/${BITBUCKET_REPO_SLUG}" - - mkdir -pv "${PACKAGE_PATH}" - - tar -cO --exclude-vcs --exclude=bitbucket-pipelines.yml . | tar -xv -C "${PACKAGE_PATH}" - - cd "${PACKAGE_PATH}" - - go version # log the version of Go we are using in this step - - export GO111MODULE=on # enable modules inside $GOPATH - - go get -v ./... - - go build -v ./... - - go test -v -race -cpu=1,4 ./... - - go vet -v ./... - -pipelines: - default: # run on each push - - step: - image: golang:1.16 - <<: *Verify - - step: - image: golang:1.17 - <<: *Verify diff --git a/vendor/bitbucket.org/creachadair/shell/shell.go b/vendor/bitbucket.org/creachadair/shell/shell.go deleted file mode 100644 index e4f8650f..00000000 --- a/vendor/bitbucket.org/creachadair/shell/shell.go +++ /dev/null @@ -1,325 +0,0 @@ -// Package shell supports splitting and joining of shell command strings. -// -// The Split function divides a string into whitespace-separated fields, -// respecting single and double quotation marks as defined by the Shell Command -// Language section of IEEE Std 1003.1 2013. The Quote function quotes -// characters that would otherwise be subject to shell evaluation, and the Join -// function concatenates quoted strings with spaces between them. -// -// The relationship between Split and Join is that given -// -// fields, ok := Split(Join(ss)) -// -// the following relationship will hold: -// -// fields == ss && ok -// -package shell - -import ( - "bufio" - "bytes" - "io" - "strings" -) - -// These characters must be quoted to escape special meaning. This list -// doesn't include the single quote. -const mustQuote = "|&;<>()$`\\\"\t\n" - -// These characters should be quoted to escape special meaning, since in some -// contexts they are special (e.g., "x=y" in command position, "*" for globs). -const shouldQuote = `*?[#~=%` - -// These are the separator characters in unquoted text. -const spaces = " \t\n" - -const allQuote = mustQuote + shouldQuote + spaces - -type state int - -const ( - stNone state = iota - stBreak - stBreakQ - stWord - stWordQ - stSingle - stDouble - stDoubleQ -) - -type class int - -const ( - clOther class = iota - clBreak - clNewline - clQuote - clSingle - clDouble -) - -type action int - -const ( - drop action = iota - push - xpush - emit -) - -// N.B. Benchmarking shows that array lookup is substantially faster than map -// lookup here, but it requires caution when changing the state machine. In -// particular: -// -// 1. The state and action values must be small integers. -// 2. The update table must completely cover the state values. -// 3. Each action slice must completely cover the action values. -// -var update = [...][]struct { - state - action -}{ - stNone: {}, - stBreak: { - clBreak: {stBreak, drop}, - clNewline: {stBreak, drop}, - clQuote: {stBreakQ, drop}, - clSingle: {stSingle, drop}, - clDouble: {stDouble, drop}, - clOther: {stWord, push}, - }, - stBreakQ: { - clBreak: {stWord, push}, - clNewline: {stBreak, drop}, - clQuote: {stWord, push}, - clSingle: {stWord, push}, - clDouble: {stWord, push}, - clOther: {stWord, push}, - }, - stWord: { - clBreak: {stBreak, emit}, - clNewline: {stBreak, emit}, - clQuote: {stWordQ, drop}, - clSingle: {stSingle, drop}, - clDouble: {stDouble, drop}, - clOther: {stWord, push}, - }, - stWordQ: { - clBreak: {stWord, push}, - clNewline: {stWord, drop}, - clQuote: {stWord, push}, - clSingle: {stWord, push}, - clDouble: {stWord, push}, - clOther: {stWord, push}, - }, - stSingle: { - clBreak: {stSingle, push}, - clNewline: {stSingle, push}, - clQuote: {stSingle, push}, - clSingle: {stWord, drop}, - clDouble: {stSingle, push}, - clOther: {stSingle, push}, - }, - stDouble: { - clBreak: {stDouble, push}, - clNewline: {stDouble, push}, - clQuote: {stDoubleQ, drop}, - clSingle: {stDouble, push}, - clDouble: {stWord, drop}, - clOther: {stDouble, push}, - }, - stDoubleQ: { - clBreak: {stDouble, xpush}, - clNewline: {stDouble, drop}, - clQuote: {stDouble, push}, - clSingle: {stDouble, xpush}, - clDouble: {stDouble, push}, - clOther: {stDouble, xpush}, - }, -} - -var classOf = [256]class{ - ' ': clBreak, - '\t': clBreak, - '\n': clNewline, - '\\': clQuote, - '\'': clSingle, - '"': clDouble, -} - -// A Scanner partitions input from a reader into tokens divided on space, tab, -// and newline characters. Single and double quotation marks are handled as -// described in http://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02. -type Scanner struct { - buf *bufio.Reader - cur bytes.Buffer - st state - err error -} - -// NewScanner returns a Scanner that reads input from r. -func NewScanner(r io.Reader) *Scanner { - return &Scanner{ - buf: bufio.NewReader(r), - st: stBreak, - } -} - -// Next advances the scanner and reports whether there are any further tokens -// to be consumed. -func (s *Scanner) Next() bool { - if s.err != nil { - return false - } - s.cur.Reset() - for { - c, err := s.buf.ReadByte() - s.err = err - if err == io.EOF { - break - } else if err != nil { - return false - } - next := update[s.st][classOf[c]] - s.st = next.state - switch next.action { - case push: - s.cur.WriteByte(c) - case xpush: - s.cur.Write([]byte{'\\', c}) - case emit: - return true // s.cur has a complete token - case drop: - continue - default: - panic("unknown action") - } - } - return s.st != stBreak -} - -// Text returns the text of the current token, or "" if there is none. -func (s *Scanner) Text() string { return s.cur.String() } - -// Err returns the error, if any, that resulted from the most recent action. -func (s *Scanner) Err() error { return s.err } - -// Complete reports whether the current token is complete, meaning that it is -// unquoted or its quotes were balanced. -func (s *Scanner) Complete() bool { return s.st == stBreak || s.st == stWord } - -// Rest returns an io.Reader for the remainder of the unconsumed input in s. -// After calling this method, Next will always return false. The remainder -// does not include the text of the current token at the time Rest is called. -func (s *Scanner) Rest() io.Reader { - s.st = stNone - s.cur.Reset() - s.err = io.EOF - return s.buf -} - -// Each calls f for each token in the scanner until the input is exhausted, f -// returns false, or an error occurs. -func (s *Scanner) Each(f func(tok string) bool) error { - for s.Next() { - if !f(s.Text()) { - return nil - } - } - if err := s.Err(); err != io.EOF { - return err - } - return nil -} - -// Split returns the remaining tokens in s, not including the current token if -// there is one. Any tokens already consumed are still returned, even if there -// is an error. -func (s *Scanner) Split() []string { - var tokens []string - for s.Next() { - tokens = append(tokens, s.Text()) - } - return tokens -} - -// Split partitions s into tokens divided on space, tab, and newline characters -// using a *Scanner. Leading and trailing whitespace are ignored. -// -// The Boolean flag reports whether the final token is "valid", meaning there -// were no unclosed quotations in the string. -func Split(s string) ([]string, bool) { - sc := NewScanner(strings.NewReader(s)) - ss := sc.Split() - return ss, sc.Complete() -} - -func quotable(s string) (hasQ, hasOther bool) { - const ( - quote = 1 - other = 2 - all = quote + other - ) - var v uint - for i := 0; i < len(s) && v < all; i++ { - if s[i] == '\'' { - v |= quote - } else if strings.IndexByte(allQuote, s[i]) >= 0 { - v |= other - } - } - return v"e != 0, v&other != 0 -} - -// Quote returns a copy of s in which shell metacharacters are quoted to -// protect them from evaluation. -func Quote(s string) string { - var buf bytes.Buffer - return quote(s, &buf) -} - -// quote implements quotation, using the provided buffer as scratch space. The -// existing contents of the buffer are clobbered. -func quote(s string, buf *bytes.Buffer) string { - if s == "" { - return "''" - } - hasQ, hasOther := quotable(s) - if !hasQ && !hasOther { - return s // fast path: nothing needs quotation - } - - buf.Reset() - inq := false - for i := 0; i < len(s); i++ { - ch := s[i] - if ch == '\'' { - if inq { - buf.WriteByte('\'') - inq = false - } - buf.WriteByte('\\') - } else if !inq && hasOther { - buf.WriteByte('\'') - inq = true - } - buf.WriteByte(ch) - } - if inq { - buf.WriteByte('\'') - } - return buf.String() -} - -// Join quotes each element of ss with Quote and concatenates the resulting -// strings separated by spaces. -func Join(ss []string) string { - quoted := make([]string, len(ss)) - var buf bytes.Buffer - for i, s := range ss { - quoted[i] = quote(s, &buf) - } - return strings.Join(quoted, " ") -} diff --git a/vendor/github.com/bitfield/script/README.md b/vendor/github.com/bitfield/script/README.md index 4a9258d9..b57324e7 100644 --- a/vendor/github.com/bitfield/script/README.md +++ b/vendor/github.com/bitfield/script/README.md @@ -1,4 +1,7 @@ -[![Go Reference](https://pkg.go.dev/badge/github.com/bitfield/script.svg)](https://pkg.go.dev/github.com/bitfield/script)[![Go Report Card](https://goreportcard.com/badge/github.com/bitfield/script)](https://goreportcard.com/report/github.com/bitfield/script)[![Mentioned in Awesome Go](https://awesome.re/mentioned-badge-flat.svg)](https://github.com/avelino/awesome-go)[![CircleCI](https://circleci.com/gh/bitfield/script.svg?style=svg)](https://circleci.com/gh/bitfield/script) +[![Go Reference](https://pkg.go.dev/badge/github.com/bitfield/script.svg)](https://pkg.go.dev/github.com/bitfield/script) +[![Go Report Card](https://goreportcard.com/badge/github.com/bitfield/script)](https://goreportcard.com/report/github.com/bitfield/script) +[![Mentioned in Awesome Go](https://awesome.re/mentioned-badge-flat.svg)](https://github.com/avelino/awesome-go) +![Tests](https://github.com/bitfield/script/actions/workflows/test.yml/badge.svg) ```go import "github.com/bitfield/script" @@ -45,6 +48,7 @@ If you're already familiar with shell scripting and the Unix toolset, here is a | `sed` | [`Replace`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Replace) / [`ReplaceRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ReplaceRegexp) | | `sha256sum` | [`SHA256Sum`](https://pkg.go.dev/github.com/bitfield/script#Pipe.SHA256Sum) / [`SHA256Sums`](https://pkg.go.dev/github.com/bitfield/script#Pipe.SHA256Sums) | | `tail` | [`Last`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Last) | +| `tee` | [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee) | | `uniq -c` | [`Freq`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Freq) | | `wc -l` | [`CountLines`](https://pkg.go.dev/github.com/bitfield/script#Pipe.CountLines) | | `xargs` | [`ExecForEach`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ExecForEach) | @@ -99,6 +103,12 @@ What's that? You want to append that output to a file instead of printing it to script.Args().Concat().Match("Error").First(10).AppendFile("/var/log/errors.txt") ``` +And if we'd like to send the output to the terminal *as well as* to the file, we can do that: + +```go +script.Echo("data").Tee().AppendFile("data.txt") +``` + We're not limited to getting data only from files or standard input. We can get it from HTTP requests too: ```go @@ -301,6 +311,7 @@ Filters are methods on an existing pipe that also return a pipe, allowing you to | [`Replace`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Replace) | matching text replaced with given string | | [`ReplaceRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ReplaceRegexp) | matching text replaced with given string | | [`SHA256Sums`](https://pkg.go.dev/github.com/bitfield/script#Pipe.SHA256Sums) | SHA-256 hashes of each listed file | +| [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee) | input copied to supplied writers | Note that filters run concurrently, rather than producing nothing until each stage has fully read its input. This is convenient for executing long-running comands, for example. If you do need to wait for the pipeline to complete, call [`Wait`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Wait). @@ -325,6 +336,7 @@ Sinks are methods that return some data from a pipe, ending the pipeline and ext | Version | New | | ----------- | ------- | +| v0.22.0 | [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee), [`WithStderr`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithStderr) | | v0.21.0 | HTTP support: [`Do`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Do), [`Get`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Get), [`Post`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Post) | | v0.20.0 | [`JQ`](https://pkg.go.dev/github.com/bitfield/script#Pipe.JQ) | diff --git a/vendor/github.com/bitfield/script/script.go b/vendor/github.com/bitfield/script/script.go index f6a69e53..da7e2fe5 100644 --- a/vendor/github.com/bitfield/script/script.go +++ b/vendor/github.com/bitfield/script/script.go @@ -8,6 +8,7 @@ import ( "encoding/json" "fmt" "io" + "math" "net/http" "os" "os/exec" @@ -19,16 +20,16 @@ import ( "sync" "text/template" - "bitbucket.org/creachadair/shell" "github.com/itchyny/gojq" + "mvdan.cc/sh/v3/shell" ) // Pipe represents a pipe object with an associated [ReadAutoCloser]. type Pipe struct { // Reader is the underlying reader. - Reader ReadAutoCloser - stdout io.Writer - httpClient *http.Client + Reader ReadAutoCloser + stdout, stderr io.Writer + httpClient *http.Client // because pipe stages are concurrent, protect 'err' mu *sync.Mutex @@ -38,11 +39,7 @@ type Pipe struct { // Args creates a pipe containing the program's command-line arguments from // [os.Args], excluding the program name, one per line. func Args() *Pipe { - var s strings.Builder - for _, a := range os.Args[1:] { - s.WriteString(a + "\n") - } - return Echo(s.String()) + return Slice(os.Args[1:]) } // Do creates a pipe that makes the HTTP request req and produces the response. @@ -68,16 +65,15 @@ func Exec(cmdLine string) *Pipe { // File creates a pipe that reads from the file path. func File(path string) *Pipe { - p := NewPipe() f, err := os.Open(path) if err != nil { - return p.WithError(err) + return NewPipe().WithError(err) } - return p.WithReader(f) + return NewPipe().WithReader(f) } -// FindFiles creates a pipe listing all the files in the directory path and its -// subdirectories recursively, one per line, like Unix find(1). If path doesn't +// FindFiles creates a pipe listing all the files in the directory dir and its +// subdirectories recursively, one per line, like Unix find(1). If dir doesn't // exist or can't be read, the pipe's error status will be set. // // Each line of the output consists of a slash-separated path, starting with @@ -91,21 +87,21 @@ func File(path string) *Pipe { // // test/1.txt // test/2.txt -func FindFiles(path string) *Pipe { - var fileNames []string - walkFn := func(path string, info os.FileInfo, err error) error { +func FindFiles(dir string) *Pipe { + var paths []string + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { if err != nil { return err } if !info.IsDir() { - fileNames = append(fileNames, path) + paths = append(paths, path) } return nil - } - if err := filepath.Walk(path, walkFn); err != nil { + }) + if err != nil { return NewPipe().WithError(err) } - return Slice(fileNames) + return Slice(paths) } // Get creates a pipe that makes an HTTP GET request to URL, and produces the @@ -121,12 +117,11 @@ func Get(URL string) *Pipe { // // IfExists("/foo/bar").Exec("/usr/bin/something") func IfExists(path string) *Pipe { - p := NewPipe() _, err := os.Stat(path) if err != nil { - return p.WithError(err) + return NewPipe().WithError(err) } - return p + return NewPipe() } // ListFiles creates a pipe containing the files or directories specified by @@ -144,7 +139,7 @@ func ListFiles(path string) *Pipe { } return Slice(fileNames) } - files, err := os.ReadDir(path) + entries, err := os.ReadDir(path) if err != nil { // Check for the case where the path matches exactly one file s, err := os.Stat(path) @@ -156,11 +151,11 @@ func ListFiles(path string) *Pipe { } return NewPipe().WithError(err) } - fileNames := make([]string, len(files)) - for i, f := range files { - fileNames[i] = filepath.Join(path, f.Name()) + matches := make([]string, len(entries)) + for i, e := range entries { + matches[i] = filepath.Join(path, e.Name()) } - return Slice(fileNames) + return Slice(matches) } // NewPipe creates a new pipe with an empty reader (use [Pipe.WithReader] to @@ -168,8 +163,7 @@ func ListFiles(path string) *Pipe { func NewPipe() *Pipe { return &Pipe{ Reader: ReadAutoCloser{}, - mu: &sync.Mutex{}, - err: nil, + mu: new(sync.Mutex), stdout: os.Stdout, httpClient: http.DefaultClient, } @@ -199,8 +193,6 @@ func (p *Pipe) AppendFile(path string) (int64, error) { return p.writeOrAppendFile(path, os.O_APPEND|os.O_CREATE|os.O_WRONLY) } -var exitStatusPattern = regexp.MustCompile(`exit status (\d+)$`) - // Basename reads paths from the pipe, one per line, and removes any leading // directory components from each. So, for example, /usr/local/bin/foo would // become just foo. This is the complementary operation to [Pipe.Dirname]. @@ -221,7 +213,7 @@ func (p *Pipe) Bytes() ([]byte, error) { if err != nil { p.SetError(err) } - return data, nil + return data, p.Error() } // Close closes the pipe's associated reader. This is a no-op if the reader is @@ -268,17 +260,15 @@ func (p *Pipe) Concat() *Pipe { var readers []io.Reader p.FilterScan(func(line string, w io.Writer) { input, err := os.Open(line) - if err != nil { - return // skip errors + if err == nil { + readers = append(readers, NewReadAutoCloser(input)) } - readers = append(readers, NewReadAutoCloser(input)) }).Wait() return p.WithReader(io.MultiReader(readers...)) } // CountLines returns the number of lines of input, or an error. -func (p *Pipe) CountLines() (int, error) { - lines := 0 +func (p *Pipe) CountLines() (lines int, err error) { p.FilterScan(func(line string, w io.Writer) { lines++ }).Wait() @@ -319,14 +309,14 @@ func (p *Pipe) Do(req *http.Request) *Pipe { return err } defer resp.Body.Close() - // Any HTTP 2xx status code is considered okay - if resp.StatusCode/100 != 2 { - return fmt.Errorf("unexpected HTTP response status: %s", resp.Status) - } _, err = io.Copy(w, resp.Body) if err != nil { return err } + // Any HTTP 2xx status code is considered okay + if resp.StatusCode/100 != 2 { + return fmt.Errorf("unexpected HTTP response status: %s", resp.Status) + } return nil }) } @@ -338,10 +328,10 @@ func (p *Pipe) Do(req *http.Request) *Pipe { // concurrently and don't do unnecessary reads on the input. func (p *Pipe) EachLine(process func(string, *strings.Builder)) *Pipe { return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) - output := strings.Builder{} + scanner := newScanner(r) + output := new(strings.Builder) for scanner.Scan() { - process(scanner.Text(), &output) + process(scanner.Text(), output) } fmt.Fprint(w, output.String()) return scanner.Err() @@ -354,7 +344,7 @@ func (p *Pipe) Echo(s string) *Pipe { if p.Error() != nil { return p } - return p.WithReader(NewReadAutoCloser(strings.NewReader(s))) + return p.WithReader(strings.NewReader(s)) } // Error returns any error present on the pipe, or nil otherwise. @@ -368,8 +358,9 @@ func (p *Pipe) Error() error { } // Exec runs cmdLine as an external command, sending it the contents of the -// pipe as input, and produces the command's combined output. The effect of -// this is to filter the contents of the pipe through the external command. +// pipe as input, and produces the command's standard output (see below for +// error output). The effect of this is to filter the contents of the pipe +// through the external command. // // # Error handling // @@ -380,19 +371,26 @@ func (p *Pipe) Error() error { // because [Pipe.String] is a no-op if the pipe's error status is set, if you // want output you will need to reset the error status before calling // [Pipe.String]. +// +// If the command writes to its standard error stream, this will also go to the +// pipe, along with its standard output. However, the standard error text can +// instead be redirected to a supplied writer, using [Pipe.WithStderr]. func (p *Pipe) Exec(cmdLine string) *Pipe { return p.Filter(func(r io.Reader, w io.Writer) error { - args, ok := shell.Split(cmdLine) // strings.Fields doesn't handle quotes - if !ok { - return fmt.Errorf("unbalanced quotes or backslashes in [%s]", cmdLine) + args, err := shell.Fields(cmdLine, nil) + if err != nil { + return err } cmd := exec.Command(args[0], args[1:]...) cmd.Stdin = r cmd.Stdout = w cmd.Stderr = w - err := cmd.Start() + if p.stderr != nil { + cmd.Stderr = p.stderr + } + err = cmd.Start() if err != nil { - fmt.Fprintln(w, err) + fmt.Fprintln(cmd.Stderr, err) return err } return cmd.Wait() @@ -413,29 +411,31 @@ func (p *Pipe) ExecForEach(cmdLine string) *Pipe { return p.WithError(err) } return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) + scanner := newScanner(r) for scanner.Scan() { - cmdLine := strings.Builder{} - err := tpl.Execute(&cmdLine, scanner.Text()) + cmdLine := new(strings.Builder) + err := tpl.Execute(cmdLine, scanner.Text()) if err != nil { return err } - // strings.Fields doesn't handle quotes - args, ok := shell.Split(cmdLine.String()) - if !ok { - return fmt.Errorf("unbalanced quotes or backslashes in [%s]", cmdLine.String()) + args, err := shell.Fields(cmdLine.String(), nil) + if err != nil { + return err } cmd := exec.Command(args[0], args[1:]...) cmd.Stdout = w cmd.Stderr = w + if p.stderr != nil { + cmd.Stderr = p.stderr + } err = cmd.Start() if err != nil { - fmt.Fprintln(w, err) + fmt.Fprintln(cmd.Stderr, err) continue } err = cmd.Wait() if err != nil { - fmt.Fprintln(w, err) + fmt.Fprintln(cmd.Stderr, err) continue } } @@ -443,6 +443,8 @@ func (p *Pipe) ExecForEach(cmdLine string) *Pipe { }) } +var exitStatusPattern = regexp.MustCompile(`exit status (\d+)$`) + // ExitStatus returns the integer exit status of a previous command (for // example run by [Pipe.Exec]). This will be zero unless the pipe's error // status is set and the error matches the pattern “exit status %d”. @@ -502,7 +504,7 @@ func (p *Pipe) FilterLine(filter func(string) string) *Pipe { // handling. func (p *Pipe) FilterScan(filter func(string, io.Writer)) *Pipe { return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) + scanner := newScanner(r) for scanner.Scan() { filter(scanner.Text(), w) } @@ -555,16 +557,16 @@ func (p *Pipe) Freq() *Pipe { count int } return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) + scanner := newScanner(r) for scanner.Scan() { freq[scanner.Text()]++ } freqs := make([]frequency, 0, len(freq)) - var maxCount int + max := 0 for line, count := range freq { freqs = append(freqs, frequency{line, count}) - if count > maxCount { - maxCount = count + if count > max { + max = count } } sort.Slice(freqs, func(i, j int) bool { @@ -574,7 +576,7 @@ func (p *Pipe) Freq() *Pipe { } return x > y }) - fieldWidth := len(strconv.Itoa(maxCount)) + fieldWidth := len(strconv.Itoa(max)) for _, item := range freqs { fmt.Fprintf(w, "%*d %s\n", fieldWidth, item.count, item.line) } @@ -597,14 +599,13 @@ func (p *Pipe) Get(URL string) *Pipe { // space-separated string, which will always end with a newline. func (p *Pipe) Join() *Pipe { return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) - var line string + scanner := newScanner(r) first := true for scanner.Scan() { if !first { fmt.Fprint(w, " ") } - line = scanner.Text() + line := scanner.Text() fmt.Fprint(w, line) first = false } @@ -659,7 +660,7 @@ func (p *Pipe) Last(n int) *Pipe { return NewPipe() } return p.Filter(func(r io.Reader, w io.Writer) error { - scanner := bufio.NewScanner(r) + scanner := newScanner(r) input := ring.New(n) for scanner.Scan() { input.Value = scanner.Text() @@ -703,16 +704,6 @@ func (p *Pipe) Post(URL string) *Pipe { return p.Do(req) } -// Read reads up to len(b) bytes from the pipe into b. It returns the number of -// bytes read and any error encountered. At end of file, or on a nil pipe, Read -// returns 0, [io.EOF]. -func (p *Pipe) Read(b []byte) (int, error) { - if p.Error() != nil { - return 0, p.Error() - } - return p.Reader.Read(b) -} - // Reject produces only lines that do not contain the string s. func (p *Pipe) Reject(s string) *Pipe { return p.FilterScan(func(line string, w io.Writer) { @@ -748,6 +739,16 @@ func (p *Pipe) ReplaceRegexp(re *regexp.Regexp, replace string) *Pipe { }) } +// Read reads up to len(b) bytes from the pipe into b. It returns the number of +// bytes read and any error encountered. At end of file, or on a nil pipe, Read +// returns 0, [io.EOF]. +func (p *Pipe) Read(b []byte) (int, error) { + if p.Error() != nil { + return 0, p.Error() + } + return p.Reader.Read(b) +} + // SetError sets the error err on the pipe. func (p *Pipe) SetError(err error) { if p.mu == nil { // uninitialised pipe @@ -833,11 +834,22 @@ func (p *Pipe) String() (string, error) { return string(data), p.Error() } +// Tee copies the pipe's contents to each of the supplied writers, like Unix +// tee(1). If no writers are supplied, the default is the pipe's standard +// output. +func (p *Pipe) Tee(writers ...io.Writer) *Pipe { + teeWriter := p.stdout + if len(writers) > 0 { + teeWriter = io.MultiWriter(writers...) + } + return p.WithReader(io.TeeReader(p.Reader, teeWriter)) +} + // Wait reads the pipe to completion and discards the result. This is mostly // useful for waiting until concurrent filters have completed (see // [Pipe.Filter]). func (p *Pipe) Wait() { - _, err := io.Copy(io.Discard, p) + _, err := io.ReadAll(p) if err != nil { p.SetError(err) } @@ -868,6 +880,14 @@ func (p *Pipe) WithReader(r io.Reader) *Pipe { return p } +// WithStderr redirects the standard error output for commands run via +// [Pipe.Exec] or [Pipe.ExecForEach] to the writer w, instead of going to the +// pipe as it normally would. +func (p *Pipe) WithStderr(w io.Writer) *Pipe { + p.stderr = w + return p +} + // WithStdout sets the pipe's standard output to the writer w, instead of the // default [os.Stdout]. func (p *Pipe) WithStdout(w io.Writer) *Pipe { @@ -894,9 +914,8 @@ func (p *Pipe) writeOrAppendFile(path string, mode int) (int64, error) { wrote, err := io.Copy(out, p) if err != nil { p.SetError(err) - return 0, err } - return wrote, nil + return wrote, p.Error() } // ReadAutoCloser wraps an [io.ReadCloser] so that it will be automatically @@ -939,3 +958,9 @@ func (ra ReadAutoCloser) Read(b []byte) (n int, err error) { } return n, err } + +func newScanner(r io.Reader) *bufio.Scanner { + scanner := bufio.NewScanner(r) + scanner.Buffer(make([]byte, 4096), math.MaxInt) + return scanner +} diff --git a/vendor/github.com/dustin/go-humanize/.travis.yml b/vendor/github.com/dustin/go-humanize/.travis.yml index ba95cdd1..ac12e485 100644 --- a/vendor/github.com/dustin/go-humanize/.travis.yml +++ b/vendor/github.com/dustin/go-humanize/.travis.yml @@ -1,12 +1,12 @@ sudo: false language: go +go_import_path: github.com/dustin/go-humanize go: - - 1.3.x - - 1.5.x - - 1.6.x - - 1.7.x - - 1.8.x - - 1.9.x + - 1.13.x + - 1.14.x + - 1.15.x + - 1.16.x + - stable - master matrix: allow_failures: @@ -15,7 +15,7 @@ matrix: install: - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). script: - - go get -t -v ./... - diff -u <(echo -n) <(gofmt -d -s .) - - go tool vet . + - go vet . + - go install -v -race ./... - go test -v -race ./... diff --git a/vendor/github.com/dustin/go-humanize/README.markdown b/vendor/github.com/dustin/go-humanize/README.markdown index 91b4ae56..7d0b16b3 100644 --- a/vendor/github.com/dustin/go-humanize/README.markdown +++ b/vendor/github.com/dustin/go-humanize/README.markdown @@ -5,7 +5,7 @@ Just a few functions for helping humanize times and sizes. `go get` it as `github.com/dustin/go-humanize`, import it as `"github.com/dustin/go-humanize"`, use it as `humanize`. -See [godoc](https://godoc.org/github.com/dustin/go-humanize) for +See [godoc](https://pkg.go.dev/github.com/dustin/go-humanize) for complete documentation. ## Sizes diff --git a/vendor/github.com/dustin/go-humanize/bigbytes.go b/vendor/github.com/dustin/go-humanize/bigbytes.go index 1a2bf617..3b015fd5 100644 --- a/vendor/github.com/dustin/go-humanize/bigbytes.go +++ b/vendor/github.com/dustin/go-humanize/bigbytes.go @@ -28,6 +28,10 @@ var ( BigZiByte = (&big.Int{}).Mul(BigEiByte, bigIECExp) // BigYiByte is 1,024 z bytes in bit.Ints BigYiByte = (&big.Int{}).Mul(BigZiByte, bigIECExp) + // BigRiByte is 1,024 y bytes in bit.Ints + BigRiByte = (&big.Int{}).Mul(BigYiByte, bigIECExp) + // BigQiByte is 1,024 r bytes in bit.Ints + BigQiByte = (&big.Int{}).Mul(BigRiByte, bigIECExp) ) var ( @@ -51,6 +55,10 @@ var ( BigZByte = (&big.Int{}).Mul(BigEByte, bigSIExp) // BigYByte is 1,000 SI z bytes in big.Ints BigYByte = (&big.Int{}).Mul(BigZByte, bigSIExp) + // BigRByte is 1,000 SI y bytes in big.Ints + BigRByte = (&big.Int{}).Mul(BigYByte, bigSIExp) + // BigQByte is 1,000 SI r bytes in big.Ints + BigQByte = (&big.Int{}).Mul(BigRByte, bigSIExp) ) var bigBytesSizeTable = map[string]*big.Int{ @@ -71,6 +79,10 @@ var bigBytesSizeTable = map[string]*big.Int{ "zb": BigZByte, "yib": BigYiByte, "yb": BigYByte, + "rib": BigRiByte, + "rb": BigRByte, + "qib": BigQiByte, + "qb": BigQByte, // Without suffix "": BigByte, "ki": BigKiByte, @@ -89,6 +101,10 @@ var bigBytesSizeTable = map[string]*big.Int{ "zi": BigZiByte, "y": BigYByte, "yi": BigYiByte, + "r": BigRByte, + "ri": BigRiByte, + "q": BigQByte, + "qi": BigQiByte, } var ten = big.NewInt(10) @@ -115,7 +131,7 @@ func humanateBigBytes(s, base *big.Int, sizes []string) string { // // BigBytes(82854982) -> 83 MB func BigBytes(s *big.Int) string { - sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"} + sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB", "RB", "QB"} return humanateBigBytes(s, bigSIExp, sizes) } @@ -125,7 +141,7 @@ func BigBytes(s *big.Int) string { // // BigIBytes(82854982) -> 79 MiB func BigIBytes(s *big.Int) string { - sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"} + sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "RiB", "QiB"} return humanateBigBytes(s, bigIECExp, sizes) } diff --git a/vendor/github.com/dustin/go-humanize/commaf.go b/vendor/github.com/dustin/go-humanize/commaf.go index 620690de..2bc83a03 100644 --- a/vendor/github.com/dustin/go-humanize/commaf.go +++ b/vendor/github.com/dustin/go-humanize/commaf.go @@ -1,3 +1,4 @@ +//go:build go1.6 // +build go1.6 package humanize diff --git a/vendor/github.com/dustin/go-humanize/ftoa.go b/vendor/github.com/dustin/go-humanize/ftoa.go index 1c62b640..bce923f3 100644 --- a/vendor/github.com/dustin/go-humanize/ftoa.go +++ b/vendor/github.com/dustin/go-humanize/ftoa.go @@ -6,6 +6,9 @@ import ( ) func stripTrailingZeros(s string) string { + if !strings.ContainsRune(s, '.') { + return s + } offset := len(s) - 1 for offset > 0 { if s[offset] == '.' { diff --git a/vendor/github.com/dustin/go-humanize/number.go b/vendor/github.com/dustin/go-humanize/number.go index dec61865..6470d0d4 100644 --- a/vendor/github.com/dustin/go-humanize/number.go +++ b/vendor/github.com/dustin/go-humanize/number.go @@ -73,7 +73,7 @@ func FormatFloat(format string, n float64) string { if n > math.MaxFloat64 { return "Infinity" } - if n < -math.MaxFloat64 { + if n < (0.0 - math.MaxFloat64) { return "-Infinity" } diff --git a/vendor/github.com/dustin/go-humanize/si.go b/vendor/github.com/dustin/go-humanize/si.go index ae659e0e..8b850198 100644 --- a/vendor/github.com/dustin/go-humanize/si.go +++ b/vendor/github.com/dustin/go-humanize/si.go @@ -8,6 +8,8 @@ import ( ) var siPrefixTable = map[float64]string{ + -30: "q", // quecto + -27: "r", // ronto -24: "y", // yocto -21: "z", // zepto -18: "a", // atto @@ -25,6 +27,8 @@ var siPrefixTable = map[float64]string{ 18: "E", // exa 21: "Z", // zetta 24: "Y", // yotta + 27: "R", // ronna + 30: "Q", // quetta } var revSIPrefixTable = revfmap(siPrefixTable) diff --git a/vendor/github.com/golang/protobuf/proto/buffer.go b/vendor/github.com/golang/protobuf/proto/buffer.go new file mode 100644 index 00000000..e810e6fe --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/buffer.go @@ -0,0 +1,324 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "errors" + "fmt" + + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + WireVarint = 0 + WireFixed32 = 5 + WireFixed64 = 1 + WireBytes = 2 + WireStartGroup = 3 + WireEndGroup = 4 +) + +// EncodeVarint returns the varint encoded bytes of v. +func EncodeVarint(v uint64) []byte { + return protowire.AppendVarint(nil, v) +} + +// SizeVarint returns the length of the varint encoded bytes of v. +// This is equal to len(EncodeVarint(v)). +func SizeVarint(v uint64) int { + return protowire.SizeVarint(v) +} + +// DecodeVarint parses a varint encoded integer from b, +// returning the integer value and the length of the varint. +// It returns (0, 0) if there is a parse error. +func DecodeVarint(b []byte) (uint64, int) { + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, 0 + } + return v, n +} + +// Buffer is a buffer for encoding and decoding the protobuf wire format. +// It may be reused between invocations to reduce memory usage. +type Buffer struct { + buf []byte + idx int + deterministic bool +} + +// NewBuffer allocates a new Buffer initialized with buf, +// where the contents of buf are considered the unread portion of the buffer. +func NewBuffer(buf []byte) *Buffer { + return &Buffer{buf: buf} +} + +// SetDeterministic specifies whether to use deterministic serialization. +// +// Deterministic serialization guarantees that for a given binary, equal +// messages will always be serialized to the same bytes. This implies: +// +// - Repeated serialization of a message will return the same bytes. +// - Different processes of the same binary (which may be executing on +// different machines) will serialize equal messages to the same bytes. +// +// Note that the deterministic serialization is NOT canonical across +// languages. It is not guaranteed to remain stable over time. It is unstable +// across different builds with schema changes due to unknown fields. +// Users who need canonical serialization (e.g., persistent storage in a +// canonical form, fingerprinting, etc.) should define their own +// canonicalization specification and implement their own serializer rather +// than relying on this API. +// +// If deterministic serialization is requested, map entries will be sorted +// by keys in lexographical order. This is an implementation detail and +// subject to change. +func (b *Buffer) SetDeterministic(deterministic bool) { + b.deterministic = deterministic +} + +// SetBuf sets buf as the internal buffer, +// where the contents of buf are considered the unread portion of the buffer. +func (b *Buffer) SetBuf(buf []byte) { + b.buf = buf + b.idx = 0 +} + +// Reset clears the internal buffer of all written and unread data. +func (b *Buffer) Reset() { + b.buf = b.buf[:0] + b.idx = 0 +} + +// Bytes returns the internal buffer. +func (b *Buffer) Bytes() []byte { + return b.buf +} + +// Unread returns the unread portion of the buffer. +func (b *Buffer) Unread() []byte { + return b.buf[b.idx:] +} + +// Marshal appends the wire-format encoding of m to the buffer. +func (b *Buffer) Marshal(m Message) error { + var err error + b.buf, err = marshalAppend(b.buf, m, b.deterministic) + return err +} + +// Unmarshal parses the wire-format message in the buffer and +// places the decoded results in m. +// It does not reset m before unmarshaling. +func (b *Buffer) Unmarshal(m Message) error { + err := UnmarshalMerge(b.Unread(), m) + b.idx = len(b.buf) + return err +} + +type unknownFields struct{ XXX_unrecognized protoimpl.UnknownFields } + +func (m *unknownFields) String() string { panic("not implemented") } +func (m *unknownFields) Reset() { panic("not implemented") } +func (m *unknownFields) ProtoMessage() { panic("not implemented") } + +// DebugPrint dumps the encoded bytes of b with a header and footer including s +// to stdout. This is only intended for debugging. +func (*Buffer) DebugPrint(s string, b []byte) { + m := MessageReflect(new(unknownFields)) + m.SetUnknown(b) + b, _ = prototext.MarshalOptions{AllowPartial: true, Indent: "\t"}.Marshal(m.Interface()) + fmt.Printf("==== %s ====\n%s==== %s ====\n", s, b, s) +} + +// EncodeVarint appends an unsigned varint encoding to the buffer. +func (b *Buffer) EncodeVarint(v uint64) error { + b.buf = protowire.AppendVarint(b.buf, v) + return nil +} + +// EncodeZigzag32 appends a 32-bit zig-zag varint encoding to the buffer. +func (b *Buffer) EncodeZigzag32(v uint64) error { + return b.EncodeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) +} + +// EncodeZigzag64 appends a 64-bit zig-zag varint encoding to the buffer. +func (b *Buffer) EncodeZigzag64(v uint64) error { + return b.EncodeVarint(uint64((uint64(v) << 1) ^ uint64((int64(v) >> 63)))) +} + +// EncodeFixed32 appends a 32-bit little-endian integer to the buffer. +func (b *Buffer) EncodeFixed32(v uint64) error { + b.buf = protowire.AppendFixed32(b.buf, uint32(v)) + return nil +} + +// EncodeFixed64 appends a 64-bit little-endian integer to the buffer. +func (b *Buffer) EncodeFixed64(v uint64) error { + b.buf = protowire.AppendFixed64(b.buf, uint64(v)) + return nil +} + +// EncodeRawBytes appends a length-prefixed raw bytes to the buffer. +func (b *Buffer) EncodeRawBytes(v []byte) error { + b.buf = protowire.AppendBytes(b.buf, v) + return nil +} + +// EncodeStringBytes appends a length-prefixed raw bytes to the buffer. +// It does not validate whether v contains valid UTF-8. +func (b *Buffer) EncodeStringBytes(v string) error { + b.buf = protowire.AppendString(b.buf, v) + return nil +} + +// EncodeMessage appends a length-prefixed encoded message to the buffer. +func (b *Buffer) EncodeMessage(m Message) error { + var err error + b.buf = protowire.AppendVarint(b.buf, uint64(Size(m))) + b.buf, err = marshalAppend(b.buf, m, b.deterministic) + return err +} + +// DecodeVarint consumes an encoded unsigned varint from the buffer. +func (b *Buffer) DecodeVarint() (uint64, error) { + v, n := protowire.ConsumeVarint(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeZigzag32 consumes an encoded 32-bit zig-zag varint from the buffer. +func (b *Buffer) DecodeZigzag32() (uint64, error) { + v, err := b.DecodeVarint() + if err != nil { + return 0, err + } + return uint64((uint32(v) >> 1) ^ uint32((int32(v&1)<<31)>>31)), nil +} + +// DecodeZigzag64 consumes an encoded 64-bit zig-zag varint from the buffer. +func (b *Buffer) DecodeZigzag64() (uint64, error) { + v, err := b.DecodeVarint() + if err != nil { + return 0, err + } + return uint64((uint64(v) >> 1) ^ uint64((int64(v&1)<<63)>>63)), nil +} + +// DecodeFixed32 consumes a 32-bit little-endian integer from the buffer. +func (b *Buffer) DecodeFixed32() (uint64, error) { + v, n := protowire.ConsumeFixed32(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeFixed64 consumes a 64-bit little-endian integer from the buffer. +func (b *Buffer) DecodeFixed64() (uint64, error) { + v, n := protowire.ConsumeFixed64(b.buf[b.idx:]) + if n < 0 { + return 0, protowire.ParseError(n) + } + b.idx += n + return uint64(v), nil +} + +// DecodeRawBytes consumes a length-prefixed raw bytes from the buffer. +// If alloc is specified, it returns a copy the raw bytes +// rather than a sub-slice of the buffer. +func (b *Buffer) DecodeRawBytes(alloc bool) ([]byte, error) { + v, n := protowire.ConsumeBytes(b.buf[b.idx:]) + if n < 0 { + return nil, protowire.ParseError(n) + } + b.idx += n + if alloc { + v = append([]byte(nil), v...) + } + return v, nil +} + +// DecodeStringBytes consumes a length-prefixed raw bytes from the buffer. +// It does not validate whether the raw bytes contain valid UTF-8. +func (b *Buffer) DecodeStringBytes() (string, error) { + v, n := protowire.ConsumeString(b.buf[b.idx:]) + if n < 0 { + return "", protowire.ParseError(n) + } + b.idx += n + return v, nil +} + +// DecodeMessage consumes a length-prefixed message from the buffer. +// It does not reset m before unmarshaling. +func (b *Buffer) DecodeMessage(m Message) error { + v, err := b.DecodeRawBytes(false) + if err != nil { + return err + } + return UnmarshalMerge(v, m) +} + +// DecodeGroup consumes a message group from the buffer. +// It assumes that the start group marker has already been consumed and +// consumes all bytes until (and including the end group marker). +// It does not reset m before unmarshaling. +func (b *Buffer) DecodeGroup(m Message) error { + v, n, err := consumeGroup(b.buf[b.idx:]) + if err != nil { + return err + } + b.idx += n + return UnmarshalMerge(v, m) +} + +// consumeGroup parses b until it finds an end group marker, returning +// the raw bytes of the message (excluding the end group marker) and the +// the total length of the message (including the end group marker). +func consumeGroup(b []byte) ([]byte, int, error) { + b0 := b + depth := 1 // assume this follows a start group marker + for { + _, wtyp, tagLen := protowire.ConsumeTag(b) + if tagLen < 0 { + return nil, 0, protowire.ParseError(tagLen) + } + b = b[tagLen:] + + var valLen int + switch wtyp { + case protowire.VarintType: + _, valLen = protowire.ConsumeVarint(b) + case protowire.Fixed32Type: + _, valLen = protowire.ConsumeFixed32(b) + case protowire.Fixed64Type: + _, valLen = protowire.ConsumeFixed64(b) + case protowire.BytesType: + _, valLen = protowire.ConsumeBytes(b) + case protowire.StartGroupType: + depth++ + case protowire.EndGroupType: + depth-- + default: + return nil, 0, errors.New("proto: cannot parse reserved wire type") + } + if valLen < 0 { + return nil, 0, protowire.ParseError(valLen) + } + b = b[valLen:] + + if depth == 0 { + return b0[:len(b0)-len(b)-tagLen], len(b0) - len(b), nil + } + } +} diff --git a/vendor/github.com/golang/protobuf/proto/clone.go b/vendor/github.com/golang/protobuf/proto/clone.go deleted file mode 100644 index 3cd3249f..00000000 --- a/vendor/github.com/golang/protobuf/proto/clone.go +++ /dev/null @@ -1,253 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer deep copy and merge. -// TODO: RawMessage. - -package proto - -import ( - "fmt" - "log" - "reflect" - "strings" -) - -// Clone returns a deep copy of a protocol buffer. -func Clone(src Message) Message { - in := reflect.ValueOf(src) - if in.IsNil() { - return src - } - out := reflect.New(in.Type().Elem()) - dst := out.Interface().(Message) - Merge(dst, src) - return dst -} - -// Merger is the interface representing objects that can merge messages of the same type. -type Merger interface { - // Merge merges src into this message. - // Required and optional fields that are set in src will be set to that value in dst. - // Elements of repeated fields will be appended. - // - // Merge may panic if called with a different argument type than the receiver. - Merge(src Message) -} - -// generatedMerger is the custom merge method that generated protos will have. -// We must add this method since a generate Merge method will conflict with -// many existing protos that have a Merge data field already defined. -type generatedMerger interface { - XXX_Merge(src Message) -} - -// Merge merges src into dst. -// Required and optional fields that are set in src will be set to that value in dst. -// Elements of repeated fields will be appended. -// Merge panics if src and dst are not the same type, or if dst is nil. -func Merge(dst, src Message) { - if m, ok := dst.(Merger); ok { - m.Merge(src) - return - } - - in := reflect.ValueOf(src) - out := reflect.ValueOf(dst) - if out.IsNil() { - panic("proto: nil destination") - } - if in.Type() != out.Type() { - panic(fmt.Sprintf("proto.Merge(%T, %T) type mismatch", dst, src)) - } - if in.IsNil() { - return // Merge from nil src is a noop - } - if m, ok := dst.(generatedMerger); ok { - m.XXX_Merge(src) - return - } - mergeStruct(out.Elem(), in.Elem()) -} - -func mergeStruct(out, in reflect.Value) { - sprop := GetProperties(in.Type()) - for i := 0; i < in.NumField(); i++ { - f := in.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - mergeAny(out.Field(i), in.Field(i), false, sprop.Prop[i]) - } - - if emIn, err := extendable(in.Addr().Interface()); err == nil { - emOut, _ := extendable(out.Addr().Interface()) - mIn, muIn := emIn.extensionsRead() - if mIn != nil { - mOut := emOut.extensionsWrite() - muIn.Lock() - mergeExtension(mOut, mIn) - muIn.Unlock() - } - } - - uf := in.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return - } - uin := uf.Bytes() - if len(uin) > 0 { - out.FieldByName("XXX_unrecognized").SetBytes(append([]byte(nil), uin...)) - } -} - -// mergeAny performs a merge between two values of the same type. -// viaPtr indicates whether the values were indirected through a pointer (implying proto2). -// prop is set if this is a struct field (it may be nil). -func mergeAny(out, in reflect.Value, viaPtr bool, prop *Properties) { - if in.Type() == protoMessageType { - if !in.IsNil() { - if out.IsNil() { - out.Set(reflect.ValueOf(Clone(in.Interface().(Message)))) - } else { - Merge(out.Interface().(Message), in.Interface().(Message)) - } - } - return - } - switch in.Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - if !viaPtr && isProto3Zero(in) { - return - } - out.Set(in) - case reflect.Interface: - // Probably a oneof field; copy non-nil values. - if in.IsNil() { - return - } - // Allocate destination if it is not set, or set to a different type. - // Otherwise we will merge as normal. - if out.IsNil() || out.Elem().Type() != in.Elem().Type() { - out.Set(reflect.New(in.Elem().Elem().Type())) // interface -> *T -> T -> new(T) - } - mergeAny(out.Elem(), in.Elem(), false, nil) - case reflect.Map: - if in.Len() == 0 { - return - } - if out.IsNil() { - out.Set(reflect.MakeMap(in.Type())) - } - // For maps with value types of *T or []byte we need to deep copy each value. - elemKind := in.Type().Elem().Kind() - for _, key := range in.MapKeys() { - var val reflect.Value - switch elemKind { - case reflect.Ptr: - val = reflect.New(in.Type().Elem().Elem()) - mergeAny(val, in.MapIndex(key), false, nil) - case reflect.Slice: - val = in.MapIndex(key) - val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) - default: - val = in.MapIndex(key) - } - out.SetMapIndex(key, val) - } - case reflect.Ptr: - if in.IsNil() { - return - } - if out.IsNil() { - out.Set(reflect.New(in.Elem().Type())) - } - mergeAny(out.Elem(), in.Elem(), true, nil) - case reflect.Slice: - if in.IsNil() { - return - } - if in.Type().Elem().Kind() == reflect.Uint8 { - // []byte is a scalar bytes field, not a repeated field. - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value, and should not - // be merged. - if prop != nil && prop.proto3 && in.Len() == 0 { - return - } - - // Make a deep copy. - // Append to []byte{} instead of []byte(nil) so that we never end up - // with a nil result. - out.SetBytes(append([]byte{}, in.Bytes()...)) - return - } - n := in.Len() - if out.IsNil() { - out.Set(reflect.MakeSlice(in.Type(), 0, n)) - } - switch in.Type().Elem().Kind() { - case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int32, reflect.Int64, - reflect.String, reflect.Uint32, reflect.Uint64: - out.Set(reflect.AppendSlice(out, in)) - default: - for i := 0; i < n; i++ { - x := reflect.Indirect(reflect.New(in.Type().Elem())) - mergeAny(x, in.Index(i), false, nil) - out.Set(reflect.Append(out, x)) - } - } - case reflect.Struct: - mergeStruct(out, in) - default: - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to copy %v", in) - } -} - -func mergeExtension(out, in map[int32]Extension) { - for extNum, eIn := range in { - eOut := Extension{desc: eIn.desc} - if eIn.value != nil { - v := reflect.New(reflect.TypeOf(eIn.value)).Elem() - mergeAny(v, reflect.ValueOf(eIn.value), false, nil) - eOut.value = v.Interface() - } - if eIn.enc != nil { - eOut.enc = make([]byte, len(eIn.enc)) - copy(eOut.enc, eIn.enc) - } - - out[extNum] = eOut - } -} diff --git a/vendor/github.com/golang/protobuf/proto/decode.go b/vendor/github.com/golang/protobuf/proto/decode.go deleted file mode 100644 index d9aa3c42..00000000 --- a/vendor/github.com/golang/protobuf/proto/decode.go +++ /dev/null @@ -1,428 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for decoding protocol buffer data to construct in-memory representations. - */ - -import ( - "errors" - "fmt" - "io" -) - -// errOverflow is returned when an integer is too large to be represented. -var errOverflow = errors.New("proto: integer overflow") - -// ErrInternalBadWireType is returned by generated code when an incorrect -// wire type is encountered. It does not get returned to user code. -var ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") - -// DecodeVarint reads a varint-encoded integer from the slice. -// It returns the integer and the number of bytes consumed, or -// zero if there is not enough. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func DecodeVarint(buf []byte) (x uint64, n int) { - for shift := uint(0); shift < 64; shift += 7 { - if n >= len(buf) { - return 0, 0 - } - b := uint64(buf[n]) - n++ - x |= (b & 0x7F) << shift - if (b & 0x80) == 0 { - return x, n - } - } - - // The number is too large to represent in a 64-bit value. - return 0, 0 -} - -func (p *Buffer) decodeVarintSlow() (x uint64, err error) { - i := p.index - l := len(p.buf) - - for shift := uint(0); shift < 64; shift += 7 { - if i >= l { - err = io.ErrUnexpectedEOF - return - } - b := p.buf[i] - i++ - x |= (uint64(b) & 0x7F) << shift - if b < 0x80 { - p.index = i - return - } - } - - // The number is too large to represent in a 64-bit value. - err = errOverflow - return -} - -// DecodeVarint reads a varint-encoded integer from the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) DecodeVarint() (x uint64, err error) { - i := p.index - buf := p.buf - - if i >= len(buf) { - return 0, io.ErrUnexpectedEOF - } else if buf[i] < 0x80 { - p.index++ - return uint64(buf[i]), nil - } else if len(buf)-i < 10 { - return p.decodeVarintSlow() - } - - var b uint64 - // we already checked the first byte - x = uint64(buf[i]) - 0x80 - i++ - - b = uint64(buf[i]) - i++ - x += b << 7 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 7 - - b = uint64(buf[i]) - i++ - x += b << 14 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 14 - - b = uint64(buf[i]) - i++ - x += b << 21 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 21 - - b = uint64(buf[i]) - i++ - x += b << 28 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 28 - - b = uint64(buf[i]) - i++ - x += b << 35 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 35 - - b = uint64(buf[i]) - i++ - x += b << 42 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 42 - - b = uint64(buf[i]) - i++ - x += b << 49 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 49 - - b = uint64(buf[i]) - i++ - x += b << 56 - if b&0x80 == 0 { - goto done - } - x -= 0x80 << 56 - - b = uint64(buf[i]) - i++ - x += b << 63 - if b&0x80 == 0 { - goto done - } - // x -= 0x80 << 63 // Always zero. - - return 0, errOverflow - -done: - p.index = i - return x, nil -} - -// DecodeFixed64 reads a 64-bit integer from the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) DecodeFixed64() (x uint64, err error) { - // x, err already 0 - i := p.index + 8 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-8]) - x |= uint64(p.buf[i-7]) << 8 - x |= uint64(p.buf[i-6]) << 16 - x |= uint64(p.buf[i-5]) << 24 - x |= uint64(p.buf[i-4]) << 32 - x |= uint64(p.buf[i-3]) << 40 - x |= uint64(p.buf[i-2]) << 48 - x |= uint64(p.buf[i-1]) << 56 - return -} - -// DecodeFixed32 reads a 32-bit integer from the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) DecodeFixed32() (x uint64, err error) { - // x, err already 0 - i := p.index + 4 - if i < 0 || i > len(p.buf) { - err = io.ErrUnexpectedEOF - return - } - p.index = i - - x = uint64(p.buf[i-4]) - x |= uint64(p.buf[i-3]) << 8 - x |= uint64(p.buf[i-2]) << 16 - x |= uint64(p.buf[i-1]) << 24 - return -} - -// DecodeZigzag64 reads a zigzag-encoded 64-bit integer -// from the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) DecodeZigzag64() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = (x >> 1) ^ uint64((int64(x&1)<<63)>>63) - return -} - -// DecodeZigzag32 reads a zigzag-encoded 32-bit integer -// from the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) DecodeZigzag32() (x uint64, err error) { - x, err = p.DecodeVarint() - if err != nil { - return - } - x = uint64((uint32(x) >> 1) ^ uint32((int32(x&1)<<31)>>31)) - return -} - -// DecodeRawBytes reads a count-delimited byte buffer from the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) DecodeRawBytes(alloc bool) (buf []byte, err error) { - n, err := p.DecodeVarint() - if err != nil { - return nil, err - } - - nb := int(n) - if nb < 0 { - return nil, fmt.Errorf("proto: bad byte length %d", nb) - } - end := p.index + nb - if end < p.index || end > len(p.buf) { - return nil, io.ErrUnexpectedEOF - } - - if !alloc { - // todo: check if can get more uses of alloc=false - buf = p.buf[p.index:end] - p.index += nb - return - } - - buf = make([]byte, nb) - copy(buf, p.buf[p.index:]) - p.index += nb - return -} - -// DecodeStringBytes reads an encoded string from the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) DecodeStringBytes() (s string, err error) { - buf, err := p.DecodeRawBytes(false) - if err != nil { - return - } - return string(buf), nil -} - -// Unmarshaler is the interface representing objects that can -// unmarshal themselves. The argument points to data that may be -// overwritten, so implementations should not keep references to the -// buffer. -// Unmarshal implementations should not clear the receiver. -// Any unmarshaled data should be merged into the receiver. -// Callers of Unmarshal that do not want to retain existing data -// should Reset the receiver before calling Unmarshal. -type Unmarshaler interface { - Unmarshal([]byte) error -} - -// newUnmarshaler is the interface representing objects that can -// unmarshal themselves. The semantics are identical to Unmarshaler. -// -// This exists to support protoc-gen-go generated messages. -// The proto package will stop type-asserting to this interface in the future. -// -// DO NOT DEPEND ON THIS. -type newUnmarshaler interface { - XXX_Unmarshal([]byte) error -} - -// Unmarshal parses the protocol buffer representation in buf and places the -// decoded result in pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// Unmarshal resets pb before starting to unmarshal, so any -// existing data in pb is always removed. Use UnmarshalMerge -// to preserve and append to existing data. -func Unmarshal(buf []byte, pb Message) error { - pb.Reset() - if u, ok := pb.(newUnmarshaler); ok { - return u.XXX_Unmarshal(buf) - } - if u, ok := pb.(Unmarshaler); ok { - return u.Unmarshal(buf) - } - return NewBuffer(buf).Unmarshal(pb) -} - -// UnmarshalMerge parses the protocol buffer representation in buf and -// writes the decoded result to pb. If the struct underlying pb does not match -// the data in buf, the results can be unpredictable. -// -// UnmarshalMerge merges into existing data in pb. -// Most code should use Unmarshal instead. -func UnmarshalMerge(buf []byte, pb Message) error { - if u, ok := pb.(newUnmarshaler); ok { - return u.XXX_Unmarshal(buf) - } - if u, ok := pb.(Unmarshaler); ok { - // NOTE: The history of proto have unfortunately been inconsistent - // whether Unmarshaler should or should not implicitly clear itself. - // Some implementations do, most do not. - // Thus, calling this here may or may not do what people want. - // - // See https://github.com/golang/protobuf/issues/424 - return u.Unmarshal(buf) - } - return NewBuffer(buf).Unmarshal(pb) -} - -// DecodeMessage reads a count-delimited message from the Buffer. -func (p *Buffer) DecodeMessage(pb Message) error { - enc, err := p.DecodeRawBytes(false) - if err != nil { - return err - } - return NewBuffer(enc).Unmarshal(pb) -} - -// DecodeGroup reads a tag-delimited group from the Buffer. -// StartGroup tag is already consumed. This function consumes -// EndGroup tag. -func (p *Buffer) DecodeGroup(pb Message) error { - b := p.buf[p.index:] - x, y := findEndGroup(b) - if x < 0 { - return io.ErrUnexpectedEOF - } - err := Unmarshal(b[:x], pb) - p.index += y - return err -} - -// Unmarshal parses the protocol buffer representation in the -// Buffer and places the decoded result in pb. If the struct -// underlying pb does not match the data in the buffer, the results can be -// unpredictable. -// -// Unlike proto.Unmarshal, this does not reset pb before starting to unmarshal. -func (p *Buffer) Unmarshal(pb Message) error { - // If the object can unmarshal itself, let it. - if u, ok := pb.(newUnmarshaler); ok { - err := u.XXX_Unmarshal(p.buf[p.index:]) - p.index = len(p.buf) - return err - } - if u, ok := pb.(Unmarshaler); ok { - // NOTE: The history of proto have unfortunately been inconsistent - // whether Unmarshaler should or should not implicitly clear itself. - // Some implementations do, most do not. - // Thus, calling this here may or may not do what people want. - // - // See https://github.com/golang/protobuf/issues/424 - err := u.Unmarshal(p.buf[p.index:]) - p.index = len(p.buf) - return err - } - - // Slow workaround for messages that aren't Unmarshalers. - // This includes some hand-coded .pb.go files and - // bootstrap protos. - // TODO: fix all of those and then add Unmarshal to - // the Message interface. Then: - // The cast above and code below can be deleted. - // The old unmarshaler can be deleted. - // Clients can call Unmarshal directly (can already do that, actually). - var info InternalMessageInfo - err := info.Unmarshal(pb, p.buf[p.index:]) - p.index = len(p.buf) - return err -} diff --git a/vendor/github.com/golang/protobuf/proto/defaults.go b/vendor/github.com/golang/protobuf/proto/defaults.go new file mode 100644 index 00000000..d399bf06 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/defaults.go @@ -0,0 +1,63 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" +) + +// SetDefaults sets unpopulated scalar fields to their default values. +// Fields within a oneof are not set even if they have a default value. +// SetDefaults is recursively called upon any populated message fields. +func SetDefaults(m Message) { + if m != nil { + setDefaults(MessageReflect(m)) + } +} + +func setDefaults(m protoreflect.Message) { + fds := m.Descriptor().Fields() + for i := 0; i < fds.Len(); i++ { + fd := fds.Get(i) + if !m.Has(fd) { + if fd.HasDefault() && fd.ContainingOneof() == nil { + v := fd.Default() + if fd.Kind() == protoreflect.BytesKind { + v = protoreflect.ValueOf(append([]byte(nil), v.Bytes()...)) // copy the default bytes + } + m.Set(fd, v) + } + continue + } + } + + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + switch { + // Handle singular message. + case fd.Cardinality() != protoreflect.Repeated: + if fd.Message() != nil { + setDefaults(m.Get(fd).Message()) + } + // Handle list of messages. + case fd.IsList(): + if fd.Message() != nil { + ls := m.Get(fd).List() + for i := 0; i < ls.Len(); i++ { + setDefaults(ls.Get(i).Message()) + } + } + // Handle map of messages. + case fd.IsMap(): + if fd.MapValue().Message() != nil { + ms := m.Get(fd).Map() + ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool { + setDefaults(v.Message()) + return true + }) + } + } + return true + }) +} diff --git a/vendor/github.com/golang/protobuf/proto/deprecated.go b/vendor/github.com/golang/protobuf/proto/deprecated.go new file mode 100644 index 00000000..e8db57e0 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/deprecated.go @@ -0,0 +1,113 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "encoding/json" + "errors" + "fmt" + "strconv" + + protoV2 "google.golang.org/protobuf/proto" +) + +var ( + // Deprecated: No longer returned. + ErrNil = errors.New("proto: Marshal called with nil") + + // Deprecated: No longer returned. + ErrTooLarge = errors.New("proto: message encodes to over 2 GB") + + // Deprecated: No longer returned. + ErrInternalBadWireType = errors.New("proto: internal error: bad wiretype for oneof") +) + +// Deprecated: Do not use. +type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 } + +// Deprecated: Do not use. +func GetStats() Stats { return Stats{} } + +// Deprecated: Do not use. +func MarshalMessageSet(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func UnmarshalMessageSet([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func MarshalMessageSetJSON(interface{}) ([]byte, error) { + return nil, errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func UnmarshalMessageSetJSON([]byte, interface{}) error { + return errors.New("proto: not implemented") +} + +// Deprecated: Do not use. +func RegisterMessageSetType(Message, int32, string) {} + +// Deprecated: Do not use. +func EnumName(m map[int32]string, v int32) string { + s, ok := m[v] + if ok { + return s + } + return strconv.Itoa(int(v)) +} + +// Deprecated: Do not use. +func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { + if data[0] == '"' { + // New style: enums are strings. + var repr string + if err := json.Unmarshal(data, &repr); err != nil { + return -1, err + } + val, ok := m[repr] + if !ok { + return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) + } + return val, nil + } + // Old style: enums are ints. + var val int32 + if err := json.Unmarshal(data, &val); err != nil { + return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) + } + return val, nil +} + +// Deprecated: Do not use; this type existed for intenal-use only. +type InternalMessageInfo struct{} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) DiscardUnknown(m Message) { + DiscardUnknown(m) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Marshal(b []byte, m Message, deterministic bool) ([]byte, error) { + return protoV2.MarshalOptions{Deterministic: deterministic}.MarshalAppend(b, MessageV2(m)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Merge(dst, src Message) { + protoV2.Merge(MessageV2(dst), MessageV2(src)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Size(m Message) int { + return protoV2.Size(MessageV2(m)) +} + +// Deprecated: Do not use; this method existed for intenal-use only. +func (*InternalMessageInfo) Unmarshal(m Message, b []byte) error { + return protoV2.UnmarshalOptions{Merge: true}.Unmarshal(b, MessageV2(m)) +} diff --git a/vendor/github.com/golang/protobuf/proto/discard.go b/vendor/github.com/golang/protobuf/proto/discard.go index dea2617c..2187e877 100644 --- a/vendor/github.com/golang/protobuf/proto/discard.go +++ b/vendor/github.com/golang/protobuf/proto/discard.go @@ -1,48 +1,13 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2017 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package proto import ( - "fmt" - "reflect" - "strings" - "sync" - "sync/atomic" + "google.golang.org/protobuf/reflect/protoreflect" ) -type generatedDiscarder interface { - XXX_DiscardUnknown() -} - // DiscardUnknown recursively discards all unknown fields from this message // and all embedded messages. // @@ -51,300 +16,43 @@ type generatedDiscarder interface { // marshal to be able to produce a message that continues to have those // unrecognized fields. To avoid this, DiscardUnknown is used to // explicitly clear the unknown fields after unmarshaling. -// -// For proto2 messages, the unknown fields of message extensions are only -// discarded from messages that have been accessed via GetExtension. func DiscardUnknown(m Message) { - if m, ok := m.(generatedDiscarder); ok { - m.XXX_DiscardUnknown() - return - } - // TODO: Dynamically populate a InternalMessageInfo for legacy messages, - // but the master branch has no implementation for InternalMessageInfo, - // so it would be more work to replicate that approach. - discardLegacy(m) -} - -// DiscardUnknown recursively discards all unknown fields. -func (a *InternalMessageInfo) DiscardUnknown(m Message) { - di := atomicLoadDiscardInfo(&a.discard) - if di == nil { - di = getDiscardInfo(reflect.TypeOf(m).Elem()) - atomicStoreDiscardInfo(&a.discard, di) - } - di.discard(toPointer(&m)) -} - -type discardInfo struct { - typ reflect.Type - - initialized int32 // 0: only typ is valid, 1: everything is valid - lock sync.Mutex - - fields []discardFieldInfo - unrecognized field -} - -type discardFieldInfo struct { - field field // Offset of field, guaranteed to be valid - discard func(src pointer) -} - -var ( - discardInfoMap = map[reflect.Type]*discardInfo{} - discardInfoLock sync.Mutex -) - -func getDiscardInfo(t reflect.Type) *discardInfo { - discardInfoLock.Lock() - defer discardInfoLock.Unlock() - di := discardInfoMap[t] - if di == nil { - di = &discardInfo{typ: t} - discardInfoMap[t] = di + if m != nil { + discardUnknown(MessageReflect(m)) } - return di } -func (di *discardInfo) discard(src pointer) { - if src.isNil() { - return // Nothing to do. - } - - if atomic.LoadInt32(&di.initialized) == 0 { - di.computeDiscardInfo() - } - - for _, fi := range di.fields { - sfp := src.offset(fi.field) - fi.discard(sfp) - } - - // For proto2 messages, only discard unknown fields in message extensions - // that have been accessed via GetExtension. - if em, err := extendable(src.asPointerTo(di.typ).Interface()); err == nil { - // Ignore lock since DiscardUnknown is not concurrency safe. - emm, _ := em.extensionsRead() - for _, mx := range emm { - if m, ok := mx.value.(Message); ok { - DiscardUnknown(m) +func discardUnknown(m protoreflect.Message) { + m.Range(func(fd protoreflect.FieldDescriptor, val protoreflect.Value) bool { + switch { + // Handle singular message. + case fd.Cardinality() != protoreflect.Repeated: + if fd.Message() != nil { + discardUnknown(m.Get(fd).Message()) } - } - } - - if di.unrecognized.IsValid() { - *src.offset(di.unrecognized).toBytes() = nil - } -} - -func (di *discardInfo) computeDiscardInfo() { - di.lock.Lock() - defer di.lock.Unlock() - if di.initialized != 0 { - return - } - t := di.typ - n := t.NumField() - - for i := 0; i < n; i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - - dfi := discardFieldInfo{field: toField(&f)} - tf := f.Type - - // Unwrap tf to get its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic(fmt.Sprintf("%v.%s cannot be a slice of pointers to primitive types", t, f.Name)) - } - - switch tf.Kind() { - case reflect.Struct: - switch { - case !isPointer: - panic(fmt.Sprintf("%v.%s cannot be a direct struct value", t, f.Name)) - case isSlice: // E.g., []*pb.T - di := getDiscardInfo(tf) - dfi.discard = func(src pointer) { - sps := src.getPointerSlice() - for _, sp := range sps { - if !sp.isNil() { - di.discard(sp) - } - } - } - default: // E.g., *pb.T - di := getDiscardInfo(tf) - dfi.discard = func(src pointer) { - sp := src.getPointer() - if !sp.isNil() { - di.discard(sp) - } + // Handle list of messages. + case fd.IsList(): + if fd.Message() != nil { + ls := m.Get(fd).List() + for i := 0; i < ls.Len(); i++ { + discardUnknown(ls.Get(i).Message()) } } - case reflect.Map: - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%v.%s cannot be a pointer to a map or a slice of map values", t, f.Name)) - default: // E.g., map[K]V - if tf.Elem().Kind() == reflect.Ptr { // Proto struct (e.g., *T) - dfi.discard = func(src pointer) { - sm := src.asPointerTo(tf).Elem() - if sm.Len() == 0 { - return - } - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - DiscardUnknown(val.Interface().(Message)) - } - } - } else { - dfi.discard = func(pointer) {} // Noop - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%v.%s cannot be a pointer to a interface or a slice of interface values", t, f.Name)) - default: // E.g., interface{} - // TODO: Make this faster? - dfi.discard = func(src pointer) { - su := src.asPointerTo(tf).Elem() - if !su.IsNil() { - sv := su.Elem().Elem().Field(0) - if sv.Kind() == reflect.Ptr && sv.IsNil() { - return - } - switch sv.Type().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - DiscardUnknown(sv.Interface().(Message)) - } - } - } + // Handle map of messages. + case fd.IsMap(): + if fd.MapValue().Message() != nil { + ms := m.Get(fd).Map() + ms.Range(func(_ protoreflect.MapKey, v protoreflect.Value) bool { + discardUnknown(v.Message()) + return true + }) } - default: - continue - } - di.fields = append(di.fields, dfi) - } - - di.unrecognized = invalidField - if f, ok := t.FieldByName("XXX_unrecognized"); ok { - if f.Type != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - di.unrecognized = toField(&f) - } - - atomic.StoreInt32(&di.initialized, 1) -} - -func discardLegacy(m Message) { - v := reflect.ValueOf(m) - if v.Kind() != reflect.Ptr || v.IsNil() { - return - } - v = v.Elem() - if v.Kind() != reflect.Struct { - return - } - t := v.Type() - - for i := 0; i < v.NumField(); i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue } - vf := v.Field(i) - tf := f.Type + return true + }) - // Unwrap tf to get its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic(fmt.Sprintf("%T.%s cannot be a slice of pointers to primitive types", m, f.Name)) - } - - switch tf.Kind() { - case reflect.Struct: - switch { - case !isPointer: - panic(fmt.Sprintf("%T.%s cannot be a direct struct value", m, f.Name)) - case isSlice: // E.g., []*pb.T - for j := 0; j < vf.Len(); j++ { - discardLegacy(vf.Index(j).Interface().(Message)) - } - default: // E.g., *pb.T - discardLegacy(vf.Interface().(Message)) - } - case reflect.Map: - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%T.%s cannot be a pointer to a map or a slice of map values", m, f.Name)) - default: // E.g., map[K]V - tv := vf.Type().Elem() - if tv.Kind() == reflect.Ptr && tv.Implements(protoMessageType) { // Proto struct (e.g., *T) - for _, key := range vf.MapKeys() { - val := vf.MapIndex(key) - discardLegacy(val.Interface().(Message)) - } - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic(fmt.Sprintf("%T.%s cannot be a pointer to a interface or a slice of interface values", m, f.Name)) - default: // E.g., test_proto.isCommunique_Union interface - if !vf.IsNil() && f.Tag.Get("protobuf_oneof") != "" { - vf = vf.Elem() // E.g., *test_proto.Communique_Msg - if !vf.IsNil() { - vf = vf.Elem() // E.g., test_proto.Communique_Msg - vf = vf.Field(0) // E.g., Proto struct (e.g., *T) or primitive value - if vf.Kind() == reflect.Ptr { - discardLegacy(vf.Interface().(Message)) - } - } - } - } - } - } - - if vf := v.FieldByName("XXX_unrecognized"); vf.IsValid() { - if vf.Type() != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - vf.Set(reflect.ValueOf([]byte(nil))) - } - - // For proto2 messages, only discard unknown fields in message extensions - // that have been accessed via GetExtension. - if em, err := extendable(m); err == nil { - // Ignore lock since discardLegacy is not concurrency safe. - emm, _ := em.extensionsRead() - for _, mx := range emm { - if m, ok := mx.value.(Message); ok { - discardLegacy(m) - } - } + // Discard unknown fields. + if len(m.GetUnknown()) > 0 { + m.SetUnknown(nil) } } diff --git a/vendor/github.com/golang/protobuf/proto/encode.go b/vendor/github.com/golang/protobuf/proto/encode.go deleted file mode 100644 index 3abfed2c..00000000 --- a/vendor/github.com/golang/protobuf/proto/encode.go +++ /dev/null @@ -1,203 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - -import ( - "errors" - "reflect" -) - -var ( - // errRepeatedHasNil is the error returned if Marshal is called with - // a struct with a repeated field containing a nil element. - errRepeatedHasNil = errors.New("proto: repeated field has nil element") - - // errOneofHasNil is the error returned if Marshal is called with - // a struct with a oneof field containing a nil element. - errOneofHasNil = errors.New("proto: oneof field has nil value") - - // ErrNil is the error returned if Marshal is called with nil. - ErrNil = errors.New("proto: Marshal called with nil") - - // ErrTooLarge is the error returned if Marshal is called with a - // message that encodes to >2GB. - ErrTooLarge = errors.New("proto: message encodes to over 2 GB") -) - -// The fundamental encoders that put bytes on the wire. -// Those that take integer types all accept uint64 and are -// therefore of type valueEncoder. - -const maxVarintBytes = 10 // maximum length of a varint - -// EncodeVarint returns the varint encoding of x. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -// Not used by the package itself, but helpful to clients -// wishing to use the same encoding. -func EncodeVarint(x uint64) []byte { - var buf [maxVarintBytes]byte - var n int - for n = 0; x > 127; n++ { - buf[n] = 0x80 | uint8(x&0x7F) - x >>= 7 - } - buf[n] = uint8(x) - n++ - return buf[0:n] -} - -// EncodeVarint writes a varint-encoded integer to the Buffer. -// This is the format for the -// int32, int64, uint32, uint64, bool, and enum -// protocol buffer types. -func (p *Buffer) EncodeVarint(x uint64) error { - for x >= 1<<7 { - p.buf = append(p.buf, uint8(x&0x7f|0x80)) - x >>= 7 - } - p.buf = append(p.buf, uint8(x)) - return nil -} - -// SizeVarint returns the varint encoding size of an integer. -func SizeVarint(x uint64) int { - switch { - case x < 1<<7: - return 1 - case x < 1<<14: - return 2 - case x < 1<<21: - return 3 - case x < 1<<28: - return 4 - case x < 1<<35: - return 5 - case x < 1<<42: - return 6 - case x < 1<<49: - return 7 - case x < 1<<56: - return 8 - case x < 1<<63: - return 9 - } - return 10 -} - -// EncodeFixed64 writes a 64-bit integer to the Buffer. -// This is the format for the -// fixed64, sfixed64, and double protocol buffer types. -func (p *Buffer) EncodeFixed64(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24), - uint8(x>>32), - uint8(x>>40), - uint8(x>>48), - uint8(x>>56)) - return nil -} - -// EncodeFixed32 writes a 32-bit integer to the Buffer. -// This is the format for the -// fixed32, sfixed32, and float protocol buffer types. -func (p *Buffer) EncodeFixed32(x uint64) error { - p.buf = append(p.buf, - uint8(x), - uint8(x>>8), - uint8(x>>16), - uint8(x>>24)) - return nil -} - -// EncodeZigzag64 writes a zigzag-encoded 64-bit integer -// to the Buffer. -// This is the format used for the sint64 protocol buffer type. -func (p *Buffer) EncodeZigzag64(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((x << 1) ^ uint64((int64(x) >> 63)))) -} - -// EncodeZigzag32 writes a zigzag-encoded 32-bit integer -// to the Buffer. -// This is the format used for the sint32 protocol buffer type. -func (p *Buffer) EncodeZigzag32(x uint64) error { - // use signed number to get arithmetic right shift. - return p.EncodeVarint(uint64((uint32(x) << 1) ^ uint32((int32(x) >> 31)))) -} - -// EncodeRawBytes writes a count-delimited byte buffer to the Buffer. -// This is the format used for the bytes protocol buffer -// type and for embedded messages. -func (p *Buffer) EncodeRawBytes(b []byte) error { - p.EncodeVarint(uint64(len(b))) - p.buf = append(p.buf, b...) - return nil -} - -// EncodeStringBytes writes an encoded string to the Buffer. -// This is the format used for the proto2 string type. -func (p *Buffer) EncodeStringBytes(s string) error { - p.EncodeVarint(uint64(len(s))) - p.buf = append(p.buf, s...) - return nil -} - -// Marshaler is the interface representing objects that can marshal themselves. -type Marshaler interface { - Marshal() ([]byte, error) -} - -// EncodeMessage writes the protocol buffer to the Buffer, -// prefixed by a varint-encoded length. -func (p *Buffer) EncodeMessage(pb Message) error { - siz := Size(pb) - p.EncodeVarint(uint64(siz)) - return p.Marshal(pb) -} - -// All protocol buffer fields are nillable, but be careful. -func isNil(v reflect.Value) bool { - switch v.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return v.IsNil() - } - return false -} diff --git a/vendor/github.com/golang/protobuf/proto/equal.go b/vendor/github.com/golang/protobuf/proto/equal.go deleted file mode 100644 index d4db5a1c..00000000 --- a/vendor/github.com/golang/protobuf/proto/equal.go +++ /dev/null @@ -1,300 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2011 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Protocol buffer comparison. - -package proto - -import ( - "bytes" - "log" - "reflect" - "strings" -) - -/* -Equal returns true iff protocol buffers a and b are equal. -The arguments must both be pointers to protocol buffer structs. - -Equality is defined in this way: - - Two messages are equal iff they are the same type, - corresponding fields are equal, unknown field sets - are equal, and extensions sets are equal. - - Two set scalar fields are equal iff their values are equal. - If the fields are of a floating-point type, remember that - NaN != x for all x, including NaN. If the message is defined - in a proto3 .proto file, fields are not "set"; specifically, - zero length proto3 "bytes" fields are equal (nil == {}). - - Two repeated fields are equal iff their lengths are the same, - and their corresponding elements are equal. Note a "bytes" field, - although represented by []byte, is not a repeated field and the - rule for the scalar fields described above applies. - - Two unset fields are equal. - - Two unknown field sets are equal if their current - encoded state is equal. - - Two extension sets are equal iff they have corresponding - elements that are pairwise equal. - - Two map fields are equal iff their lengths are the same, - and they contain the same set of elements. Zero-length map - fields are equal. - - Every other combination of things are not equal. - -The return value is undefined if a and b are not protocol buffers. -*/ -func Equal(a, b Message) bool { - if a == nil || b == nil { - return a == b - } - v1, v2 := reflect.ValueOf(a), reflect.ValueOf(b) - if v1.Type() != v2.Type() { - return false - } - if v1.Kind() == reflect.Ptr { - if v1.IsNil() { - return v2.IsNil() - } - if v2.IsNil() { - return false - } - v1, v2 = v1.Elem(), v2.Elem() - } - if v1.Kind() != reflect.Struct { - return false - } - return equalStruct(v1, v2) -} - -// v1 and v2 are known to have the same type. -func equalStruct(v1, v2 reflect.Value) bool { - sprop := GetProperties(v1.Type()) - for i := 0; i < v1.NumField(); i++ { - f := v1.Type().Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - f1, f2 := v1.Field(i), v2.Field(i) - if f.Type.Kind() == reflect.Ptr { - if n1, n2 := f1.IsNil(), f2.IsNil(); n1 && n2 { - // both unset - continue - } else if n1 != n2 { - // set/unset mismatch - return false - } - f1, f2 = f1.Elem(), f2.Elem() - } - if !equalAny(f1, f2, sprop.Prop[i]) { - return false - } - } - - if em1 := v1.FieldByName("XXX_InternalExtensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_InternalExtensions") - if !equalExtensions(v1.Type(), em1.Interface().(XXX_InternalExtensions), em2.Interface().(XXX_InternalExtensions)) { - return false - } - } - - if em1 := v1.FieldByName("XXX_extensions"); em1.IsValid() { - em2 := v2.FieldByName("XXX_extensions") - if !equalExtMap(v1.Type(), em1.Interface().(map[int32]Extension), em2.Interface().(map[int32]Extension)) { - return false - } - } - - uf := v1.FieldByName("XXX_unrecognized") - if !uf.IsValid() { - return true - } - - u1 := uf.Bytes() - u2 := v2.FieldByName("XXX_unrecognized").Bytes() - return bytes.Equal(u1, u2) -} - -// v1 and v2 are known to have the same type. -// prop may be nil. -func equalAny(v1, v2 reflect.Value, prop *Properties) bool { - if v1.Type() == protoMessageType { - m1, _ := v1.Interface().(Message) - m2, _ := v2.Interface().(Message) - return Equal(m1, m2) - } - switch v1.Kind() { - case reflect.Bool: - return v1.Bool() == v2.Bool() - case reflect.Float32, reflect.Float64: - return v1.Float() == v2.Float() - case reflect.Int32, reflect.Int64: - return v1.Int() == v2.Int() - case reflect.Interface: - // Probably a oneof field; compare the inner values. - n1, n2 := v1.IsNil(), v2.IsNil() - if n1 || n2 { - return n1 == n2 - } - e1, e2 := v1.Elem(), v2.Elem() - if e1.Type() != e2.Type() { - return false - } - return equalAny(e1, e2, nil) - case reflect.Map: - if v1.Len() != v2.Len() { - return false - } - for _, key := range v1.MapKeys() { - val2 := v2.MapIndex(key) - if !val2.IsValid() { - // This key was not found in the second map. - return false - } - if !equalAny(v1.MapIndex(key), val2, nil) { - return false - } - } - return true - case reflect.Ptr: - // Maps may have nil values in them, so check for nil. - if v1.IsNil() && v2.IsNil() { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return equalAny(v1.Elem(), v2.Elem(), prop) - case reflect.Slice: - if v1.Type().Elem().Kind() == reflect.Uint8 { - // short circuit: []byte - - // Edge case: if this is in a proto3 message, a zero length - // bytes field is considered the zero value. - if prop != nil && prop.proto3 && v1.Len() == 0 && v2.Len() == 0 { - return true - } - if v1.IsNil() != v2.IsNil() { - return false - } - return bytes.Equal(v1.Interface().([]byte), v2.Interface().([]byte)) - } - - if v1.Len() != v2.Len() { - return false - } - for i := 0; i < v1.Len(); i++ { - if !equalAny(v1.Index(i), v2.Index(i), prop) { - return false - } - } - return true - case reflect.String: - return v1.Interface().(string) == v2.Interface().(string) - case reflect.Struct: - return equalStruct(v1, v2) - case reflect.Uint32, reflect.Uint64: - return v1.Uint() == v2.Uint() - } - - // unknown type, so not a protocol buffer - log.Printf("proto: don't know how to compare %v", v1) - return false -} - -// base is the struct type that the extensions are based on. -// x1 and x2 are InternalExtensions. -func equalExtensions(base reflect.Type, x1, x2 XXX_InternalExtensions) bool { - em1, _ := x1.extensionsRead() - em2, _ := x2.extensionsRead() - return equalExtMap(base, em1, em2) -} - -func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool { - if len(em1) != len(em2) { - return false - } - - for extNum, e1 := range em1 { - e2, ok := em2[extNum] - if !ok { - return false - } - - m1, m2 := e1.value, e2.value - - if m1 == nil && m2 == nil { - // Both have only encoded form. - if bytes.Equal(e1.enc, e2.enc) { - continue - } - // The bytes are different, but the extensions might still be - // equal. We need to decode them to compare. - } - - if m1 != nil && m2 != nil { - // Both are unencoded. - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - continue - } - - // At least one is encoded. To do a semantically correct comparison - // we need to unmarshal them first. - var desc *ExtensionDesc - if m := extensionMaps[base]; m != nil { - desc = m[extNum] - } - if desc == nil { - // If both have only encoded form and the bytes are the same, - // it is handled above. We get here when the bytes are different. - // We don't know how to decode it, so just compare them as byte - // slices. - log.Printf("proto: don't know how to compare extension %d of %v", extNum, base) - return false - } - var err error - if m1 == nil { - m1, err = decodeExtension(e1.enc, desc) - } - if m2 == nil && err == nil { - m2, err = decodeExtension(e2.enc, desc) - } - if err != nil { - // The encoded form is invalid. - log.Printf("proto: badly encoded extension %d of %v: %v", extNum, base, err) - return false - } - if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) { - return false - } - } - - return true -} diff --git a/vendor/github.com/golang/protobuf/proto/extensions.go b/vendor/github.com/golang/protobuf/proto/extensions.go index 816a3b9d..42fc120c 100644 --- a/vendor/github.com/golang/protobuf/proto/extensions.go +++ b/vendor/github.com/golang/protobuf/proto/extensions.go @@ -1,543 +1,356 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package proto -/* - * Types and routines for supporting protocol buffer extensions. - */ - import ( "errors" "fmt" - "io" "reflect" - "strconv" - "sync" -) - -// ErrMissingExtension is the error returned by GetExtension if the named extension is not in the message. -var ErrMissingExtension = errors.New("proto: missing extension") -// ExtensionRange represents a range of message extensions for a protocol buffer. -// Used in code generated by the protocol compiler. -type ExtensionRange struct { - Start, End int32 // both inclusive -} - -// extendableProto is an interface implemented by any protocol buffer generated by the current -// proto compiler that may be extended. -type extendableProto interface { - Message - ExtensionRangeArray() []ExtensionRange - extensionsWrite() map[int32]Extension - extensionsRead() (map[int32]Extension, sync.Locker) -} - -// extendableProtoV1 is an interface implemented by a protocol buffer generated by the previous -// version of the proto compiler that may be extended. -type extendableProtoV1 interface { - Message - ExtensionRangeArray() []ExtensionRange - ExtensionMap() map[int32]Extension -} - -// extensionAdapter is a wrapper around extendableProtoV1 that implements extendableProto. -type extensionAdapter struct { - extendableProtoV1 -} + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoiface" + "google.golang.org/protobuf/runtime/protoimpl" +) -func (e extensionAdapter) extensionsWrite() map[int32]Extension { - return e.ExtensionMap() -} +type ( + // ExtensionDesc represents an extension descriptor and + // is used to interact with an extension field in a message. + // + // Variables of this type are generated in code by protoc-gen-go. + ExtensionDesc = protoimpl.ExtensionInfo -func (e extensionAdapter) extensionsRead() (map[int32]Extension, sync.Locker) { - return e.ExtensionMap(), notLocker{} -} + // ExtensionRange represents a range of message extensions. + // Used in code generated by protoc-gen-go. + ExtensionRange = protoiface.ExtensionRangeV1 -// notLocker is a sync.Locker whose Lock and Unlock methods are nops. -type notLocker struct{} + // Deprecated: Do not use; this is an internal type. + Extension = protoimpl.ExtensionFieldV1 -func (n notLocker) Lock() {} -func (n notLocker) Unlock() {} + // Deprecated: Do not use; this is an internal type. + XXX_InternalExtensions = protoimpl.ExtensionFields +) -// extendable returns the extendableProto interface for the given generated proto message. -// If the proto message has the old extension format, it returns a wrapper that implements -// the extendableProto interface. -func extendable(p interface{}) (extendableProto, error) { - switch p := p.(type) { - case extendableProto: - if isNilPtr(p) { - return nil, fmt.Errorf("proto: nil %T is not extendable", p) - } - return p, nil - case extendableProtoV1: - if isNilPtr(p) { - return nil, fmt.Errorf("proto: nil %T is not extendable", p) - } - return extensionAdapter{p}, nil - } - // Don't allocate a specific error containing %T: - // this is the hot path for Clone and MarshalText. - return nil, errNotExtendable -} +// ErrMissingExtension reports whether the extension was not present. +var ErrMissingExtension = errors.New("proto: missing extension") var errNotExtendable = errors.New("proto: not an extendable proto.Message") -func isNilPtr(x interface{}) bool { - v := reflect.ValueOf(x) - return v.Kind() == reflect.Ptr && v.IsNil() -} - -// XXX_InternalExtensions is an internal representation of proto extensions. -// -// Each generated message struct type embeds an anonymous XXX_InternalExtensions field, -// thus gaining the unexported 'extensions' method, which can be called only from the proto package. -// -// The methods of XXX_InternalExtensions are not concurrency safe in general, -// but calls to logically read-only methods such as has and get may be executed concurrently. -type XXX_InternalExtensions struct { - // The struct must be indirect so that if a user inadvertently copies a - // generated message and its embedded XXX_InternalExtensions, they - // avoid the mayhem of a copied mutex. - // - // The mutex serializes all logically read-only operations to p.extensionMap. - // It is up to the client to ensure that write operations to p.extensionMap are - // mutually exclusive with other accesses. - p *struct { - mu sync.Mutex - extensionMap map[int32]Extension +// HasExtension reports whether the extension field is present in m +// either as an explicitly populated field or as an unknown field. +func HasExtension(m Message, xt *ExtensionDesc) (has bool) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return false } -} -// extensionsWrite returns the extension map, creating it on first use. -func (e *XXX_InternalExtensions) extensionsWrite() map[int32]Extension { - if e.p == nil { - e.p = new(struct { - mu sync.Mutex - extensionMap map[int32]Extension + // Check whether any populated known field matches the field number. + xtd := xt.TypeDescriptor() + if isValidExtension(mr.Descriptor(), xtd) { + has = mr.Has(xtd) + } else { + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + has = int32(fd.Number()) == xt.Field + return !has }) - e.p.extensionMap = make(map[int32]Extension) } - return e.p.extensionMap -} -// extensionsRead returns the extensions map for read-only use. It may be nil. -// The caller must hold the returned mutex's lock when accessing Elements within the map. -func (e *XXX_InternalExtensions) extensionsRead() (map[int32]Extension, sync.Locker) { - if e.p == nil { - return nil, nil + // Check whether any unknown field matches the field number. + for b := mr.GetUnknown(); !has && len(b) > 0; { + num, _, n := protowire.ConsumeField(b) + has = int32(num) == xt.Field + b = b[n:] } - return e.p.extensionMap, &e.p.mu -} - -// ExtensionDesc represents an extension specification. -// Used in generated code from the protocol compiler. -type ExtensionDesc struct { - ExtendedType Message // nil pointer to the type that is being extended - ExtensionType interface{} // nil pointer to the extension type - Field int32 // field number - Name string // fully-qualified name of extension, for text formatting - Tag string // protobuf tag style - Filename string // name of the file in which the extension is defined -} - -func (ed *ExtensionDesc) repeated() bool { - t := reflect.TypeOf(ed.ExtensionType) - return t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 -} - -// Extension represents an extension in a message. -type Extension struct { - // When an extension is stored in a message using SetExtension - // only desc and value are set. When the message is marshaled - // enc will be set to the encoded form of the message. - // - // When a message is unmarshaled and contains extensions, each - // extension will have only enc set. When such an extension is - // accessed using GetExtension (or GetExtensions) desc and value - // will be set. - desc *ExtensionDesc - value interface{} - enc []byte + return has } -// SetRawExtension is for testing only. -func SetRawExtension(base Message, id int32, b []byte) { - epb, err := extendable(base) - if err != nil { +// ClearExtension removes the extension field from m +// either as an explicitly populated field or as an unknown field. +func ClearExtension(m Message, xt *ExtensionDesc) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { return } - extmap := epb.extensionsWrite() - extmap[id] = Extension{enc: b} -} -// isExtensionField returns true iff the given field number is in an extension range. -func isExtensionField(pb extendableProto, field int32) bool { - for _, er := range pb.ExtensionRangeArray() { - if er.Start <= field && field <= er.End { + xtd := xt.TypeDescriptor() + if isValidExtension(mr.Descriptor(), xtd) { + mr.Clear(xtd) + } else { + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + if int32(fd.Number()) == xt.Field { + mr.Clear(fd) + return false + } return true - } - } - return false -} - -// checkExtensionTypes checks that the given extension is valid for pb. -func checkExtensionTypes(pb extendableProto, extension *ExtensionDesc) error { - var pbi interface{} = pb - // Check the extended type. - if ea, ok := pbi.(extensionAdapter); ok { - pbi = ea.extendableProtoV1 - } - if a, b := reflect.TypeOf(pbi), reflect.TypeOf(extension.ExtendedType); a != b { - return fmt.Errorf("proto: bad extended type; %v does not extend %v", b, a) - } - // Check the range. - if !isExtensionField(pb, extension.Field) { - return errors.New("proto: bad extension number; not in declared ranges") - } - return nil -} - -// extPropKey is sufficient to uniquely identify an extension. -type extPropKey struct { - base reflect.Type - field int32 -} - -var extProp = struct { - sync.RWMutex - m map[extPropKey]*Properties -}{ - m: make(map[extPropKey]*Properties), -} - -func extensionProperties(ed *ExtensionDesc) *Properties { - key := extPropKey{base: reflect.TypeOf(ed.ExtendedType), field: ed.Field} - - extProp.RLock() - if prop, ok := extProp.m[key]; ok { - extProp.RUnlock() - return prop - } - extProp.RUnlock() - - extProp.Lock() - defer extProp.Unlock() - // Check again. - if prop, ok := extProp.m[key]; ok { - return prop - } - - prop := new(Properties) - prop.Init(reflect.TypeOf(ed.ExtensionType), "unknown_name", ed.Tag, nil) - extProp.m[key] = prop - return prop -} - -// HasExtension returns whether the given extension is present in pb. -func HasExtension(pb Message, extension *ExtensionDesc) bool { - // TODO: Check types, field numbers, etc.? - epb, err := extendable(pb) - if err != nil { - return false - } - extmap, mu := epb.extensionsRead() - if extmap == nil { - return false + }) } - mu.Lock() - _, ok := extmap[extension.Field] - mu.Unlock() - return ok + clearUnknown(mr, fieldNum(xt.Field)) } -// ClearExtension removes the given extension from pb. -func ClearExtension(pb Message, extension *ExtensionDesc) { - epb, err := extendable(pb) - if err != nil { +// ClearAllExtensions clears all extensions from m. +// This includes populated fields and unknown fields in the extension range. +func ClearAllExtensions(m Message) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { return } - // TODO: Check types, field numbers, etc.? - extmap := epb.extensionsWrite() - delete(extmap, extension.Field) + + mr.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + if fd.IsExtension() { + mr.Clear(fd) + } + return true + }) + clearUnknown(mr, mr.Descriptor().ExtensionRanges()) } -// GetExtension retrieves a proto2 extended field from pb. +// GetExtension retrieves a proto2 extended field from m. // // If the descriptor is type complete (i.e., ExtensionDesc.ExtensionType is non-nil), // then GetExtension parses the encoded field and returns a Go value of the specified type. // If the field is not present, then the default value is returned (if one is specified), // otherwise ErrMissingExtension is reported. // -// If the descriptor is not type complete (i.e., ExtensionDesc.ExtensionType is nil), -// then GetExtension returns the raw encoded bytes of the field extension. -func GetExtension(pb Message, extension *ExtensionDesc) (interface{}, error) { - epb, err := extendable(pb) - if err != nil { - return nil, err - } - - if extension.ExtendedType != nil { - // can only check type if this is a complete descriptor - if err := checkExtensionTypes(epb, extension); err != nil { - return nil, err +// If the descriptor is type incomplete (i.e., ExtensionDesc.ExtensionType is nil), +// then GetExtension returns the raw encoded bytes for the extension field. +func GetExtension(m Message, xt *ExtensionDesc) (interface{}, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return nil, errNotExtendable + } + + // Retrieve the unknown fields for this extension field. + var bo protoreflect.RawFields + for bi := mr.GetUnknown(); len(bi) > 0; { + num, _, n := protowire.ConsumeField(bi) + if int32(num) == xt.Field { + bo = append(bo, bi[:n]...) } + bi = bi[n:] } - emap, mu := epb.extensionsRead() - if emap == nil { - return defaultExtensionValue(extension) - } - mu.Lock() - defer mu.Unlock() - e, ok := emap[extension.Field] - if !ok { - // defaultExtensionValue returns the default value or - // ErrMissingExtension if there is no default. - return defaultExtensionValue(extension) + // For type incomplete descriptors, only retrieve the unknown fields. + if xt.ExtensionType == nil { + return []byte(bo), nil } - if e.value != nil { - // Already decoded. Check the descriptor, though. - if e.desc != extension { - // This shouldn't happen. If it does, it means that - // GetExtension was called twice with two different - // descriptors with the same field number. - return nil, errors.New("proto: descriptor conflict") + // If the extension field only exists as unknown fields, unmarshal it. + // This is rarely done since proto.Unmarshal eagerly unmarshals extensions. + xtd := xt.TypeDescriptor() + if !isValidExtension(mr.Descriptor(), xtd) { + return nil, fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m) + } + if !mr.Has(xtd) && len(bo) > 0 { + m2 := mr.New() + if err := (proto.UnmarshalOptions{ + Resolver: extensionResolver{xt}, + }.Unmarshal(bo, m2.Interface())); err != nil { + return nil, err + } + if m2.Has(xtd) { + mr.Set(xtd, m2.Get(xtd)) + clearUnknown(mr, fieldNum(xt.Field)) } - return e.value, nil } - if extension.ExtensionType == nil { - // incomplete descriptor - return e.enc, nil + // Check whether the message has the extension field set or a default. + var pv protoreflect.Value + switch { + case mr.Has(xtd): + pv = mr.Get(xtd) + case xtd.HasDefault(): + pv = xtd.Default() + default: + return nil, ErrMissingExtension } - v, err := decodeExtension(e.enc, extension) - if err != nil { - return nil, err + v := xt.InterfaceOf(pv) + rv := reflect.ValueOf(v) + if isScalarKind(rv.Kind()) { + rv2 := reflect.New(rv.Type()) + rv2.Elem().Set(rv) + v = rv2.Interface() } - - // Remember the decoded version and drop the encoded version. - // That way it is safe to mutate what we return. - e.value = v - e.desc = extension - e.enc = nil - emap[extension.Field] = e - return e.value, nil + return v, nil } -// defaultExtensionValue returns the default value for extension. -// If no default for an extension is defined ErrMissingExtension is returned. -func defaultExtensionValue(extension *ExtensionDesc) (interface{}, error) { - if extension.ExtensionType == nil { - // incomplete descriptor, so no default - return nil, ErrMissingExtension - } - - t := reflect.TypeOf(extension.ExtensionType) - props := extensionProperties(extension) +// extensionResolver is a custom extension resolver that stores a single +// extension type that takes precedence over the global registry. +type extensionResolver struct{ xt protoreflect.ExtensionType } - sf, _, err := fieldDefault(t, props) - if err != nil { - return nil, err +func (r extensionResolver) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) { + if xtd := r.xt.TypeDescriptor(); xtd.FullName() == field { + return r.xt, nil } + return protoregistry.GlobalTypes.FindExtensionByName(field) +} - if sf == nil || sf.value == nil { - // There is no default value. - return nil, ErrMissingExtension +func (r extensionResolver) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) { + if xtd := r.xt.TypeDescriptor(); xtd.ContainingMessage().FullName() == message && xtd.Number() == field { + return r.xt, nil } + return protoregistry.GlobalTypes.FindExtensionByNumber(message, field) +} - if t.Kind() != reflect.Ptr { - // We do not need to return a Ptr, we can directly return sf.value. - return sf.value, nil +// GetExtensions returns a list of the extensions values present in m, +// corresponding with the provided list of extension descriptors, xts. +// If an extension is missing in m, the corresponding value is nil. +func GetExtensions(m Message, xts []*ExtensionDesc) ([]interface{}, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return nil, errNotExtendable } - // We need to return an interface{} that is a pointer to sf.value. - value := reflect.New(t).Elem() - value.Set(reflect.New(value.Type().Elem())) - if sf.kind == reflect.Int32 { - // We may have an int32 or an enum, but the underlying data is int32. - // Since we can't set an int32 into a non int32 reflect.value directly - // set it as a int32. - value.Elem().SetInt(int64(sf.value.(int32))) - } else { - value.Elem().Set(reflect.ValueOf(sf.value)) + vs := make([]interface{}, len(xts)) + for i, xt := range xts { + v, err := GetExtension(m, xt) + if err != nil { + if err == ErrMissingExtension { + continue + } + return vs, err + } + vs[i] = v } - return value.Interface(), nil + return vs, nil } -// decodeExtension decodes an extension encoded in b. -func decodeExtension(b []byte, extension *ExtensionDesc) (interface{}, error) { - t := reflect.TypeOf(extension.ExtensionType) - unmarshal := typeUnmarshaler(t, extension.Tag) - - // t is a pointer to a struct, pointer to basic type or a slice. - // Allocate space to store the pointer/slice. - value := reflect.New(t).Elem() +// SetExtension sets an extension field in m to the provided value. +func SetExtension(m Message, xt *ExtensionDesc, v interface{}) error { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return errNotExtendable + } - var err error - for { - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF + rv := reflect.ValueOf(v) + if reflect.TypeOf(v) != reflect.TypeOf(xt.ExtensionType) { + return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", v, xt.ExtensionType) + } + if rv.Kind() == reflect.Ptr { + if rv.IsNil() { + return fmt.Errorf("proto: SetExtension called with nil value of type %T", v) } - b = b[n:] - wire := int(x) & 7 - - b, err = unmarshal(b, valToPointer(value.Addr()), wire) - if err != nil { - return nil, err + if isScalarKind(rv.Elem().Kind()) { + v = rv.Elem().Interface() } + } - if len(b) == 0 { - break - } + xtd := xt.TypeDescriptor() + if !isValidExtension(mr.Descriptor(), xtd) { + return fmt.Errorf("proto: bad extended type; %T does not extend %T", xt.ExtendedType, m) } - return value.Interface(), nil + mr.Set(xtd, xt.ValueOf(v)) + clearUnknown(mr, fieldNum(xt.Field)) + return nil } -// GetExtensions returns a slice of the extensions present in pb that are also listed in es. -// The returned slice has the same length as es; missing extensions will appear as nil elements. -func GetExtensions(pb Message, es []*ExtensionDesc) (extensions []interface{}, err error) { - epb, err := extendable(pb) - if err != nil { - return nil, err +// SetRawExtension inserts b into the unknown fields of m. +// +// Deprecated: Use Message.ProtoReflect.SetUnknown instead. +func SetRawExtension(m Message, fnum int32, b []byte) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return } - extensions = make([]interface{}, len(es)) - for i, e := range es { - extensions[i], err = GetExtension(epb, e) - if err == ErrMissingExtension { - err = nil - } - if err != nil { - return + + // Verify that the raw field is valid. + for b0 := b; len(b0) > 0; { + num, _, n := protowire.ConsumeField(b0) + if int32(num) != fnum { + panic(fmt.Sprintf("mismatching field number: got %d, want %d", num, fnum)) } + b0 = b0[n:] } - return -} -// ExtensionDescs returns a new slice containing pb's extension descriptors, in undefined order. -// For non-registered extensions, ExtensionDescs returns an incomplete descriptor containing -// just the Field field, which defines the extension's field number. -func ExtensionDescs(pb Message) ([]*ExtensionDesc, error) { - epb, err := extendable(pb) - if err != nil { - return nil, err - } - registeredExtensions := RegisteredExtensions(pb) + ClearExtension(m, &ExtensionDesc{Field: fnum}) + mr.SetUnknown(append(mr.GetUnknown(), b...)) +} - emap, mu := epb.extensionsRead() - if emap == nil { - return nil, nil - } - mu.Lock() - defer mu.Unlock() - extensions := make([]*ExtensionDesc, 0, len(emap)) - for extid, e := range emap { - desc := e.desc - if desc == nil { - desc = registeredExtensions[extid] - if desc == nil { - desc = &ExtensionDesc{Field: extid} +// ExtensionDescs returns a list of extension descriptors found in m, +// containing descriptors for both populated extension fields in m and +// also unknown fields of m that are in the extension range. +// For the later case, an type incomplete descriptor is provided where only +// the ExtensionDesc.Field field is populated. +// The order of the extension descriptors is undefined. +func ExtensionDescs(m Message) ([]*ExtensionDesc, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() || mr.Descriptor().ExtensionRanges().Len() == 0 { + return nil, errNotExtendable + } + + // Collect a set of known extension descriptors. + extDescs := make(map[protoreflect.FieldNumber]*ExtensionDesc) + mr.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if fd.IsExtension() { + xt := fd.(protoreflect.ExtensionTypeDescriptor) + if xd, ok := xt.Type().(*ExtensionDesc); ok { + extDescs[fd.Number()] = xd } } - - extensions = append(extensions, desc) + return true + }) + + // Collect a set of unknown extension descriptors. + extRanges := mr.Descriptor().ExtensionRanges() + for b := mr.GetUnknown(); len(b) > 0; { + num, _, n := protowire.ConsumeField(b) + if extRanges.Has(num) && extDescs[num] == nil { + extDescs[num] = nil + } + b = b[n:] } - return extensions, nil -} -// SetExtension sets the specified extension of pb to the specified value. -func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error { - epb, err := extendable(pb) - if err != nil { - return err - } - if err := checkExtensionTypes(epb, extension); err != nil { - return err - } - typ := reflect.TypeOf(extension.ExtensionType) - if typ != reflect.TypeOf(value) { - return errors.New("proto: bad extension value type") - } - // nil extension values need to be caught early, because the - // encoder can't distinguish an ErrNil due to a nil extension - // from an ErrNil due to a missing field. Extensions are - // always optional, so the encoder would just swallow the error - // and drop all the extensions from the encoded message. - if reflect.ValueOf(value).IsNil() { - return fmt.Errorf("proto: SetExtension called with nil value of type %T", value) + // Transpose the set of descriptors into a list. + var xts []*ExtensionDesc + for num, xt := range extDescs { + if xt == nil { + xt = &ExtensionDesc{Field: int32(num)} + } + xts = append(xts, xt) } + return xts, nil +} - extmap := epb.extensionsWrite() - extmap[extension.Field] = Extension{desc: extension, value: value} - return nil +// isValidExtension reports whether xtd is a valid extension descriptor for md. +func isValidExtension(md protoreflect.MessageDescriptor, xtd protoreflect.ExtensionTypeDescriptor) bool { + return xtd.ContainingMessage() == md && md.ExtensionRanges().Has(xtd.Number()) } -// ClearAllExtensions clears all extensions from pb. -func ClearAllExtensions(pb Message) { - epb, err := extendable(pb) - if err != nil { - return - } - m := epb.extensionsWrite() - for k := range m { - delete(m, k) +// isScalarKind reports whether k is a protobuf scalar kind (except bytes). +// This function exists for historical reasons since the representation of +// scalars differs between v1 and v2, where v1 uses *T and v2 uses T. +func isScalarKind(k reflect.Kind) bool { + switch k { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + return true + default: + return false } } -// A global registry of extensions. -// The generated code will register the generated descriptors by calling RegisterExtension. - -var extensionMaps = make(map[reflect.Type]map[int32]*ExtensionDesc) - -// RegisterExtension is called from the generated code. -func RegisterExtension(desc *ExtensionDesc) { - st := reflect.TypeOf(desc.ExtendedType).Elem() - m := extensionMaps[st] - if m == nil { - m = make(map[int32]*ExtensionDesc) - extensionMaps[st] = m +// clearUnknown removes unknown fields from m where remover.Has reports true. +func clearUnknown(m protoreflect.Message, remover interface { + Has(protoreflect.FieldNumber) bool +}) { + var bo protoreflect.RawFields + for bi := m.GetUnknown(); len(bi) > 0; { + num, _, n := protowire.ConsumeField(bi) + if !remover.Has(num) { + bo = append(bo, bi[:n]...) + } + bi = bi[n:] } - if _, ok := m[desc.Field]; ok { - panic("proto: duplicate extension registered: " + st.String() + " " + strconv.Itoa(int(desc.Field))) + if bi := m.GetUnknown(); len(bi) != len(bo) { + m.SetUnknown(bo) } - m[desc.Field] = desc } -// RegisteredExtensions returns a map of the registered extensions of a -// protocol buffer struct, indexed by the extension number. -// The argument pb should be a nil pointer to the struct type. -func RegisteredExtensions(pb Message) map[int32]*ExtensionDesc { - return extensionMaps[reflect.TypeOf(pb).Elem()] +type fieldNum protoreflect.FieldNumber + +func (n1 fieldNum) Has(n2 protoreflect.FieldNumber) bool { + return protoreflect.FieldNumber(n1) == n2 } diff --git a/vendor/github.com/golang/protobuf/proto/lib.go b/vendor/github.com/golang/protobuf/proto/lib.go deleted file mode 100644 index 75565cc6..00000000 --- a/vendor/github.com/golang/protobuf/proto/lib.go +++ /dev/null @@ -1,979 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* -Package proto converts data structures to and from the wire format of -protocol buffers. It works in concert with the Go source code generated -for .proto files by the protocol compiler. - -A summary of the properties of the protocol buffer interface -for a protocol buffer variable v: - - - Names are turned from camel_case to CamelCase for export. - - There are no methods on v to set fields; just treat - them as structure fields. - - There are getters that return a field's value if set, - and return the field's default value if unset. - The getters work even if the receiver is a nil message. - - The zero value for a struct is its correct initialization state. - All desired fields must be set before marshaling. - - A Reset() method will restore a protobuf struct to its zero state. - - Non-repeated fields are pointers to the values; nil means unset. - That is, optional or required field int32 f becomes F *int32. - - Repeated fields are slices. - - Helper functions are available to aid the setting of fields. - msg.Foo = proto.String("hello") // set field - - Constants are defined to hold the default values of all fields that - have them. They have the form Default_StructName_FieldName. - Because the getter methods handle defaulted values, - direct use of these constants should be rare. - - Enums are given type names and maps from names to values. - Enum values are prefixed by the enclosing message's name, or by the - enum's type name if it is a top-level enum. Enum types have a String - method, and a Enum method to assist in message construction. - - Nested messages, groups and enums have type names prefixed with the name of - the surrounding message type. - - Extensions are given descriptor names that start with E_, - followed by an underscore-delimited list of the nested messages - that contain it (if any) followed by the CamelCased name of the - extension field itself. HasExtension, ClearExtension, GetExtension - and SetExtension are functions for manipulating extensions. - - Oneof field sets are given a single field in their message, - with distinguished wrapper types for each possible field value. - - Marshal and Unmarshal are functions to encode and decode the wire format. - -When the .proto file specifies `syntax="proto3"`, there are some differences: - - - Non-repeated fields of non-message type are values instead of pointers. - - Enum types do not get an Enum method. - -The simplest way to describe this is to see an example. -Given file test.proto, containing - - package example; - - enum FOO { X = 17; } - - message Test { - required string label = 1; - optional int32 type = 2 [default=77]; - repeated int64 reps = 3; - optional group OptionalGroup = 4 { - required string RequiredField = 5; - } - oneof union { - int32 number = 6; - string name = 7; - } - } - -The resulting file, test.pb.go, is: - - package example - - import proto "github.com/golang/protobuf/proto" - import math "math" - - type FOO int32 - const ( - FOO_X FOO = 17 - ) - var FOO_name = map[int32]string{ - 17: "X", - } - var FOO_value = map[string]int32{ - "X": 17, - } - - func (x FOO) Enum() *FOO { - p := new(FOO) - *p = x - return p - } - func (x FOO) String() string { - return proto.EnumName(FOO_name, int32(x)) - } - func (x *FOO) UnmarshalJSON(data []byte) error { - value, err := proto.UnmarshalJSONEnum(FOO_value, data) - if err != nil { - return err - } - *x = FOO(value) - return nil - } - - type Test struct { - Label *string `protobuf:"bytes,1,req,name=label" json:"label,omitempty"` - Type *int32 `protobuf:"varint,2,opt,name=type,def=77" json:"type,omitempty"` - Reps []int64 `protobuf:"varint,3,rep,name=reps" json:"reps,omitempty"` - Optionalgroup *Test_OptionalGroup `protobuf:"group,4,opt,name=OptionalGroup" json:"optionalgroup,omitempty"` - // Types that are valid to be assigned to Union: - // *Test_Number - // *Test_Name - Union isTest_Union `protobuf_oneof:"union"` - XXX_unrecognized []byte `json:"-"` - } - func (m *Test) Reset() { *m = Test{} } - func (m *Test) String() string { return proto.CompactTextString(m) } - func (*Test) ProtoMessage() {} - - type isTest_Union interface { - isTest_Union() - } - - type Test_Number struct { - Number int32 `protobuf:"varint,6,opt,name=number"` - } - type Test_Name struct { - Name string `protobuf:"bytes,7,opt,name=name"` - } - - func (*Test_Number) isTest_Union() {} - func (*Test_Name) isTest_Union() {} - - func (m *Test) GetUnion() isTest_Union { - if m != nil { - return m.Union - } - return nil - } - const Default_Test_Type int32 = 77 - - func (m *Test) GetLabel() string { - if m != nil && m.Label != nil { - return *m.Label - } - return "" - } - - func (m *Test) GetType() int32 { - if m != nil && m.Type != nil { - return *m.Type - } - return Default_Test_Type - } - - func (m *Test) GetOptionalgroup() *Test_OptionalGroup { - if m != nil { - return m.Optionalgroup - } - return nil - } - - type Test_OptionalGroup struct { - RequiredField *string `protobuf:"bytes,5,req" json:"RequiredField,omitempty"` - } - func (m *Test_OptionalGroup) Reset() { *m = Test_OptionalGroup{} } - func (m *Test_OptionalGroup) String() string { return proto.CompactTextString(m) } - - func (m *Test_OptionalGroup) GetRequiredField() string { - if m != nil && m.RequiredField != nil { - return *m.RequiredField - } - return "" - } - - func (m *Test) GetNumber() int32 { - if x, ok := m.GetUnion().(*Test_Number); ok { - return x.Number - } - return 0 - } - - func (m *Test) GetName() string { - if x, ok := m.GetUnion().(*Test_Name); ok { - return x.Name - } - return "" - } - - func init() { - proto.RegisterEnum("example.FOO", FOO_name, FOO_value) - } - -To create and play with a Test object: - - package main - - import ( - "log" - - "github.com/golang/protobuf/proto" - pb "./example.pb" - ) - - func main() { - test := &pb.Test{ - Label: proto.String("hello"), - Type: proto.Int32(17), - Reps: []int64{1, 2, 3}, - Optionalgroup: &pb.Test_OptionalGroup{ - RequiredField: proto.String("good bye"), - }, - Union: &pb.Test_Name{"fred"}, - } - data, err := proto.Marshal(test) - if err != nil { - log.Fatal("marshaling error: ", err) - } - newTest := &pb.Test{} - err = proto.Unmarshal(data, newTest) - if err != nil { - log.Fatal("unmarshaling error: ", err) - } - // Now test and newTest contain the same data. - if test.GetLabel() != newTest.GetLabel() { - log.Fatalf("data mismatch %q != %q", test.GetLabel(), newTest.GetLabel()) - } - // Use a type switch to determine which oneof was set. - switch u := test.Union.(type) { - case *pb.Test_Number: // u.Number contains the number. - case *pb.Test_Name: // u.Name contains the string. - } - // etc. - } -*/ -package proto - -import ( - "encoding/json" - "fmt" - "log" - "reflect" - "sort" - "strconv" - "sync" -) - -// RequiredNotSetError is an error type returned by either Marshal or Unmarshal. -// Marshal reports this when a required field is not initialized. -// Unmarshal reports this when a required field is missing from the wire data. -type RequiredNotSetError struct{ field string } - -func (e *RequiredNotSetError) Error() string { - if e.field == "" { - return fmt.Sprintf("proto: required field not set") - } - return fmt.Sprintf("proto: required field %q not set", e.field) -} -func (e *RequiredNotSetError) RequiredNotSet() bool { - return true -} - -type invalidUTF8Error struct{ field string } - -func (e *invalidUTF8Error) Error() string { - if e.field == "" { - return "proto: invalid UTF-8 detected" - } - return fmt.Sprintf("proto: field %q contains invalid UTF-8", e.field) -} -func (e *invalidUTF8Error) InvalidUTF8() bool { - return true -} - -// errInvalidUTF8 is a sentinel error to identify fields with invalid UTF-8. -// This error should not be exposed to the external API as such errors should -// be recreated with the field information. -var errInvalidUTF8 = &invalidUTF8Error{} - -// isNonFatal reports whether the error is either a RequiredNotSet error -// or a InvalidUTF8 error. -func isNonFatal(err error) bool { - if re, ok := err.(interface{ RequiredNotSet() bool }); ok && re.RequiredNotSet() { - return true - } - if re, ok := err.(interface{ InvalidUTF8() bool }); ok && re.InvalidUTF8() { - return true - } - return false -} - -type nonFatal struct{ E error } - -// Merge merges err into nf and reports whether it was successful. -// Otherwise it returns false for any fatal non-nil errors. -func (nf *nonFatal) Merge(err error) (ok bool) { - if err == nil { - return true // not an error - } - if !isNonFatal(err) { - return false // fatal error - } - if nf.E == nil { - nf.E = err // store first instance of non-fatal error - } - return true -} - -// Message is implemented by generated protocol buffer messages. -type Message interface { - Reset() - String() string - ProtoMessage() -} - -// Stats records allocation details about the protocol buffer encoders -// and decoders. Useful for tuning the library itself. -type Stats struct { - Emalloc uint64 // mallocs in encode - Dmalloc uint64 // mallocs in decode - Encode uint64 // number of encodes - Decode uint64 // number of decodes - Chit uint64 // number of cache hits - Cmiss uint64 // number of cache misses - Size uint64 // number of sizes -} - -// Set to true to enable stats collection. -const collectStats = false - -var stats Stats - -// GetStats returns a copy of the global Stats structure. -func GetStats() Stats { return stats } - -// A Buffer is a buffer manager for marshaling and unmarshaling -// protocol buffers. It may be reused between invocations to -// reduce memory usage. It is not necessary to use a Buffer; -// the global functions Marshal and Unmarshal create a -// temporary Buffer and are fine for most applications. -type Buffer struct { - buf []byte // encode/decode byte stream - index int // read point - - deterministic bool -} - -// NewBuffer allocates a new Buffer and initializes its internal data to -// the contents of the argument slice. -func NewBuffer(e []byte) *Buffer { - return &Buffer{buf: e} -} - -// Reset resets the Buffer, ready for marshaling a new protocol buffer. -func (p *Buffer) Reset() { - p.buf = p.buf[0:0] // for reading/writing - p.index = 0 // for reading -} - -// SetBuf replaces the internal buffer with the slice, -// ready for unmarshaling the contents of the slice. -func (p *Buffer) SetBuf(s []byte) { - p.buf = s - p.index = 0 -} - -// Bytes returns the contents of the Buffer. -func (p *Buffer) Bytes() []byte { return p.buf } - -// SetDeterministic sets whether to use deterministic serialization. -// -// Deterministic serialization guarantees that for a given binary, equal -// messages will always be serialized to the same bytes. This implies: -// -// - Repeated serialization of a message will return the same bytes. -// - Different processes of the same binary (which may be executing on -// different machines) will serialize equal messages to the same bytes. -// -// Note that the deterministic serialization is NOT canonical across -// languages. It is not guaranteed to remain stable over time. It is unstable -// across different builds with schema changes due to unknown fields. -// Users who need canonical serialization (e.g., persistent storage in a -// canonical form, fingerprinting, etc.) should define their own -// canonicalization specification and implement their own serializer rather -// than relying on this API. -// -// If deterministic serialization is requested, map entries will be sorted -// by keys in lexographical order. This is an implementation detail and -// subject to change. -func (p *Buffer) SetDeterministic(deterministic bool) { - p.deterministic = deterministic -} - -/* - * Helper routines for simplifying the creation of optional fields of basic type. - */ - -// Bool is a helper routine that allocates a new bool value -// to store v and returns a pointer to it. -func Bool(v bool) *bool { - return &v -} - -// Int32 is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it. -func Int32(v int32) *int32 { - return &v -} - -// Int is a helper routine that allocates a new int32 value -// to store v and returns a pointer to it, but unlike Int32 -// its argument value is an int. -func Int(v int) *int32 { - p := new(int32) - *p = int32(v) - return p -} - -// Int64 is a helper routine that allocates a new int64 value -// to store v and returns a pointer to it. -func Int64(v int64) *int64 { - return &v -} - -// Float32 is a helper routine that allocates a new float32 value -// to store v and returns a pointer to it. -func Float32(v float32) *float32 { - return &v -} - -// Float64 is a helper routine that allocates a new float64 value -// to store v and returns a pointer to it. -func Float64(v float64) *float64 { - return &v -} - -// Uint32 is a helper routine that allocates a new uint32 value -// to store v and returns a pointer to it. -func Uint32(v uint32) *uint32 { - return &v -} - -// Uint64 is a helper routine that allocates a new uint64 value -// to store v and returns a pointer to it. -func Uint64(v uint64) *uint64 { - return &v -} - -// String is a helper routine that allocates a new string value -// to store v and returns a pointer to it. -func String(v string) *string { - return &v -} - -// EnumName is a helper function to simplify printing protocol buffer enums -// by name. Given an enum map and a value, it returns a useful string. -func EnumName(m map[int32]string, v int32) string { - s, ok := m[v] - if ok { - return s - } - return strconv.Itoa(int(v)) -} - -// UnmarshalJSONEnum is a helper function to simplify recovering enum int values -// from their JSON-encoded representation. Given a map from the enum's symbolic -// names to its int values, and a byte buffer containing the JSON-encoded -// value, it returns an int32 that can be cast to the enum type by the caller. -// -// The function can deal with both JSON representations, numeric and symbolic. -func UnmarshalJSONEnum(m map[string]int32, data []byte, enumName string) (int32, error) { - if data[0] == '"' { - // New style: enums are strings. - var repr string - if err := json.Unmarshal(data, &repr); err != nil { - return -1, err - } - val, ok := m[repr] - if !ok { - return 0, fmt.Errorf("unrecognized enum %s value %q", enumName, repr) - } - return val, nil - } - // Old style: enums are ints. - var val int32 - if err := json.Unmarshal(data, &val); err != nil { - return 0, fmt.Errorf("cannot unmarshal %#q into enum %s", data, enumName) - } - return val, nil -} - -// DebugPrint dumps the encoded data in b in a debugging format with a header -// including the string s. Used in testing but made available for general debugging. -func (p *Buffer) DebugPrint(s string, b []byte) { - var u uint64 - - obuf := p.buf - index := p.index - p.buf = b - p.index = 0 - depth := 0 - - fmt.Printf("\n--- %s ---\n", s) - -out: - for { - for i := 0; i < depth; i++ { - fmt.Print(" ") - } - - index := p.index - if index == len(p.buf) { - break - } - - op, err := p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: fetching op err %v\n", index, err) - break out - } - tag := op >> 3 - wire := op & 7 - - switch wire { - default: - fmt.Printf("%3d: t=%3d unknown wire=%d\n", - index, tag, wire) - break out - - case WireBytes: - var r []byte - - r, err = p.DecodeRawBytes(false) - if err != nil { - break out - } - fmt.Printf("%3d: t=%3d bytes [%d]", index, tag, len(r)) - if len(r) <= 6 { - for i := 0; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } else { - for i := 0; i < 3; i++ { - fmt.Printf(" %.2x", r[i]) - } - fmt.Printf(" ..") - for i := len(r) - 3; i < len(r); i++ { - fmt.Printf(" %.2x", r[i]) - } - } - fmt.Printf("\n") - - case WireFixed32: - u, err = p.DecodeFixed32() - if err != nil { - fmt.Printf("%3d: t=%3d fix32 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix32 %d\n", index, tag, u) - - case WireFixed64: - u, err = p.DecodeFixed64() - if err != nil { - fmt.Printf("%3d: t=%3d fix64 err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d fix64 %d\n", index, tag, u) - - case WireVarint: - u, err = p.DecodeVarint() - if err != nil { - fmt.Printf("%3d: t=%3d varint err %v\n", index, tag, err) - break out - } - fmt.Printf("%3d: t=%3d varint %d\n", index, tag, u) - - case WireStartGroup: - fmt.Printf("%3d: t=%3d start\n", index, tag) - depth++ - - case WireEndGroup: - depth-- - fmt.Printf("%3d: t=%3d end\n", index, tag) - } - } - - if depth != 0 { - fmt.Printf("%3d: start-end not balanced %d\n", p.index, depth) - } - fmt.Printf("\n") - - p.buf = obuf - p.index = index -} - -// SetDefaults sets unset protocol buffer fields to their default values. -// It only modifies fields that are both unset and have defined defaults. -// It recursively sets default values in any non-nil sub-messages. -func SetDefaults(pb Message) { - setDefaults(reflect.ValueOf(pb), true, false) -} - -// v is a pointer to a struct. -func setDefaults(v reflect.Value, recur, zeros bool) { - v = v.Elem() - - defaultMu.RLock() - dm, ok := defaults[v.Type()] - defaultMu.RUnlock() - if !ok { - dm = buildDefaultMessage(v.Type()) - defaultMu.Lock() - defaults[v.Type()] = dm - defaultMu.Unlock() - } - - for _, sf := range dm.scalars { - f := v.Field(sf.index) - if !f.IsNil() { - // field already set - continue - } - dv := sf.value - if dv == nil && !zeros { - // no explicit default, and don't want to set zeros - continue - } - fptr := f.Addr().Interface() // **T - // TODO: Consider batching the allocations we do here. - switch sf.kind { - case reflect.Bool: - b := new(bool) - if dv != nil { - *b = dv.(bool) - } - *(fptr.(**bool)) = b - case reflect.Float32: - f := new(float32) - if dv != nil { - *f = dv.(float32) - } - *(fptr.(**float32)) = f - case reflect.Float64: - f := new(float64) - if dv != nil { - *f = dv.(float64) - } - *(fptr.(**float64)) = f - case reflect.Int32: - // might be an enum - if ft := f.Type(); ft != int32PtrType { - // enum - f.Set(reflect.New(ft.Elem())) - if dv != nil { - f.Elem().SetInt(int64(dv.(int32))) - } - } else { - // int32 field - i := new(int32) - if dv != nil { - *i = dv.(int32) - } - *(fptr.(**int32)) = i - } - case reflect.Int64: - i := new(int64) - if dv != nil { - *i = dv.(int64) - } - *(fptr.(**int64)) = i - case reflect.String: - s := new(string) - if dv != nil { - *s = dv.(string) - } - *(fptr.(**string)) = s - case reflect.Uint8: - // exceptional case: []byte - var b []byte - if dv != nil { - db := dv.([]byte) - b = make([]byte, len(db)) - copy(b, db) - } else { - b = []byte{} - } - *(fptr.(*[]byte)) = b - case reflect.Uint32: - u := new(uint32) - if dv != nil { - *u = dv.(uint32) - } - *(fptr.(**uint32)) = u - case reflect.Uint64: - u := new(uint64) - if dv != nil { - *u = dv.(uint64) - } - *(fptr.(**uint64)) = u - default: - log.Printf("proto: can't set default for field %v (sf.kind=%v)", f, sf.kind) - } - } - - for _, ni := range dm.nested { - f := v.Field(ni) - // f is *T or []*T or map[T]*T - switch f.Kind() { - case reflect.Ptr: - if f.IsNil() { - continue - } - setDefaults(f, recur, zeros) - - case reflect.Slice: - for i := 0; i < f.Len(); i++ { - e := f.Index(i) - if e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - - case reflect.Map: - for _, k := range f.MapKeys() { - e := f.MapIndex(k) - if e.IsNil() { - continue - } - setDefaults(e, recur, zeros) - } - } - } -} - -var ( - // defaults maps a protocol buffer struct type to a slice of the fields, - // with its scalar fields set to their proto-declared non-zero default values. - defaultMu sync.RWMutex - defaults = make(map[reflect.Type]defaultMessage) - - int32PtrType = reflect.TypeOf((*int32)(nil)) -) - -// defaultMessage represents information about the default values of a message. -type defaultMessage struct { - scalars []scalarField - nested []int // struct field index of nested messages -} - -type scalarField struct { - index int // struct field index - kind reflect.Kind // element type (the T in *T or []T) - value interface{} // the proto-declared default value, or nil -} - -// t is a struct type. -func buildDefaultMessage(t reflect.Type) (dm defaultMessage) { - sprop := GetProperties(t) - for _, prop := range sprop.Prop { - fi, ok := sprop.decoderTags.get(prop.Tag) - if !ok { - // XXX_unrecognized - continue - } - ft := t.Field(fi).Type - - sf, nested, err := fieldDefault(ft, prop) - switch { - case err != nil: - log.Print(err) - case nested: - dm.nested = append(dm.nested, fi) - case sf != nil: - sf.index = fi - dm.scalars = append(dm.scalars, *sf) - } - } - - return dm -} - -// fieldDefault returns the scalarField for field type ft. -// sf will be nil if the field can not have a default. -// nestedMessage will be true if this is a nested message. -// Note that sf.index is not set on return. -func fieldDefault(ft reflect.Type, prop *Properties) (sf *scalarField, nestedMessage bool, err error) { - var canHaveDefault bool - switch ft.Kind() { - case reflect.Ptr: - if ft.Elem().Kind() == reflect.Struct { - nestedMessage = true - } else { - canHaveDefault = true // proto2 scalar field - } - - case reflect.Slice: - switch ft.Elem().Kind() { - case reflect.Ptr: - nestedMessage = true // repeated message - case reflect.Uint8: - canHaveDefault = true // bytes field - } - - case reflect.Map: - if ft.Elem().Kind() == reflect.Ptr { - nestedMessage = true // map with message values - } - } - - if !canHaveDefault { - if nestedMessage { - return nil, true, nil - } - return nil, false, nil - } - - // We now know that ft is a pointer or slice. - sf = &scalarField{kind: ft.Elem().Kind()} - - // scalar fields without defaults - if !prop.HasDefault { - return sf, false, nil - } - - // a scalar field: either *T or []byte - switch ft.Elem().Kind() { - case reflect.Bool: - x, err := strconv.ParseBool(prop.Default) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default bool %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Float32: - x, err := strconv.ParseFloat(prop.Default, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float32 %q: %v", prop.Default, err) - } - sf.value = float32(x) - case reflect.Float64: - x, err := strconv.ParseFloat(prop.Default, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default float64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.Int32: - x, err := strconv.ParseInt(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int32 %q: %v", prop.Default, err) - } - sf.value = int32(x) - case reflect.Int64: - x, err := strconv.ParseInt(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default int64 %q: %v", prop.Default, err) - } - sf.value = x - case reflect.String: - sf.value = prop.Default - case reflect.Uint8: - // []byte (not *uint8) - sf.value = []byte(prop.Default) - case reflect.Uint32: - x, err := strconv.ParseUint(prop.Default, 10, 32) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint32 %q: %v", prop.Default, err) - } - sf.value = uint32(x) - case reflect.Uint64: - x, err := strconv.ParseUint(prop.Default, 10, 64) - if err != nil { - return nil, false, fmt.Errorf("proto: bad default uint64 %q: %v", prop.Default, err) - } - sf.value = x - default: - return nil, false, fmt.Errorf("proto: unhandled def kind %v", ft.Elem().Kind()) - } - - return sf, false, nil -} - -// mapKeys returns a sort.Interface to be used for sorting the map keys. -// Map fields may have key types of non-float scalars, strings and enums. -func mapKeys(vs []reflect.Value) sort.Interface { - s := mapKeySorter{vs: vs} - - // Type specialization per https://developers.google.com/protocol-buffers/docs/proto#maps. - if len(vs) == 0 { - return s - } - switch vs[0].Kind() { - case reflect.Int32, reflect.Int64: - s.less = func(a, b reflect.Value) bool { return a.Int() < b.Int() } - case reflect.Uint32, reflect.Uint64: - s.less = func(a, b reflect.Value) bool { return a.Uint() < b.Uint() } - case reflect.Bool: - s.less = func(a, b reflect.Value) bool { return !a.Bool() && b.Bool() } // false < true - case reflect.String: - s.less = func(a, b reflect.Value) bool { return a.String() < b.String() } - default: - panic(fmt.Sprintf("unsupported map key type: %v", vs[0].Kind())) - } - - return s -} - -type mapKeySorter struct { - vs []reflect.Value - less func(a, b reflect.Value) bool -} - -func (s mapKeySorter) Len() int { return len(s.vs) } -func (s mapKeySorter) Swap(i, j int) { s.vs[i], s.vs[j] = s.vs[j], s.vs[i] } -func (s mapKeySorter) Less(i, j int) bool { - return s.less(s.vs[i], s.vs[j]) -} - -// isProto3Zero reports whether v is a zero proto3 value. -func isProto3Zero(v reflect.Value) bool { - switch v.Kind() { - case reflect.Bool: - return !v.Bool() - case reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint32, reflect.Uint64: - return v.Uint() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.String: - return v.String() == "" - } - return false -} - -// ProtoPackageIsVersion2 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion2 = true - -// ProtoPackageIsVersion1 is referenced from generated protocol buffer files -// to assert that that code is compatible with this version of the proto package. -const ProtoPackageIsVersion1 = true - -// InternalMessageInfo is a type used internally by generated .pb.go files. -// This type is not intended to be used by non-generated code. -// This type is not subject to any compatibility guarantee. -type InternalMessageInfo struct { - marshal *marshalInfo - unmarshal *unmarshalInfo - merge *mergeInfo - discard *discardInfo -} diff --git a/vendor/github.com/golang/protobuf/proto/message_set.go b/vendor/github.com/golang/protobuf/proto/message_set.go deleted file mode 100644 index 3b6ca41d..00000000 --- a/vendor/github.com/golang/protobuf/proto/message_set.go +++ /dev/null @@ -1,314 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -/* - * Support for message sets. - */ - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "reflect" - "sort" - "sync" -) - -// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. -// A message type ID is required for storing a protocol buffer in a message set. -var errNoMessageTypeID = errors.New("proto does not have a message type ID") - -// The first two types (_MessageSet_Item and messageSet) -// model what the protocol compiler produces for the following protocol message: -// message MessageSet { -// repeated group Item = 1 { -// required int32 type_id = 2; -// required string message = 3; -// }; -// } -// That is the MessageSet wire format. We can't use a proto to generate these -// because that would introduce a circular dependency between it and this package. - -type _MessageSet_Item struct { - TypeId *int32 `protobuf:"varint,2,req,name=type_id"` - Message []byte `protobuf:"bytes,3,req,name=message"` -} - -type messageSet struct { - Item []*_MessageSet_Item `protobuf:"group,1,rep"` - XXX_unrecognized []byte - // TODO: caching? -} - -// Make sure messageSet is a Message. -var _ Message = (*messageSet)(nil) - -// messageTypeIder is an interface satisfied by a protocol buffer type -// that may be stored in a MessageSet. -type messageTypeIder interface { - MessageTypeId() int32 -} - -func (ms *messageSet) find(pb Message) *_MessageSet_Item { - mti, ok := pb.(messageTypeIder) - if !ok { - return nil - } - id := mti.MessageTypeId() - for _, item := range ms.Item { - if *item.TypeId == id { - return item - } - } - return nil -} - -func (ms *messageSet) Has(pb Message) bool { - return ms.find(pb) != nil -} - -func (ms *messageSet) Unmarshal(pb Message) error { - if item := ms.find(pb); item != nil { - return Unmarshal(item.Message, pb) - } - if _, ok := pb.(messageTypeIder); !ok { - return errNoMessageTypeID - } - return nil // TODO: return error instead? -} - -func (ms *messageSet) Marshal(pb Message) error { - msg, err := Marshal(pb) - if err != nil { - return err - } - if item := ms.find(pb); item != nil { - // reuse existing item - item.Message = msg - return nil - } - - mti, ok := pb.(messageTypeIder) - if !ok { - return errNoMessageTypeID - } - - mtid := mti.MessageTypeId() - ms.Item = append(ms.Item, &_MessageSet_Item{ - TypeId: &mtid, - Message: msg, - }) - return nil -} - -func (ms *messageSet) Reset() { *ms = messageSet{} } -func (ms *messageSet) String() string { return CompactTextString(ms) } -func (*messageSet) ProtoMessage() {} - -// Support for the message_set_wire_format message option. - -func skipVarint(buf []byte) []byte { - i := 0 - for ; buf[i]&0x80 != 0; i++ { - } - return buf[i+1:] -} - -// MarshalMessageSet encodes the extension map represented by m in the message set wire format. -// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSet(exts interface{}) ([]byte, error) { - return marshalMessageSet(exts, false) -} - -// marshaMessageSet implements above function, with the opt to turn on / off deterministic during Marshal. -func marshalMessageSet(exts interface{}, deterministic bool) ([]byte, error) { - switch exts := exts.(type) { - case *XXX_InternalExtensions: - var u marshalInfo - siz := u.sizeMessageSet(exts) - b := make([]byte, 0, siz) - return u.appendMessageSet(b, exts, deterministic) - - case map[int32]Extension: - // This is an old-style extension map. - // Wrap it in a new-style XXX_InternalExtensions. - ie := XXX_InternalExtensions{ - p: &struct { - mu sync.Mutex - extensionMap map[int32]Extension - }{ - extensionMap: exts, - }, - } - - var u marshalInfo - siz := u.sizeMessageSet(&ie) - b := make([]byte, 0, siz) - return u.appendMessageSet(b, &ie, deterministic) - - default: - return nil, errors.New("proto: not an extension map") - } -} - -// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. -// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSet(buf []byte, exts interface{}) error { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - m = exts.extensionsWrite() - case map[int32]Extension: - m = exts - default: - return errors.New("proto: not an extension map") - } - - ms := new(messageSet) - if err := Unmarshal(buf, ms); err != nil { - return err - } - for _, item := range ms.Item { - id := *item.TypeId - msg := item.Message - - // Restore wire type and field number varint, plus length varint. - // Be careful to preserve duplicate items. - b := EncodeVarint(uint64(id)<<3 | WireBytes) - if ext, ok := m[id]; ok { - // Existing data; rip off the tag and length varint - // so we join the new data correctly. - // We can assume that ext.enc is set because we are unmarshaling. - o := ext.enc[len(b):] // skip wire type and field number - _, n := DecodeVarint(o) // calculate length of length varint - o = o[n:] // skip length varint - msg = append(o, msg...) // join old data and new data - } - b = append(b, EncodeVarint(uint64(len(msg)))...) - b = append(b, msg...) - - m[id] = Extension{enc: b} - } - return nil -} - -// MarshalMessageSetJSON encodes the extension map represented by m in JSON format. -// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func MarshalMessageSetJSON(exts interface{}) ([]byte, error) { - var m map[int32]Extension - switch exts := exts.(type) { - case *XXX_InternalExtensions: - var mu sync.Locker - m, mu = exts.extensionsRead() - if m != nil { - // Keep the extensions map locked until we're done marshaling to prevent - // races between marshaling and unmarshaling the lazily-{en,de}coded - // values. - mu.Lock() - defer mu.Unlock() - } - case map[int32]Extension: - m = exts - default: - return nil, errors.New("proto: not an extension map") - } - var b bytes.Buffer - b.WriteByte('{') - - // Process the map in key order for deterministic output. - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) // int32Slice defined in text.go - - for i, id := range ids { - ext := m[id] - msd, ok := messageSetMap[id] - if !ok { - // Unknown type; we can't render it, so skip it. - continue - } - - if i > 0 && b.Len() > 1 { - b.WriteByte(',') - } - - fmt.Fprintf(&b, `"[%s]":`, msd.name) - - x := ext.value - if x == nil { - x = reflect.New(msd.t.Elem()).Interface() - if err := Unmarshal(ext.enc, x.(Message)); err != nil { - return nil, err - } - } - d, err := json.Marshal(x) - if err != nil { - return nil, err - } - b.Write(d) - } - b.WriteByte('}') - return b.Bytes(), nil -} - -// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format. -// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option. -func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error { - // Common-case fast path. - if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) { - return nil - } - - // This is fairly tricky, and it's not clear that it is needed. - return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented") -} - -// A global registry of types that can be used in a MessageSet. - -var messageSetMap = make(map[int32]messageSetDesc) - -type messageSetDesc struct { - t reflect.Type // pointer to struct - name string -} - -// RegisterMessageSetType is called from the generated code. -func RegisterMessageSetType(m Message, fieldNum int32, name string) { - messageSetMap[fieldNum] = messageSetDesc{ - t: reflect.TypeOf(m), - name: name, - } -} diff --git a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go b/vendor/github.com/golang/protobuf/proto/pointer_reflect.go deleted file mode 100644 index b6cad908..00000000 --- a/vendor/github.com/golang/protobuf/proto/pointer_reflect.go +++ /dev/null @@ -1,357 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build purego appengine js - -// This file contains an implementation of proto field accesses using package reflect. -// It is slower than the code in pointer_unsafe.go but it avoids package unsafe and can -// be used on App Engine. - -package proto - -import ( - "reflect" - "sync" -) - -const unsafeAllowed = false - -// A field identifies a field in a struct, accessible from a pointer. -// In this implementation, a field is identified by the sequence of field indices -// passed to reflect's FieldByIndex. -type field []int - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return f.Index -} - -// invalidField is an invalid field identifier. -var invalidField = field(nil) - -// zeroField is a noop when calling pointer.offset. -var zeroField = field([]int{}) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { return f != nil } - -// The pointer type is for the table-driven decoder. -// The implementation here uses a reflect.Value of pointer type to -// create a generic pointer. In pointer_unsafe.go we use unsafe -// instead of reflect to implement the same (but faster) interface. -type pointer struct { - v reflect.Value -} - -// toPointer converts an interface of pointer type to a pointer -// that points to the same target. -func toPointer(i *Message) pointer { - return pointer{v: reflect.ValueOf(*i)} -} - -// toAddrPointer converts an interface to a pointer that points to -// the interface data. -func toAddrPointer(i *interface{}, isptr bool) pointer { - v := reflect.ValueOf(*i) - u := reflect.New(v.Type()) - u.Elem().Set(v) - return pointer{v: u} -} - -// valToPointer converts v to a pointer. v must be of pointer type. -func valToPointer(v reflect.Value) pointer { - return pointer{v: v} -} - -// offset converts from a pointer to a structure to a pointer to -// one of its fields. -func (p pointer) offset(f field) pointer { - return pointer{v: p.v.Elem().FieldByIndex(f).Addr()} -} - -func (p pointer) isNil() bool { - return p.v.IsNil() -} - -// grow updates the slice s in place to make it one element longer. -// s must be addressable. -// Returns the (addressable) new element. -func grow(s reflect.Value) reflect.Value { - n, m := s.Len(), s.Cap() - if n < m { - s.SetLen(n + 1) - } else { - s.Set(reflect.Append(s, reflect.Zero(s.Type().Elem()))) - } - return s.Index(n) -} - -func (p pointer) toInt64() *int64 { - return p.v.Interface().(*int64) -} -func (p pointer) toInt64Ptr() **int64 { - return p.v.Interface().(**int64) -} -func (p pointer) toInt64Slice() *[]int64 { - return p.v.Interface().(*[]int64) -} - -var int32ptr = reflect.TypeOf((*int32)(nil)) - -func (p pointer) toInt32() *int32 { - return p.v.Convert(int32ptr).Interface().(*int32) -} - -// The toInt32Ptr/Slice methods don't work because of enums. -// Instead, we must use set/get methods for the int32ptr/slice case. -/* - func (p pointer) toInt32Ptr() **int32 { - return p.v.Interface().(**int32) -} - func (p pointer) toInt32Slice() *[]int32 { - return p.v.Interface().(*[]int32) -} -*/ -func (p pointer) getInt32Ptr() *int32 { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - return p.v.Elem().Interface().(*int32) - } - // an enum - return p.v.Elem().Convert(int32PtrType).Interface().(*int32) -} -func (p pointer) setInt32Ptr(v int32) { - // Allocate value in a *int32. Possibly convert that to a *enum. - // Then assign it to a **int32 or **enum. - // Note: we can convert *int32 to *enum, but we can't convert - // **int32 to **enum! - p.v.Elem().Set(reflect.ValueOf(&v).Convert(p.v.Type().Elem())) -} - -// getInt32Slice copies []int32 from p as a new slice. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) getInt32Slice() []int32 { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - return p.v.Elem().Interface().([]int32) - } - // an enum - // Allocate a []int32, then assign []enum's values into it. - // Note: we can't convert []enum to []int32. - slice := p.v.Elem() - s := make([]int32, slice.Len()) - for i := 0; i < slice.Len(); i++ { - s[i] = int32(slice.Index(i).Int()) - } - return s -} - -// setInt32Slice copies []int32 into p as a new slice. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) setInt32Slice(v []int32) { - if p.v.Type().Elem().Elem() == reflect.TypeOf(int32(0)) { - // raw int32 type - p.v.Elem().Set(reflect.ValueOf(v)) - return - } - // an enum - // Allocate a []enum, then assign []int32's values into it. - // Note: we can't convert []enum to []int32. - slice := reflect.MakeSlice(p.v.Type().Elem(), len(v), cap(v)) - for i, x := range v { - slice.Index(i).SetInt(int64(x)) - } - p.v.Elem().Set(slice) -} -func (p pointer) appendInt32Slice(v int32) { - grow(p.v.Elem()).SetInt(int64(v)) -} - -func (p pointer) toUint64() *uint64 { - return p.v.Interface().(*uint64) -} -func (p pointer) toUint64Ptr() **uint64 { - return p.v.Interface().(**uint64) -} -func (p pointer) toUint64Slice() *[]uint64 { - return p.v.Interface().(*[]uint64) -} -func (p pointer) toUint32() *uint32 { - return p.v.Interface().(*uint32) -} -func (p pointer) toUint32Ptr() **uint32 { - return p.v.Interface().(**uint32) -} -func (p pointer) toUint32Slice() *[]uint32 { - return p.v.Interface().(*[]uint32) -} -func (p pointer) toBool() *bool { - return p.v.Interface().(*bool) -} -func (p pointer) toBoolPtr() **bool { - return p.v.Interface().(**bool) -} -func (p pointer) toBoolSlice() *[]bool { - return p.v.Interface().(*[]bool) -} -func (p pointer) toFloat64() *float64 { - return p.v.Interface().(*float64) -} -func (p pointer) toFloat64Ptr() **float64 { - return p.v.Interface().(**float64) -} -func (p pointer) toFloat64Slice() *[]float64 { - return p.v.Interface().(*[]float64) -} -func (p pointer) toFloat32() *float32 { - return p.v.Interface().(*float32) -} -func (p pointer) toFloat32Ptr() **float32 { - return p.v.Interface().(**float32) -} -func (p pointer) toFloat32Slice() *[]float32 { - return p.v.Interface().(*[]float32) -} -func (p pointer) toString() *string { - return p.v.Interface().(*string) -} -func (p pointer) toStringPtr() **string { - return p.v.Interface().(**string) -} -func (p pointer) toStringSlice() *[]string { - return p.v.Interface().(*[]string) -} -func (p pointer) toBytes() *[]byte { - return p.v.Interface().(*[]byte) -} -func (p pointer) toBytesSlice() *[][]byte { - return p.v.Interface().(*[][]byte) -} -func (p pointer) toExtensions() *XXX_InternalExtensions { - return p.v.Interface().(*XXX_InternalExtensions) -} -func (p pointer) toOldExtensions() *map[int32]Extension { - return p.v.Interface().(*map[int32]Extension) -} -func (p pointer) getPointer() pointer { - return pointer{v: p.v.Elem()} -} -func (p pointer) setPointer(q pointer) { - p.v.Elem().Set(q.v) -} -func (p pointer) appendPointer(q pointer) { - grow(p.v.Elem()).Set(q.v) -} - -// getPointerSlice copies []*T from p as a new []pointer. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) getPointerSlice() []pointer { - if p.v.IsNil() { - return nil - } - n := p.v.Elem().Len() - s := make([]pointer, n) - for i := 0; i < n; i++ { - s[i] = pointer{v: p.v.Elem().Index(i)} - } - return s -} - -// setPointerSlice copies []pointer into p as a new []*T. -// This behavior differs from the implementation in pointer_unsafe.go. -func (p pointer) setPointerSlice(v []pointer) { - if v == nil { - p.v.Elem().Set(reflect.New(p.v.Elem().Type()).Elem()) - return - } - s := reflect.MakeSlice(p.v.Elem().Type(), 0, len(v)) - for _, p := range v { - s = reflect.Append(s, p.v) - } - p.v.Elem().Set(s) -} - -// getInterfacePointer returns a pointer that points to the -// interface data of the interface pointed by p. -func (p pointer) getInterfacePointer() pointer { - if p.v.Elem().IsNil() { - return pointer{v: p.v.Elem()} - } - return pointer{v: p.v.Elem().Elem().Elem().Field(0).Addr()} // *interface -> interface -> *struct -> struct -} - -func (p pointer) asPointerTo(t reflect.Type) reflect.Value { - // TODO: check that p.v.Type().Elem() == t? - return p.v -} - -func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} -func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { - atomicLock.Lock() - defer atomicLock.Unlock() - return *p -} -func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { - atomicLock.Lock() - defer atomicLock.Unlock() - *p = v -} - -var atomicLock sync.Mutex diff --git a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go b/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go deleted file mode 100644 index d55a335d..00000000 --- a/vendor/github.com/golang/protobuf/proto/pointer_unsafe.go +++ /dev/null @@ -1,308 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2012 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// +build !purego,!appengine,!js - -// This file contains the implementation of the proto field accesses using package unsafe. - -package proto - -import ( - "reflect" - "sync/atomic" - "unsafe" -) - -const unsafeAllowed = true - -// A field identifies a field in a struct, accessible from a pointer. -// In this implementation, a field is identified by its byte offset from the start of the struct. -type field uintptr - -// toField returns a field equivalent to the given reflect field. -func toField(f *reflect.StructField) field { - return field(f.Offset) -} - -// invalidField is an invalid field identifier. -const invalidField = ^field(0) - -// zeroField is a noop when calling pointer.offset. -const zeroField = field(0) - -// IsValid reports whether the field identifier is valid. -func (f field) IsValid() bool { - return f != invalidField -} - -// The pointer type below is for the new table-driven encoder/decoder. -// The implementation here uses unsafe.Pointer to create a generic pointer. -// In pointer_reflect.go we use reflect instead of unsafe to implement -// the same (but slower) interface. -type pointer struct { - p unsafe.Pointer -} - -// size of pointer -var ptrSize = unsafe.Sizeof(uintptr(0)) - -// toPointer converts an interface of pointer type to a pointer -// that points to the same target. -func toPointer(i *Message) pointer { - // Super-tricky - read pointer out of data word of interface value. - // Saves ~25ns over the equivalent: - // return valToPointer(reflect.ValueOf(*i)) - return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} -} - -// toAddrPointer converts an interface to a pointer that points to -// the interface data. -func toAddrPointer(i *interface{}, isptr bool) pointer { - // Super-tricky - read or get the address of data word of interface value. - if isptr { - // The interface is of pointer type, thus it is a direct interface. - // The data word is the pointer data itself. We take its address. - return pointer{p: unsafe.Pointer(uintptr(unsafe.Pointer(i)) + ptrSize)} - } - // The interface is not of pointer type. The data word is the pointer - // to the data. - return pointer{p: (*[2]unsafe.Pointer)(unsafe.Pointer(i))[1]} -} - -// valToPointer converts v to a pointer. v must be of pointer type. -func valToPointer(v reflect.Value) pointer { - return pointer{p: unsafe.Pointer(v.Pointer())} -} - -// offset converts from a pointer to a structure to a pointer to -// one of its fields. -func (p pointer) offset(f field) pointer { - // For safety, we should panic if !f.IsValid, however calling panic causes - // this to no longer be inlineable, which is a serious performance cost. - /* - if !f.IsValid() { - panic("invalid field") - } - */ - return pointer{p: unsafe.Pointer(uintptr(p.p) + uintptr(f))} -} - -func (p pointer) isNil() bool { - return p.p == nil -} - -func (p pointer) toInt64() *int64 { - return (*int64)(p.p) -} -func (p pointer) toInt64Ptr() **int64 { - return (**int64)(p.p) -} -func (p pointer) toInt64Slice() *[]int64 { - return (*[]int64)(p.p) -} -func (p pointer) toInt32() *int32 { - return (*int32)(p.p) -} - -// See pointer_reflect.go for why toInt32Ptr/Slice doesn't exist. -/* - func (p pointer) toInt32Ptr() **int32 { - return (**int32)(p.p) - } - func (p pointer) toInt32Slice() *[]int32 { - return (*[]int32)(p.p) - } -*/ -func (p pointer) getInt32Ptr() *int32 { - return *(**int32)(p.p) -} -func (p pointer) setInt32Ptr(v int32) { - *(**int32)(p.p) = &v -} - -// getInt32Slice loads a []int32 from p. -// The value returned is aliased with the original slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) getInt32Slice() []int32 { - return *(*[]int32)(p.p) -} - -// setInt32Slice stores a []int32 to p. -// The value set is aliased with the input slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) setInt32Slice(v []int32) { - *(*[]int32)(p.p) = v -} - -// TODO: Can we get rid of appendInt32Slice and use setInt32Slice instead? -func (p pointer) appendInt32Slice(v int32) { - s := (*[]int32)(p.p) - *s = append(*s, v) -} - -func (p pointer) toUint64() *uint64 { - return (*uint64)(p.p) -} -func (p pointer) toUint64Ptr() **uint64 { - return (**uint64)(p.p) -} -func (p pointer) toUint64Slice() *[]uint64 { - return (*[]uint64)(p.p) -} -func (p pointer) toUint32() *uint32 { - return (*uint32)(p.p) -} -func (p pointer) toUint32Ptr() **uint32 { - return (**uint32)(p.p) -} -func (p pointer) toUint32Slice() *[]uint32 { - return (*[]uint32)(p.p) -} -func (p pointer) toBool() *bool { - return (*bool)(p.p) -} -func (p pointer) toBoolPtr() **bool { - return (**bool)(p.p) -} -func (p pointer) toBoolSlice() *[]bool { - return (*[]bool)(p.p) -} -func (p pointer) toFloat64() *float64 { - return (*float64)(p.p) -} -func (p pointer) toFloat64Ptr() **float64 { - return (**float64)(p.p) -} -func (p pointer) toFloat64Slice() *[]float64 { - return (*[]float64)(p.p) -} -func (p pointer) toFloat32() *float32 { - return (*float32)(p.p) -} -func (p pointer) toFloat32Ptr() **float32 { - return (**float32)(p.p) -} -func (p pointer) toFloat32Slice() *[]float32 { - return (*[]float32)(p.p) -} -func (p pointer) toString() *string { - return (*string)(p.p) -} -func (p pointer) toStringPtr() **string { - return (**string)(p.p) -} -func (p pointer) toStringSlice() *[]string { - return (*[]string)(p.p) -} -func (p pointer) toBytes() *[]byte { - return (*[]byte)(p.p) -} -func (p pointer) toBytesSlice() *[][]byte { - return (*[][]byte)(p.p) -} -func (p pointer) toExtensions() *XXX_InternalExtensions { - return (*XXX_InternalExtensions)(p.p) -} -func (p pointer) toOldExtensions() *map[int32]Extension { - return (*map[int32]Extension)(p.p) -} - -// getPointerSlice loads []*T from p as a []pointer. -// The value returned is aliased with the original slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) getPointerSlice() []pointer { - // Super-tricky - p should point to a []*T where T is a - // message type. We load it as []pointer. - return *(*[]pointer)(p.p) -} - -// setPointerSlice stores []pointer into p as a []*T. -// The value set is aliased with the input slice. -// This behavior differs from the implementation in pointer_reflect.go. -func (p pointer) setPointerSlice(v []pointer) { - // Super-tricky - p should point to a []*T where T is a - // message type. We store it as []pointer. - *(*[]pointer)(p.p) = v -} - -// getPointer loads the pointer at p and returns it. -func (p pointer) getPointer() pointer { - return pointer{p: *(*unsafe.Pointer)(p.p)} -} - -// setPointer stores the pointer q at p. -func (p pointer) setPointer(q pointer) { - *(*unsafe.Pointer)(p.p) = q.p -} - -// append q to the slice pointed to by p. -func (p pointer) appendPointer(q pointer) { - s := (*[]unsafe.Pointer)(p.p) - *s = append(*s, q.p) -} - -// getInterfacePointer returns a pointer that points to the -// interface data of the interface pointed by p. -func (p pointer) getInterfacePointer() pointer { - // Super-tricky - read pointer out of data word of interface value. - return pointer{p: (*(*[2]unsafe.Pointer)(p.p))[1]} -} - -// asPointerTo returns a reflect.Value that is a pointer to an -// object of type t stored at p. -func (p pointer) asPointerTo(t reflect.Type) reflect.Value { - return reflect.NewAt(t, p.p) -} - -func atomicLoadUnmarshalInfo(p **unmarshalInfo) *unmarshalInfo { - return (*unmarshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreUnmarshalInfo(p **unmarshalInfo, v *unmarshalInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadMarshalInfo(p **marshalInfo) *marshalInfo { - return (*marshalInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreMarshalInfo(p **marshalInfo, v *marshalInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadMergeInfo(p **mergeInfo) *mergeInfo { - return (*mergeInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreMergeInfo(p **mergeInfo, v *mergeInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} -func atomicLoadDiscardInfo(p **discardInfo) *discardInfo { - return (*discardInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p)))) -} -func atomicStoreDiscardInfo(p **discardInfo, v *discardInfo) { - atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(v)) -} diff --git a/vendor/github.com/golang/protobuf/proto/properties.go b/vendor/github.com/golang/protobuf/proto/properties.go index 50b99b83..dcdc2202 100644 --- a/vendor/github.com/golang/protobuf/proto/properties.go +++ b/vendor/github.com/golang/protobuf/proto/properties.go @@ -1,163 +1,104 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package proto -/* - * Routines for encoding data into the wire format for protocol buffers. - */ - import ( "fmt" - "log" - "os" "reflect" - "sort" "strconv" "strings" "sync" -) - -const debug bool = false -// Constants that identify the encoding of a value on the wire. -const ( - WireVarint = 0 - WireFixed64 = 1 - WireBytes = 2 - WireStartGroup = 3 - WireEndGroup = 4 - WireFixed32 = 5 + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoimpl" ) -// tagMap is an optimization over map[int]int for typical protocol buffer -// use-cases. Encoded protocol buffers are often in tag order with small tag -// numbers. -type tagMap struct { - fastTags []int - slowTags map[int]int -} - -// tagMapFastLimit is the upper bound on the tag number that will be stored in -// the tagMap slice rather than its map. -const tagMapFastLimit = 1024 - -func (p *tagMap) get(t int) (int, bool) { - if t > 0 && t < tagMapFastLimit { - if t >= len(p.fastTags) { - return 0, false - } - fi := p.fastTags[t] - return fi, fi >= 0 - } - fi, ok := p.slowTags[t] - return fi, ok -} - -func (p *tagMap) put(t int, fi int) { - if t > 0 && t < tagMapFastLimit { - for len(p.fastTags) < t+1 { - p.fastTags = append(p.fastTags, -1) - } - p.fastTags[t] = fi - return - } - if p.slowTags == nil { - p.slowTags = make(map[int]int) - } - p.slowTags[t] = fi -} - -// StructProperties represents properties for all the fields of a struct. -// decoderTags and decoderOrigNames should only be used by the decoder. +// StructProperties represents protocol buffer type information for a +// generated protobuf message in the open-struct API. +// +// Deprecated: Do not use. type StructProperties struct { - Prop []*Properties // properties for each field - reqCount int // required count - decoderTags tagMap // map from proto tag to struct field number - decoderOrigNames map[string]int // map from original name to struct field number - order []int // list of struct field numbers in tag order + // Prop are the properties for each field. + // + // Fields belonging to a oneof are stored in OneofTypes instead, with a + // single Properties representing the parent oneof held here. + // + // The order of Prop matches the order of fields in the Go struct. + // Struct fields that are not related to protobufs have a "XXX_" prefix + // in the Properties.Name and must be ignored by the user. + Prop []*Properties // OneofTypes contains information about the oneof fields in this message. - // It is keyed by the original name of a field. + // It is keyed by the protobuf field name. OneofTypes map[string]*OneofProperties } -// OneofProperties represents information about a specific field in a oneof. -type OneofProperties struct { - Type reflect.Type // pointer to generated struct type for this oneof field - Field int // struct field number of the containing oneof in the message - Prop *Properties -} - -// Implement the sorting interface so we can sort the fields in tag order, as recommended by the spec. -// See encode.go, (*Buffer).enc_struct. - -func (sp *StructProperties) Len() int { return len(sp.order) } -func (sp *StructProperties) Less(i, j int) bool { - return sp.Prop[sp.order[i]].Tag < sp.Prop[sp.order[j]].Tag -} -func (sp *StructProperties) Swap(i, j int) { sp.order[i], sp.order[j] = sp.order[j], sp.order[i] } - -// Properties represents the protocol-specific behavior of a single struct field. +// Properties represents the type information for a protobuf message field. +// +// Deprecated: Do not use. type Properties struct { - Name string // name of the field, for error messages - OrigName string // original name before protocol compiler (always set) - JSONName string // name to use for JSON; determined by protoc - Wire string + // Name is a placeholder name with little meaningful semantic value. + // If the name has an "XXX_" prefix, the entire Properties must be ignored. + Name string + // OrigName is the protobuf field name or oneof name. + OrigName string + // JSONName is the JSON name for the protobuf field. + JSONName string + // Enum is a placeholder name for enums. + // For historical reasons, this is neither the Go name for the enum, + // nor the protobuf name for the enum. + Enum string // Deprecated: Do not use. + // Weak contains the full name of the weakly referenced message. + Weak string + // Wire is a string representation of the wire type. + Wire string + // WireType is the protobuf wire type for the field. WireType int - Tag int + // Tag is the protobuf field number. + Tag int + // Required reports whether this is a required field. Required bool + // Optional reports whether this is a optional field. Optional bool + // Repeated reports whether this is a repeated field. Repeated bool - Packed bool // relevant for repeated primitives only - Enum string // set for enum types only - proto3 bool // whether this is known to be a proto3 field - oneof bool // whether this is a oneof field - - Default string // default value - HasDefault bool // whether an explicit default was provided - - stype reflect.Type // set for struct types only - sprop *StructProperties // set for struct types only + // Packed reports whether this is a packed repeated field of scalars. + Packed bool + // Proto3 reports whether this field operates under the proto3 syntax. + Proto3 bool + // Oneof reports whether this field belongs within a oneof. + Oneof bool + + // Default is the default value in string form. + Default string + // HasDefault reports whether the field has a default value. + HasDefault bool + + // MapKeyProp is the properties for the key field for a map field. + MapKeyProp *Properties + // MapValProp is the properties for the value field for a map field. + MapValProp *Properties +} - mtype reflect.Type // set for map types only - MapKeyProp *Properties // set for map types only - MapValProp *Properties // set for map types only +// OneofProperties represents the type information for a protobuf oneof. +// +// Deprecated: Do not use. +type OneofProperties struct { + // Type is a pointer to the generated wrapper type for the field value. + // This is nil for messages that are not in the open-struct API. + Type reflect.Type + // Field is the index into StructProperties.Prop for the containing oneof. + Field int + // Prop is the properties for the field. + Prop *Properties } // String formats the properties in the protobuf struct field tag style. func (p *Properties) String() string { s := p.Wire - s += "," - s += strconv.Itoa(p.Tag) + s += "," + strconv.Itoa(p.Tag) if p.Required { s += ",req" } @@ -171,18 +112,21 @@ func (p *Properties) String() string { s += ",packed" } s += ",name=" + p.OrigName - if p.JSONName != p.OrigName { + if p.JSONName != "" { s += ",json=" + p.JSONName } - if p.proto3 { + if len(p.Enum) > 0 { + s += ",enum=" + p.Enum + } + if len(p.Weak) > 0 { + s += ",weak=" + p.Weak + } + if p.Proto3 { s += ",proto3" } - if p.oneof { + if p.Oneof { s += ",oneof" } - if len(p.Enum) > 0 { - s += ",enum=" + p.Enum - } if p.HasDefault { s += ",def=" + p.Default } @@ -190,355 +134,173 @@ func (p *Properties) String() string { } // Parse populates p by parsing a string in the protobuf struct field tag style. -func (p *Properties) Parse(s string) { - // "bytes,49,opt,name=foo,def=hello!" - fields := strings.Split(s, ",") // breaks def=, but handled below. - if len(fields) < 2 { - fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s) - return - } - - p.Wire = fields[0] - switch p.Wire { - case "varint": - p.WireType = WireVarint - case "fixed32": - p.WireType = WireFixed32 - case "fixed64": - p.WireType = WireFixed64 - case "zigzag32": - p.WireType = WireVarint - case "zigzag64": - p.WireType = WireVarint - case "bytes", "group": - p.WireType = WireBytes - // no numeric converter for non-numeric types - default: - fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s) - return - } - - var err error - p.Tag, err = strconv.Atoi(fields[1]) - if err != nil { - return - } - -outer: - for i := 2; i < len(fields); i++ { - f := fields[i] - switch { - case f == "req": - p.Required = true - case f == "opt": +func (p *Properties) Parse(tag string) { + // For example: "bytes,49,opt,name=foo,def=hello!" + for len(tag) > 0 { + i := strings.IndexByte(tag, ',') + if i < 0 { + i = len(tag) + } + switch s := tag[:i]; { + case strings.HasPrefix(s, "name="): + p.OrigName = s[len("name="):] + case strings.HasPrefix(s, "json="): + p.JSONName = s[len("json="):] + case strings.HasPrefix(s, "enum="): + p.Enum = s[len("enum="):] + case strings.HasPrefix(s, "weak="): + p.Weak = s[len("weak="):] + case strings.Trim(s, "0123456789") == "": + n, _ := strconv.ParseUint(s, 10, 32) + p.Tag = int(n) + case s == "opt": p.Optional = true - case f == "rep": + case s == "req": + p.Required = true + case s == "rep": p.Repeated = true - case f == "packed": + case s == "varint" || s == "zigzag32" || s == "zigzag64": + p.Wire = s + p.WireType = WireVarint + case s == "fixed32": + p.Wire = s + p.WireType = WireFixed32 + case s == "fixed64": + p.Wire = s + p.WireType = WireFixed64 + case s == "bytes": + p.Wire = s + p.WireType = WireBytes + case s == "group": + p.Wire = s + p.WireType = WireStartGroup + case s == "packed": p.Packed = true - case strings.HasPrefix(f, "name="): - p.OrigName = f[5:] - case strings.HasPrefix(f, "json="): - p.JSONName = f[5:] - case strings.HasPrefix(f, "enum="): - p.Enum = f[5:] - case f == "proto3": - p.proto3 = true - case f == "oneof": - p.oneof = true - case strings.HasPrefix(f, "def="): + case s == "proto3": + p.Proto3 = true + case s == "oneof": + p.Oneof = true + case strings.HasPrefix(s, "def="): + // The default tag is special in that everything afterwards is the + // default regardless of the presence of commas. p.HasDefault = true - p.Default = f[4:] // rest of string - if i+1 < len(fields) { - // Commas aren't escaped, and def is always last. - p.Default += "," + strings.Join(fields[i+1:], ",") - break outer - } - } - } -} - -var protoMessageType = reflect.TypeOf((*Message)(nil)).Elem() - -// setFieldProps initializes the field properties for submessages and maps. -func (p *Properties) setFieldProps(typ reflect.Type, f *reflect.StructField, lockGetProp bool) { - switch t1 := typ; t1.Kind() { - case reflect.Ptr: - if t1.Elem().Kind() == reflect.Struct { - p.stype = t1.Elem() - } - - case reflect.Slice: - if t2 := t1.Elem(); t2.Kind() == reflect.Ptr && t2.Elem().Kind() == reflect.Struct { - p.stype = t2.Elem() - } - - case reflect.Map: - p.mtype = t1 - p.MapKeyProp = &Properties{} - p.MapKeyProp.init(reflect.PtrTo(p.mtype.Key()), "Key", f.Tag.Get("protobuf_key"), nil, lockGetProp) - p.MapValProp = &Properties{} - vtype := p.mtype.Elem() - if vtype.Kind() != reflect.Ptr && vtype.Kind() != reflect.Slice { - // The value type is not a message (*T) or bytes ([]byte), - // so we need encoders for the pointer to this type. - vtype = reflect.PtrTo(vtype) - } - p.MapValProp.init(vtype, "Value", f.Tag.Get("protobuf_val"), nil, lockGetProp) - } - - if p.stype != nil { - if lockGetProp { - p.sprop = GetProperties(p.stype) - } else { - p.sprop = getPropertiesLocked(p.stype) + p.Default, i = tag[len("def="):], len(tag) } + tag = strings.TrimPrefix(tag[i:], ",") } } -var ( - marshalerType = reflect.TypeOf((*Marshaler)(nil)).Elem() -) - // Init populates the properties from a protocol buffer struct tag. +// +// Deprecated: Do not use. func (p *Properties) Init(typ reflect.Type, name, tag string, f *reflect.StructField) { - p.init(typ, name, tag, f, true) -} - -func (p *Properties) init(typ reflect.Type, name, tag string, f *reflect.StructField, lockGetProp bool) { - // "bytes,49,opt,def=hello!" p.Name = name p.OrigName = name if tag == "" { return } p.Parse(tag) - p.setFieldProps(typ, f, lockGetProp) + + if typ != nil && typ.Kind() == reflect.Map { + p.MapKeyProp = new(Properties) + p.MapKeyProp.Init(nil, "Key", f.Tag.Get("protobuf_key"), nil) + p.MapValProp = new(Properties) + p.MapValProp.Init(nil, "Value", f.Tag.Get("protobuf_val"), nil) + } } -var ( - propertiesMu sync.RWMutex - propertiesMap = make(map[reflect.Type]*StructProperties) -) +var propertiesCache sync.Map // map[reflect.Type]*StructProperties -// GetProperties returns the list of properties for the type represented by t. -// t must represent a generated struct type of a protocol message. +// GetProperties returns the list of properties for the type represented by t, +// which must be a generated protocol buffer message in the open-struct API, +// where protobuf message fields are represented by exported Go struct fields. +// +// Deprecated: Use protobuf reflection instead. func GetProperties(t reflect.Type) *StructProperties { - if t.Kind() != reflect.Struct { - panic("proto: type must have kind struct") - } - - // Most calls to GetProperties in a long-running program will be - // retrieving details for types we have seen before. - propertiesMu.RLock() - sprop, ok := propertiesMap[t] - propertiesMu.RUnlock() - if ok { - if collectStats { - stats.Chit++ - } - return sprop + if p, ok := propertiesCache.Load(t); ok { + return p.(*StructProperties) } - - propertiesMu.Lock() - sprop = getPropertiesLocked(t) - propertiesMu.Unlock() - return sprop + p, _ := propertiesCache.LoadOrStore(t, newProperties(t)) + return p.(*StructProperties) } -// getPropertiesLocked requires that propertiesMu is held. -func getPropertiesLocked(t reflect.Type) *StructProperties { - if prop, ok := propertiesMap[t]; ok { - if collectStats { - stats.Chit++ - } - return prop - } - if collectStats { - stats.Cmiss++ +func newProperties(t reflect.Type) *StructProperties { + if t.Kind() != reflect.Struct { + panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t)) } + var hasOneof bool prop := new(StructProperties) - // in case of recursive protos, fill this in now. - propertiesMap[t] = prop - - // build properties - prop.Prop = make([]*Properties, t.NumField()) - prop.order = make([]int, t.NumField()) + // Construct a list of properties for each field in the struct. for i := 0; i < t.NumField(); i++ { - f := t.Field(i) p := new(Properties) - name := f.Name - p.init(f.Type, name, f.Tag.Get("protobuf"), &f, false) + f := t.Field(i) + tagField := f.Tag.Get("protobuf") + p.Init(f.Type, f.Name, tagField, &f) - oneof := f.Tag.Get("protobuf_oneof") // special case - if oneof != "" { - // Oneof fields don't use the traditional protobuf tag. - p.OrigName = oneof - } - prop.Prop[i] = p - prop.order[i] = i - if debug { - print(i, " ", f.Name, " ", t.String(), " ") - if p.Tag > 0 { - print(p.String()) - } - print("\n") + tagOneof := f.Tag.Get("protobuf_oneof") + if tagOneof != "" { + hasOneof = true + p.OrigName = tagOneof } - } - // Re-order prop.order. - sort.Sort(prop) + // Rename unrelated struct fields with the "XXX_" prefix since so much + // user code simply checks for this to exclude special fields. + if tagField == "" && tagOneof == "" && !strings.HasPrefix(p.Name, "XXX_") { + p.Name = "XXX_" + p.Name + p.OrigName = "XXX_" + p.OrigName + } else if p.Weak != "" { + p.Name = p.OrigName // avoid possible "XXX_" prefix on weak field + } - type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) + prop.Prop = append(prop.Prop, p) } - if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { - var oots []interface{} - _, _, _, oots = om.XXX_OneofFuncs() - // Interpret oneof metadata. + // Construct a mapping of oneof field names to properties. + if hasOneof { + var oneofWrappers []interface{} + if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofFuncs"); ok { + oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[3].Interface().([]interface{}) + } + if fn, ok := reflect.PtrTo(t).MethodByName("XXX_OneofWrappers"); ok { + oneofWrappers = fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[0].Interface().([]interface{}) + } + if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(protoreflect.ProtoMessage); ok { + if m, ok := m.ProtoReflect().(interface{ ProtoMessageInfo() *protoimpl.MessageInfo }); ok { + oneofWrappers = m.ProtoMessageInfo().OneofWrappers + } + } + prop.OneofTypes = make(map[string]*OneofProperties) - for _, oot := range oots { - oop := &OneofProperties{ - Type: reflect.ValueOf(oot).Type(), // *T + for _, wrapper := range oneofWrappers { + p := &OneofProperties{ + Type: reflect.ValueOf(wrapper).Type(), // *T Prop: new(Properties), } - sft := oop.Type.Elem().Field(0) - oop.Prop.Name = sft.Name - oop.Prop.Parse(sft.Tag.Get("protobuf")) - // There will be exactly one interface field that - // this new value is assignable to. - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if f.Type.Kind() != reflect.Interface { - continue + f := p.Type.Elem().Field(0) + p.Prop.Name = f.Name + p.Prop.Parse(f.Tag.Get("protobuf")) + + // Determine the struct field that contains this oneof. + // Each wrapper is assignable to exactly one parent field. + var foundOneof bool + for i := 0; i < t.NumField() && !foundOneof; i++ { + if p.Type.AssignableTo(t.Field(i).Type) { + p.Field = i + foundOneof = true } - if !oop.Type.AssignableTo(f.Type) { - continue - } - oop.Field = i - break } - prop.OneofTypes[oop.Prop.OrigName] = oop - } - } - - // build required counts - // build tags - reqCount := 0 - prop.decoderOrigNames = make(map[string]int) - for i, p := range prop.Prop { - if strings.HasPrefix(p.Name, "XXX_") { - // Internal fields should not appear in tags/origNames maps. - // They are handled specially when encoding and decoding. - continue - } - if p.Required { - reqCount++ + if !foundOneof { + panic(fmt.Sprintf("%v is not a generated message in the open-struct API", t)) + } + prop.OneofTypes[p.Prop.OrigName] = p } - prop.decoderTags.put(p.Tag, i) - prop.decoderOrigNames[p.OrigName] = i } - prop.reqCount = reqCount return prop } -// A global registry of enum types. -// The generated code will register the generated maps by calling RegisterEnum. - -var enumValueMaps = make(map[string]map[string]int32) - -// RegisterEnum is called from the generated code to install the enum descriptor -// maps into the global table to aid parsing text format protocol buffers. -func RegisterEnum(typeName string, unusedNameMap map[int32]string, valueMap map[string]int32) { - if _, ok := enumValueMaps[typeName]; ok { - panic("proto: duplicate enum registered: " + typeName) - } - enumValueMaps[typeName] = valueMap -} - -// EnumValueMap returns the mapping from names to integers of the -// enum type enumType, or a nil if not found. -func EnumValueMap(enumType string) map[string]int32 { - return enumValueMaps[enumType] -} - -// A registry of all linked message types. -// The string is a fully-qualified proto name ("pkg.Message"). -var ( - protoTypedNils = make(map[string]Message) // a map from proto names to typed nil pointers - protoMapTypes = make(map[string]reflect.Type) // a map from proto names to map types - revProtoTypes = make(map[reflect.Type]string) -) - -// RegisterType is called from generated code and maps from the fully qualified -// proto name to the type (pointer to struct) of the protocol buffer. -func RegisterType(x Message, name string) { - if _, ok := protoTypedNils[name]; ok { - // TODO: Some day, make this a panic. - log.Printf("proto: duplicate proto type registered: %s", name) - return - } - t := reflect.TypeOf(x) - if v := reflect.ValueOf(x); v.Kind() == reflect.Ptr && v.Pointer() == 0 { - // Generated code always calls RegisterType with nil x. - // This check is just for extra safety. - protoTypedNils[name] = x - } else { - protoTypedNils[name] = reflect.Zero(t).Interface().(Message) - } - revProtoTypes[t] = name -} - -// RegisterMapType is called from generated code and maps from the fully qualified -// proto name to the native map type of the proto map definition. -func RegisterMapType(x interface{}, name string) { - if reflect.TypeOf(x).Kind() != reflect.Map { - panic(fmt.Sprintf("RegisterMapType(%T, %q); want map", x, name)) - } - if _, ok := protoMapTypes[name]; ok { - log.Printf("proto: duplicate proto type registered: %s", name) - return - } - t := reflect.TypeOf(x) - protoMapTypes[name] = t - revProtoTypes[t] = name -} - -// MessageName returns the fully-qualified proto name for the given message type. -func MessageName(x Message) string { - type xname interface { - XXX_MessageName() string - } - if m, ok := x.(xname); ok { - return m.XXX_MessageName() - } - return revProtoTypes[reflect.TypeOf(x)] -} - -// MessageType returns the message type (pointer to struct) for a named message. -// The type is not guaranteed to implement proto.Message if the name refers to a -// map entry. -func MessageType(name string) reflect.Type { - if t, ok := protoTypedNils[name]; ok { - return reflect.TypeOf(t) - } - return protoMapTypes[name] -} - -// A registry of all linked proto files. -var ( - protoFiles = make(map[string][]byte) // file name => fileDescriptor -) - -// RegisterFile is called from generated code and maps from the -// full file name of a .proto file to its compressed FileDescriptorProto. -func RegisterFile(filename string, fileDescriptor []byte) { - protoFiles[filename] = fileDescriptor -} - -// FileDescriptor returns the compressed FileDescriptorProto for a .proto file. -func FileDescriptor(filename string) []byte { return protoFiles[filename] } +func (sp *StructProperties) Len() int { return len(sp.Prop) } +func (sp *StructProperties) Less(i, j int) bool { return false } +func (sp *StructProperties) Swap(i, j int) { return } diff --git a/vendor/github.com/golang/protobuf/proto/proto.go b/vendor/github.com/golang/protobuf/proto/proto.go new file mode 100644 index 00000000..5aee89c3 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/proto.go @@ -0,0 +1,167 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package proto provides functionality for handling protocol buffer messages. +// In particular, it provides marshaling and unmarshaling between a protobuf +// message and the binary wire format. +// +// See https://developers.google.com/protocol-buffers/docs/gotutorial for +// more information. +// +// Deprecated: Use the "google.golang.org/protobuf/proto" package instead. +package proto + +import ( + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" + "google.golang.org/protobuf/runtime/protoimpl" +) + +const ( + ProtoPackageIsVersion1 = true + ProtoPackageIsVersion2 = true + ProtoPackageIsVersion3 = true + ProtoPackageIsVersion4 = true +) + +// GeneratedEnum is any enum type generated by protoc-gen-go +// which is a named int32 kind. +// This type exists for documentation purposes. +type GeneratedEnum interface{} + +// GeneratedMessage is any message type generated by protoc-gen-go +// which is a pointer to a named struct kind. +// This type exists for documentation purposes. +type GeneratedMessage interface{} + +// Message is a protocol buffer message. +// +// This is the v1 version of the message interface and is marginally better +// than an empty interface as it lacks any method to programatically interact +// with the contents of the message. +// +// A v2 message is declared in "google.golang.org/protobuf/proto".Message and +// exposes protobuf reflection as a first-class feature of the interface. +// +// To convert a v1 message to a v2 message, use the MessageV2 function. +// To convert a v2 message to a v1 message, use the MessageV1 function. +type Message = protoiface.MessageV1 + +// MessageV1 converts either a v1 or v2 message to a v1 message. +// It returns nil if m is nil. +func MessageV1(m GeneratedMessage) protoiface.MessageV1 { + return protoimpl.X.ProtoMessageV1Of(m) +} + +// MessageV2 converts either a v1 or v2 message to a v2 message. +// It returns nil if m is nil. +func MessageV2(m GeneratedMessage) protoV2.Message { + return protoimpl.X.ProtoMessageV2Of(m) +} + +// MessageReflect returns a reflective view for a message. +// It returns nil if m is nil. +func MessageReflect(m Message) protoreflect.Message { + return protoimpl.X.MessageOf(m) +} + +// Marshaler is implemented by messages that can marshal themselves. +// This interface is used by the following functions: Size, Marshal, +// Buffer.Marshal, and Buffer.EncodeMessage. +// +// Deprecated: Do not implement. +type Marshaler interface { + // Marshal formats the encoded bytes of the message. + // It should be deterministic and emit valid protobuf wire data. + // The caller takes ownership of the returned buffer. + Marshal() ([]byte, error) +} + +// Unmarshaler is implemented by messages that can unmarshal themselves. +// This interface is used by the following functions: Unmarshal, UnmarshalMerge, +// Buffer.Unmarshal, Buffer.DecodeMessage, and Buffer.DecodeGroup. +// +// Deprecated: Do not implement. +type Unmarshaler interface { + // Unmarshal parses the encoded bytes of the protobuf wire input. + // The provided buffer is only valid for during method call. + // It should not reset the receiver message. + Unmarshal([]byte) error +} + +// Merger is implemented by messages that can merge themselves. +// This interface is used by the following functions: Clone and Merge. +// +// Deprecated: Do not implement. +type Merger interface { + // Merge merges the contents of src into the receiver message. + // It clones all data structures in src such that it aliases no mutable + // memory referenced by src. + Merge(src Message) +} + +// RequiredNotSetError is an error type returned when +// marshaling or unmarshaling a message with missing required fields. +type RequiredNotSetError struct { + err error +} + +func (e *RequiredNotSetError) Error() string { + if e.err != nil { + return e.err.Error() + } + return "proto: required field not set" +} +func (e *RequiredNotSetError) RequiredNotSet() bool { + return true +} + +func checkRequiredNotSet(m protoV2.Message) error { + if err := protoV2.CheckInitialized(m); err != nil { + return &RequiredNotSetError{err: err} + } + return nil +} + +// Clone returns a deep copy of src. +func Clone(src Message) Message { + return MessageV1(protoV2.Clone(MessageV2(src))) +} + +// Merge merges src into dst, which must be messages of the same type. +// +// Populated scalar fields in src are copied to dst, while populated +// singular messages in src are merged into dst by recursively calling Merge. +// The elements of every list field in src is appended to the corresponded +// list fields in dst. The entries of every map field in src is copied into +// the corresponding map field in dst, possibly replacing existing entries. +// The unknown fields of src are appended to the unknown fields of dst. +func Merge(dst, src Message) { + protoV2.Merge(MessageV2(dst), MessageV2(src)) +} + +// Equal reports whether two messages are equal. +// If two messages marshal to the same bytes under deterministic serialization, +// then Equal is guaranteed to report true. +// +// Two messages are equal if they are the same protobuf message type, +// have the same set of populated known and extension field values, +// and the same set of unknown fields values. +// +// Scalar values are compared with the equivalent of the == operator in Go, +// except bytes values which are compared using bytes.Equal and +// floating point values which specially treat NaNs as equal. +// Message values are compared by recursively calling Equal. +// Lists are equal if each element value is also equal. +// Maps are equal if they have the same set of keys, where the pair of values +// for each key is also equal. +func Equal(x, y Message) bool { + return protoV2.Equal(MessageV2(x), MessageV2(y)) +} + +func isMessageSet(md protoreflect.MessageDescriptor) bool { + ms, ok := md.(interface{ IsMessageSet() bool }) + return ok && ms.IsMessageSet() +} diff --git a/vendor/github.com/golang/protobuf/proto/registry.go b/vendor/github.com/golang/protobuf/proto/registry.go new file mode 100644 index 00000000..066b4323 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/registry.go @@ -0,0 +1,317 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "bytes" + "compress/gzip" + "fmt" + "io/ioutil" + "reflect" + "strings" + "sync" + + "google.golang.org/protobuf/reflect/protodesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoimpl" +) + +// filePath is the path to the proto source file. +type filePath = string // e.g., "google/protobuf/descriptor.proto" + +// fileDescGZIP is the compressed contents of the encoded FileDescriptorProto. +type fileDescGZIP = []byte + +var fileCache sync.Map // map[filePath]fileDescGZIP + +// RegisterFile is called from generated code to register the compressed +// FileDescriptorProto with the file path for a proto source file. +// +// Deprecated: Use protoregistry.GlobalFiles.RegisterFile instead. +func RegisterFile(s filePath, d fileDescGZIP) { + // Decompress the descriptor. + zr, err := gzip.NewReader(bytes.NewReader(d)) + if err != nil { + panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err)) + } + b, err := ioutil.ReadAll(zr) + if err != nil { + panic(fmt.Sprintf("proto: invalid compressed file descriptor: %v", err)) + } + + // Construct a protoreflect.FileDescriptor from the raw descriptor. + // Note that DescBuilder.Build automatically registers the constructed + // file descriptor with the v2 registry. + protoimpl.DescBuilder{RawDescriptor: b}.Build() + + // Locally cache the raw descriptor form for the file. + fileCache.Store(s, d) +} + +// FileDescriptor returns the compressed FileDescriptorProto given the file path +// for a proto source file. It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalFiles.FindFileByPath instead. +func FileDescriptor(s filePath) fileDescGZIP { + if v, ok := fileCache.Load(s); ok { + return v.(fileDescGZIP) + } + + // Find the descriptor in the v2 registry. + var b []byte + if fd, _ := protoregistry.GlobalFiles.FindFileByPath(s); fd != nil { + b, _ = Marshal(protodesc.ToFileDescriptorProto(fd)) + } + + // Locally cache the raw descriptor form for the file. + if len(b) > 0 { + v, _ := fileCache.LoadOrStore(s, protoimpl.X.CompressGZIP(b)) + return v.(fileDescGZIP) + } + return nil +} + +// enumName is the name of an enum. For historical reasons, the enum name is +// neither the full Go name nor the full protobuf name of the enum. +// The name is the dot-separated combination of just the proto package that the +// enum is declared within followed by the Go type name of the generated enum. +type enumName = string // e.g., "my.proto.package.GoMessage_GoEnum" + +// enumsByName maps enum values by name to their numeric counterpart. +type enumsByName = map[string]int32 + +// enumsByNumber maps enum values by number to their name counterpart. +type enumsByNumber = map[int32]string + +var enumCache sync.Map // map[enumName]enumsByName +var numFilesCache sync.Map // map[protoreflect.FullName]int + +// RegisterEnum is called from the generated code to register the mapping of +// enum value names to enum numbers for the enum identified by s. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterEnum instead. +func RegisterEnum(s enumName, _ enumsByNumber, m enumsByName) { + if _, ok := enumCache.Load(s); ok { + panic("proto: duplicate enum registered: " + s) + } + enumCache.Store(s, m) + + // This does not forward registration to the v2 registry since this API + // lacks sufficient information to construct a complete v2 enum descriptor. +} + +// EnumValueMap returns the mapping from enum value names to enum numbers for +// the enum of the given name. It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalTypes.FindEnumByName instead. +func EnumValueMap(s enumName) enumsByName { + if v, ok := enumCache.Load(s); ok { + return v.(enumsByName) + } + + // Check whether the cache is stale. If the number of files in the current + // package differs, then it means that some enums may have been recently + // registered upstream that we do not know about. + var protoPkg protoreflect.FullName + if i := strings.LastIndexByte(s, '.'); i >= 0 { + protoPkg = protoreflect.FullName(s[:i]) + } + v, _ := numFilesCache.Load(protoPkg) + numFiles, _ := v.(int) + if protoregistry.GlobalFiles.NumFilesByPackage(protoPkg) == numFiles { + return nil // cache is up-to-date; was not found earlier + } + + // Update the enum cache for all enums declared in the given proto package. + numFiles = 0 + protoregistry.GlobalFiles.RangeFilesByPackage(protoPkg, func(fd protoreflect.FileDescriptor) bool { + walkEnums(fd, func(ed protoreflect.EnumDescriptor) { + name := protoimpl.X.LegacyEnumName(ed) + if _, ok := enumCache.Load(name); !ok { + m := make(enumsByName) + evs := ed.Values() + for i := evs.Len() - 1; i >= 0; i-- { + ev := evs.Get(i) + m[string(ev.Name())] = int32(ev.Number()) + } + enumCache.LoadOrStore(name, m) + } + }) + numFiles++ + return true + }) + numFilesCache.Store(protoPkg, numFiles) + + // Check cache again for enum map. + if v, ok := enumCache.Load(s); ok { + return v.(enumsByName) + } + return nil +} + +// walkEnums recursively walks all enums declared in d. +func walkEnums(d interface { + Enums() protoreflect.EnumDescriptors + Messages() protoreflect.MessageDescriptors +}, f func(protoreflect.EnumDescriptor)) { + eds := d.Enums() + for i := eds.Len() - 1; i >= 0; i-- { + f(eds.Get(i)) + } + mds := d.Messages() + for i := mds.Len() - 1; i >= 0; i-- { + walkEnums(mds.Get(i), f) + } +} + +// messageName is the full name of protobuf message. +type messageName = string + +var messageTypeCache sync.Map // map[messageName]reflect.Type + +// RegisterType is called from generated code to register the message Go type +// for a message of the given name. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterMessage instead. +func RegisterType(m Message, s messageName) { + mt := protoimpl.X.LegacyMessageTypeOf(m, protoreflect.FullName(s)) + if err := protoregistry.GlobalTypes.RegisterMessage(mt); err != nil { + panic(err) + } + messageTypeCache.Store(s, reflect.TypeOf(m)) +} + +// RegisterMapType is called from generated code to register the Go map type +// for a protobuf message representing a map entry. +// +// Deprecated: Do not use. +func RegisterMapType(m interface{}, s messageName) { + t := reflect.TypeOf(m) + if t.Kind() != reflect.Map { + panic(fmt.Sprintf("invalid map kind: %v", t)) + } + if _, ok := messageTypeCache.Load(s); ok { + panic(fmt.Errorf("proto: duplicate proto message registered: %s", s)) + } + messageTypeCache.Store(s, t) +} + +// MessageType returns the message type for a named message. +// It returns nil if not found. +// +// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead. +func MessageType(s messageName) reflect.Type { + if v, ok := messageTypeCache.Load(s); ok { + return v.(reflect.Type) + } + + // Derive the message type from the v2 registry. + var t reflect.Type + if mt, _ := protoregistry.GlobalTypes.FindMessageByName(protoreflect.FullName(s)); mt != nil { + t = messageGoType(mt) + } + + // If we could not get a concrete type, it is possible that it is a + // pseudo-message for a map entry. + if t == nil { + d, _ := protoregistry.GlobalFiles.FindDescriptorByName(protoreflect.FullName(s)) + if md, _ := d.(protoreflect.MessageDescriptor); md != nil && md.IsMapEntry() { + kt := goTypeForField(md.Fields().ByNumber(1)) + vt := goTypeForField(md.Fields().ByNumber(2)) + t = reflect.MapOf(kt, vt) + } + } + + // Locally cache the message type for the given name. + if t != nil { + v, _ := messageTypeCache.LoadOrStore(s, t) + return v.(reflect.Type) + } + return nil +} + +func goTypeForField(fd protoreflect.FieldDescriptor) reflect.Type { + switch k := fd.Kind(); k { + case protoreflect.EnumKind: + if et, _ := protoregistry.GlobalTypes.FindEnumByName(fd.Enum().FullName()); et != nil { + return enumGoType(et) + } + return reflect.TypeOf(protoreflect.EnumNumber(0)) + case protoreflect.MessageKind, protoreflect.GroupKind: + if mt, _ := protoregistry.GlobalTypes.FindMessageByName(fd.Message().FullName()); mt != nil { + return messageGoType(mt) + } + return reflect.TypeOf((*protoreflect.Message)(nil)).Elem() + default: + return reflect.TypeOf(fd.Default().Interface()) + } +} + +func enumGoType(et protoreflect.EnumType) reflect.Type { + return reflect.TypeOf(et.New(0)) +} + +func messageGoType(mt protoreflect.MessageType) reflect.Type { + return reflect.TypeOf(MessageV1(mt.Zero().Interface())) +} + +// MessageName returns the full protobuf name for the given message type. +// +// Deprecated: Use protoreflect.MessageDescriptor.FullName instead. +func MessageName(m Message) messageName { + if m == nil { + return "" + } + if m, ok := m.(interface{ XXX_MessageName() messageName }); ok { + return m.XXX_MessageName() + } + return messageName(protoimpl.X.MessageDescriptorOf(m).FullName()) +} + +// RegisterExtension is called from the generated code to register +// the extension descriptor. +// +// Deprecated: Use protoregistry.GlobalTypes.RegisterExtension instead. +func RegisterExtension(d *ExtensionDesc) { + if err := protoregistry.GlobalTypes.RegisterExtension(d); err != nil { + panic(err) + } +} + +type extensionsByNumber = map[int32]*ExtensionDesc + +var extensionCache sync.Map // map[messageName]extensionsByNumber + +// RegisteredExtensions returns a map of the registered extensions for the +// provided protobuf message, indexed by the extension field number. +// +// Deprecated: Use protoregistry.GlobalTypes.RangeExtensionsByMessage instead. +func RegisteredExtensions(m Message) extensionsByNumber { + // Check whether the cache is stale. If the number of extensions for + // the given message differs, then it means that some extensions were + // recently registered upstream that we do not know about. + s := MessageName(m) + v, _ := extensionCache.Load(s) + xs, _ := v.(extensionsByNumber) + if protoregistry.GlobalTypes.NumExtensionsByMessage(protoreflect.FullName(s)) == len(xs) { + return xs // cache is up-to-date + } + + // Cache is stale, re-compute the extensions map. + xs = make(extensionsByNumber) + protoregistry.GlobalTypes.RangeExtensionsByMessage(protoreflect.FullName(s), func(xt protoreflect.ExtensionType) bool { + if xd, ok := xt.(*ExtensionDesc); ok { + xs[int32(xt.TypeDescriptor().Number())] = xd + } else { + // TODO: This implies that the protoreflect.ExtensionType is a + // custom type not generated by protoc-gen-go. We could try and + // convert the type to an ExtensionDesc. + } + return true + }) + extensionCache.Store(s, xs) + return xs +} diff --git a/vendor/github.com/golang/protobuf/proto/table_marshal.go b/vendor/github.com/golang/protobuf/proto/table_marshal.go deleted file mode 100644 index b1679449..00000000 --- a/vendor/github.com/golang/protobuf/proto/table_marshal.go +++ /dev/null @@ -1,2767 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "errors" - "fmt" - "math" - "reflect" - "sort" - "strconv" - "strings" - "sync" - "sync/atomic" - "unicode/utf8" -) - -// a sizer takes a pointer to a field and the size of its tag, computes the size of -// the encoded data. -type sizer func(pointer, int) int - -// a marshaler takes a byte slice, a pointer to a field, and its tag (in wire format), -// marshals the field to the end of the slice, returns the slice and error (if any). -type marshaler func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) - -// marshalInfo is the information used for marshaling a message. -type marshalInfo struct { - typ reflect.Type - fields []*marshalFieldInfo - unrecognized field // offset of XXX_unrecognized - extensions field // offset of XXX_InternalExtensions - v1extensions field // offset of XXX_extensions - sizecache field // offset of XXX_sizecache - initialized int32 // 0 -- only typ is set, 1 -- fully initialized - messageset bool // uses message set wire format - hasmarshaler bool // has custom marshaler - sync.RWMutex // protect extElems map, also for initialization - extElems map[int32]*marshalElemInfo // info of extension elements -} - -// marshalFieldInfo is the information used for marshaling a field of a message. -type marshalFieldInfo struct { - field field - wiretag uint64 // tag in wire format - tagsize int // size of tag in wire format - sizer sizer - marshaler marshaler - isPointer bool - required bool // field is required - name string // name of the field, for error reporting - oneofElems map[reflect.Type]*marshalElemInfo // info of oneof elements -} - -// marshalElemInfo is the information used for marshaling an extension or oneof element. -type marshalElemInfo struct { - wiretag uint64 // tag in wire format - tagsize int // size of tag in wire format - sizer sizer - marshaler marshaler - isptr bool // elem is pointer typed, thus interface of this type is a direct interface (extension only) -} - -var ( - marshalInfoMap = map[reflect.Type]*marshalInfo{} - marshalInfoLock sync.Mutex -) - -// getMarshalInfo returns the information to marshal a given type of message. -// The info it returns may not necessarily initialized. -// t is the type of the message (NOT the pointer to it). -func getMarshalInfo(t reflect.Type) *marshalInfo { - marshalInfoLock.Lock() - u, ok := marshalInfoMap[t] - if !ok { - u = &marshalInfo{typ: t} - marshalInfoMap[t] = u - } - marshalInfoLock.Unlock() - return u -} - -// Size is the entry point from generated code, -// and should be ONLY called by generated code. -// It computes the size of encoded data of msg. -// a is a pointer to a place to store cached marshal info. -func (a *InternalMessageInfo) Size(msg Message) int { - u := getMessageMarshalInfo(msg, a) - ptr := toPointer(&msg) - if ptr.isNil() { - // We get here if msg is a typed nil ((*SomeMessage)(nil)), - // so it satisfies the interface, and msg == nil wouldn't - // catch it. We don't want crash in this case. - return 0 - } - return u.size(ptr) -} - -// Marshal is the entry point from generated code, -// and should be ONLY called by generated code. -// It marshals msg to the end of b. -// a is a pointer to a place to store cached marshal info. -func (a *InternalMessageInfo) Marshal(b []byte, msg Message, deterministic bool) ([]byte, error) { - u := getMessageMarshalInfo(msg, a) - ptr := toPointer(&msg) - if ptr.isNil() { - // We get here if msg is a typed nil ((*SomeMessage)(nil)), - // so it satisfies the interface, and msg == nil wouldn't - // catch it. We don't want crash in this case. - return b, ErrNil - } - return u.marshal(b, ptr, deterministic) -} - -func getMessageMarshalInfo(msg interface{}, a *InternalMessageInfo) *marshalInfo { - // u := a.marshal, but atomically. - // We use an atomic here to ensure memory consistency. - u := atomicLoadMarshalInfo(&a.marshal) - if u == nil { - // Get marshal information from type of message. - t := reflect.ValueOf(msg).Type() - if t.Kind() != reflect.Ptr { - panic(fmt.Sprintf("cannot handle non-pointer message type %v", t)) - } - u = getMarshalInfo(t.Elem()) - // Store it in the cache for later users. - // a.marshal = u, but atomically. - atomicStoreMarshalInfo(&a.marshal, u) - } - return u -} - -// size is the main function to compute the size of the encoded data of a message. -// ptr is the pointer to the message. -func (u *marshalInfo) size(ptr pointer) int { - if atomic.LoadInt32(&u.initialized) == 0 { - u.computeMarshalInfo() - } - - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if u.hasmarshaler { - m := ptr.asPointerTo(u.typ).Interface().(Marshaler) - b, _ := m.Marshal() - return len(b) - } - - n := 0 - for _, f := range u.fields { - if f.isPointer && ptr.offset(f.field).getPointer().isNil() { - // nil pointer always marshals to nothing - continue - } - n += f.sizer(ptr.offset(f.field), f.tagsize) - } - if u.extensions.IsValid() { - e := ptr.offset(u.extensions).toExtensions() - if u.messageset { - n += u.sizeMessageSet(e) - } else { - n += u.sizeExtensions(e) - } - } - if u.v1extensions.IsValid() { - m := *ptr.offset(u.v1extensions).toOldExtensions() - n += u.sizeV1Extensions(m) - } - if u.unrecognized.IsValid() { - s := *ptr.offset(u.unrecognized).toBytes() - n += len(s) - } - // cache the result for use in marshal - if u.sizecache.IsValid() { - atomic.StoreInt32(ptr.offset(u.sizecache).toInt32(), int32(n)) - } - return n -} - -// cachedsize gets the size from cache. If there is no cache (i.e. message is not generated), -// fall back to compute the size. -func (u *marshalInfo) cachedsize(ptr pointer) int { - if u.sizecache.IsValid() { - return int(atomic.LoadInt32(ptr.offset(u.sizecache).toInt32())) - } - return u.size(ptr) -} - -// marshal is the main function to marshal a message. It takes a byte slice and appends -// the encoded data to the end of the slice, returns the slice and error (if any). -// ptr is the pointer to the message. -// If deterministic is true, map is marshaled in deterministic order. -func (u *marshalInfo) marshal(b []byte, ptr pointer, deterministic bool) ([]byte, error) { - if atomic.LoadInt32(&u.initialized) == 0 { - u.computeMarshalInfo() - } - - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if u.hasmarshaler { - m := ptr.asPointerTo(u.typ).Interface().(Marshaler) - b1, err := m.Marshal() - b = append(b, b1...) - return b, err - } - - var err, errLater error - // The old marshaler encodes extensions at beginning. - if u.extensions.IsValid() { - e := ptr.offset(u.extensions).toExtensions() - if u.messageset { - b, err = u.appendMessageSet(b, e, deterministic) - } else { - b, err = u.appendExtensions(b, e, deterministic) - } - if err != nil { - return b, err - } - } - if u.v1extensions.IsValid() { - m := *ptr.offset(u.v1extensions).toOldExtensions() - b, err = u.appendV1Extensions(b, m, deterministic) - if err != nil { - return b, err - } - } - for _, f := range u.fields { - if f.required { - if ptr.offset(f.field).getPointer().isNil() { - // Required field is not set. - // We record the error but keep going, to give a complete marshaling. - if errLater == nil { - errLater = &RequiredNotSetError{f.name} - } - continue - } - } - if f.isPointer && ptr.offset(f.field).getPointer().isNil() { - // nil pointer always marshals to nothing - continue - } - b, err = f.marshaler(b, ptr.offset(f.field), f.wiretag, deterministic) - if err != nil { - if err1, ok := err.(*RequiredNotSetError); ok { - // Required field in submessage is not set. - // We record the error but keep going, to give a complete marshaling. - if errLater == nil { - errLater = &RequiredNotSetError{f.name + "." + err1.field} - } - continue - } - if err == errRepeatedHasNil { - err = errors.New("proto: repeated field " + f.name + " has nil element") - } - if err == errInvalidUTF8 { - if errLater == nil { - fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name - errLater = &invalidUTF8Error{fullName} - } - continue - } - return b, err - } - } - if u.unrecognized.IsValid() { - s := *ptr.offset(u.unrecognized).toBytes() - b = append(b, s...) - } - return b, errLater -} - -// computeMarshalInfo initializes the marshal info. -func (u *marshalInfo) computeMarshalInfo() { - u.Lock() - defer u.Unlock() - if u.initialized != 0 { // non-atomic read is ok as it is protected by the lock - return - } - - t := u.typ - u.unrecognized = invalidField - u.extensions = invalidField - u.v1extensions = invalidField - u.sizecache = invalidField - - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - if reflect.PtrTo(t).Implements(marshalerType) { - u.hasmarshaler = true - atomic.StoreInt32(&u.initialized, 1) - return - } - - // get oneof implementers - var oneofImplementers []interface{} - if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok { - _, _, _, oneofImplementers = m.XXX_OneofFuncs() - } - - n := t.NumField() - - // deal with XXX fields first - for i := 0; i < t.NumField(); i++ { - f := t.Field(i) - if !strings.HasPrefix(f.Name, "XXX_") { - continue - } - switch f.Name { - case "XXX_sizecache": - u.sizecache = toField(&f) - case "XXX_unrecognized": - u.unrecognized = toField(&f) - case "XXX_InternalExtensions": - u.extensions = toField(&f) - u.messageset = f.Tag.Get("protobuf_messageset") == "1" - case "XXX_extensions": - u.v1extensions = toField(&f) - case "XXX_NoUnkeyedLiteral": - // nothing to do - default: - panic("unknown XXX field: " + f.Name) - } - n-- - } - - // normal fields - fields := make([]marshalFieldInfo, n) // batch allocation - u.fields = make([]*marshalFieldInfo, 0, n) - for i, j := 0, 0; i < t.NumField(); i++ { - f := t.Field(i) - - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - field := &fields[j] - j++ - field.name = f.Name - u.fields = append(u.fields, field) - if f.Tag.Get("protobuf_oneof") != "" { - field.computeOneofFieldInfo(&f, oneofImplementers) - continue - } - if f.Tag.Get("protobuf") == "" { - // field has no tag (not in generated message), ignore it - u.fields = u.fields[:len(u.fields)-1] - j-- - continue - } - field.computeMarshalFieldInfo(&f) - } - - // fields are marshaled in tag order on the wire. - sort.Sort(byTag(u.fields)) - - atomic.StoreInt32(&u.initialized, 1) -} - -// helper for sorting fields by tag -type byTag []*marshalFieldInfo - -func (a byTag) Len() int { return len(a) } -func (a byTag) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a byTag) Less(i, j int) bool { return a[i].wiretag < a[j].wiretag } - -// getExtElemInfo returns the information to marshal an extension element. -// The info it returns is initialized. -func (u *marshalInfo) getExtElemInfo(desc *ExtensionDesc) *marshalElemInfo { - // get from cache first - u.RLock() - e, ok := u.extElems[desc.Field] - u.RUnlock() - if ok { - return e - } - - t := reflect.TypeOf(desc.ExtensionType) // pointer or slice to basic type or struct - tags := strings.Split(desc.Tag, ",") - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - sizer, marshaler := typeMarshaler(t, tags, false, false) - e = &marshalElemInfo{ - wiretag: uint64(tag)<<3 | wt, - tagsize: SizeVarint(uint64(tag) << 3), - sizer: sizer, - marshaler: marshaler, - isptr: t.Kind() == reflect.Ptr, - } - - // update cache - u.Lock() - if u.extElems == nil { - u.extElems = make(map[int32]*marshalElemInfo) - } - u.extElems[desc.Field] = e - u.Unlock() - return e -} - -// computeMarshalFieldInfo fills up the information to marshal a field. -func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) { - // parse protobuf tag of the field. - // tag has format of "bytes,49,opt,name=foo,def=hello!" - tags := strings.Split(f.Tag.Get("protobuf"), ",") - if tags[0] == "" { - return - } - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - if tags[2] == "req" { - fi.required = true - } - fi.setTag(f, tag, wt) - fi.setMarshaler(f, tags) -} - -func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) { - fi.field = toField(f) - fi.wiretag = 1<<31 - 1 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire. - fi.isPointer = true - fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f) - fi.oneofElems = make(map[reflect.Type]*marshalElemInfo) - - ityp := f.Type // interface type - for _, o := range oneofImplementers { - t := reflect.TypeOf(o) - if !t.Implements(ityp) { - continue - } - sf := t.Elem().Field(0) // oneof implementer is a struct with a single field - tags := strings.Split(sf.Tag.Get("protobuf"), ",") - tag, err := strconv.Atoi(tags[1]) - if err != nil { - panic("tag is not an integer") - } - wt := wiretype(tags[0]) - sizer, marshaler := typeMarshaler(sf.Type, tags, false, true) // oneof should not omit any zero value - fi.oneofElems[t.Elem()] = &marshalElemInfo{ - wiretag: uint64(tag)<<3 | wt, - tagsize: SizeVarint(uint64(tag) << 3), - sizer: sizer, - marshaler: marshaler, - } - } -} - -type oneofMessage interface { - XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{}) -} - -// wiretype returns the wire encoding of the type. -func wiretype(encoding string) uint64 { - switch encoding { - case "fixed32": - return WireFixed32 - case "fixed64": - return WireFixed64 - case "varint", "zigzag32", "zigzag64": - return WireVarint - case "bytes": - return WireBytes - case "group": - return WireStartGroup - } - panic("unknown wire type " + encoding) -} - -// setTag fills up the tag (in wire format) and its size in the info of a field. -func (fi *marshalFieldInfo) setTag(f *reflect.StructField, tag int, wt uint64) { - fi.field = toField(f) - fi.wiretag = uint64(tag)<<3 | wt - fi.tagsize = SizeVarint(uint64(tag) << 3) -} - -// setMarshaler fills up the sizer and marshaler in the info of a field. -func (fi *marshalFieldInfo) setMarshaler(f *reflect.StructField, tags []string) { - switch f.Type.Kind() { - case reflect.Map: - // map field - fi.isPointer = true - fi.sizer, fi.marshaler = makeMapMarshaler(f) - return - case reflect.Ptr, reflect.Slice: - fi.isPointer = true - } - fi.sizer, fi.marshaler = typeMarshaler(f.Type, tags, true, false) -} - -// typeMarshaler returns the sizer and marshaler of a given field. -// t is the type of the field. -// tags is the generated "protobuf" tag of the field. -// If nozero is true, zero value is not marshaled to the wire. -// If oneof is true, it is a oneof field. -func typeMarshaler(t reflect.Type, tags []string, nozero, oneof bool) (sizer, marshaler) { - encoding := tags[0] - - pointer := false - slice := false - if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { - slice = true - t = t.Elem() - } - if t.Kind() == reflect.Ptr { - pointer = true - t = t.Elem() - } - - packed := false - proto3 := false - validateUTF8 := true - for i := 2; i < len(tags); i++ { - if tags[i] == "packed" { - packed = true - } - if tags[i] == "proto3" { - proto3 = true - } - } - validateUTF8 = validateUTF8 && proto3 - - switch t.Kind() { - case reflect.Bool: - if pointer { - return sizeBoolPtr, appendBoolPtr - } - if slice { - if packed { - return sizeBoolPackedSlice, appendBoolPackedSlice - } - return sizeBoolSlice, appendBoolSlice - } - if nozero { - return sizeBoolValueNoZero, appendBoolValueNoZero - } - return sizeBoolValue, appendBoolValue - case reflect.Uint32: - switch encoding { - case "fixed32": - if pointer { - return sizeFixed32Ptr, appendFixed32Ptr - } - if slice { - if packed { - return sizeFixed32PackedSlice, appendFixed32PackedSlice - } - return sizeFixed32Slice, appendFixed32Slice - } - if nozero { - return sizeFixed32ValueNoZero, appendFixed32ValueNoZero - } - return sizeFixed32Value, appendFixed32Value - case "varint": - if pointer { - return sizeVarint32Ptr, appendVarint32Ptr - } - if slice { - if packed { - return sizeVarint32PackedSlice, appendVarint32PackedSlice - } - return sizeVarint32Slice, appendVarint32Slice - } - if nozero { - return sizeVarint32ValueNoZero, appendVarint32ValueNoZero - } - return sizeVarint32Value, appendVarint32Value - } - case reflect.Int32: - switch encoding { - case "fixed32": - if pointer { - return sizeFixedS32Ptr, appendFixedS32Ptr - } - if slice { - if packed { - return sizeFixedS32PackedSlice, appendFixedS32PackedSlice - } - return sizeFixedS32Slice, appendFixedS32Slice - } - if nozero { - return sizeFixedS32ValueNoZero, appendFixedS32ValueNoZero - } - return sizeFixedS32Value, appendFixedS32Value - case "varint": - if pointer { - return sizeVarintS32Ptr, appendVarintS32Ptr - } - if slice { - if packed { - return sizeVarintS32PackedSlice, appendVarintS32PackedSlice - } - return sizeVarintS32Slice, appendVarintS32Slice - } - if nozero { - return sizeVarintS32ValueNoZero, appendVarintS32ValueNoZero - } - return sizeVarintS32Value, appendVarintS32Value - case "zigzag32": - if pointer { - return sizeZigzag32Ptr, appendZigzag32Ptr - } - if slice { - if packed { - return sizeZigzag32PackedSlice, appendZigzag32PackedSlice - } - return sizeZigzag32Slice, appendZigzag32Slice - } - if nozero { - return sizeZigzag32ValueNoZero, appendZigzag32ValueNoZero - } - return sizeZigzag32Value, appendZigzag32Value - } - case reflect.Uint64: - switch encoding { - case "fixed64": - if pointer { - return sizeFixed64Ptr, appendFixed64Ptr - } - if slice { - if packed { - return sizeFixed64PackedSlice, appendFixed64PackedSlice - } - return sizeFixed64Slice, appendFixed64Slice - } - if nozero { - return sizeFixed64ValueNoZero, appendFixed64ValueNoZero - } - return sizeFixed64Value, appendFixed64Value - case "varint": - if pointer { - return sizeVarint64Ptr, appendVarint64Ptr - } - if slice { - if packed { - return sizeVarint64PackedSlice, appendVarint64PackedSlice - } - return sizeVarint64Slice, appendVarint64Slice - } - if nozero { - return sizeVarint64ValueNoZero, appendVarint64ValueNoZero - } - return sizeVarint64Value, appendVarint64Value - } - case reflect.Int64: - switch encoding { - case "fixed64": - if pointer { - return sizeFixedS64Ptr, appendFixedS64Ptr - } - if slice { - if packed { - return sizeFixedS64PackedSlice, appendFixedS64PackedSlice - } - return sizeFixedS64Slice, appendFixedS64Slice - } - if nozero { - return sizeFixedS64ValueNoZero, appendFixedS64ValueNoZero - } - return sizeFixedS64Value, appendFixedS64Value - case "varint": - if pointer { - return sizeVarintS64Ptr, appendVarintS64Ptr - } - if slice { - if packed { - return sizeVarintS64PackedSlice, appendVarintS64PackedSlice - } - return sizeVarintS64Slice, appendVarintS64Slice - } - if nozero { - return sizeVarintS64ValueNoZero, appendVarintS64ValueNoZero - } - return sizeVarintS64Value, appendVarintS64Value - case "zigzag64": - if pointer { - return sizeZigzag64Ptr, appendZigzag64Ptr - } - if slice { - if packed { - return sizeZigzag64PackedSlice, appendZigzag64PackedSlice - } - return sizeZigzag64Slice, appendZigzag64Slice - } - if nozero { - return sizeZigzag64ValueNoZero, appendZigzag64ValueNoZero - } - return sizeZigzag64Value, appendZigzag64Value - } - case reflect.Float32: - if pointer { - return sizeFloat32Ptr, appendFloat32Ptr - } - if slice { - if packed { - return sizeFloat32PackedSlice, appendFloat32PackedSlice - } - return sizeFloat32Slice, appendFloat32Slice - } - if nozero { - return sizeFloat32ValueNoZero, appendFloat32ValueNoZero - } - return sizeFloat32Value, appendFloat32Value - case reflect.Float64: - if pointer { - return sizeFloat64Ptr, appendFloat64Ptr - } - if slice { - if packed { - return sizeFloat64PackedSlice, appendFloat64PackedSlice - } - return sizeFloat64Slice, appendFloat64Slice - } - if nozero { - return sizeFloat64ValueNoZero, appendFloat64ValueNoZero - } - return sizeFloat64Value, appendFloat64Value - case reflect.String: - if validateUTF8 { - if pointer { - return sizeStringPtr, appendUTF8StringPtr - } - if slice { - return sizeStringSlice, appendUTF8StringSlice - } - if nozero { - return sizeStringValueNoZero, appendUTF8StringValueNoZero - } - return sizeStringValue, appendUTF8StringValue - } - if pointer { - return sizeStringPtr, appendStringPtr - } - if slice { - return sizeStringSlice, appendStringSlice - } - if nozero { - return sizeStringValueNoZero, appendStringValueNoZero - } - return sizeStringValue, appendStringValue - case reflect.Slice: - if slice { - return sizeBytesSlice, appendBytesSlice - } - if oneof { - // Oneof bytes field may also have "proto3" tag. - // We want to marshal it as a oneof field. Do this - // check before the proto3 check. - return sizeBytesOneof, appendBytesOneof - } - if proto3 { - return sizeBytes3, appendBytes3 - } - return sizeBytes, appendBytes - case reflect.Struct: - switch encoding { - case "group": - if slice { - return makeGroupSliceMarshaler(getMarshalInfo(t)) - } - return makeGroupMarshaler(getMarshalInfo(t)) - case "bytes": - if slice { - return makeMessageSliceMarshaler(getMarshalInfo(t)) - } - return makeMessageMarshaler(getMarshalInfo(t)) - } - } - panic(fmt.Sprintf("unknown or mismatched type: type: %v, wire type: %v", t, encoding)) -} - -// Below are functions to size/marshal a specific type of a field. -// They are stored in the field's info, and called by function pointers. -// They have type sizer or marshaler. - -func sizeFixed32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFixed32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFixed32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFixed32Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - return (4 + tagsize) * len(s) -} -func sizeFixed32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFixedS32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFixedS32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFixedS32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFixedS32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - return (4 + tagsize) * len(s) -} -func sizeFixedS32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFloat32Value(_ pointer, tagsize int) int { - return 4 + tagsize -} -func sizeFloat32ValueNoZero(ptr pointer, tagsize int) int { - v := math.Float32bits(*ptr.toFloat32()) - if v == 0 { - return 0 - } - return 4 + tagsize -} -func sizeFloat32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toFloat32Ptr() - if p == nil { - return 0 - } - return 4 + tagsize -} -func sizeFloat32Slice(ptr pointer, tagsize int) int { - s := *ptr.toFloat32Slice() - return (4 + tagsize) * len(s) -} -func sizeFloat32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toFloat32Slice() - if len(s) == 0 { - return 0 - } - return 4*len(s) + SizeVarint(uint64(4*len(s))) + tagsize -} -func sizeFixed64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFixed64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFixed64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFixed64Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - return (8 + tagsize) * len(s) -} -func sizeFixed64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeFixedS64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFixedS64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFixedS64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFixedS64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - return (8 + tagsize) * len(s) -} -func sizeFixedS64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeFloat64Value(_ pointer, tagsize int) int { - return 8 + tagsize -} -func sizeFloat64ValueNoZero(ptr pointer, tagsize int) int { - v := math.Float64bits(*ptr.toFloat64()) - if v == 0 { - return 0 - } - return 8 + tagsize -} -func sizeFloat64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toFloat64Ptr() - if p == nil { - return 0 - } - return 8 + tagsize -} -func sizeFloat64Slice(ptr pointer, tagsize int) int { - s := *ptr.toFloat64Slice() - return (8 + tagsize) * len(s) -} -func sizeFloat64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toFloat64Slice() - if len(s) == 0 { - return 0 - } - return 8*len(s) + SizeVarint(uint64(8*len(s))) + tagsize -} -func sizeVarint32Value(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarint32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint32() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarint32Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint32Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarint32Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarint32PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarintS32Value(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarintS32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarintS32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarint64Value(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - return SizeVarint(v) + tagsize -} -func sizeVarint64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toUint64() - if v == 0 { - return 0 - } - return SizeVarint(v) + tagsize -} -func sizeVarint64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toUint64Ptr() - if p == nil { - return 0 - } - return SizeVarint(*p) + tagsize -} -func sizeVarint64Slice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(v) + tagsize - } - return n -} -func sizeVarint64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(v) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeVarintS64Value(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v)) + tagsize -} -func sizeVarintS64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - return SizeVarint(uint64(*p)) + tagsize -} -func sizeVarintS64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) + tagsize - } - return n -} -func sizeVarintS64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeZigzag32Value(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt32() - if v == 0 { - return 0 - } - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32Ptr(ptr pointer, tagsize int) int { - p := ptr.getInt32Ptr() - if p == nil { - return 0 - } - v := *p - return SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize -} -func sizeZigzag32Slice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) + tagsize - } - return n -} -func sizeZigzag32PackedSlice(ptr pointer, tagsize int) int { - s := ptr.getInt32Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeZigzag64Value(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64ValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toInt64() - if v == 0 { - return 0 - } - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64Ptr(ptr pointer, tagsize int) int { - p := *ptr.toInt64Ptr() - if p == nil { - return 0 - } - v := *p - return SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize -} -func sizeZigzag64Slice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1)^uint64((int64(v)>>63))) + tagsize - } - return n -} -func sizeZigzag64PackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return 0 - } - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) - } - return n + SizeVarint(uint64(n)) + tagsize -} -func sizeBoolValue(_ pointer, tagsize int) int { - return 1 + tagsize -} -func sizeBoolValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toBool() - if !v { - return 0 - } - return 1 + tagsize -} -func sizeBoolPtr(ptr pointer, tagsize int) int { - p := *ptr.toBoolPtr() - if p == nil { - return 0 - } - return 1 + tagsize -} -func sizeBoolSlice(ptr pointer, tagsize int) int { - s := *ptr.toBoolSlice() - return (1 + tagsize) * len(s) -} -func sizeBoolPackedSlice(ptr pointer, tagsize int) int { - s := *ptr.toBoolSlice() - if len(s) == 0 { - return 0 - } - return len(s) + SizeVarint(uint64(len(s))) + tagsize -} -func sizeStringValue(ptr pointer, tagsize int) int { - v := *ptr.toString() - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringValueNoZero(ptr pointer, tagsize int) int { - v := *ptr.toString() - if v == "" { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringPtr(ptr pointer, tagsize int) int { - p := *ptr.toStringPtr() - if p == nil { - return 0 - } - v := *p - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeStringSlice(ptr pointer, tagsize int) int { - s := *ptr.toStringSlice() - n := 0 - for _, v := range s { - n += len(v) + SizeVarint(uint64(len(v))) + tagsize - } - return n -} -func sizeBytes(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - if v == nil { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytes3(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - if len(v) == 0 { - return 0 - } - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytesOneof(ptr pointer, tagsize int) int { - v := *ptr.toBytes() - return len(v) + SizeVarint(uint64(len(v))) + tagsize -} -func sizeBytesSlice(ptr pointer, tagsize int) int { - s := *ptr.toBytesSlice() - n := 0 - for _, v := range s { - n += len(v) + SizeVarint(uint64(len(v))) + tagsize - } - return n -} - -// appendFixed32 appends an encoded fixed32 to b. -func appendFixed32(b []byte, v uint32) []byte { - b = append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24)) - return b -} - -// appendFixed64 appends an encoded fixed64 to b. -func appendFixed64(b []byte, v uint64) []byte { - b = append(b, - byte(v), - byte(v>>8), - byte(v>>16), - byte(v>>24), - byte(v>>32), - byte(v>>40), - byte(v>>48), - byte(v>>56)) - return b -} - -// appendVarint appends an encoded varint to b. -func appendVarint(b []byte, v uint64) []byte { - // TODO: make 1-byte (maybe 2-byte) case inline-able, once we - // have non-leaf inliner. - switch { - case v < 1<<7: - b = append(b, byte(v)) - case v < 1<<14: - b = append(b, - byte(v&0x7f|0x80), - byte(v>>7)) - case v < 1<<21: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte(v>>14)) - case v < 1<<28: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte(v>>21)) - case v < 1<<35: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte(v>>28)) - case v < 1<<42: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte(v>>35)) - case v < 1<<49: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte(v>>42)) - case v < 1<<56: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte(v>>49)) - case v < 1<<63: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte((v>>49)&0x7f|0x80), - byte(v>>56)) - default: - b = append(b, - byte(v&0x7f|0x80), - byte((v>>7)&0x7f|0x80), - byte((v>>14)&0x7f|0x80), - byte((v>>21)&0x7f|0x80), - byte((v>>28)&0x7f|0x80), - byte((v>>35)&0x7f|0x80), - byte((v>>42)&0x7f|0x80), - byte((v>>49)&0x7f|0x80), - byte((v>>56)&0x7f|0x80), - 1) - } - return b -} - -func appendFixed32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFixed32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFixed32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, *p) - return b, nil -} -func appendFixed32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - } - return b, nil -} -func appendFixed32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, v) - } - return b, nil -} -func appendFixedS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - return b, nil -} -func appendFixedS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - return b, nil -} -func appendFixedS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(*p)) - return b, nil -} -func appendFixedS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, uint32(v)) - } - return b, nil -} -func appendFixedS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, uint32(v)) - } - return b, nil -} -func appendFloat32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float32bits(*ptr.toFloat32()) - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFloat32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float32bits(*ptr.toFloat32()) - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, v) - return b, nil -} -func appendFloat32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toFloat32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed32(b, math.Float32bits(*p)) - return b, nil -} -func appendFloat32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed32(b, math.Float32bits(v)) - } - return b, nil -} -func appendFloat32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(4*len(s))) - for _, v := range s { - b = appendFixed32(b, math.Float32bits(v)) - } - return b, nil -} -func appendFixed64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFixed64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFixed64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, *p) - return b, nil -} -func appendFixed64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - } - return b, nil -} -func appendFixed64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, v) - } - return b, nil -} -func appendFixedS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - return b, nil -} -func appendFixedS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - return b, nil -} -func appendFixedS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(*p)) - return b, nil -} -func appendFixedS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, uint64(v)) - } - return b, nil -} -func appendFixedS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, uint64(v)) - } - return b, nil -} -func appendFloat64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float64bits(*ptr.toFloat64()) - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFloat64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := math.Float64bits(*ptr.toFloat64()) - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, v) - return b, nil -} -func appendFloat64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toFloat64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendFixed64(b, math.Float64bits(*p)) - return b, nil -} -func appendFloat64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendFixed64(b, math.Float64bits(v)) - } - return b, nil -} -func appendFloat64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toFloat64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(8*len(s))) - for _, v := range s { - b = appendFixed64(b, math.Float64bits(v)) - } - return b, nil -} -func appendVarint32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarint32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarint32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarint32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarint32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarintS32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarint64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - return b, nil -} -func appendVarint64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toUint64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - return b, nil -} -func appendVarint64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toUint64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, *p) - return b, nil -} -func appendVarint64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, v) - } - return b, nil -} -func appendVarint64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toUint64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(v) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, v) - } - return b, nil -} -func appendVarintS64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - return b, nil -} -func appendVarintS64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(*p)) - return b, nil -} -func appendVarintS64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendVarintS64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v)) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v)) - } - return b, nil -} -func appendZigzag32Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt32() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := ptr.getInt32Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - v := *p - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - return b, nil -} -func appendZigzag32Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - } - return b, nil -} -func appendZigzag32PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := ptr.getInt32Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64((uint32(v) << 1) ^ uint32((int32(v) >> 31)))) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64((uint32(v)<<1)^uint32((int32(v)>>31)))) - } - return b, nil -} -func appendZigzag64Value(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64ValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toInt64() - if v == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64Ptr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toInt64Ptr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - v := *p - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - return b, nil -} -func appendZigzag64Slice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - } - return b, nil -} -func appendZigzag64PackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toInt64Slice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - // compute size - n := 0 - for _, v := range s { - n += SizeVarint(uint64(v<<1) ^ uint64((int64(v) >> 63))) - } - b = appendVarint(b, uint64(n)) - for _, v := range s { - b = appendVarint(b, uint64(v<<1)^uint64((int64(v)>>63))) - } - return b, nil -} -func appendBoolValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBool() - b = appendVarint(b, wiretag) - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - return b, nil -} -func appendBoolValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBool() - if !v { - return b, nil - } - b = appendVarint(b, wiretag) - b = append(b, 1) - return b, nil -} - -func appendBoolPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toBoolPtr() - if p == nil { - return b, nil - } - b = appendVarint(b, wiretag) - if *p { - b = append(b, 1) - } else { - b = append(b, 0) - } - return b, nil -} -func appendBoolSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBoolSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - } - return b, nil -} -func appendBoolPackedSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBoolSlice() - if len(s) == 0 { - return b, nil - } - b = appendVarint(b, wiretag&^7|WireBytes) - b = appendVarint(b, uint64(len(s))) - for _, v := range s { - if v { - b = append(b, 1) - } else { - b = append(b, 0) - } - } - return b, nil -} -func appendStringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toString() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toString() - if v == "" { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - p := *ptr.toStringPtr() - if p == nil { - return b, nil - } - v := *p - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendStringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toStringSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - return b, nil -} -func appendUTF8StringValue(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - v := *ptr.toString() - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringValueNoZero(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - v := *ptr.toString() - if v == "" { - return b, nil - } - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringPtr(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - p := *ptr.toStringPtr() - if p == nil { - return b, nil - } - v := *p - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendUTF8StringSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - var invalidUTF8 bool - s := *ptr.toStringSlice() - for _, v := range s { - if !utf8.ValidString(v) { - invalidUTF8 = true - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - if invalidUTF8 { - return b, errInvalidUTF8 - } - return b, nil -} -func appendBytes(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - if v == nil { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytes3(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - if len(v) == 0 { - return b, nil - } - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytesOneof(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - v := *ptr.toBytes() - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - return b, nil -} -func appendBytesSlice(b []byte, ptr pointer, wiretag uint64, _ bool) ([]byte, error) { - s := *ptr.toBytesSlice() - for _, v := range s { - b = appendVarint(b, wiretag) - b = appendVarint(b, uint64(len(v))) - b = append(b, v...) - } - return b, nil -} - -// makeGroupMarshaler returns the sizer and marshaler for a group. -// u is the marshal info of the underlying message. -func makeGroupMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - p := ptr.getPointer() - if p.isNil() { - return 0 - } - return u.size(p) + 2*tagsize - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - p := ptr.getPointer() - if p.isNil() { - return b, nil - } - var err error - b = appendVarint(b, wiretag) // start group - b, err = u.marshal(b, p, deterministic) - b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group - return b, err - } -} - -// makeGroupSliceMarshaler returns the sizer and marshaler for a group slice. -// u is the marshal info of the underlying message. -func makeGroupSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getPointerSlice() - n := 0 - for _, v := range s { - if v.isNil() { - continue - } - n += u.size(v) + 2*tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getPointerSlice() - var err error - var nerr nonFatal - for _, v := range s { - if v.isNil() { - return b, errRepeatedHasNil - } - b = appendVarint(b, wiretag) // start group - b, err = u.marshal(b, v, deterministic) - b = appendVarint(b, wiretag+(WireEndGroup-WireStartGroup)) // end group - if !nerr.Merge(err) { - if err == ErrNil { - err = errRepeatedHasNil - } - return b, err - } - } - return b, nerr.E - } -} - -// makeMessageMarshaler returns the sizer and marshaler for a message field. -// u is the marshal info of the message. -func makeMessageMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - p := ptr.getPointer() - if p.isNil() { - return 0 - } - siz := u.size(p) - return siz + SizeVarint(uint64(siz)) + tagsize - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - p := ptr.getPointer() - if p.isNil() { - return b, nil - } - b = appendVarint(b, wiretag) - siz := u.cachedsize(p) - b = appendVarint(b, uint64(siz)) - return u.marshal(b, p, deterministic) - } -} - -// makeMessageSliceMarshaler returns the sizer and marshaler for a message slice. -// u is the marshal info of the message. -func makeMessageSliceMarshaler(u *marshalInfo) (sizer, marshaler) { - return func(ptr pointer, tagsize int) int { - s := ptr.getPointerSlice() - n := 0 - for _, v := range s { - if v.isNil() { - continue - } - siz := u.size(v) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, wiretag uint64, deterministic bool) ([]byte, error) { - s := ptr.getPointerSlice() - var err error - var nerr nonFatal - for _, v := range s { - if v.isNil() { - return b, errRepeatedHasNil - } - b = appendVarint(b, wiretag) - siz := u.cachedsize(v) - b = appendVarint(b, uint64(siz)) - b, err = u.marshal(b, v, deterministic) - - if !nerr.Merge(err) { - if err == ErrNil { - err = errRepeatedHasNil - } - return b, err - } - } - return b, nerr.E - } -} - -// makeMapMarshaler returns the sizer and marshaler for a map field. -// f is the pointer to the reflect data structure of the field. -func makeMapMarshaler(f *reflect.StructField) (sizer, marshaler) { - // figure out key and value type - t := f.Type - keyType := t.Key() - valType := t.Elem() - keyTags := strings.Split(f.Tag.Get("protobuf_key"), ",") - valTags := strings.Split(f.Tag.Get("protobuf_val"), ",") - keySizer, keyMarshaler := typeMarshaler(keyType, keyTags, false, false) // don't omit zero value in map - valSizer, valMarshaler := typeMarshaler(valType, valTags, false, false) // don't omit zero value in map - keyWireTag := 1<<3 | wiretype(keyTags[0]) - valWireTag := 2<<3 | wiretype(valTags[0]) - - // We create an interface to get the addresses of the map key and value. - // If value is pointer-typed, the interface is a direct interface, the - // idata itself is the value. Otherwise, the idata is the pointer to the - // value. - // Key cannot be pointer-typed. - valIsPtr := valType.Kind() == reflect.Ptr - - // If value is a message with nested maps, calling - // valSizer in marshal may be quadratic. We should use - // cached version in marshal (but not in size). - // If value is not message type, we don't have size cache, - // but it cannot be nested either. Just use valSizer. - valCachedSizer := valSizer - if valIsPtr && valType.Elem().Kind() == reflect.Struct { - u := getMarshalInfo(valType.Elem()) - valCachedSizer = func(ptr pointer, tagsize int) int { - // Same as message sizer, but use cache. - p := ptr.getPointer() - if p.isNil() { - return 0 - } - siz := u.cachedsize(p) - return siz + SizeVarint(uint64(siz)) + tagsize - } - } - return func(ptr pointer, tagsize int) int { - m := ptr.asPointerTo(t).Elem() // the map - n := 0 - for _, k := range m.MapKeys() { - ki := k.Interface() - vi := m.MapIndex(k).Interface() - kaddr := toAddrPointer(&ki, false) // pointer to key - vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value - siz := keySizer(kaddr, 1) + valSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) - n += siz + SizeVarint(uint64(siz)) + tagsize - } - return n - }, - func(b []byte, ptr pointer, tag uint64, deterministic bool) ([]byte, error) { - m := ptr.asPointerTo(t).Elem() // the map - var err error - keys := m.MapKeys() - if len(keys) > 1 && deterministic { - sort.Sort(mapKeys(keys)) - } - - var nerr nonFatal - for _, k := range keys { - ki := k.Interface() - vi := m.MapIndex(k).Interface() - kaddr := toAddrPointer(&ki, false) // pointer to key - vaddr := toAddrPointer(&vi, valIsPtr) // pointer to value - b = appendVarint(b, tag) - siz := keySizer(kaddr, 1) + valCachedSizer(vaddr, 1) // tag of key = 1 (size=1), tag of val = 2 (size=1) - b = appendVarint(b, uint64(siz)) - b, err = keyMarshaler(b, kaddr, keyWireTag, deterministic) - if !nerr.Merge(err) { - return b, err - } - b, err = valMarshaler(b, vaddr, valWireTag, deterministic) - if err != ErrNil && !nerr.Merge(err) { // allow nil value in map - return b, err - } - } - return b, nerr.E - } -} - -// makeOneOfMarshaler returns the sizer and marshaler for a oneof field. -// fi is the marshal info of the field. -// f is the pointer to the reflect data structure of the field. -func makeOneOfMarshaler(fi *marshalFieldInfo, f *reflect.StructField) (sizer, marshaler) { - // Oneof field is an interface. We need to get the actual data type on the fly. - t := f.Type - return func(ptr pointer, _ int) int { - p := ptr.getInterfacePointer() - if p.isNil() { - return 0 - } - v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct - telem := v.Type() - e := fi.oneofElems[telem] - return e.sizer(p, e.tagsize) - }, - func(b []byte, ptr pointer, _ uint64, deterministic bool) ([]byte, error) { - p := ptr.getInterfacePointer() - if p.isNil() { - return b, nil - } - v := ptr.asPointerTo(t).Elem().Elem().Elem() // *interface -> interface -> *struct -> struct - telem := v.Type() - if telem.Field(0).Type.Kind() == reflect.Ptr && p.getPointer().isNil() { - return b, errOneofHasNil - } - e := fi.oneofElems[telem] - return e.marshaler(b, p, e.wiretag, deterministic) - } -} - -// sizeExtensions computes the size of encoded data for a XXX_InternalExtensions field. -func (u *marshalInfo) sizeExtensions(ext *XXX_InternalExtensions) int { - m, mu := ext.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - - n := 0 - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, ei.tagsize) - } - mu.Unlock() - return n -} - -// appendExtensions marshals a XXX_InternalExtensions field to the end of byte slice b. -func (u *marshalInfo) appendExtensions(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { - m, mu := ext.extensionsRead() - if m == nil { - return b, nil - } - mu.Lock() - defer mu.Unlock() - - var err error - var nerr nonFatal - - // Fast-path for common cases: zero or one extensions. - // Don't bother sorting the keys. - if len(m) <= 1 { - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E - } - - // Sort the keys to provide a deterministic encoding. - // Not sure this is required, but the old code does it. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, k := range keys { - e := m[int32(k)] - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// message set format is: -// message MessageSet { -// repeated group Item = 1 { -// required int32 type_id = 2; -// required string message = 3; -// }; -// } - -// sizeMessageSet computes the size of encoded data for a XXX_InternalExtensions field -// in message set format (above). -func (u *marshalInfo) sizeMessageSet(ext *XXX_InternalExtensions) int { - m, mu := ext.extensionsRead() - if m == nil { - return 0 - } - mu.Lock() - - n := 0 - for id, e := range m { - n += 2 // start group, end group. tag = 1 (size=1) - n += SizeVarint(uint64(id)) + 1 // type_id, tag = 2 (size=1) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - siz := len(msgWithLen) - n += siz + 1 // message, tag = 3 (size=1) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, 1) // message, tag = 3 (size=1) - } - mu.Unlock() - return n -} - -// appendMessageSet marshals a XXX_InternalExtensions field in message set format (above) -// to the end of byte slice b. -func (u *marshalInfo) appendMessageSet(b []byte, ext *XXX_InternalExtensions, deterministic bool) ([]byte, error) { - m, mu := ext.extensionsRead() - if m == nil { - return b, nil - } - mu.Lock() - defer mu.Unlock() - - var err error - var nerr nonFatal - - // Fast-path for common cases: zero or one extensions. - // Don't bother sorting the keys. - if len(m) <= 1 { - for id, e := range m { - b = append(b, 1<<3|WireStartGroup) - b = append(b, 2<<3|WireVarint) - b = appendVarint(b, uint64(id)) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - b = append(b, 3<<3|WireBytes) - b = append(b, msgWithLen...) - b = append(b, 1<<3|WireEndGroup) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) - if !nerr.Merge(err) { - return b, err - } - b = append(b, 1<<3|WireEndGroup) - } - return b, nerr.E - } - - // Sort the keys to provide a deterministic encoding. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - for _, id := range keys { - e := m[int32(id)] - b = append(b, 1<<3|WireStartGroup) - b = append(b, 2<<3|WireVarint) - b = appendVarint(b, uint64(id)) - - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - msgWithLen := skipVarint(e.enc) // skip old tag, but leave the length varint - b = append(b, 3<<3|WireBytes) - b = append(b, msgWithLen...) - b = append(b, 1<<3|WireEndGroup) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, 3<<3|WireBytes, deterministic) - b = append(b, 1<<3|WireEndGroup) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// sizeV1Extensions computes the size of encoded data for a V1-API extension field. -func (u *marshalInfo) sizeV1Extensions(m map[int32]Extension) int { - if m == nil { - return 0 - } - - n := 0 - for _, e := range m { - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - n += len(e.enc) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - n += ei.sizer(p, ei.tagsize) - } - return n -} - -// appendV1Extensions marshals a V1-API extension field to the end of byte slice b. -func (u *marshalInfo) appendV1Extensions(b []byte, m map[int32]Extension, deterministic bool) ([]byte, error) { - if m == nil { - return b, nil - } - - // Sort the keys to provide a deterministic encoding. - keys := make([]int, 0, len(m)) - for k := range m { - keys = append(keys, int(k)) - } - sort.Ints(keys) - - var err error - var nerr nonFatal - for _, k := range keys { - e := m[int32(k)] - if e.value == nil || e.desc == nil { - // Extension is only in its encoded form. - b = append(b, e.enc...) - continue - } - - // We don't skip extensions that have an encoded form set, - // because the extension value may have been mutated after - // the last time this function was called. - - ei := u.getExtElemInfo(e.desc) - v := e.value - p := toAddrPointer(&v, ei.isptr) - b, err = ei.marshaler(b, p, ei.wiretag, deterministic) - if !nerr.Merge(err) { - return b, err - } - } - return b, nerr.E -} - -// newMarshaler is the interface representing objects that can marshal themselves. -// -// This exists to support protoc-gen-go generated messages. -// The proto package will stop type-asserting to this interface in the future. -// -// DO NOT DEPEND ON THIS. -type newMarshaler interface { - XXX_Size() int - XXX_Marshal(b []byte, deterministic bool) ([]byte, error) -} - -// Size returns the encoded size of a protocol buffer message. -// This is the main entry point. -func Size(pb Message) int { - if m, ok := pb.(newMarshaler); ok { - return m.XXX_Size() - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - b, _ := m.Marshal() - return len(b) - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return 0 - } - var info InternalMessageInfo - return info.Size(pb) -} - -// Marshal takes a protocol buffer message -// and encodes it into the wire format, returning the data. -// This is the main entry point. -func Marshal(pb Message) ([]byte, error) { - if m, ok := pb.(newMarshaler); ok { - siz := m.XXX_Size() - b := make([]byte, 0, siz) - return m.XXX_Marshal(b, false) - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - return m.Marshal() - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return nil, ErrNil - } - var info InternalMessageInfo - siz := info.Size(pb) - b := make([]byte, 0, siz) - return info.Marshal(b, pb, false) -} - -// Marshal takes a protocol buffer message -// and encodes it into the wire format, writing the result to the -// Buffer. -// This is an alternative entry point. It is not necessary to use -// a Buffer for most applications. -func (p *Buffer) Marshal(pb Message) error { - var err error - if m, ok := pb.(newMarshaler); ok { - siz := m.XXX_Size() - p.grow(siz) // make sure buf has enough capacity - p.buf, err = m.XXX_Marshal(p.buf, p.deterministic) - return err - } - if m, ok := pb.(Marshaler); ok { - // If the message can marshal itself, let it do it, for compatibility. - // NOTE: This is not efficient. - b, err := m.Marshal() - p.buf = append(p.buf, b...) - return err - } - // in case somehow we didn't generate the wrapper - if pb == nil { - return ErrNil - } - var info InternalMessageInfo - siz := info.Size(pb) - p.grow(siz) // make sure buf has enough capacity - p.buf, err = info.Marshal(p.buf, pb, p.deterministic) - return err -} - -// grow grows the buffer's capacity, if necessary, to guarantee space for -// another n bytes. After grow(n), at least n bytes can be written to the -// buffer without another allocation. -func (p *Buffer) grow(n int) { - need := len(p.buf) + n - if need <= cap(p.buf) { - return - } - newCap := len(p.buf) * 2 - if newCap < need { - newCap = need - } - p.buf = append(make([]byte, 0, newCap), p.buf...) -} diff --git a/vendor/github.com/golang/protobuf/proto/table_merge.go b/vendor/github.com/golang/protobuf/proto/table_merge.go deleted file mode 100644 index 5525def6..00000000 --- a/vendor/github.com/golang/protobuf/proto/table_merge.go +++ /dev/null @@ -1,654 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "fmt" - "reflect" - "strings" - "sync" - "sync/atomic" -) - -// Merge merges the src message into dst. -// This assumes that dst and src of the same type and are non-nil. -func (a *InternalMessageInfo) Merge(dst, src Message) { - mi := atomicLoadMergeInfo(&a.merge) - if mi == nil { - mi = getMergeInfo(reflect.TypeOf(dst).Elem()) - atomicStoreMergeInfo(&a.merge, mi) - } - mi.merge(toPointer(&dst), toPointer(&src)) -} - -type mergeInfo struct { - typ reflect.Type - - initialized int32 // 0: only typ is valid, 1: everything is valid - lock sync.Mutex - - fields []mergeFieldInfo - unrecognized field // Offset of XXX_unrecognized -} - -type mergeFieldInfo struct { - field field // Offset of field, guaranteed to be valid - - // isPointer reports whether the value in the field is a pointer. - // This is true for the following situations: - // * Pointer to struct - // * Pointer to basic type (proto2 only) - // * Slice (first value in slice header is a pointer) - // * String (first value in string header is a pointer) - isPointer bool - - // basicWidth reports the width of the field assuming that it is directly - // embedded in the struct (as is the case for basic types in proto3). - // The possible values are: - // 0: invalid - // 1: bool - // 4: int32, uint32, float32 - // 8: int64, uint64, float64 - basicWidth int - - // Where dst and src are pointers to the types being merged. - merge func(dst, src pointer) -} - -var ( - mergeInfoMap = map[reflect.Type]*mergeInfo{} - mergeInfoLock sync.Mutex -) - -func getMergeInfo(t reflect.Type) *mergeInfo { - mergeInfoLock.Lock() - defer mergeInfoLock.Unlock() - mi := mergeInfoMap[t] - if mi == nil { - mi = &mergeInfo{typ: t} - mergeInfoMap[t] = mi - } - return mi -} - -// merge merges src into dst assuming they are both of type *mi.typ. -func (mi *mergeInfo) merge(dst, src pointer) { - if dst.isNil() { - panic("proto: nil destination") - } - if src.isNil() { - return // Nothing to do. - } - - if atomic.LoadInt32(&mi.initialized) == 0 { - mi.computeMergeInfo() - } - - for _, fi := range mi.fields { - sfp := src.offset(fi.field) - - // As an optimization, we can avoid the merge function call cost - // if we know for sure that the source will have no effect - // by checking if it is the zero value. - if unsafeAllowed { - if fi.isPointer && sfp.getPointer().isNil() { // Could be slice or string - continue - } - if fi.basicWidth > 0 { - switch { - case fi.basicWidth == 1 && !*sfp.toBool(): - continue - case fi.basicWidth == 4 && *sfp.toUint32() == 0: - continue - case fi.basicWidth == 8 && *sfp.toUint64() == 0: - continue - } - } - } - - dfp := dst.offset(fi.field) - fi.merge(dfp, sfp) - } - - // TODO: Make this faster? - out := dst.asPointerTo(mi.typ).Elem() - in := src.asPointerTo(mi.typ).Elem() - if emIn, err := extendable(in.Addr().Interface()); err == nil { - emOut, _ := extendable(out.Addr().Interface()) - mIn, muIn := emIn.extensionsRead() - if mIn != nil { - mOut := emOut.extensionsWrite() - muIn.Lock() - mergeExtension(mOut, mIn) - muIn.Unlock() - } - } - - if mi.unrecognized.IsValid() { - if b := *src.offset(mi.unrecognized).toBytes(); len(b) > 0 { - *dst.offset(mi.unrecognized).toBytes() = append([]byte(nil), b...) - } - } -} - -func (mi *mergeInfo) computeMergeInfo() { - mi.lock.Lock() - defer mi.lock.Unlock() - if mi.initialized != 0 { - return - } - t := mi.typ - n := t.NumField() - - props := GetProperties(t) - for i := 0; i < n; i++ { - f := t.Field(i) - if strings.HasPrefix(f.Name, "XXX_") { - continue - } - - mfi := mergeFieldInfo{field: toField(&f)} - tf := f.Type - - // As an optimization, we can avoid the merge function call cost - // if we know for sure that the source will have no effect - // by checking if it is the zero value. - if unsafeAllowed { - switch tf.Kind() { - case reflect.Ptr, reflect.Slice, reflect.String: - // As a special case, we assume slices and strings are pointers - // since we know that the first field in the SliceSlice or - // StringHeader is a data pointer. - mfi.isPointer = true - case reflect.Bool: - mfi.basicWidth = 1 - case reflect.Int32, reflect.Uint32, reflect.Float32: - mfi.basicWidth = 4 - case reflect.Int64, reflect.Uint64, reflect.Float64: - mfi.basicWidth = 8 - } - } - - // Unwrap tf to get at its most basic type. - var isPointer, isSlice bool - if tf.Kind() == reflect.Slice && tf.Elem().Kind() != reflect.Uint8 { - isSlice = true - tf = tf.Elem() - } - if tf.Kind() == reflect.Ptr { - isPointer = true - tf = tf.Elem() - } - if isPointer && isSlice && tf.Kind() != reflect.Struct { - panic("both pointer and slice for basic type in " + tf.Name()) - } - - switch tf.Kind() { - case reflect.Int32: - switch { - case isSlice: // E.g., []int32 - mfi.merge = func(dst, src pointer) { - // NOTE: toInt32Slice is not defined (see pointer_reflect.go). - /* - sfsp := src.toInt32Slice() - if *sfsp != nil { - dfsp := dst.toInt32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []int64{} - } - } - */ - sfs := src.getInt32Slice() - if sfs != nil { - dfs := dst.getInt32Slice() - dfs = append(dfs, sfs...) - if dfs == nil { - dfs = []int32{} - } - dst.setInt32Slice(dfs) - } - } - case isPointer: // E.g., *int32 - mfi.merge = func(dst, src pointer) { - // NOTE: toInt32Ptr is not defined (see pointer_reflect.go). - /* - sfpp := src.toInt32Ptr() - if *sfpp != nil { - dfpp := dst.toInt32Ptr() - if *dfpp == nil { - *dfpp = Int32(**sfpp) - } else { - **dfpp = **sfpp - } - } - */ - sfp := src.getInt32Ptr() - if sfp != nil { - dfp := dst.getInt32Ptr() - if dfp == nil { - dst.setInt32Ptr(*sfp) - } else { - *dfp = *sfp - } - } - } - default: // E.g., int32 - mfi.merge = func(dst, src pointer) { - if v := *src.toInt32(); v != 0 { - *dst.toInt32() = v - } - } - } - case reflect.Int64: - switch { - case isSlice: // E.g., []int64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toInt64Slice() - if *sfsp != nil { - dfsp := dst.toInt64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []int64{} - } - } - } - case isPointer: // E.g., *int64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toInt64Ptr() - if *sfpp != nil { - dfpp := dst.toInt64Ptr() - if *dfpp == nil { - *dfpp = Int64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., int64 - mfi.merge = func(dst, src pointer) { - if v := *src.toInt64(); v != 0 { - *dst.toInt64() = v - } - } - } - case reflect.Uint32: - switch { - case isSlice: // E.g., []uint32 - mfi.merge = func(dst, src pointer) { - sfsp := src.toUint32Slice() - if *sfsp != nil { - dfsp := dst.toUint32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []uint32{} - } - } - } - case isPointer: // E.g., *uint32 - mfi.merge = func(dst, src pointer) { - sfpp := src.toUint32Ptr() - if *sfpp != nil { - dfpp := dst.toUint32Ptr() - if *dfpp == nil { - *dfpp = Uint32(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., uint32 - mfi.merge = func(dst, src pointer) { - if v := *src.toUint32(); v != 0 { - *dst.toUint32() = v - } - } - } - case reflect.Uint64: - switch { - case isSlice: // E.g., []uint64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toUint64Slice() - if *sfsp != nil { - dfsp := dst.toUint64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []uint64{} - } - } - } - case isPointer: // E.g., *uint64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toUint64Ptr() - if *sfpp != nil { - dfpp := dst.toUint64Ptr() - if *dfpp == nil { - *dfpp = Uint64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., uint64 - mfi.merge = func(dst, src pointer) { - if v := *src.toUint64(); v != 0 { - *dst.toUint64() = v - } - } - } - case reflect.Float32: - switch { - case isSlice: // E.g., []float32 - mfi.merge = func(dst, src pointer) { - sfsp := src.toFloat32Slice() - if *sfsp != nil { - dfsp := dst.toFloat32Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []float32{} - } - } - } - case isPointer: // E.g., *float32 - mfi.merge = func(dst, src pointer) { - sfpp := src.toFloat32Ptr() - if *sfpp != nil { - dfpp := dst.toFloat32Ptr() - if *dfpp == nil { - *dfpp = Float32(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., float32 - mfi.merge = func(dst, src pointer) { - if v := *src.toFloat32(); v != 0 { - *dst.toFloat32() = v - } - } - } - case reflect.Float64: - switch { - case isSlice: // E.g., []float64 - mfi.merge = func(dst, src pointer) { - sfsp := src.toFloat64Slice() - if *sfsp != nil { - dfsp := dst.toFloat64Slice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []float64{} - } - } - } - case isPointer: // E.g., *float64 - mfi.merge = func(dst, src pointer) { - sfpp := src.toFloat64Ptr() - if *sfpp != nil { - dfpp := dst.toFloat64Ptr() - if *dfpp == nil { - *dfpp = Float64(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., float64 - mfi.merge = func(dst, src pointer) { - if v := *src.toFloat64(); v != 0 { - *dst.toFloat64() = v - } - } - } - case reflect.Bool: - switch { - case isSlice: // E.g., []bool - mfi.merge = func(dst, src pointer) { - sfsp := src.toBoolSlice() - if *sfsp != nil { - dfsp := dst.toBoolSlice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []bool{} - } - } - } - case isPointer: // E.g., *bool - mfi.merge = func(dst, src pointer) { - sfpp := src.toBoolPtr() - if *sfpp != nil { - dfpp := dst.toBoolPtr() - if *dfpp == nil { - *dfpp = Bool(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., bool - mfi.merge = func(dst, src pointer) { - if v := *src.toBool(); v { - *dst.toBool() = v - } - } - } - case reflect.String: - switch { - case isSlice: // E.g., []string - mfi.merge = func(dst, src pointer) { - sfsp := src.toStringSlice() - if *sfsp != nil { - dfsp := dst.toStringSlice() - *dfsp = append(*dfsp, *sfsp...) - if *dfsp == nil { - *dfsp = []string{} - } - } - } - case isPointer: // E.g., *string - mfi.merge = func(dst, src pointer) { - sfpp := src.toStringPtr() - if *sfpp != nil { - dfpp := dst.toStringPtr() - if *dfpp == nil { - *dfpp = String(**sfpp) - } else { - **dfpp = **sfpp - } - } - } - default: // E.g., string - mfi.merge = func(dst, src pointer) { - if v := *src.toString(); v != "" { - *dst.toString() = v - } - } - } - case reflect.Slice: - isProto3 := props.Prop[i].proto3 - switch { - case isPointer: - panic("bad pointer in byte slice case in " + tf.Name()) - case tf.Elem().Kind() != reflect.Uint8: - panic("bad element kind in byte slice case in " + tf.Name()) - case isSlice: // E.g., [][]byte - mfi.merge = func(dst, src pointer) { - sbsp := src.toBytesSlice() - if *sbsp != nil { - dbsp := dst.toBytesSlice() - for _, sb := range *sbsp { - if sb == nil { - *dbsp = append(*dbsp, nil) - } else { - *dbsp = append(*dbsp, append([]byte{}, sb...)) - } - } - if *dbsp == nil { - *dbsp = [][]byte{} - } - } - } - default: // E.g., []byte - mfi.merge = func(dst, src pointer) { - sbp := src.toBytes() - if *sbp != nil { - dbp := dst.toBytes() - if !isProto3 || len(*sbp) > 0 { - *dbp = append([]byte{}, *sbp...) - } - } - } - } - case reflect.Struct: - switch { - case !isPointer: - panic(fmt.Sprintf("message field %s without pointer", tf)) - case isSlice: // E.g., []*pb.T - mi := getMergeInfo(tf) - mfi.merge = func(dst, src pointer) { - sps := src.getPointerSlice() - if sps != nil { - dps := dst.getPointerSlice() - for _, sp := range sps { - var dp pointer - if !sp.isNil() { - dp = valToPointer(reflect.New(tf)) - mi.merge(dp, sp) - } - dps = append(dps, dp) - } - if dps == nil { - dps = []pointer{} - } - dst.setPointerSlice(dps) - } - } - default: // E.g., *pb.T - mi := getMergeInfo(tf) - mfi.merge = func(dst, src pointer) { - sp := src.getPointer() - if !sp.isNil() { - dp := dst.getPointer() - if dp.isNil() { - dp = valToPointer(reflect.New(tf)) - dst.setPointer(dp) - } - mi.merge(dp, sp) - } - } - } - case reflect.Map: - switch { - case isPointer || isSlice: - panic("bad pointer or slice in map case in " + tf.Name()) - default: // E.g., map[K]V - mfi.merge = func(dst, src pointer) { - sm := src.asPointerTo(tf).Elem() - if sm.Len() == 0 { - return - } - dm := dst.asPointerTo(tf).Elem() - if dm.IsNil() { - dm.Set(reflect.MakeMap(tf)) - } - - switch tf.Elem().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - val = reflect.ValueOf(Clone(val.Interface().(Message))) - dm.SetMapIndex(key, val) - } - case reflect.Slice: // E.g. Bytes type (e.g., []byte) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - val = reflect.ValueOf(append([]byte{}, val.Bytes()...)) - dm.SetMapIndex(key, val) - } - default: // Basic type (e.g., string) - for _, key := range sm.MapKeys() { - val := sm.MapIndex(key) - dm.SetMapIndex(key, val) - } - } - } - } - case reflect.Interface: - // Must be oneof field. - switch { - case isPointer || isSlice: - panic("bad pointer or slice in interface case in " + tf.Name()) - default: // E.g., interface{} - // TODO: Make this faster? - mfi.merge = func(dst, src pointer) { - su := src.asPointerTo(tf).Elem() - if !su.IsNil() { - du := dst.asPointerTo(tf).Elem() - typ := su.Elem().Type() - if du.IsNil() || du.Elem().Type() != typ { - du.Set(reflect.New(typ.Elem())) // Initialize interface if empty - } - sv := su.Elem().Elem().Field(0) - if sv.Kind() == reflect.Ptr && sv.IsNil() { - return - } - dv := du.Elem().Elem().Field(0) - if dv.Kind() == reflect.Ptr && dv.IsNil() { - dv.Set(reflect.New(sv.Type().Elem())) // Initialize proto message if empty - } - switch sv.Type().Kind() { - case reflect.Ptr: // Proto struct (e.g., *T) - Merge(dv.Interface().(Message), sv.Interface().(Message)) - case reflect.Slice: // E.g. Bytes type (e.g., []byte) - dv.Set(reflect.ValueOf(append([]byte{}, sv.Bytes()...))) - default: // Basic type (e.g., string) - dv.Set(sv) - } - } - } - } - default: - panic(fmt.Sprintf("merger not found for type:%s", tf)) - } - mi.fields = append(mi.fields, mfi) - } - - mi.unrecognized = invalidField - if f, ok := t.FieldByName("XXX_unrecognized"); ok { - if f.Type != reflect.TypeOf([]byte{}) { - panic("expected XXX_unrecognized to be of type []byte") - } - mi.unrecognized = toField(&f) - } - - atomic.StoreInt32(&mi.initialized, 1) -} diff --git a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go b/vendor/github.com/golang/protobuf/proto/table_unmarshal.go deleted file mode 100644 index ebf1caa5..00000000 --- a/vendor/github.com/golang/protobuf/proto/table_unmarshal.go +++ /dev/null @@ -1,2051 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -import ( - "errors" - "fmt" - "io" - "math" - "reflect" - "strconv" - "strings" - "sync" - "sync/atomic" - "unicode/utf8" -) - -// Unmarshal is the entry point from the generated .pb.go files. -// This function is not intended to be used by non-generated code. -// This function is not subject to any compatibility guarantee. -// msg contains a pointer to a protocol buffer struct. -// b is the data to be unmarshaled into the protocol buffer. -// a is a pointer to a place to store cached unmarshal information. -func (a *InternalMessageInfo) Unmarshal(msg Message, b []byte) error { - // Load the unmarshal information for this message type. - // The atomic load ensures memory consistency. - u := atomicLoadUnmarshalInfo(&a.unmarshal) - if u == nil { - // Slow path: find unmarshal info for msg, update a with it. - u = getUnmarshalInfo(reflect.TypeOf(msg).Elem()) - atomicStoreUnmarshalInfo(&a.unmarshal, u) - } - // Then do the unmarshaling. - err := u.unmarshal(toPointer(&msg), b) - return err -} - -type unmarshalInfo struct { - typ reflect.Type // type of the protobuf struct - - // 0 = only typ field is initialized - // 1 = completely initialized - initialized int32 - lock sync.Mutex // prevents double initialization - dense []unmarshalFieldInfo // fields indexed by tag # - sparse map[uint64]unmarshalFieldInfo // fields indexed by tag # - reqFields []string // names of required fields - reqMask uint64 // 1< 0 { - // Read tag and wire type. - // Special case 1 and 2 byte varints. - var x uint64 - if b[0] < 128 { - x = uint64(b[0]) - b = b[1:] - } else if len(b) >= 2 && b[1] < 128 { - x = uint64(b[0]&0x7f) + uint64(b[1])<<7 - b = b[2:] - } else { - var n int - x, n = decodeVarint(b) - if n == 0 { - return io.ErrUnexpectedEOF - } - b = b[n:] - } - tag := x >> 3 - wire := int(x) & 7 - - // Dispatch on the tag to one of the unmarshal* functions below. - var f unmarshalFieldInfo - if tag < uint64(len(u.dense)) { - f = u.dense[tag] - } else { - f = u.sparse[tag] - } - if fn := f.unmarshal; fn != nil { - var err error - b, err = fn(b, m.offset(f.field), wire) - if err == nil { - reqMask |= f.reqMask - continue - } - if r, ok := err.(*RequiredNotSetError); ok { - // Remember this error, but keep parsing. We need to produce - // a full parse even if a required field is missing. - if errLater == nil { - errLater = r - } - reqMask |= f.reqMask - continue - } - if err != errInternalBadWireType { - if err == errInvalidUTF8 { - if errLater == nil { - fullName := revProtoTypes[reflect.PtrTo(u.typ)] + "." + f.name - errLater = &invalidUTF8Error{fullName} - } - continue - } - return err - } - // Fragments with bad wire type are treated as unknown fields. - } - - // Unknown tag. - if !u.unrecognized.IsValid() { - // Don't keep unrecognized data; just skip it. - var err error - b, err = skipField(b, wire) - if err != nil { - return err - } - continue - } - // Keep unrecognized data around. - // maybe in extensions, maybe in the unrecognized field. - z := m.offset(u.unrecognized).toBytes() - var emap map[int32]Extension - var e Extension - for _, r := range u.extensionRanges { - if uint64(r.Start) <= tag && tag <= uint64(r.End) { - if u.extensions.IsValid() { - mp := m.offset(u.extensions).toExtensions() - emap = mp.extensionsWrite() - e = emap[int32(tag)] - z = &e.enc - break - } - if u.oldExtensions.IsValid() { - p := m.offset(u.oldExtensions).toOldExtensions() - emap = *p - if emap == nil { - emap = map[int32]Extension{} - *p = emap - } - e = emap[int32(tag)] - z = &e.enc - break - } - panic("no extensions field available") - } - } - - // Use wire type to skip data. - var err error - b0 := b - b, err = skipField(b, wire) - if err != nil { - return err - } - *z = encodeVarint(*z, tag<<3|uint64(wire)) - *z = append(*z, b0[:len(b0)-len(b)]...) - - if emap != nil { - emap[int32(tag)] = e - } - } - if reqMask != u.reqMask && errLater == nil { - // A required field of this message is missing. - for _, n := range u.reqFields { - if reqMask&1 == 0 { - errLater = &RequiredNotSetError{n} - } - reqMask >>= 1 - } - } - return errLater -} - -// computeUnmarshalInfo fills in u with information for use -// in unmarshaling protocol buffers of type u.typ. -func (u *unmarshalInfo) computeUnmarshalInfo() { - u.lock.Lock() - defer u.lock.Unlock() - if u.initialized != 0 { - return - } - t := u.typ - n := t.NumField() - - // Set up the "not found" value for the unrecognized byte buffer. - // This is the default for proto3. - u.unrecognized = invalidField - u.extensions = invalidField - u.oldExtensions = invalidField - - // List of the generated type and offset for each oneof field. - type oneofField struct { - ityp reflect.Type // interface type of oneof field - field field // offset in containing message - } - var oneofFields []oneofField - - for i := 0; i < n; i++ { - f := t.Field(i) - if f.Name == "XXX_unrecognized" { - // The byte slice used to hold unrecognized input is special. - if f.Type != reflect.TypeOf(([]byte)(nil)) { - panic("bad type for XXX_unrecognized field: " + f.Type.Name()) - } - u.unrecognized = toField(&f) - continue - } - if f.Name == "XXX_InternalExtensions" { - // Ditto here. - if f.Type != reflect.TypeOf(XXX_InternalExtensions{}) { - panic("bad type for XXX_InternalExtensions field: " + f.Type.Name()) - } - u.extensions = toField(&f) - if f.Tag.Get("protobuf_messageset") == "1" { - u.isMessageSet = true - } - continue - } - if f.Name == "XXX_extensions" { - // An older form of the extensions field. - if f.Type != reflect.TypeOf((map[int32]Extension)(nil)) { - panic("bad type for XXX_extensions field: " + f.Type.Name()) - } - u.oldExtensions = toField(&f) - continue - } - if f.Name == "XXX_NoUnkeyedLiteral" || f.Name == "XXX_sizecache" { - continue - } - - oneof := f.Tag.Get("protobuf_oneof") - if oneof != "" { - oneofFields = append(oneofFields, oneofField{f.Type, toField(&f)}) - // The rest of oneof processing happens below. - continue - } - - tags := f.Tag.Get("protobuf") - tagArray := strings.Split(tags, ",") - if len(tagArray) < 2 { - panic("protobuf tag not enough fields in " + t.Name() + "." + f.Name + ": " + tags) - } - tag, err := strconv.Atoi(tagArray[1]) - if err != nil { - panic("protobuf tag field not an integer: " + tagArray[1]) - } - - name := "" - for _, tag := range tagArray[3:] { - if strings.HasPrefix(tag, "name=") { - name = tag[5:] - } - } - - // Extract unmarshaling function from the field (its type and tags). - unmarshal := fieldUnmarshaler(&f) - - // Required field? - var reqMask uint64 - if tagArray[2] == "req" { - bit := len(u.reqFields) - u.reqFields = append(u.reqFields, name) - reqMask = uint64(1) << uint(bit) - // TODO: if we have more than 64 required fields, we end up - // not verifying that all required fields are present. - // Fix this, perhaps using a count of required fields? - } - - // Store the info in the correct slot in the message. - u.setTag(tag, toField(&f), unmarshal, reqMask, name) - } - - // Find any types associated with oneof fields. - // TODO: XXX_OneofFuncs returns more info than we need. Get rid of some of it? - fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("XXX_OneofFuncs") - if fn.IsValid() { - res := fn.Call(nil)[3] // last return value from XXX_OneofFuncs: []interface{} - for i := res.Len() - 1; i >= 0; i-- { - v := res.Index(i) // interface{} - tptr := reflect.ValueOf(v.Interface()).Type() // *Msg_X - typ := tptr.Elem() // Msg_X - - f := typ.Field(0) // oneof implementers have one field - baseUnmarshal := fieldUnmarshaler(&f) - tags := strings.Split(f.Tag.Get("protobuf"), ",") - fieldNum, err := strconv.Atoi(tags[1]) - if err != nil { - panic("protobuf tag field not an integer: " + tags[1]) - } - var name string - for _, tag := range tags { - if strings.HasPrefix(tag, "name=") { - name = strings.TrimPrefix(tag, "name=") - break - } - } - - // Find the oneof field that this struct implements. - // Might take O(n^2) to process all of the oneofs, but who cares. - for _, of := range oneofFields { - if tptr.Implements(of.ityp) { - // We have found the corresponding interface for this struct. - // That lets us know where this struct should be stored - // when we encounter it during unmarshaling. - unmarshal := makeUnmarshalOneof(typ, of.ityp, baseUnmarshal) - u.setTag(fieldNum, of.field, unmarshal, 0, name) - } - } - } - } - - // Get extension ranges, if any. - fn = reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray") - if fn.IsValid() { - if !u.extensions.IsValid() && !u.oldExtensions.IsValid() { - panic("a message with extensions, but no extensions field in " + t.Name()) - } - u.extensionRanges = fn.Call(nil)[0].Interface().([]ExtensionRange) - } - - // Explicitly disallow tag 0. This will ensure we flag an error - // when decoding a buffer of all zeros. Without this code, we - // would decode and skip an all-zero buffer of even length. - // [0 0] is [tag=0/wiretype=varint varint-encoded-0]. - u.setTag(0, zeroField, func(b []byte, f pointer, w int) ([]byte, error) { - return nil, fmt.Errorf("proto: %s: illegal tag 0 (wire type %d)", t, w) - }, 0, "") - - // Set mask for required field check. - u.reqMask = uint64(1)<= 0 && (tag < 16 || tag < 2*n) { // TODO: what are the right numbers here? - for len(u.dense) <= tag { - u.dense = append(u.dense, unmarshalFieldInfo{}) - } - u.dense[tag] = i - return - } - if u.sparse == nil { - u.sparse = map[uint64]unmarshalFieldInfo{} - } - u.sparse[uint64(tag)] = i -} - -// fieldUnmarshaler returns an unmarshaler for the given field. -func fieldUnmarshaler(f *reflect.StructField) unmarshaler { - if f.Type.Kind() == reflect.Map { - return makeUnmarshalMap(f) - } - return typeUnmarshaler(f.Type, f.Tag.Get("protobuf")) -} - -// typeUnmarshaler returns an unmarshaler for the given field type / field tag pair. -func typeUnmarshaler(t reflect.Type, tags string) unmarshaler { - tagArray := strings.Split(tags, ",") - encoding := tagArray[0] - name := "unknown" - proto3 := false - validateUTF8 := true - for _, tag := range tagArray[3:] { - if strings.HasPrefix(tag, "name=") { - name = tag[5:] - } - if tag == "proto3" { - proto3 = true - } - } - validateUTF8 = validateUTF8 && proto3 - - // Figure out packaging (pointer, slice, or both) - slice := false - pointer := false - if t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 { - slice = true - t = t.Elem() - } - if t.Kind() == reflect.Ptr { - pointer = true - t = t.Elem() - } - - // We'll never have both pointer and slice for basic types. - if pointer && slice && t.Kind() != reflect.Struct { - panic("both pointer and slice for basic type in " + t.Name()) - } - - switch t.Kind() { - case reflect.Bool: - if pointer { - return unmarshalBoolPtr - } - if slice { - return unmarshalBoolSlice - } - return unmarshalBoolValue - case reflect.Int32: - switch encoding { - case "fixed32": - if pointer { - return unmarshalFixedS32Ptr - } - if slice { - return unmarshalFixedS32Slice - } - return unmarshalFixedS32Value - case "varint": - // this could be int32 or enum - if pointer { - return unmarshalInt32Ptr - } - if slice { - return unmarshalInt32Slice - } - return unmarshalInt32Value - case "zigzag32": - if pointer { - return unmarshalSint32Ptr - } - if slice { - return unmarshalSint32Slice - } - return unmarshalSint32Value - } - case reflect.Int64: - switch encoding { - case "fixed64": - if pointer { - return unmarshalFixedS64Ptr - } - if slice { - return unmarshalFixedS64Slice - } - return unmarshalFixedS64Value - case "varint": - if pointer { - return unmarshalInt64Ptr - } - if slice { - return unmarshalInt64Slice - } - return unmarshalInt64Value - case "zigzag64": - if pointer { - return unmarshalSint64Ptr - } - if slice { - return unmarshalSint64Slice - } - return unmarshalSint64Value - } - case reflect.Uint32: - switch encoding { - case "fixed32": - if pointer { - return unmarshalFixed32Ptr - } - if slice { - return unmarshalFixed32Slice - } - return unmarshalFixed32Value - case "varint": - if pointer { - return unmarshalUint32Ptr - } - if slice { - return unmarshalUint32Slice - } - return unmarshalUint32Value - } - case reflect.Uint64: - switch encoding { - case "fixed64": - if pointer { - return unmarshalFixed64Ptr - } - if slice { - return unmarshalFixed64Slice - } - return unmarshalFixed64Value - case "varint": - if pointer { - return unmarshalUint64Ptr - } - if slice { - return unmarshalUint64Slice - } - return unmarshalUint64Value - } - case reflect.Float32: - if pointer { - return unmarshalFloat32Ptr - } - if slice { - return unmarshalFloat32Slice - } - return unmarshalFloat32Value - case reflect.Float64: - if pointer { - return unmarshalFloat64Ptr - } - if slice { - return unmarshalFloat64Slice - } - return unmarshalFloat64Value - case reflect.Map: - panic("map type in typeUnmarshaler in " + t.Name()) - case reflect.Slice: - if pointer { - panic("bad pointer in slice case in " + t.Name()) - } - if slice { - return unmarshalBytesSlice - } - return unmarshalBytesValue - case reflect.String: - if validateUTF8 { - if pointer { - return unmarshalUTF8StringPtr - } - if slice { - return unmarshalUTF8StringSlice - } - return unmarshalUTF8StringValue - } - if pointer { - return unmarshalStringPtr - } - if slice { - return unmarshalStringSlice - } - return unmarshalStringValue - case reflect.Struct: - // message or group field - if !pointer { - panic(fmt.Sprintf("message/group field %s:%s without pointer", t, encoding)) - } - switch encoding { - case "bytes": - if slice { - return makeUnmarshalMessageSlicePtr(getUnmarshalInfo(t), name) - } - return makeUnmarshalMessagePtr(getUnmarshalInfo(t), name) - case "group": - if slice { - return makeUnmarshalGroupSlicePtr(getUnmarshalInfo(t), name) - } - return makeUnmarshalGroupPtr(getUnmarshalInfo(t), name) - } - } - panic(fmt.Sprintf("unmarshaler not found type:%s encoding:%s", t, encoding)) -} - -// Below are all the unmarshalers for individual fields of various types. - -func unmarshalInt64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - *f.toInt64() = v - return b, nil -} - -func unmarshalInt64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - *f.toInt64Ptr() = &v - return b, nil -} - -func unmarshalInt64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - s := f.toInt64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x) - s := f.toInt64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalSint64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - *f.toInt64() = v - return b, nil -} - -func unmarshalSint64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - *f.toInt64Ptr() = &v - return b, nil -} - -func unmarshalSint64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - s := f.toInt64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int64(x>>1) ^ int64(x)<<63>>63 - s := f.toInt64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalUint64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - *f.toUint64() = v - return b, nil -} - -func unmarshalUint64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - *f.toUint64Ptr() = &v - return b, nil -} - -func unmarshalUint64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - s := f.toUint64Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint64(x) - s := f.toUint64Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalInt32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - *f.toInt32() = v - return b, nil -} - -func unmarshalInt32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.setInt32Ptr(v) - return b, nil -} - -func unmarshalInt32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.appendInt32Slice(v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x) - f.appendInt32Slice(v) - return b, nil -} - -func unmarshalSint32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - *f.toInt32() = v - return b, nil -} - -func unmarshalSint32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.setInt32Ptr(v) - return b, nil -} - -func unmarshalSint32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.appendInt32Slice(v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := int32(x>>1) ^ int32(x)<<31>>31 - f.appendInt32Slice(v) - return b, nil -} - -func unmarshalUint32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - *f.toUint32() = v - return b, nil -} - -func unmarshalUint32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - *f.toUint32Ptr() = &v - return b, nil -} - -func unmarshalUint32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - s := f.toUint32Slice() - *s = append(*s, v) - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - v := uint32(x) - s := f.toUint32Slice() - *s = append(*s, v) - return b, nil -} - -func unmarshalFixed64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - *f.toUint64() = v - return b[8:], nil -} - -func unmarshalFixed64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - *f.toUint64Ptr() = &v - return b[8:], nil -} - -func unmarshalFixed64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - s := f.toUint64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 - s := f.toUint64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFixedS64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - *f.toInt64() = v - return b[8:], nil -} - -func unmarshalFixedS64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - *f.toInt64Ptr() = &v - return b[8:], nil -} - -func unmarshalFixedS64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - s := f.toInt64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := int64(b[0]) | int64(b[1])<<8 | int64(b[2])<<16 | int64(b[3])<<24 | int64(b[4])<<32 | int64(b[5])<<40 | int64(b[6])<<48 | int64(b[7])<<56 - s := f.toInt64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFixed32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - *f.toUint32() = v - return b[4:], nil -} - -func unmarshalFixed32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - *f.toUint32Ptr() = &v - return b[4:], nil -} - -func unmarshalFixed32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - s := f.toUint32Slice() - *s = append(*s, v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 - s := f.toUint32Slice() - *s = append(*s, v) - return b[4:], nil -} - -func unmarshalFixedS32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - *f.toInt32() = v - return b[4:], nil -} - -func unmarshalFixedS32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.setInt32Ptr(v) - return b[4:], nil -} - -func unmarshalFixedS32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.appendInt32Slice(v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := int32(b[0]) | int32(b[1])<<8 | int32(b[2])<<16 | int32(b[3])<<24 - f.appendInt32Slice(v) - return b[4:], nil -} - -func unmarshalBoolValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - // Note: any length varint is allowed, even though any sane - // encoder will use one byte. - // See https://github.com/golang/protobuf/issues/76 - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - // TODO: check if x>1? Tests seem to indicate no. - v := x != 0 - *f.toBool() = v - return b[n:], nil -} - -func unmarshalBoolPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - *f.toBoolPtr() = &v - return b[n:], nil -} - -func unmarshalBoolSlice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - x, n = decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - s := f.toBoolSlice() - *s = append(*s, v) - b = b[n:] - } - return res, nil - } - if w != WireVarint { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - v := x != 0 - s := f.toBoolSlice() - *s = append(*s, v) - return b[n:], nil -} - -func unmarshalFloat64Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - *f.toFloat64() = v - return b[8:], nil -} - -func unmarshalFloat64Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - *f.toFloat64Ptr() = &v - return b[8:], nil -} - -func unmarshalFloat64Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - s := f.toFloat64Slice() - *s = append(*s, v) - b = b[8:] - } - return res, nil - } - if w != WireFixed64 { - return b, errInternalBadWireType - } - if len(b) < 8 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float64frombits(uint64(b[0]) | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56) - s := f.toFloat64Slice() - *s = append(*s, v) - return b[8:], nil -} - -func unmarshalFloat32Value(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - *f.toFloat32() = v - return b[4:], nil -} - -func unmarshalFloat32Ptr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - *f.toFloat32Ptr() = &v - return b[4:], nil -} - -func unmarshalFloat32Slice(b []byte, f pointer, w int) ([]byte, error) { - if w == WireBytes { // packed - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - res := b[x:] - b = b[:x] - for len(b) > 0 { - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - s := f.toFloat32Slice() - *s = append(*s, v) - b = b[4:] - } - return res, nil - } - if w != WireFixed32 { - return b, errInternalBadWireType - } - if len(b) < 4 { - return nil, io.ErrUnexpectedEOF - } - v := math.Float32frombits(uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24) - s := f.toFloat32Slice() - *s = append(*s, v) - return b[4:], nil -} - -func unmarshalStringValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toString() = v - return b[x:], nil -} - -func unmarshalStringPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toStringPtr() = &v - return b[x:], nil -} - -func unmarshalStringSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - s := f.toStringSlice() - *s = append(*s, v) - return b[x:], nil -} - -func unmarshalUTF8StringValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toString() = v - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -func unmarshalUTF8StringPtr(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - *f.toStringPtr() = &v - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -func unmarshalUTF8StringSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := string(b[:x]) - s := f.toStringSlice() - *s = append(*s, v) - if !utf8.ValidString(v) { - return b[x:], errInvalidUTF8 - } - return b[x:], nil -} - -var emptyBuf [0]byte - -func unmarshalBytesValue(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - // The use of append here is a trick which avoids the zeroing - // that would be required if we used a make/copy pair. - // We append to emptyBuf instead of nil because we want - // a non-nil result even when the length is 0. - v := append(emptyBuf[:], b[:x]...) - *f.toBytes() = v - return b[x:], nil -} - -func unmarshalBytesSlice(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := append(emptyBuf[:], b[:x]...) - s := f.toBytesSlice() - *s = append(*s, v) - return b[x:], nil -} - -func makeUnmarshalMessagePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - // First read the message field to see if something is there. - // The semantics of multiple submessages are weird. Instead of - // the last one winning (as it is for all other fields), multiple - // submessages are merged. - v := f.getPointer() - if v.isNil() { - v = valToPointer(reflect.New(sub.typ)) - f.setPointer(v) - } - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - return b[x:], err - } -} - -func makeUnmarshalMessageSlicePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireBytes { - return b, errInternalBadWireType - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - v := valToPointer(reflect.New(sub.typ)) - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - f.appendPointer(v) - return b[x:], err - } -} - -func makeUnmarshalGroupPtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireStartGroup { - return b, errInternalBadWireType - } - x, y := findEndGroup(b) - if x < 0 { - return nil, io.ErrUnexpectedEOF - } - v := f.getPointer() - if v.isNil() { - v = valToPointer(reflect.New(sub.typ)) - f.setPointer(v) - } - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - return b[y:], err - } -} - -func makeUnmarshalGroupSlicePtr(sub *unmarshalInfo, name string) unmarshaler { - return func(b []byte, f pointer, w int) ([]byte, error) { - if w != WireStartGroup { - return b, errInternalBadWireType - } - x, y := findEndGroup(b) - if x < 0 { - return nil, io.ErrUnexpectedEOF - } - v := valToPointer(reflect.New(sub.typ)) - err := sub.unmarshal(v, b[:x]) - if err != nil { - if r, ok := err.(*RequiredNotSetError); ok { - r.field = name + "." + r.field - } else { - return nil, err - } - } - f.appendPointer(v) - return b[y:], err - } -} - -func makeUnmarshalMap(f *reflect.StructField) unmarshaler { - t := f.Type - kt := t.Key() - vt := t.Elem() - unmarshalKey := typeUnmarshaler(kt, f.Tag.Get("protobuf_key")) - unmarshalVal := typeUnmarshaler(vt, f.Tag.Get("protobuf_val")) - return func(b []byte, f pointer, w int) ([]byte, error) { - // The map entry is a submessage. Figure out how big it is. - if w != WireBytes { - return nil, fmt.Errorf("proto: bad wiretype for map field: got %d want %d", w, WireBytes) - } - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - b = b[n:] - if x > uint64(len(b)) { - return nil, io.ErrUnexpectedEOF - } - r := b[x:] // unused data to return - b = b[:x] // data for map entry - - // Note: we could use #keys * #values ~= 200 functions - // to do map decoding without reflection. Probably not worth it. - // Maps will be somewhat slow. Oh well. - - // Read key and value from data. - var nerr nonFatal - k := reflect.New(kt) - v := reflect.New(vt) - for len(b) > 0 { - x, n := decodeVarint(b) - if n == 0 { - return nil, io.ErrUnexpectedEOF - } - wire := int(x) & 7 - b = b[n:] - - var err error - switch x >> 3 { - case 1: - b, err = unmarshalKey(b, valToPointer(k), wire) - case 2: - b, err = unmarshalVal(b, valToPointer(v), wire) - default: - err = errInternalBadWireType // skip unknown tag - } - - if nerr.Merge(err) { - continue - } - if err != errInternalBadWireType { - return nil, err - } - - // Skip past unknown fields. - b, err = skipField(b, wire) - if err != nil { - return nil, err - } - } - - // Get map, allocate if needed. - m := f.asPointerTo(t).Elem() // an addressable map[K]T - if m.IsNil() { - m.Set(reflect.MakeMap(t)) - } - - // Insert into map. - m.SetMapIndex(k.Elem(), v.Elem()) - - return r, nerr.E - } -} - -// makeUnmarshalOneof makes an unmarshaler for oneof fields. -// for: -// message Msg { -// oneof F { -// int64 X = 1; -// float64 Y = 2; -// } -// } -// typ is the type of the concrete entry for a oneof case (e.g. Msg_X). -// ityp is the interface type of the oneof field (e.g. isMsg_F). -// unmarshal is the unmarshaler for the base type of the oneof case (e.g. int64). -// Note that this function will be called once for each case in the oneof. -func makeUnmarshalOneof(typ, ityp reflect.Type, unmarshal unmarshaler) unmarshaler { - sf := typ.Field(0) - field0 := toField(&sf) - return func(b []byte, f pointer, w int) ([]byte, error) { - // Allocate holder for value. - v := reflect.New(typ) - - // Unmarshal data into holder. - // We unmarshal into the first field of the holder object. - var err error - var nerr nonFatal - b, err = unmarshal(b, valToPointer(v).offset(field0), w) - if !nerr.Merge(err) { - return nil, err - } - - // Write pointer to holder into target field. - f.asPointerTo(ityp).Elem().Set(v) - - return b, nerr.E - } -} - -// Error used by decode internally. -var errInternalBadWireType = errors.New("proto: internal error: bad wiretype") - -// skipField skips past a field of type wire and returns the remaining bytes. -func skipField(b []byte, wire int) ([]byte, error) { - switch wire { - case WireVarint: - _, k := decodeVarint(b) - if k == 0 { - return b, io.ErrUnexpectedEOF - } - b = b[k:] - case WireFixed32: - if len(b) < 4 { - return b, io.ErrUnexpectedEOF - } - b = b[4:] - case WireFixed64: - if len(b) < 8 { - return b, io.ErrUnexpectedEOF - } - b = b[8:] - case WireBytes: - m, k := decodeVarint(b) - if k == 0 || uint64(len(b)-k) < m { - return b, io.ErrUnexpectedEOF - } - b = b[uint64(k)+m:] - case WireStartGroup: - _, i := findEndGroup(b) - if i == -1 { - return b, io.ErrUnexpectedEOF - } - b = b[i:] - default: - return b, fmt.Errorf("proto: can't skip unknown wire type %d", wire) - } - return b, nil -} - -// findEndGroup finds the index of the next EndGroup tag. -// Groups may be nested, so the "next" EndGroup tag is the first -// unpaired EndGroup. -// findEndGroup returns the indexes of the start and end of the EndGroup tag. -// Returns (-1,-1) if it can't find one. -func findEndGroup(b []byte) (int, int) { - depth := 1 - i := 0 - for { - x, n := decodeVarint(b[i:]) - if n == 0 { - return -1, -1 - } - j := i - i += n - switch x & 7 { - case WireVarint: - _, k := decodeVarint(b[i:]) - if k == 0 { - return -1, -1 - } - i += k - case WireFixed32: - if len(b)-4 < i { - return -1, -1 - } - i += 4 - case WireFixed64: - if len(b)-8 < i { - return -1, -1 - } - i += 8 - case WireBytes: - m, k := decodeVarint(b[i:]) - if k == 0 { - return -1, -1 - } - i += k - if uint64(len(b)-i) < m { - return -1, -1 - } - i += int(m) - case WireStartGroup: - depth++ - case WireEndGroup: - depth-- - if depth == 0 { - return j, i - } - default: - return -1, -1 - } - } -} - -// encodeVarint appends a varint-encoded integer to b and returns the result. -func encodeVarint(b []byte, x uint64) []byte { - for x >= 1<<7 { - b = append(b, byte(x&0x7f|0x80)) - x >>= 7 - } - return append(b, byte(x)) -} - -// decodeVarint reads a varint-encoded integer from b. -// Returns the decoded integer and the number of bytes read. -// If there is an error, it returns 0,0. -func decodeVarint(b []byte) (uint64, int) { - var x, y uint64 - if len(b) <= 0 { - goto bad - } - x = uint64(b[0]) - if x < 0x80 { - return x, 1 - } - x -= 0x80 - - if len(b) <= 1 { - goto bad - } - y = uint64(b[1]) - x += y << 7 - if y < 0x80 { - return x, 2 - } - x -= 0x80 << 7 - - if len(b) <= 2 { - goto bad - } - y = uint64(b[2]) - x += y << 14 - if y < 0x80 { - return x, 3 - } - x -= 0x80 << 14 - - if len(b) <= 3 { - goto bad - } - y = uint64(b[3]) - x += y << 21 - if y < 0x80 { - return x, 4 - } - x -= 0x80 << 21 - - if len(b) <= 4 { - goto bad - } - y = uint64(b[4]) - x += y << 28 - if y < 0x80 { - return x, 5 - } - x -= 0x80 << 28 - - if len(b) <= 5 { - goto bad - } - y = uint64(b[5]) - x += y << 35 - if y < 0x80 { - return x, 6 - } - x -= 0x80 << 35 - - if len(b) <= 6 { - goto bad - } - y = uint64(b[6]) - x += y << 42 - if y < 0x80 { - return x, 7 - } - x -= 0x80 << 42 - - if len(b) <= 7 { - goto bad - } - y = uint64(b[7]) - x += y << 49 - if y < 0x80 { - return x, 8 - } - x -= 0x80 << 49 - - if len(b) <= 8 { - goto bad - } - y = uint64(b[8]) - x += y << 56 - if y < 0x80 { - return x, 9 - } - x -= 0x80 << 56 - - if len(b) <= 9 { - goto bad - } - y = uint64(b[9]) - x += y << 63 - if y < 2 { - return x, 10 - } - -bad: - return 0, 0 -} diff --git a/vendor/github.com/golang/protobuf/proto/text.go b/vendor/github.com/golang/protobuf/proto/text.go deleted file mode 100644 index 1aaee725..00000000 --- a/vendor/github.com/golang/protobuf/proto/text.go +++ /dev/null @@ -1,843 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for writing the text protocol buffer format. - -import ( - "bufio" - "bytes" - "encoding" - "errors" - "fmt" - "io" - "log" - "math" - "reflect" - "sort" - "strings" -) - -var ( - newline = []byte("\n") - spaces = []byte(" ") - endBraceNewline = []byte("}\n") - backslashN = []byte{'\\', 'n'} - backslashR = []byte{'\\', 'r'} - backslashT = []byte{'\\', 't'} - backslashDQ = []byte{'\\', '"'} - backslashBS = []byte{'\\', '\\'} - posInf = []byte("inf") - negInf = []byte("-inf") - nan = []byte("nan") -) - -type writer interface { - io.Writer - WriteByte(byte) error -} - -// textWriter is an io.Writer that tracks its indentation level. -type textWriter struct { - ind int - complete bool // if the current position is a complete line - compact bool // whether to write out as a one-liner - w writer -} - -func (w *textWriter) WriteString(s string) (n int, err error) { - if !strings.Contains(s, "\n") { - if !w.compact && w.complete { - w.writeIndent() - } - w.complete = false - return io.WriteString(w.w, s) - } - // WriteString is typically called without newlines, so this - // codepath and its copy are rare. We copy to avoid - // duplicating all of Write's logic here. - return w.Write([]byte(s)) -} - -func (w *textWriter) Write(p []byte) (n int, err error) { - newlines := bytes.Count(p, newline) - if newlines == 0 { - if !w.compact && w.complete { - w.writeIndent() - } - n, err = w.w.Write(p) - w.complete = false - return n, err - } - - frags := bytes.SplitN(p, newline, newlines+1) - if w.compact { - for i, frag := range frags { - if i > 0 { - if err := w.w.WriteByte(' '); err != nil { - return n, err - } - n++ - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - } - return n, nil - } - - for i, frag := range frags { - if w.complete { - w.writeIndent() - } - nn, err := w.w.Write(frag) - n += nn - if err != nil { - return n, err - } - if i+1 < len(frags) { - if err := w.w.WriteByte('\n'); err != nil { - return n, err - } - n++ - } - } - w.complete = len(frags[len(frags)-1]) == 0 - return n, nil -} - -func (w *textWriter) WriteByte(c byte) error { - if w.compact && c == '\n' { - c = ' ' - } - if !w.compact && w.complete { - w.writeIndent() - } - err := w.w.WriteByte(c) - w.complete = c == '\n' - return err -} - -func (w *textWriter) indent() { w.ind++ } - -func (w *textWriter) unindent() { - if w.ind == 0 { - log.Print("proto: textWriter unindented too far") - return - } - w.ind-- -} - -func writeName(w *textWriter, props *Properties) error { - if _, err := w.WriteString(props.OrigName); err != nil { - return err - } - if props.Wire != "group" { - return w.WriteByte(':') - } - return nil -} - -func requiresQuotes(u string) bool { - // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. - for _, ch := range u { - switch { - case ch == '.' || ch == '/' || ch == '_': - continue - case '0' <= ch && ch <= '9': - continue - case 'A' <= ch && ch <= 'Z': - continue - case 'a' <= ch && ch <= 'z': - continue - default: - return true - } - } - return false -} - -// isAny reports whether sv is a google.protobuf.Any message -func isAny(sv reflect.Value) bool { - type wkt interface { - XXX_WellKnownType() string - } - t, ok := sv.Addr().Interface().(wkt) - return ok && t.XXX_WellKnownType() == "Any" -} - -// writeProto3Any writes an expanded google.protobuf.Any message. -// -// It returns (false, nil) if sv value can't be unmarshaled (e.g. because -// required messages are not linked in). -// -// It returns (true, error) when sv was written in expanded format or an error -// was encountered. -func (tm *TextMarshaler) writeProto3Any(w *textWriter, sv reflect.Value) (bool, error) { - turl := sv.FieldByName("TypeUrl") - val := sv.FieldByName("Value") - if !turl.IsValid() || !val.IsValid() { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - b, ok := val.Interface().([]byte) - if !ok { - return true, errors.New("proto: invalid google.protobuf.Any message") - } - - parts := strings.Split(turl.String(), "/") - mt := MessageType(parts[len(parts)-1]) - if mt == nil { - return false, nil - } - m := reflect.New(mt.Elem()) - if err := Unmarshal(b, m.Interface().(Message)); err != nil { - return false, nil - } - w.Write([]byte("[")) - u := turl.String() - if requiresQuotes(u) { - writeString(w, u) - } else { - w.Write([]byte(u)) - } - if w.compact { - w.Write([]byte("]:<")) - } else { - w.Write([]byte("]: <\n")) - w.ind++ - } - if err := tm.writeStruct(w, m.Elem()); err != nil { - return true, err - } - if w.compact { - w.Write([]byte("> ")) - } else { - w.ind-- - w.Write([]byte(">\n")) - } - return true, nil -} - -func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error { - if tm.ExpandAny && isAny(sv) { - if canExpand, err := tm.writeProto3Any(w, sv); canExpand { - return err - } - } - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < sv.NumField(); i++ { - fv := sv.Field(i) - props := sprops.Prop[i] - name := st.Field(i).Name - - if name == "XXX_NoUnkeyedLiteral" { - continue - } - - if strings.HasPrefix(name, "XXX_") { - // There are two XXX_ fields: - // XXX_unrecognized []byte - // XXX_extensions map[int32]proto.Extension - // The first is handled here; - // the second is handled at the bottom of this function. - if name == "XXX_unrecognized" && !fv.IsNil() { - if err := writeUnknownStruct(w, fv.Interface().([]byte)); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Field not filled in. This could be an optional field or - // a required field that wasn't filled in. Either way, there - // isn't anything we can show for it. - continue - } - if fv.Kind() == reflect.Slice && fv.IsNil() { - // Repeated field that is empty, or a bytes field that is unused. - continue - } - - if props.Repeated && fv.Kind() == reflect.Slice { - // Repeated field. - for j := 0; j < fv.Len(); j++ { - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - v := fv.Index(j) - if v.Kind() == reflect.Ptr && v.IsNil() { - // A nil message in a repeated field is not valid, - // but we can handle that more gracefully than panicking. - if _, err := w.Write([]byte("\n")); err != nil { - return err - } - continue - } - if err := tm.writeAny(w, v, props); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if fv.Kind() == reflect.Map { - // Map fields are rendered as a repeated struct with key/value fields. - keys := fv.MapKeys() - sort.Sort(mapKeys(keys)) - for _, key := range keys { - val := fv.MapIndex(key) - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - // open struct - if err := w.WriteByte('<'); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - // key - if _, err := w.WriteString("key:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, key, props.MapKeyProp); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - // nil values aren't legal, but we can avoid panicking because of them. - if val.Kind() != reflect.Ptr || !val.IsNil() { - // value - if _, err := w.WriteString("value:"); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, val, props.MapValProp); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - // close struct - w.unindent() - if err := w.WriteByte('>'); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - } - continue - } - if props.proto3 && fv.Kind() == reflect.Slice && fv.Len() == 0 { - // empty bytes field - continue - } - if fv.Kind() != reflect.Ptr && fv.Kind() != reflect.Slice { - // proto3 non-repeated scalar field; skip if zero value - if isProto3Zero(fv) { - continue - } - } - - if fv.Kind() == reflect.Interface { - // Check if it is a oneof. - if st.Field(i).Tag.Get("protobuf_oneof") != "" { - // fv is nil, or holds a pointer to generated struct. - // That generated struct has exactly one field, - // which has a protobuf struct tag. - if fv.IsNil() { - continue - } - inner := fv.Elem().Elem() // interface -> *T -> T - tag := inner.Type().Field(0).Tag.Get("protobuf") - props = new(Properties) // Overwrite the outer props var, but not its pointee. - props.Parse(tag) - // Write the value in the oneof, not the oneof itself. - fv = inner.Field(0) - - // Special case to cope with malformed messages gracefully: - // If the value in the oneof is a nil pointer, don't panic - // in writeAny. - if fv.Kind() == reflect.Ptr && fv.IsNil() { - // Use errors.New so writeAny won't render quotes. - msg := errors.New("/* nil */") - fv = reflect.ValueOf(&msg).Elem() - } - } - } - - if err := writeName(w, props); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - - // Enums have a String method, so writeAny will work fine. - if err := tm.writeAny(w, fv, props); err != nil { - return err - } - - if err := w.WriteByte('\n'); err != nil { - return err - } - } - - // Extensions (the XXX_extensions field). - pv := sv.Addr() - if _, err := extendable(pv.Interface()); err == nil { - if err := tm.writeExtensions(w, pv); err != nil { - return err - } - } - - return nil -} - -// writeAny writes an arbitrary field. -func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error { - v = reflect.Indirect(v) - - // Floats have special cases. - if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 { - x := v.Float() - var b []byte - switch { - case math.IsInf(x, 1): - b = posInf - case math.IsInf(x, -1): - b = negInf - case math.IsNaN(x): - b = nan - } - if b != nil { - _, err := w.Write(b) - return err - } - // Other values are handled below. - } - - // We don't attempt to serialise every possible value type; only those - // that can occur in protocol buffers. - switch v.Kind() { - case reflect.Slice: - // Should only be a []byte; repeated fields are handled in writeStruct. - if err := writeString(w, string(v.Bytes())); err != nil { - return err - } - case reflect.String: - if err := writeString(w, v.String()); err != nil { - return err - } - case reflect.Struct: - // Required/optional group/message. - var bra, ket byte = '<', '>' - if props != nil && props.Wire == "group" { - bra, ket = '{', '}' - } - if err := w.WriteByte(bra); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte('\n'); err != nil { - return err - } - } - w.indent() - if v.CanAddr() { - // Calling v.Interface on a struct causes the reflect package to - // copy the entire struct. This is racy with the new Marshaler - // since we atomically update the XXX_sizecache. - // - // Thus, we retrieve a pointer to the struct if possible to avoid - // a race since v.Interface on the pointer doesn't copy the struct. - // - // If v is not addressable, then we are not worried about a race - // since it implies that the binary Marshaler cannot possibly be - // mutating this value. - v = v.Addr() - } - if etm, ok := v.Interface().(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = w.Write(text); err != nil { - return err - } - } else { - if v.Kind() == reflect.Ptr { - v = v.Elem() - } - if err := tm.writeStruct(w, v); err != nil { - return err - } - } - w.unindent() - if err := w.WriteByte(ket); err != nil { - return err - } - default: - _, err := fmt.Fprint(w, v.Interface()) - return err - } - return nil -} - -// equivalent to C's isprint. -func isprint(c byte) bool { - return c >= 0x20 && c < 0x7f -} - -// writeString writes a string in the protocol buffer text format. -// It is similar to strconv.Quote except we don't use Go escape sequences, -// we treat the string as a byte sequence, and we use octal escapes. -// These differences are to maintain interoperability with the other -// languages' implementations of the text format. -func writeString(w *textWriter, s string) error { - // use WriteByte here to get any needed indent - if err := w.WriteByte('"'); err != nil { - return err - } - // Loop over the bytes, not the runes. - for i := 0; i < len(s); i++ { - var err error - // Divergence from C++: we don't escape apostrophes. - // There's no need to escape them, and the C++ parser - // copes with a naked apostrophe. - switch c := s[i]; c { - case '\n': - _, err = w.w.Write(backslashN) - case '\r': - _, err = w.w.Write(backslashR) - case '\t': - _, err = w.w.Write(backslashT) - case '"': - _, err = w.w.Write(backslashDQ) - case '\\': - _, err = w.w.Write(backslashBS) - default: - if isprint(c) { - err = w.w.WriteByte(c) - } else { - _, err = fmt.Fprintf(w.w, "\\%03o", c) - } - } - if err != nil { - return err - } - } - return w.WriteByte('"') -} - -func writeUnknownStruct(w *textWriter, data []byte) (err error) { - if !w.compact { - if _, err := fmt.Fprintf(w, "/* %d unknown bytes */\n", len(data)); err != nil { - return err - } - } - b := NewBuffer(data) - for b.index < len(b.buf) { - x, err := b.DecodeVarint() - if err != nil { - _, err := fmt.Fprintf(w, "/* %v */\n", err) - return err - } - wire, tag := x&7, x>>3 - if wire == WireEndGroup { - w.unindent() - if _, err := w.Write(endBraceNewline); err != nil { - return err - } - continue - } - if _, err := fmt.Fprint(w, tag); err != nil { - return err - } - if wire != WireStartGroup { - if err := w.WriteByte(':'); err != nil { - return err - } - } - if !w.compact || wire == WireStartGroup { - if err := w.WriteByte(' '); err != nil { - return err - } - } - switch wire { - case WireBytes: - buf, e := b.DecodeRawBytes(false) - if e == nil { - _, err = fmt.Fprintf(w, "%q", buf) - } else { - _, err = fmt.Fprintf(w, "/* %v */", e) - } - case WireFixed32: - x, err = b.DecodeFixed32() - err = writeUnknownInt(w, x, err) - case WireFixed64: - x, err = b.DecodeFixed64() - err = writeUnknownInt(w, x, err) - case WireStartGroup: - err = w.WriteByte('{') - w.indent() - case WireVarint: - x, err = b.DecodeVarint() - err = writeUnknownInt(w, x, err) - default: - _, err = fmt.Fprintf(w, "/* unknown wire type %d */", wire) - } - if err != nil { - return err - } - if err = w.WriteByte('\n'); err != nil { - return err - } - } - return nil -} - -func writeUnknownInt(w *textWriter, x uint64, err error) error { - if err == nil { - _, err = fmt.Fprint(w, x) - } else { - _, err = fmt.Fprintf(w, "/* %v */", err) - } - return err -} - -type int32Slice []int32 - -func (s int32Slice) Len() int { return len(s) } -func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] } -func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } - -// writeExtensions writes all the extensions in pv. -// pv is assumed to be a pointer to a protocol message struct that is extendable. -func (tm *TextMarshaler) writeExtensions(w *textWriter, pv reflect.Value) error { - emap := extensionMaps[pv.Type().Elem()] - ep, _ := extendable(pv.Interface()) - - // Order the extensions by ID. - // This isn't strictly necessary, but it will give us - // canonical output, which will also make testing easier. - m, mu := ep.extensionsRead() - if m == nil { - return nil - } - mu.Lock() - ids := make([]int32, 0, len(m)) - for id := range m { - ids = append(ids, id) - } - sort.Sort(int32Slice(ids)) - mu.Unlock() - - for _, extNum := range ids { - ext := m[extNum] - var desc *ExtensionDesc - if emap != nil { - desc = emap[extNum] - } - if desc == nil { - // Unknown extension. - if err := writeUnknownStruct(w, ext.enc); err != nil { - return err - } - continue - } - - pb, err := GetExtension(ep, desc) - if err != nil { - return fmt.Errorf("failed getting extension: %v", err) - } - - // Repeated extensions will appear as a slice. - if !desc.repeated() { - if err := tm.writeExtension(w, desc.Name, pb); err != nil { - return err - } - } else { - v := reflect.ValueOf(pb) - for i := 0; i < v.Len(); i++ { - if err := tm.writeExtension(w, desc.Name, v.Index(i).Interface()); err != nil { - return err - } - } - } - } - return nil -} - -func (tm *TextMarshaler) writeExtension(w *textWriter, name string, pb interface{}) error { - if _, err := fmt.Fprintf(w, "[%s]:", name); err != nil { - return err - } - if !w.compact { - if err := w.WriteByte(' '); err != nil { - return err - } - } - if err := tm.writeAny(w, reflect.ValueOf(pb), nil); err != nil { - return err - } - if err := w.WriteByte('\n'); err != nil { - return err - } - return nil -} - -func (w *textWriter) writeIndent() { - if !w.complete { - return - } - remain := w.ind * 2 - for remain > 0 { - n := remain - if n > len(spaces) { - n = len(spaces) - } - w.w.Write(spaces[:n]) - remain -= n - } - w.complete = false -} - -// TextMarshaler is a configurable text format marshaler. -type TextMarshaler struct { - Compact bool // use compact text format (one line). - ExpandAny bool // expand google.protobuf.Any messages of known types -} - -// Marshal writes a given protocol buffer in text format. -// The only errors returned are from w. -func (tm *TextMarshaler) Marshal(w io.Writer, pb Message) error { - val := reflect.ValueOf(pb) - if pb == nil || val.IsNil() { - w.Write([]byte("")) - return nil - } - var bw *bufio.Writer - ww, ok := w.(writer) - if !ok { - bw = bufio.NewWriter(w) - ww = bw - } - aw := &textWriter{ - w: ww, - complete: true, - compact: tm.Compact, - } - - if etm, ok := pb.(encoding.TextMarshaler); ok { - text, err := etm.MarshalText() - if err != nil { - return err - } - if _, err = aw.Write(text); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil - } - // Dereference the received pointer so we don't have outer < and >. - v := reflect.Indirect(val) - if err := tm.writeStruct(aw, v); err != nil { - return err - } - if bw != nil { - return bw.Flush() - } - return nil -} - -// Text is the same as Marshal, but returns the string directly. -func (tm *TextMarshaler) Text(pb Message) string { - var buf bytes.Buffer - tm.Marshal(&buf, pb) - return buf.String() -} - -var ( - defaultTextMarshaler = TextMarshaler{} - compactTextMarshaler = TextMarshaler{Compact: true} -) - -// TODO: consider removing some of the Marshal functions below. - -// MarshalText writes a given protocol buffer in text format. -// The only errors returned are from w. -func MarshalText(w io.Writer, pb Message) error { return defaultTextMarshaler.Marshal(w, pb) } - -// MarshalTextString is the same as MarshalText, but returns the string directly. -func MarshalTextString(pb Message) string { return defaultTextMarshaler.Text(pb) } - -// CompactText writes a given protocol buffer in compact text format (one line). -func CompactText(w io.Writer, pb Message) error { return compactTextMarshaler.Marshal(w, pb) } - -// CompactTextString is the same as CompactText, but returns the string directly. -func CompactTextString(pb Message) string { return compactTextMarshaler.Text(pb) } diff --git a/vendor/github.com/golang/protobuf/proto/text_decode.go b/vendor/github.com/golang/protobuf/proto/text_decode.go new file mode 100644 index 00000000..47eb3e44 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_decode.go @@ -0,0 +1,801 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "encoding" + "errors" + "fmt" + "reflect" + "strconv" + "strings" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/prototext" + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +const wrapTextUnmarshalV2 = false + +// ParseError is returned by UnmarshalText. +type ParseError struct { + Message string + + // Deprecated: Do not use. + Line, Offset int +} + +func (e *ParseError) Error() string { + if wrapTextUnmarshalV2 { + return e.Message + } + if e.Line == 1 { + return fmt.Sprintf("line 1.%d: %v", e.Offset, e.Message) + } + return fmt.Sprintf("line %d: %v", e.Line, e.Message) +} + +// UnmarshalText parses a proto text formatted string into m. +func UnmarshalText(s string, m Message) error { + if u, ok := m.(encoding.TextUnmarshaler); ok { + return u.UnmarshalText([]byte(s)) + } + + m.Reset() + mi := MessageV2(m) + + if wrapTextUnmarshalV2 { + err := prototext.UnmarshalOptions{ + AllowPartial: true, + }.Unmarshal([]byte(s), mi) + if err != nil { + return &ParseError{Message: err.Error()} + } + return checkRequiredNotSet(mi) + } else { + if err := newTextParser(s).unmarshalMessage(mi.ProtoReflect(), ""); err != nil { + return err + } + return checkRequiredNotSet(mi) + } +} + +type textParser struct { + s string // remaining input + done bool // whether the parsing is finished (success or error) + backed bool // whether back() was called + offset, line int + cur token +} + +type token struct { + value string + err *ParseError + line int // line number + offset int // byte number from start of input, not start of line + unquoted string // the unquoted version of value, if it was a quoted string +} + +func newTextParser(s string) *textParser { + p := new(textParser) + p.s = s + p.line = 1 + p.cur.line = 1 + return p +} + +func (p *textParser) unmarshalMessage(m protoreflect.Message, terminator string) (err error) { + md := m.Descriptor() + fds := md.Fields() + + // A struct is a sequence of "name: value", terminated by one of + // '>' or '}', or the end of the input. A name may also be + // "[extension]" or "[type/url]". + // + // The whole struct can also be an expanded Any message, like: + // [type/url] < ... struct contents ... > + seen := make(map[protoreflect.FieldNumber]bool) + for { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value == terminator { + break + } + if tok.value == "[" { + if err := p.unmarshalExtensionOrAny(m, seen); err != nil { + return err + } + continue + } + + // This is a normal, non-extension field. + name := protoreflect.Name(tok.value) + fd := fds.ByName(name) + switch { + case fd == nil: + gd := fds.ByName(protoreflect.Name(strings.ToLower(string(name)))) + if gd != nil && gd.Kind() == protoreflect.GroupKind && gd.Message().Name() == name { + fd = gd + } + case fd.Kind() == protoreflect.GroupKind && fd.Message().Name() != name: + fd = nil + case fd.IsWeak() && fd.Message().IsPlaceholder(): + fd = nil + } + if fd == nil { + typeName := string(md.FullName()) + if m, ok := m.Interface().(Message); ok { + t := reflect.TypeOf(m) + if t.Kind() == reflect.Ptr { + typeName = t.Elem().String() + } + } + return p.errorf("unknown field name %q in %v", name, typeName) + } + if od := fd.ContainingOneof(); od != nil && m.WhichOneof(od) != nil { + return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, od.Name()) + } + if fd.Cardinality() != protoreflect.Repeated && seen[fd.Number()] { + return p.errorf("non-repeated field %q was repeated", fd.Name()) + } + seen[fd.Number()] = true + + // Consume any colon. + if err := p.checkForColon(fd); err != nil { + return err + } + + // Parse into the field. + v := m.Get(fd) + if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) { + v = m.Mutable(fd) + } + if v, err = p.unmarshalValue(v, fd); err != nil { + return err + } + m.Set(fd, v) + + if err := p.consumeOptionalSeparator(); err != nil { + return err + } + } + return nil +} + +func (p *textParser) unmarshalExtensionOrAny(m protoreflect.Message, seen map[protoreflect.FieldNumber]bool) error { + name, err := p.consumeExtensionOrAnyName() + if err != nil { + return err + } + + // If it contains a slash, it's an Any type URL. + if slashIdx := strings.LastIndex(name, "/"); slashIdx >= 0 { + tok := p.next() + if tok.err != nil { + return tok.err + } + // consume an optional colon + if tok.value == ":" { + tok = p.next() + if tok.err != nil { + return tok.err + } + } + + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return p.errorf("expected '{' or '<', found %q", tok.value) + } + + mt, err := protoregistry.GlobalTypes.FindMessageByURL(name) + if err != nil { + return p.errorf("unrecognized message %q in google.protobuf.Any", name[slashIdx+len("/"):]) + } + m2 := mt.New() + if err := p.unmarshalMessage(m2, terminator); err != nil { + return err + } + b, err := protoV2.Marshal(m2.Interface()) + if err != nil { + return p.errorf("failed to marshal message of type %q: %v", name[slashIdx+len("/"):], err) + } + + urlFD := m.Descriptor().Fields().ByName("type_url") + valFD := m.Descriptor().Fields().ByName("value") + if seen[urlFD.Number()] { + return p.errorf("Any message unpacked multiple times, or %q already set", urlFD.Name()) + } + if seen[valFD.Number()] { + return p.errorf("Any message unpacked multiple times, or %q already set", valFD.Name()) + } + m.Set(urlFD, protoreflect.ValueOfString(name)) + m.Set(valFD, protoreflect.ValueOfBytes(b)) + seen[urlFD.Number()] = true + seen[valFD.Number()] = true + return nil + } + + xname := protoreflect.FullName(name) + xt, _ := protoregistry.GlobalTypes.FindExtensionByName(xname) + if xt == nil && isMessageSet(m.Descriptor()) { + xt, _ = protoregistry.GlobalTypes.FindExtensionByName(xname.Append("message_set_extension")) + } + if xt == nil { + return p.errorf("unrecognized extension %q", name) + } + fd := xt.TypeDescriptor() + if fd.ContainingMessage().FullName() != m.Descriptor().FullName() { + return p.errorf("extension field %q does not extend message %q", name, m.Descriptor().FullName()) + } + + if err := p.checkForColon(fd); err != nil { + return err + } + + v := m.Get(fd) + if !m.Has(fd) && (fd.IsList() || fd.IsMap() || fd.Message() != nil) { + v = m.Mutable(fd) + } + v, err = p.unmarshalValue(v, fd) + if err != nil { + return err + } + m.Set(fd, v) + return p.consumeOptionalSeparator() +} + +func (p *textParser) unmarshalValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "" { + return v, p.errorf("unexpected EOF") + } + + switch { + case fd.IsList(): + lv := v.List() + var err error + if tok.value == "[" { + // Repeated field with list notation, like [1,2,3]. + for { + vv := lv.NewElement() + vv, err = p.unmarshalSingularValue(vv, fd) + if err != nil { + return v, err + } + lv.Append(vv) + + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "]" { + break + } + if tok.value != "," { + return v, p.errorf("Expected ']' or ',' found %q", tok.value) + } + } + return v, nil + } + + // One value of the repeated field. + p.back() + vv := lv.NewElement() + vv, err = p.unmarshalSingularValue(vv, fd) + if err != nil { + return v, err + } + lv.Append(vv) + return v, nil + case fd.IsMap(): + // The map entry should be this sequence of tokens: + // < key : KEY value : VALUE > + // However, implementations may omit key or value, and technically + // we should support them in any order. + var terminator string + switch tok.value { + case "<": + terminator = ">" + case "{": + terminator = "}" + default: + return v, p.errorf("expected '{' or '<', found %q", tok.value) + } + + keyFD := fd.MapKey() + valFD := fd.MapValue() + + mv := v.Map() + kv := keyFD.Default() + vv := mv.NewValue() + for { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == terminator { + break + } + var err error + switch tok.value { + case "key": + if err := p.consumeToken(":"); err != nil { + return v, err + } + if kv, err = p.unmarshalSingularValue(kv, keyFD); err != nil { + return v, err + } + if err := p.consumeOptionalSeparator(); err != nil { + return v, err + } + case "value": + if err := p.checkForColon(valFD); err != nil { + return v, err + } + if vv, err = p.unmarshalSingularValue(vv, valFD); err != nil { + return v, err + } + if err := p.consumeOptionalSeparator(); err != nil { + return v, err + } + default: + p.back() + return v, p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) + } + } + mv.Set(kv.MapKey(), vv) + return v, nil + default: + p.back() + return p.unmarshalSingularValue(v, fd) + } +} + +func (p *textParser) unmarshalSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) (protoreflect.Value, error) { + tok := p.next() + if tok.err != nil { + return v, tok.err + } + if tok.value == "" { + return v, p.errorf("unexpected EOF") + } + + switch fd.Kind() { + case protoreflect.BoolKind: + switch tok.value { + case "true", "1", "t", "True": + return protoreflect.ValueOfBool(true), nil + case "false", "0", "f", "False": + return protoreflect.ValueOfBool(false), nil + } + case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind: + if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfInt32(int32(x)), nil + } + + // The C++ parser accepts large positive hex numbers that uses + // two's complement arithmetic to represent negative numbers. + // This feature is here for backwards compatibility with C++. + if strings.HasPrefix(tok.value, "0x") { + if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfInt32(int32(-(int64(^x) + 1))), nil + } + } + case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: + if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfInt64(int64(x)), nil + } + + // The C++ parser accepts large positive hex numbers that uses + // two's complement arithmetic to represent negative numbers. + // This feature is here for backwards compatibility with C++. + if strings.HasPrefix(tok.value, "0x") { + if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfInt64(int64(-(int64(^x) + 1))), nil + } + } + case protoreflect.Uint32Kind, protoreflect.Fixed32Kind: + if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfUint32(uint32(x)), nil + } + case protoreflect.Uint64Kind, protoreflect.Fixed64Kind: + if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { + return protoreflect.ValueOfUint64(uint64(x)), nil + } + case protoreflect.FloatKind: + // Ignore 'f' for compatibility with output generated by C++, + // but don't remove 'f' when the value is "-inf" or "inf". + v := tok.value + if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" { + v = v[:len(v)-len("f")] + } + if x, err := strconv.ParseFloat(v, 32); err == nil { + return protoreflect.ValueOfFloat32(float32(x)), nil + } + case protoreflect.DoubleKind: + // Ignore 'f' for compatibility with output generated by C++, + // but don't remove 'f' when the value is "-inf" or "inf". + v := tok.value + if strings.HasSuffix(v, "f") && v != "-inf" && v != "inf" { + v = v[:len(v)-len("f")] + } + if x, err := strconv.ParseFloat(v, 64); err == nil { + return protoreflect.ValueOfFloat64(float64(x)), nil + } + case protoreflect.StringKind: + if isQuote(tok.value[0]) { + return protoreflect.ValueOfString(tok.unquoted), nil + } + case protoreflect.BytesKind: + if isQuote(tok.value[0]) { + return protoreflect.ValueOfBytes([]byte(tok.unquoted)), nil + } + case protoreflect.EnumKind: + if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { + return protoreflect.ValueOfEnum(protoreflect.EnumNumber(x)), nil + } + vd := fd.Enum().Values().ByName(protoreflect.Name(tok.value)) + if vd != nil { + return protoreflect.ValueOfEnum(vd.Number()), nil + } + case protoreflect.MessageKind, protoreflect.GroupKind: + var terminator string + switch tok.value { + case "{": + terminator = "}" + case "<": + terminator = ">" + default: + return v, p.errorf("expected '{' or '<', found %q", tok.value) + } + err := p.unmarshalMessage(v.Message(), terminator) + return v, err + default: + panic(fmt.Sprintf("invalid kind %v", fd.Kind())) + } + return v, p.errorf("invalid %v: %v", fd.Kind(), tok.value) +} + +// Consume a ':' from the input stream (if the next token is a colon), +// returning an error if a colon is needed but not present. +func (p *textParser) checkForColon(fd protoreflect.FieldDescriptor) *ParseError { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ":" { + if fd.Message() == nil { + return p.errorf("expected ':', found %q", tok.value) + } + p.back() + } + return nil +} + +// consumeExtensionOrAnyName consumes an extension name or an Any type URL and +// the following ']'. It returns the name or URL consumed. +func (p *textParser) consumeExtensionOrAnyName() (string, error) { + tok := p.next() + if tok.err != nil { + return "", tok.err + } + + // If extension name or type url is quoted, it's a single token. + if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { + name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) + if err != nil { + return "", err + } + return name, p.consumeToken("]") + } + + // Consume everything up to "]" + var parts []string + for tok.value != "]" { + parts = append(parts, tok.value) + tok = p.next() + if tok.err != nil { + return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) + } + if p.done && tok.value != "]" { + return "", p.errorf("unclosed type_url or extension name") + } + } + return strings.Join(parts, ""), nil +} + +// consumeOptionalSeparator consumes an optional semicolon or comma. +// It is used in unmarshalMessage to provide backward compatibility. +func (p *textParser) consumeOptionalSeparator() error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != ";" && tok.value != "," { + p.back() + } + return nil +} + +func (p *textParser) errorf(format string, a ...interface{}) *ParseError { + pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} + p.cur.err = pe + p.done = true + return pe +} + +func (p *textParser) skipWhitespace() { + i := 0 + for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { + if p.s[i] == '#' { + // comment; skip to end of line or input + for i < len(p.s) && p.s[i] != '\n' { + i++ + } + if i == len(p.s) { + break + } + } + if p.s[i] == '\n' { + p.line++ + } + i++ + } + p.offset += i + p.s = p.s[i:len(p.s)] + if len(p.s) == 0 { + p.done = true + } +} + +func (p *textParser) advance() { + // Skip whitespace + p.skipWhitespace() + if p.done { + return + } + + // Start of non-whitespace + p.cur.err = nil + p.cur.offset, p.cur.line = p.offset, p.line + p.cur.unquoted = "" + switch p.s[0] { + case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': + // Single symbol + p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] + case '"', '\'': + // Quoted string + i := 1 + for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { + if p.s[i] == '\\' && i+1 < len(p.s) { + // skip escaped char + i++ + } + i++ + } + if i >= len(p.s) || p.s[i] != p.s[0] { + p.errorf("unmatched quote") + return + } + unq, err := unquoteC(p.s[1:i], rune(p.s[0])) + if err != nil { + p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) + return + } + p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] + p.cur.unquoted = unq + default: + i := 0 + for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { + i++ + } + if i == 0 { + p.errorf("unexpected byte %#x", p.s[0]) + return + } + p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] + } + p.offset += len(p.cur.value) +} + +// Back off the parser by one token. Can only be done between calls to next(). +// It makes the next advance() a no-op. +func (p *textParser) back() { p.backed = true } + +// Advances the parser and returns the new current token. +func (p *textParser) next() *token { + if p.backed || p.done { + p.backed = false + return &p.cur + } + p.advance() + if p.done { + p.cur.value = "" + } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { + // Look for multiple quoted strings separated by whitespace, + // and concatenate them. + cat := p.cur + for { + p.skipWhitespace() + if p.done || !isQuote(p.s[0]) { + break + } + p.advance() + if p.cur.err != nil { + return &p.cur + } + cat.value += " " + p.cur.value + cat.unquoted += p.cur.unquoted + } + p.done = false // parser may have seen EOF, but we want to return cat + p.cur = cat + } + return &p.cur +} + +func (p *textParser) consumeToken(s string) error { + tok := p.next() + if tok.err != nil { + return tok.err + } + if tok.value != s { + p.back() + return p.errorf("expected %q, found %q", s, tok.value) + } + return nil +} + +var errBadUTF8 = errors.New("proto: bad UTF-8") + +func unquoteC(s string, quote rune) (string, error) { + // This is based on C++'s tokenizer.cc. + // Despite its name, this is *not* parsing C syntax. + // For instance, "\0" is an invalid quoted string. + + // Avoid allocation in trivial cases. + simple := true + for _, r := range s { + if r == '\\' || r == quote { + simple = false + break + } + } + if simple { + return s, nil + } + + buf := make([]byte, 0, 3*len(s)/2) + for len(s) > 0 { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", errBadUTF8 + } + s = s[n:] + if r != '\\' { + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + } else { + buf = append(buf, string(r)...) + } + continue + } + + ch, tail, err := unescape(s) + if err != nil { + return "", err + } + buf = append(buf, ch...) + s = tail + } + return string(buf), nil +} + +func unescape(s string) (ch string, tail string, err error) { + r, n := utf8.DecodeRuneInString(s) + if r == utf8.RuneError && n == 1 { + return "", "", errBadUTF8 + } + s = s[n:] + switch r { + case 'a': + return "\a", s, nil + case 'b': + return "\b", s, nil + case 'f': + return "\f", s, nil + case 'n': + return "\n", s, nil + case 'r': + return "\r", s, nil + case 't': + return "\t", s, nil + case 'v': + return "\v", s, nil + case '?': + return "?", s, nil // trigraph workaround + case '\'', '"', '\\': + return string(r), s, nil + case '0', '1', '2', '3', '4', '5', '6', '7': + if len(s) < 2 { + return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) + } + ss := string(r) + s[:2] + s = s[2:] + i, err := strconv.ParseUint(ss, 8, 8) + if err != nil { + return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) + } + return string([]byte{byte(i)}), s, nil + case 'x', 'X', 'u', 'U': + var n int + switch r { + case 'x', 'X': + n = 2 + case 'u': + n = 4 + case 'U': + n = 8 + } + if len(s) < n { + return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) + } + ss := s[:n] + s = s[n:] + i, err := strconv.ParseUint(ss, 16, 64) + if err != nil { + return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) + } + if r == 'x' || r == 'X' { + return string([]byte{byte(i)}), s, nil + } + if i > utf8.MaxRune { + return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) + } + return string(rune(i)), s, nil + } + return "", "", fmt.Errorf(`unknown escape \%c`, r) +} + +func isIdentOrNumberChar(c byte) bool { + switch { + case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': + return true + case '0' <= c && c <= '9': + return true + } + switch c { + case '-', '+', '.', '_': + return true + } + return false +} + +func isWhitespace(c byte) bool { + switch c { + case ' ', '\t', '\n', '\r': + return true + } + return false +} + +func isQuote(c byte) bool { + switch c { + case '"', '\'': + return true + } + return false +} diff --git a/vendor/github.com/golang/protobuf/proto/text_encode.go b/vendor/github.com/golang/protobuf/proto/text_encode.go new file mode 100644 index 00000000..a31134ee --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/text_encode.go @@ -0,0 +1,560 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "bytes" + "encoding" + "fmt" + "io" + "math" + "sort" + "strings" + + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +const wrapTextMarshalV2 = false + +// TextMarshaler is a configurable text format marshaler. +type TextMarshaler struct { + Compact bool // use compact text format (one line) + ExpandAny bool // expand google.protobuf.Any messages of known types +} + +// Marshal writes the proto text format of m to w. +func (tm *TextMarshaler) Marshal(w io.Writer, m Message) error { + b, err := tm.marshal(m) + if len(b) > 0 { + if _, err := w.Write(b); err != nil { + return err + } + } + return err +} + +// Text returns a proto text formatted string of m. +func (tm *TextMarshaler) Text(m Message) string { + b, _ := tm.marshal(m) + return string(b) +} + +func (tm *TextMarshaler) marshal(m Message) ([]byte, error) { + mr := MessageReflect(m) + if mr == nil || !mr.IsValid() { + return []byte(""), nil + } + + if wrapTextMarshalV2 { + if m, ok := m.(encoding.TextMarshaler); ok { + return m.MarshalText() + } + + opts := prototext.MarshalOptions{ + AllowPartial: true, + EmitUnknown: true, + } + if !tm.Compact { + opts.Indent = " " + } + if !tm.ExpandAny { + opts.Resolver = (*protoregistry.Types)(nil) + } + return opts.Marshal(mr.Interface()) + } else { + w := &textWriter{ + compact: tm.Compact, + expandAny: tm.ExpandAny, + complete: true, + } + + if m, ok := m.(encoding.TextMarshaler); ok { + b, err := m.MarshalText() + if err != nil { + return nil, err + } + w.Write(b) + return w.buf, nil + } + + err := w.writeMessage(mr) + return w.buf, err + } +} + +var ( + defaultTextMarshaler = TextMarshaler{} + compactTextMarshaler = TextMarshaler{Compact: true} +) + +// MarshalText writes the proto text format of m to w. +func MarshalText(w io.Writer, m Message) error { return defaultTextMarshaler.Marshal(w, m) } + +// MarshalTextString returns a proto text formatted string of m. +func MarshalTextString(m Message) string { return defaultTextMarshaler.Text(m) } + +// CompactText writes the compact proto text format of m to w. +func CompactText(w io.Writer, m Message) error { return compactTextMarshaler.Marshal(w, m) } + +// CompactTextString returns a compact proto text formatted string of m. +func CompactTextString(m Message) string { return compactTextMarshaler.Text(m) } + +var ( + newline = []byte("\n") + endBraceNewline = []byte("}\n") + posInf = []byte("inf") + negInf = []byte("-inf") + nan = []byte("nan") +) + +// textWriter is an io.Writer that tracks its indentation level. +type textWriter struct { + compact bool // same as TextMarshaler.Compact + expandAny bool // same as TextMarshaler.ExpandAny + complete bool // whether the current position is a complete line + indent int // indentation level; never negative + buf []byte +} + +func (w *textWriter) Write(p []byte) (n int, _ error) { + newlines := bytes.Count(p, newline) + if newlines == 0 { + if !w.compact && w.complete { + w.writeIndent() + } + w.buf = append(w.buf, p...) + w.complete = false + return len(p), nil + } + + frags := bytes.SplitN(p, newline, newlines+1) + if w.compact { + for i, frag := range frags { + if i > 0 { + w.buf = append(w.buf, ' ') + n++ + } + w.buf = append(w.buf, frag...) + n += len(frag) + } + return n, nil + } + + for i, frag := range frags { + if w.complete { + w.writeIndent() + } + w.buf = append(w.buf, frag...) + n += len(frag) + if i+1 < len(frags) { + w.buf = append(w.buf, '\n') + n++ + } + } + w.complete = len(frags[len(frags)-1]) == 0 + return n, nil +} + +func (w *textWriter) WriteByte(c byte) error { + if w.compact && c == '\n' { + c = ' ' + } + if !w.compact && w.complete { + w.writeIndent() + } + w.buf = append(w.buf, c) + w.complete = c == '\n' + return nil +} + +func (w *textWriter) writeName(fd protoreflect.FieldDescriptor) { + if !w.compact && w.complete { + w.writeIndent() + } + w.complete = false + + if fd.Kind() != protoreflect.GroupKind { + w.buf = append(w.buf, fd.Name()...) + w.WriteByte(':') + } else { + // Use message type name for group field name. + w.buf = append(w.buf, fd.Message().Name()...) + } + + if !w.compact { + w.WriteByte(' ') + } +} + +func requiresQuotes(u string) bool { + // When type URL contains any characters except [0-9A-Za-z./\-]*, it must be quoted. + for _, ch := range u { + switch { + case ch == '.' || ch == '/' || ch == '_': + continue + case '0' <= ch && ch <= '9': + continue + case 'A' <= ch && ch <= 'Z': + continue + case 'a' <= ch && ch <= 'z': + continue + default: + return true + } + } + return false +} + +// writeProto3Any writes an expanded google.protobuf.Any message. +// +// It returns (false, nil) if sv value can't be unmarshaled (e.g. because +// required messages are not linked in). +// +// It returns (true, error) when sv was written in expanded format or an error +// was encountered. +func (w *textWriter) writeProto3Any(m protoreflect.Message) (bool, error) { + md := m.Descriptor() + fdURL := md.Fields().ByName("type_url") + fdVal := md.Fields().ByName("value") + + url := m.Get(fdURL).String() + mt, err := protoregistry.GlobalTypes.FindMessageByURL(url) + if err != nil { + return false, nil + } + + b := m.Get(fdVal).Bytes() + m2 := mt.New() + if err := proto.Unmarshal(b, m2.Interface()); err != nil { + return false, nil + } + w.Write([]byte("[")) + if requiresQuotes(url) { + w.writeQuotedString(url) + } else { + w.Write([]byte(url)) + } + if w.compact { + w.Write([]byte("]:<")) + } else { + w.Write([]byte("]: <\n")) + w.indent++ + } + if err := w.writeMessage(m2); err != nil { + return true, err + } + if w.compact { + w.Write([]byte("> ")) + } else { + w.indent-- + w.Write([]byte(">\n")) + } + return true, nil +} + +func (w *textWriter) writeMessage(m protoreflect.Message) error { + md := m.Descriptor() + if w.expandAny && md.FullName() == "google.protobuf.Any" { + if canExpand, err := w.writeProto3Any(m); canExpand { + return err + } + } + + fds := md.Fields() + for i := 0; i < fds.Len(); { + fd := fds.Get(i) + if od := fd.ContainingOneof(); od != nil { + fd = m.WhichOneof(od) + i += od.Fields().Len() + } else { + i++ + } + if fd == nil || !m.Has(fd) { + continue + } + + switch { + case fd.IsList(): + lv := m.Get(fd).List() + for j := 0; j < lv.Len(); j++ { + w.writeName(fd) + v := lv.Get(j) + if err := w.writeSingularValue(v, fd); err != nil { + return err + } + w.WriteByte('\n') + } + case fd.IsMap(): + kfd := fd.MapKey() + vfd := fd.MapValue() + mv := m.Get(fd).Map() + + type entry struct{ key, val protoreflect.Value } + var entries []entry + mv.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { + entries = append(entries, entry{k.Value(), v}) + return true + }) + sort.Slice(entries, func(i, j int) bool { + switch kfd.Kind() { + case protoreflect.BoolKind: + return !entries[i].key.Bool() && entries[j].key.Bool() + case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind, protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: + return entries[i].key.Int() < entries[j].key.Int() + case protoreflect.Uint32Kind, protoreflect.Fixed32Kind, protoreflect.Uint64Kind, protoreflect.Fixed64Kind: + return entries[i].key.Uint() < entries[j].key.Uint() + case protoreflect.StringKind: + return entries[i].key.String() < entries[j].key.String() + default: + panic("invalid kind") + } + }) + for _, entry := range entries { + w.writeName(fd) + w.WriteByte('<') + if !w.compact { + w.WriteByte('\n') + } + w.indent++ + w.writeName(kfd) + if err := w.writeSingularValue(entry.key, kfd); err != nil { + return err + } + w.WriteByte('\n') + w.writeName(vfd) + if err := w.writeSingularValue(entry.val, vfd); err != nil { + return err + } + w.WriteByte('\n') + w.indent-- + w.WriteByte('>') + w.WriteByte('\n') + } + default: + w.writeName(fd) + if err := w.writeSingularValue(m.Get(fd), fd); err != nil { + return err + } + w.WriteByte('\n') + } + } + + if b := m.GetUnknown(); len(b) > 0 { + w.writeUnknownFields(b) + } + return w.writeExtensions(m) +} + +func (w *textWriter) writeSingularValue(v protoreflect.Value, fd protoreflect.FieldDescriptor) error { + switch fd.Kind() { + case protoreflect.FloatKind, protoreflect.DoubleKind: + switch vf := v.Float(); { + case math.IsInf(vf, +1): + w.Write(posInf) + case math.IsInf(vf, -1): + w.Write(negInf) + case math.IsNaN(vf): + w.Write(nan) + default: + fmt.Fprint(w, v.Interface()) + } + case protoreflect.StringKind: + // NOTE: This does not validate UTF-8 for historical reasons. + w.writeQuotedString(string(v.String())) + case protoreflect.BytesKind: + w.writeQuotedString(string(v.Bytes())) + case protoreflect.MessageKind, protoreflect.GroupKind: + var bra, ket byte = '<', '>' + if fd.Kind() == protoreflect.GroupKind { + bra, ket = '{', '}' + } + w.WriteByte(bra) + if !w.compact { + w.WriteByte('\n') + } + w.indent++ + m := v.Message() + if m2, ok := m.Interface().(encoding.TextMarshaler); ok { + b, err := m2.MarshalText() + if err != nil { + return err + } + w.Write(b) + } else { + w.writeMessage(m) + } + w.indent-- + w.WriteByte(ket) + case protoreflect.EnumKind: + if ev := fd.Enum().Values().ByNumber(v.Enum()); ev != nil { + fmt.Fprint(w, ev.Name()) + } else { + fmt.Fprint(w, v.Enum()) + } + default: + fmt.Fprint(w, v.Interface()) + } + return nil +} + +// writeQuotedString writes a quoted string in the protocol buffer text format. +func (w *textWriter) writeQuotedString(s string) { + w.WriteByte('"') + for i := 0; i < len(s); i++ { + switch c := s[i]; c { + case '\n': + w.buf = append(w.buf, `\n`...) + case '\r': + w.buf = append(w.buf, `\r`...) + case '\t': + w.buf = append(w.buf, `\t`...) + case '"': + w.buf = append(w.buf, `\"`...) + case '\\': + w.buf = append(w.buf, `\\`...) + default: + if isPrint := c >= 0x20 && c < 0x7f; isPrint { + w.buf = append(w.buf, c) + } else { + w.buf = append(w.buf, fmt.Sprintf(`\%03o`, c)...) + } + } + } + w.WriteByte('"') +} + +func (w *textWriter) writeUnknownFields(b []byte) { + if !w.compact { + fmt.Fprintf(w, "/* %d unknown bytes */\n", len(b)) + } + + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return + } + b = b[n:] + + if wtyp == protowire.EndGroupType { + w.indent-- + w.Write(endBraceNewline) + continue + } + fmt.Fprint(w, num) + if wtyp != protowire.StartGroupType { + w.WriteByte(':') + } + if !w.compact || wtyp == protowire.StartGroupType { + w.WriteByte(' ') + } + switch wtyp { + case protowire.VarintType: + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.Fixed32Type: + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.Fixed64Type: + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprint(w, v) + case protowire.BytesType: + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return + } + b = b[n:] + fmt.Fprintf(w, "%q", v) + case protowire.StartGroupType: + w.WriteByte('{') + w.indent++ + default: + fmt.Fprintf(w, "/* unknown wire type %d */", wtyp) + } + w.WriteByte('\n') + } +} + +// writeExtensions writes all the extensions in m. +func (w *textWriter) writeExtensions(m protoreflect.Message) error { + md := m.Descriptor() + if md.ExtensionRanges().Len() == 0 { + return nil + } + + type ext struct { + desc protoreflect.FieldDescriptor + val protoreflect.Value + } + var exts []ext + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if fd.IsExtension() { + exts = append(exts, ext{fd, v}) + } + return true + }) + sort.Slice(exts, func(i, j int) bool { + return exts[i].desc.Number() < exts[j].desc.Number() + }) + + for _, ext := range exts { + // For message set, use the name of the message as the extension name. + name := string(ext.desc.FullName()) + if isMessageSet(ext.desc.ContainingMessage()) { + name = strings.TrimSuffix(name, ".message_set_extension") + } + + if !ext.desc.IsList() { + if err := w.writeSingularExtension(name, ext.val, ext.desc); err != nil { + return err + } + } else { + lv := ext.val.List() + for i := 0; i < lv.Len(); i++ { + if err := w.writeSingularExtension(name, lv.Get(i), ext.desc); err != nil { + return err + } + } + } + } + return nil +} + +func (w *textWriter) writeSingularExtension(name string, v protoreflect.Value, fd protoreflect.FieldDescriptor) error { + fmt.Fprintf(w, "[%s]:", name) + if !w.compact { + w.WriteByte(' ') + } + if err := w.writeSingularValue(v, fd); err != nil { + return err + } + w.WriteByte('\n') + return nil +} + +func (w *textWriter) writeIndent() { + if !w.complete { + return + } + for i := 0; i < w.indent*2; i++ { + w.buf = append(w.buf, ' ') + } + w.complete = false +} diff --git a/vendor/github.com/golang/protobuf/proto/text_parser.go b/vendor/github.com/golang/protobuf/proto/text_parser.go deleted file mode 100644 index bb55a3af..00000000 --- a/vendor/github.com/golang/protobuf/proto/text_parser.go +++ /dev/null @@ -1,880 +0,0 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2010 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -package proto - -// Functions for parsing the Text protocol buffer format. -// TODO: message sets. - -import ( - "encoding" - "errors" - "fmt" - "reflect" - "strconv" - "strings" - "unicode/utf8" -) - -// Error string emitted when deserializing Any and fields are already set -const anyRepeatedlyUnpacked = "Any message unpacked multiple times, or %q already set" - -type ParseError struct { - Message string - Line int // 1-based line number - Offset int // 0-based byte offset from start of input -} - -func (p *ParseError) Error() string { - if p.Line == 1 { - // show offset only for first line - return fmt.Sprintf("line 1.%d: %v", p.Offset, p.Message) - } - return fmt.Sprintf("line %d: %v", p.Line, p.Message) -} - -type token struct { - value string - err *ParseError - line int // line number - offset int // byte number from start of input, not start of line - unquoted string // the unquoted version of value, if it was a quoted string -} - -func (t *token) String() string { - if t.err == nil { - return fmt.Sprintf("%q (line=%d, offset=%d)", t.value, t.line, t.offset) - } - return fmt.Sprintf("parse error: %v", t.err) -} - -type textParser struct { - s string // remaining input - done bool // whether the parsing is finished (success or error) - backed bool // whether back() was called - offset, line int - cur token -} - -func newTextParser(s string) *textParser { - p := new(textParser) - p.s = s - p.line = 1 - p.cur.line = 1 - return p -} - -func (p *textParser) errorf(format string, a ...interface{}) *ParseError { - pe := &ParseError{fmt.Sprintf(format, a...), p.cur.line, p.cur.offset} - p.cur.err = pe - p.done = true - return pe -} - -// Numbers and identifiers are matched by [-+._A-Za-z0-9] -func isIdentOrNumberChar(c byte) bool { - switch { - case 'A' <= c && c <= 'Z', 'a' <= c && c <= 'z': - return true - case '0' <= c && c <= '9': - return true - } - switch c { - case '-', '+', '.', '_': - return true - } - return false -} - -func isWhitespace(c byte) bool { - switch c { - case ' ', '\t', '\n', '\r': - return true - } - return false -} - -func isQuote(c byte) bool { - switch c { - case '"', '\'': - return true - } - return false -} - -func (p *textParser) skipWhitespace() { - i := 0 - for i < len(p.s) && (isWhitespace(p.s[i]) || p.s[i] == '#') { - if p.s[i] == '#' { - // comment; skip to end of line or input - for i < len(p.s) && p.s[i] != '\n' { - i++ - } - if i == len(p.s) { - break - } - } - if p.s[i] == '\n' { - p.line++ - } - i++ - } - p.offset += i - p.s = p.s[i:len(p.s)] - if len(p.s) == 0 { - p.done = true - } -} - -func (p *textParser) advance() { - // Skip whitespace - p.skipWhitespace() - if p.done { - return - } - - // Start of non-whitespace - p.cur.err = nil - p.cur.offset, p.cur.line = p.offset, p.line - p.cur.unquoted = "" - switch p.s[0] { - case '<', '>', '{', '}', ':', '[', ']', ';', ',', '/': - // Single symbol - p.cur.value, p.s = p.s[0:1], p.s[1:len(p.s)] - case '"', '\'': - // Quoted string - i := 1 - for i < len(p.s) && p.s[i] != p.s[0] && p.s[i] != '\n' { - if p.s[i] == '\\' && i+1 < len(p.s) { - // skip escaped char - i++ - } - i++ - } - if i >= len(p.s) || p.s[i] != p.s[0] { - p.errorf("unmatched quote") - return - } - unq, err := unquoteC(p.s[1:i], rune(p.s[0])) - if err != nil { - p.errorf("invalid quoted string %s: %v", p.s[0:i+1], err) - return - } - p.cur.value, p.s = p.s[0:i+1], p.s[i+1:len(p.s)] - p.cur.unquoted = unq - default: - i := 0 - for i < len(p.s) && isIdentOrNumberChar(p.s[i]) { - i++ - } - if i == 0 { - p.errorf("unexpected byte %#x", p.s[0]) - return - } - p.cur.value, p.s = p.s[0:i], p.s[i:len(p.s)] - } - p.offset += len(p.cur.value) -} - -var ( - errBadUTF8 = errors.New("proto: bad UTF-8") -) - -func unquoteC(s string, quote rune) (string, error) { - // This is based on C++'s tokenizer.cc. - // Despite its name, this is *not* parsing C syntax. - // For instance, "\0" is an invalid quoted string. - - // Avoid allocation in trivial cases. - simple := true - for _, r := range s { - if r == '\\' || r == quote { - simple = false - break - } - } - if simple { - return s, nil - } - - buf := make([]byte, 0, 3*len(s)/2) - for len(s) > 0 { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", errBadUTF8 - } - s = s[n:] - if r != '\\' { - if r < utf8.RuneSelf { - buf = append(buf, byte(r)) - } else { - buf = append(buf, string(r)...) - } - continue - } - - ch, tail, err := unescape(s) - if err != nil { - return "", err - } - buf = append(buf, ch...) - s = tail - } - return string(buf), nil -} - -func unescape(s string) (ch string, tail string, err error) { - r, n := utf8.DecodeRuneInString(s) - if r == utf8.RuneError && n == 1 { - return "", "", errBadUTF8 - } - s = s[n:] - switch r { - case 'a': - return "\a", s, nil - case 'b': - return "\b", s, nil - case 'f': - return "\f", s, nil - case 'n': - return "\n", s, nil - case 'r': - return "\r", s, nil - case 't': - return "\t", s, nil - case 'v': - return "\v", s, nil - case '?': - return "?", s, nil // trigraph workaround - case '\'', '"', '\\': - return string(r), s, nil - case '0', '1', '2', '3', '4', '5', '6', '7': - if len(s) < 2 { - return "", "", fmt.Errorf(`\%c requires 2 following digits`, r) - } - ss := string(r) + s[:2] - s = s[2:] - i, err := strconv.ParseUint(ss, 8, 8) - if err != nil { - return "", "", fmt.Errorf(`\%s contains non-octal digits`, ss) - } - return string([]byte{byte(i)}), s, nil - case 'x', 'X', 'u', 'U': - var n int - switch r { - case 'x', 'X': - n = 2 - case 'u': - n = 4 - case 'U': - n = 8 - } - if len(s) < n { - return "", "", fmt.Errorf(`\%c requires %d following digits`, r, n) - } - ss := s[:n] - s = s[n:] - i, err := strconv.ParseUint(ss, 16, 64) - if err != nil { - return "", "", fmt.Errorf(`\%c%s contains non-hexadecimal digits`, r, ss) - } - if r == 'x' || r == 'X' { - return string([]byte{byte(i)}), s, nil - } - if i > utf8.MaxRune { - return "", "", fmt.Errorf(`\%c%s is not a valid Unicode code point`, r, ss) - } - return string(i), s, nil - } - return "", "", fmt.Errorf(`unknown escape \%c`, r) -} - -// Back off the parser by one token. Can only be done between calls to next(). -// It makes the next advance() a no-op. -func (p *textParser) back() { p.backed = true } - -// Advances the parser and returns the new current token. -func (p *textParser) next() *token { - if p.backed || p.done { - p.backed = false - return &p.cur - } - p.advance() - if p.done { - p.cur.value = "" - } else if len(p.cur.value) > 0 && isQuote(p.cur.value[0]) { - // Look for multiple quoted strings separated by whitespace, - // and concatenate them. - cat := p.cur - for { - p.skipWhitespace() - if p.done || !isQuote(p.s[0]) { - break - } - p.advance() - if p.cur.err != nil { - return &p.cur - } - cat.value += " " + p.cur.value - cat.unquoted += p.cur.unquoted - } - p.done = false // parser may have seen EOF, but we want to return cat - p.cur = cat - } - return &p.cur -} - -func (p *textParser) consumeToken(s string) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != s { - p.back() - return p.errorf("expected %q, found %q", s, tok.value) - } - return nil -} - -// Return a RequiredNotSetError indicating which required field was not set. -func (p *textParser) missingRequiredFieldError(sv reflect.Value) *RequiredNotSetError { - st := sv.Type() - sprops := GetProperties(st) - for i := 0; i < st.NumField(); i++ { - if !isNil(sv.Field(i)) { - continue - } - - props := sprops.Prop[i] - if props.Required { - return &RequiredNotSetError{fmt.Sprintf("%v.%v", st, props.OrigName)} - } - } - return &RequiredNotSetError{fmt.Sprintf("%v.", st)} // should not happen -} - -// Returns the index in the struct for the named field, as well as the parsed tag properties. -func structFieldByName(sprops *StructProperties, name string) (int, *Properties, bool) { - i, ok := sprops.decoderOrigNames[name] - if ok { - return i, sprops.Prop[i], true - } - return -1, nil, false -} - -// Consume a ':' from the input stream (if the next token is a colon), -// returning an error if a colon is needed but not present. -func (p *textParser) checkForColon(props *Properties, typ reflect.Type) *ParseError { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ":" { - // Colon is optional when the field is a group or message. - needColon := true - switch props.Wire { - case "group": - needColon = false - case "bytes": - // A "bytes" field is either a message, a string, or a repeated field; - // those three become *T, *string and []T respectively, so we can check for - // this field being a pointer to a non-string. - if typ.Kind() == reflect.Ptr { - // *T or *string - if typ.Elem().Kind() == reflect.String { - break - } - } else if typ.Kind() == reflect.Slice { - // []T or []*T - if typ.Elem().Kind() != reflect.Ptr { - break - } - } else if typ.Kind() == reflect.String { - // The proto3 exception is for a string field, - // which requires a colon. - break - } - needColon = false - } - if needColon { - return p.errorf("expected ':', found %q", tok.value) - } - p.back() - } - return nil -} - -func (p *textParser) readStruct(sv reflect.Value, terminator string) error { - st := sv.Type() - sprops := GetProperties(st) - reqCount := sprops.reqCount - var reqFieldErr error - fieldSet := make(map[string]bool) - // A struct is a sequence of "name: value", terminated by one of - // '>' or '}', or the end of the input. A name may also be - // "[extension]" or "[type/url]". - // - // The whole struct can also be an expanded Any message, like: - // [type/url] < ... struct contents ... > - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - if tok.value == "[" { - // Looks like an extension or an Any. - // - // TODO: Check whether we need to handle - // namespace rooted names (e.g. ".something.Foo"). - extName, err := p.consumeExtName() - if err != nil { - return err - } - - if s := strings.LastIndex(extName, "/"); s >= 0 { - // If it contains a slash, it's an Any type URL. - messageName := extName[s+1:] - mt := MessageType(messageName) - if mt == nil { - return p.errorf("unrecognized message %q in google.protobuf.Any", messageName) - } - tok = p.next() - if tok.err != nil { - return tok.err - } - // consume an optional colon - if tok.value == ":" { - tok = p.next() - if tok.err != nil { - return tok.err - } - } - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - v := reflect.New(mt.Elem()) - if pe := p.readStruct(v.Elem(), terminator); pe != nil { - return pe - } - b, err := Marshal(v.Interface().(Message)) - if err != nil { - return p.errorf("failed to marshal message of type %q: %v", messageName, err) - } - if fieldSet["type_url"] { - return p.errorf(anyRepeatedlyUnpacked, "type_url") - } - if fieldSet["value"] { - return p.errorf(anyRepeatedlyUnpacked, "value") - } - sv.FieldByName("TypeUrl").SetString(extName) - sv.FieldByName("Value").SetBytes(b) - fieldSet["type_url"] = true - fieldSet["value"] = true - continue - } - - var desc *ExtensionDesc - // This could be faster, but it's functional. - // TODO: Do something smarter than a linear scan. - for _, d := range RegisteredExtensions(reflect.New(st).Interface().(Message)) { - if d.Name == extName { - desc = d - break - } - } - if desc == nil { - return p.errorf("unrecognized extension %q", extName) - } - - props := &Properties{} - props.Parse(desc.Tag) - - typ := reflect.TypeOf(desc.ExtensionType) - if err := p.checkForColon(props, typ); err != nil { - return err - } - - rep := desc.repeated() - - // Read the extension structure, and set it in - // the value we're constructing. - var ext reflect.Value - if !rep { - ext = reflect.New(typ).Elem() - } else { - ext = reflect.New(typ.Elem()).Elem() - } - if err := p.readAny(ext, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - ep := sv.Addr().Interface().(Message) - if !rep { - SetExtension(ep, desc, ext.Interface()) - } else { - old, err := GetExtension(ep, desc) - var sl reflect.Value - if err == nil { - sl = reflect.ValueOf(old) // existing slice - } else { - sl = reflect.MakeSlice(typ, 0, 1) - } - sl = reflect.Append(sl, ext) - SetExtension(ep, desc, sl.Interface()) - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - continue - } - - // This is a normal, non-extension field. - name := tok.value - var dst reflect.Value - fi, props, ok := structFieldByName(sprops, name) - if ok { - dst = sv.Field(fi) - } else if oop, ok := sprops.OneofTypes[name]; ok { - // It is a oneof. - props = oop.Prop - nv := reflect.New(oop.Type.Elem()) - dst = nv.Elem().Field(0) - field := sv.Field(oop.Field) - if !field.IsNil() { - return p.errorf("field '%s' would overwrite already parsed oneof '%s'", name, sv.Type().Field(oop.Field).Name) - } - field.Set(nv) - } - if !dst.IsValid() { - return p.errorf("unknown field name %q in %v", name, st) - } - - if dst.Kind() == reflect.Map { - // Consume any colon. - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Construct the map if it doesn't already exist. - if dst.IsNil() { - dst.Set(reflect.MakeMap(dst.Type())) - } - key := reflect.New(dst.Type().Key()).Elem() - val := reflect.New(dst.Type().Elem()).Elem() - - // The map entry should be this sequence of tokens: - // < key : KEY value : VALUE > - // However, implementations may omit key or value, and technically - // we should support them in any order. See b/28924776 for a time - // this went wrong. - - tok := p.next() - var terminator string - switch tok.value { - case "<": - terminator = ">" - case "{": - terminator = "}" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - for { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == terminator { - break - } - switch tok.value { - case "key": - if err := p.consumeToken(":"); err != nil { - return err - } - if err := p.readAny(key, props.MapKeyProp); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - case "value": - if err := p.checkForColon(props.MapValProp, dst.Type().Elem()); err != nil { - return err - } - if err := p.readAny(val, props.MapValProp); err != nil { - return err - } - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - default: - p.back() - return p.errorf(`expected "key", "value", or %q, found %q`, terminator, tok.value) - } - } - - dst.SetMapIndex(key, val) - continue - } - - // Check that it's not already set if it's not a repeated field. - if !props.Repeated && fieldSet[name] { - return p.errorf("non-repeated field %q was repeated", name) - } - - if err := p.checkForColon(props, dst.Type()); err != nil { - return err - } - - // Parse into the field. - fieldSet[name] = true - if err := p.readAny(dst, props); err != nil { - if _, ok := err.(*RequiredNotSetError); !ok { - return err - } - reqFieldErr = err - } - if props.Required { - reqCount-- - } - - if err := p.consumeOptionalSeparator(); err != nil { - return err - } - - } - - if reqCount > 0 { - return p.missingRequiredFieldError(sv) - } - return reqFieldErr -} - -// consumeExtName consumes extension name or expanded Any type URL and the -// following ']'. It returns the name or URL consumed. -func (p *textParser) consumeExtName() (string, error) { - tok := p.next() - if tok.err != nil { - return "", tok.err - } - - // If extension name or type url is quoted, it's a single token. - if len(tok.value) > 2 && isQuote(tok.value[0]) && tok.value[len(tok.value)-1] == tok.value[0] { - name, err := unquoteC(tok.value[1:len(tok.value)-1], rune(tok.value[0])) - if err != nil { - return "", err - } - return name, p.consumeToken("]") - } - - // Consume everything up to "]" - var parts []string - for tok.value != "]" { - parts = append(parts, tok.value) - tok = p.next() - if tok.err != nil { - return "", p.errorf("unrecognized type_url or extension name: %s", tok.err) - } - if p.done && tok.value != "]" { - return "", p.errorf("unclosed type_url or extension name") - } - } - return strings.Join(parts, ""), nil -} - -// consumeOptionalSeparator consumes an optional semicolon or comma. -// It is used in readStruct to provide backward compatibility. -func (p *textParser) consumeOptionalSeparator() error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value != ";" && tok.value != "," { - p.back() - } - return nil -} - -func (p *textParser) readAny(v reflect.Value, props *Properties) error { - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == "" { - return p.errorf("unexpected EOF") - } - - switch fv := v; fv.Kind() { - case reflect.Slice: - at := v.Type() - if at.Elem().Kind() == reflect.Uint8 { - // Special case for []byte - if tok.value[0] != '"' && tok.value[0] != '\'' { - // Deliberately written out here, as the error after - // this switch statement would write "invalid []byte: ...", - // which is not as user-friendly. - return p.errorf("invalid string: %v", tok.value) - } - bytes := []byte(tok.unquoted) - fv.Set(reflect.ValueOf(bytes)) - return nil - } - // Repeated field. - if tok.value == "[" { - // Repeated field with list notation, like [1,2,3]. - for { - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - err := p.readAny(fv.Index(fv.Len()-1), props) - if err != nil { - return err - } - tok := p.next() - if tok.err != nil { - return tok.err - } - if tok.value == "]" { - break - } - if tok.value != "," { - return p.errorf("Expected ']' or ',' found %q", tok.value) - } - } - return nil - } - // One value of the repeated field. - p.back() - fv.Set(reflect.Append(fv, reflect.New(at.Elem()).Elem())) - return p.readAny(fv.Index(fv.Len()-1), props) - case reflect.Bool: - // true/1/t/True or false/f/0/False. - switch tok.value { - case "true", "1", "t", "True": - fv.SetBool(true) - return nil - case "false", "0", "f", "False": - fv.SetBool(false) - return nil - } - case reflect.Float32, reflect.Float64: - v := tok.value - // Ignore 'f' for compatibility with output generated by C++, but don't - // remove 'f' when the value is "-inf" or "inf". - if strings.HasSuffix(v, "f") && tok.value != "-inf" && tok.value != "inf" { - v = v[:len(v)-1] - } - if f, err := strconv.ParseFloat(v, fv.Type().Bits()); err == nil { - fv.SetFloat(f) - return nil - } - case reflect.Int32: - if x, err := strconv.ParseInt(tok.value, 0, 32); err == nil { - fv.SetInt(x) - return nil - } - - if len(props.Enum) == 0 { - break - } - m, ok := enumValueMaps[props.Enum] - if !ok { - break - } - x, ok := m[tok.value] - if !ok { - break - } - fv.SetInt(int64(x)) - return nil - case reflect.Int64: - if x, err := strconv.ParseInt(tok.value, 0, 64); err == nil { - fv.SetInt(x) - return nil - } - - case reflect.Ptr: - // A basic field (indirected through pointer), or a repeated message/group - p.back() - fv.Set(reflect.New(fv.Type().Elem())) - return p.readAny(fv.Elem(), props) - case reflect.String: - if tok.value[0] == '"' || tok.value[0] == '\'' { - fv.SetString(tok.unquoted) - return nil - } - case reflect.Struct: - var terminator string - switch tok.value { - case "{": - terminator = "}" - case "<": - terminator = ">" - default: - return p.errorf("expected '{' or '<', found %q", tok.value) - } - // TODO: Handle nested messages which implement encoding.TextUnmarshaler. - return p.readStruct(fv, terminator) - case reflect.Uint32: - if x, err := strconv.ParseUint(tok.value, 0, 32); err == nil { - fv.SetUint(uint64(x)) - return nil - } - case reflect.Uint64: - if x, err := strconv.ParseUint(tok.value, 0, 64); err == nil { - fv.SetUint(x) - return nil - } - } - return p.errorf("invalid %v: %v", v.Type(), tok.value) -} - -// UnmarshalText reads a protocol buffer in Text format. UnmarshalText resets pb -// before starting to unmarshal, so any existing data in pb is always removed. -// If a required field is not set and no other error occurs, -// UnmarshalText returns *RequiredNotSetError. -func UnmarshalText(s string, pb Message) error { - if um, ok := pb.(encoding.TextUnmarshaler); ok { - return um.UnmarshalText([]byte(s)) - } - pb.Reset() - v := reflect.ValueOf(pb) - return newTextParser(s).readStruct(v.Elem(), "") -} diff --git a/vendor/github.com/golang/protobuf/proto/wire.go b/vendor/github.com/golang/protobuf/proto/wire.go new file mode 100644 index 00000000..d7c28da5 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/wire.go @@ -0,0 +1,78 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + protoV2 "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/runtime/protoiface" +) + +// Size returns the size in bytes of the wire-format encoding of m. +func Size(m Message) int { + if m == nil { + return 0 + } + mi := MessageV2(m) + return protoV2.Size(mi) +} + +// Marshal returns the wire-format encoding of m. +func Marshal(m Message) ([]byte, error) { + b, err := marshalAppend(nil, m, false) + if b == nil { + b = zeroBytes + } + return b, err +} + +var zeroBytes = make([]byte, 0, 0) + +func marshalAppend(buf []byte, m Message, deterministic bool) ([]byte, error) { + if m == nil { + return nil, ErrNil + } + mi := MessageV2(m) + nbuf, err := protoV2.MarshalOptions{ + Deterministic: deterministic, + AllowPartial: true, + }.MarshalAppend(buf, mi) + if err != nil { + return buf, err + } + if len(buf) == len(nbuf) { + if !mi.ProtoReflect().IsValid() { + return buf, ErrNil + } + } + return nbuf, checkRequiredNotSet(mi) +} + +// Unmarshal parses a wire-format message in b and places the decoded results in m. +// +// Unmarshal resets m before starting to unmarshal, so any existing data in m is always +// removed. Use UnmarshalMerge to preserve and append to existing data. +func Unmarshal(b []byte, m Message) error { + m.Reset() + return UnmarshalMerge(b, m) +} + +// UnmarshalMerge parses a wire-format message in b and places the decoded results in m. +func UnmarshalMerge(b []byte, m Message) error { + mi := MessageV2(m) + out, err := protoV2.UnmarshalOptions{ + AllowPartial: true, + Merge: true, + }.UnmarshalState(protoiface.UnmarshalInput{ + Buf: b, + Message: mi.ProtoReflect(), + }) + if err != nil { + return err + } + if out.Flags&protoiface.UnmarshalInitialized > 0 { + return nil + } + return checkRequiredNotSet(mi) +} diff --git a/vendor/github.com/golang/protobuf/proto/wrappers.go b/vendor/github.com/golang/protobuf/proto/wrappers.go new file mode 100644 index 00000000..398e3485 --- /dev/null +++ b/vendor/github.com/golang/protobuf/proto/wrappers.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +// Bool stores v in a new bool value and returns a pointer to it. +func Bool(v bool) *bool { return &v } + +// Int stores v in a new int32 value and returns a pointer to it. +// +// Deprecated: Use Int32 instead. +func Int(v int) *int32 { return Int32(int32(v)) } + +// Int32 stores v in a new int32 value and returns a pointer to it. +func Int32(v int32) *int32 { return &v } + +// Int64 stores v in a new int64 value and returns a pointer to it. +func Int64(v int64) *int64 { return &v } + +// Uint32 stores v in a new uint32 value and returns a pointer to it. +func Uint32(v uint32) *uint32 { return &v } + +// Uint64 stores v in a new uint64 value and returns a pointer to it. +func Uint64(v uint64) *uint64 { return &v } + +// Float32 stores v in a new float32 value and returns a pointer to it. +func Float32(v float32) *float32 { return &v } + +// Float64 stores v in a new float64 value and returns a pointer to it. +func Float64(v float64) *float64 { return &v } + +// String stores v in a new string value and returns a pointer to it. +func String(v string) *string { return &v } diff --git a/vendor/github.com/golang/protobuf/ptypes/any.go b/vendor/github.com/golang/protobuf/ptypes/any.go index 70276e8f..85f9f573 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any.go +++ b/vendor/github.com/golang/protobuf/ptypes/any.go @@ -1,141 +1,179 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package ptypes -// This file implements functions to marshal proto.Message to/from -// google.protobuf.Any message. - import ( "fmt" - "reflect" "strings" "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes/any" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + + anypb "github.com/golang/protobuf/ptypes/any" ) -const googleApis = "type.googleapis.com/" +const urlPrefix = "type.googleapis.com/" -// AnyMessageName returns the name of the message contained in a google.protobuf.Any message. +// AnyMessageName returns the message name contained in an anypb.Any message. +// Most type assertions should use the Is function instead. // -// Note that regular type assertions should be done using the Is -// function. AnyMessageName is provided for less common use cases like filtering a -// sequence of Any messages based on a set of allowed message type names. -func AnyMessageName(any *any.Any) (string, error) { +// Deprecated: Call the any.MessageName method instead. +func AnyMessageName(any *anypb.Any) (string, error) { + name, err := anyMessageName(any) + return string(name), err +} +func anyMessageName(any *anypb.Any) (protoreflect.FullName, error) { if any == nil { return "", fmt.Errorf("message is nil") } - slash := strings.LastIndex(any.TypeUrl, "/") - if slash < 0 { + name := protoreflect.FullName(any.TypeUrl) + if i := strings.LastIndex(any.TypeUrl, "/"); i >= 0 { + name = name[i+len("/"):] + } + if !name.IsValid() { return "", fmt.Errorf("message type url %q is invalid", any.TypeUrl) } - return any.TypeUrl[slash+1:], nil + return name, nil } -// MarshalAny takes the protocol buffer and encodes it into google.protobuf.Any. -func MarshalAny(pb proto.Message) (*any.Any, error) { - value, err := proto.Marshal(pb) +// MarshalAny marshals the given message m into an anypb.Any message. +// +// Deprecated: Call the anypb.New function instead. +func MarshalAny(m proto.Message) (*anypb.Any, error) { + switch dm := m.(type) { + case DynamicAny: + m = dm.Message + case *DynamicAny: + if dm == nil { + return nil, proto.ErrNil + } + m = dm.Message + } + b, err := proto.Marshal(m) if err != nil { return nil, err } - return &any.Any{TypeUrl: googleApis + proto.MessageName(pb), Value: value}, nil + return &anypb.Any{TypeUrl: urlPrefix + proto.MessageName(m), Value: b}, nil } -// DynamicAny is a value that can be passed to UnmarshalAny to automatically -// allocate a proto.Message for the type specified in a google.protobuf.Any -// message. The allocated message is stored in the embedded proto.Message. -// -// Example: +// Empty returns a new message of the type specified in an anypb.Any message. +// It returns protoregistry.NotFound if the corresponding message type could not +// be resolved in the global registry. // -// var x ptypes.DynamicAny -// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... } -// fmt.Printf("unmarshaled message: %v", x.Message) -type DynamicAny struct { - proto.Message -} - -// Empty returns a new proto.Message of the type specified in a -// google.protobuf.Any message. It returns an error if corresponding message -// type isn't linked in. -func Empty(any *any.Any) (proto.Message, error) { - aname, err := AnyMessageName(any) +// Deprecated: Use protoregistry.GlobalTypes.FindMessageByName instead +// to resolve the message name and create a new instance of it. +func Empty(any *anypb.Any) (proto.Message, error) { + name, err := anyMessageName(any) if err != nil { return nil, err } - - t := proto.MessageType(aname) - if t == nil { - return nil, fmt.Errorf("any: message type %q isn't linked in", aname) + mt, err := protoregistry.GlobalTypes.FindMessageByName(name) + if err != nil { + return nil, err } - return reflect.New(t.Elem()).Interface().(proto.Message), nil + return proto.MessageV1(mt.New().Interface()), nil } -// UnmarshalAny parses the protocol buffer representation in a google.protobuf.Any -// message and places the decoded result in pb. It returns an error if type of -// contents of Any message does not match type of pb message. +// UnmarshalAny unmarshals the encoded value contained in the anypb.Any message +// into the provided message m. It returns an error if the target message +// does not match the type in the Any message or if an unmarshal error occurs. +// +// The target message m may be a *DynamicAny message. If the underlying message +// type could not be resolved, then this returns protoregistry.NotFound. // -// pb can be a proto.Message, or a *DynamicAny. -func UnmarshalAny(any *any.Any, pb proto.Message) error { - if d, ok := pb.(*DynamicAny); ok { - if d.Message == nil { +// Deprecated: Call the any.UnmarshalTo method instead. +func UnmarshalAny(any *anypb.Any, m proto.Message) error { + if dm, ok := m.(*DynamicAny); ok { + if dm.Message == nil { var err error - d.Message, err = Empty(any) + dm.Message, err = Empty(any) if err != nil { return err } } - return UnmarshalAny(any, d.Message) + m = dm.Message } - aname, err := AnyMessageName(any) + anyName, err := AnyMessageName(any) if err != nil { return err } - - mname := proto.MessageName(pb) - if aname != mname { - return fmt.Errorf("mismatched message type: got %q want %q", aname, mname) + msgName := proto.MessageName(m) + if anyName != msgName { + return fmt.Errorf("mismatched message type: got %q want %q", anyName, msgName) } - return proto.Unmarshal(any.Value, pb) + return proto.Unmarshal(any.Value, m) } -// Is returns true if any value contains a given message type. -func Is(any *any.Any, pb proto.Message) bool { - // The following is equivalent to AnyMessageName(any) == proto.MessageName(pb), - // but it avoids scanning TypeUrl for the slash. - if any == nil { +// Is reports whether the Any message contains a message of the specified type. +// +// Deprecated: Call the any.MessageIs method instead. +func Is(any *anypb.Any, m proto.Message) bool { + if any == nil || m == nil { return false } - name := proto.MessageName(pb) - prefix := len(any.TypeUrl) - len(name) - return prefix >= 1 && any.TypeUrl[prefix-1] == '/' && any.TypeUrl[prefix:] == name + name := proto.MessageName(m) + if !strings.HasSuffix(any.TypeUrl, name) { + return false + } + return len(any.TypeUrl) == len(name) || any.TypeUrl[len(any.TypeUrl)-len(name)-1] == '/' +} + +// DynamicAny is a value that can be passed to UnmarshalAny to automatically +// allocate a proto.Message for the type specified in an anypb.Any message. +// The allocated message is stored in the embedded proto.Message. +// +// Example: +// var x ptypes.DynamicAny +// if err := ptypes.UnmarshalAny(a, &x); err != nil { ... } +// fmt.Printf("unmarshaled message: %v", x.Message) +// +// Deprecated: Use the any.UnmarshalNew method instead to unmarshal +// the any message contents into a new instance of the underlying message. +type DynamicAny struct{ proto.Message } + +func (m DynamicAny) String() string { + if m.Message == nil { + return "" + } + return m.Message.String() +} +func (m DynamicAny) Reset() { + if m.Message == nil { + return + } + m.Message.Reset() +} +func (m DynamicAny) ProtoMessage() { + return +} +func (m DynamicAny) ProtoReflect() protoreflect.Message { + if m.Message == nil { + return nil + } + return dynamicAny{proto.MessageReflect(m.Message)} +} + +type dynamicAny struct{ protoreflect.Message } + +func (m dynamicAny) Type() protoreflect.MessageType { + return dynamicAnyType{m.Message.Type()} +} +func (m dynamicAny) New() protoreflect.Message { + return dynamicAnyType{m.Message.Type()}.New() +} +func (m dynamicAny) Interface() protoreflect.ProtoMessage { + return DynamicAny{proto.MessageV1(m.Message.Interface())} +} + +type dynamicAnyType struct{ protoreflect.MessageType } + +func (t dynamicAnyType) New() protoreflect.Message { + return dynamicAny{t.MessageType.New()} +} +func (t dynamicAnyType) Zero() protoreflect.Message { + return dynamicAny{t.MessageType.Zero()} } diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go index e3c56d3f..0ef27d33 100644 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/any/any.pb.go @@ -1,191 +1,62 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: google/protobuf/any.proto +// source: github.com/golang/protobuf/ptypes/any/any.proto -package any // import "github.com/golang/protobuf/ptypes/any" +package any -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + anypb "google.golang.org/protobuf/types/known/anypb" + reflect "reflect" +) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf +// Symbols defined in public import of google/protobuf/any.proto. -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +type Any = anypb.Any -// `Any` contains an arbitrary serialized protocol buffer message along with a -// URL that describes the type of the serialized message. -// -// Protobuf library provides support to pack/unpack Any values in the form -// of utility functions or additional generated methods of the Any type. -// -// Example 1: Pack and unpack a message in C++. -// -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } -// -// Example 2: Pack and unpack a message in Java. -// -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := ptypes.MarshalAny(foo) -// ... -// foo := &pb.Foo{} -// if err := ptypes.UnmarshalAny(any, foo); err != nil { -// ... -// } -// -// The pack methods provided by protobuf library will by default use -// 'type.googleapis.com/full.type.name' as the type URL and the unpack -// methods only use the fully qualified type name after the last '/' -// in the type URL, for example "foo.bar.com/x/y.z" will yield type -// name "y.z". -// -// -// JSON -// ==== -// The JSON representation of an `Any` value uses the regular -// representation of the deserialized, embedded message, with an -// additional field `@type` which contains the type URL. Example: -// -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } -// -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } -// -// If the embedded message type is well-known and has a custom JSON -// representation, that representation will be embedded adding a field -// `value` which holds the custom JSON in addition to the `@type` -// field. Example (for message [google.protobuf.Duration][]): -// -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } -// -type Any struct { - // A URL/resource name whose content describes the type of the - // serialized protocol buffer message. - // - // For URLs which use the scheme `http`, `https`, or no scheme, the - // following restrictions and interpretations apply: - // - // * If no scheme is provided, `https` is assumed. - // * The last segment of the URL's path must represent the fully - // qualified name of the type (as in `path/google.protobuf.Duration`). - // The name should be in a canonical form (e.g., leading "." is - // not accepted). - // * An HTTP GET on the URL must yield a [google.protobuf.Type][] - // value in binary format, or produce an error. - // * Applications are allowed to cache lookup results based on the - // URL, or have them precompiled into a binary to avoid any - // lookup. Therefore, binary compatibility needs to be preserved - // on changes to types. (Use versioned type names to manage - // breaking changes.) - // - // Schemes other than `http`, `https` (or the empty scheme) might be - // used with implementation specific semantics. - // - TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` - // Must be a valid serialized protocol buffer of the above specified type. - Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} +var File_github_com_golang_protobuf_ptypes_any_any_proto protoreflect.FileDescriptor -func (m *Any) Reset() { *m = Any{} } -func (m *Any) String() string { return proto.CompactTextString(m) } -func (*Any) ProtoMessage() {} -func (*Any) Descriptor() ([]byte, []int) { - return fileDescriptor_any_744b9ca530f228db, []int{0} -} -func (*Any) XXX_WellKnownType() string { return "Any" } -func (m *Any) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Any.Unmarshal(m, b) -} -func (m *Any) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Any.Marshal(b, m, deterministic) -} -func (dst *Any) XXX_Merge(src proto.Message) { - xxx_messageInfo_Any.Merge(dst, src) +var file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc = []byte{ + 0x0a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x61, 0x6e, 0x79, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x2b, 0x5a, 0x29, + 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, + 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, 0x70, 0x65, + 0x73, 0x2f, 0x61, 0x6e, 0x79, 0x3b, 0x61, 0x6e, 0x79, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } -func (m *Any) XXX_Size() int { - return xxx_messageInfo_Any.Size(m) -} -func (m *Any) XXX_DiscardUnknown() { - xxx_messageInfo_Any.DiscardUnknown(m) -} - -var xxx_messageInfo_Any proto.InternalMessageInfo -func (m *Any) GetTypeUrl() string { - if m != nil { - return m.TypeUrl - } - return "" +var file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name } -func (m *Any) GetValue() []byte { - if m != nil { - return m.Value +func init() { file_github_com_golang_protobuf_ptypes_any_any_proto_init() } +func file_github_com_golang_protobuf_ptypes_any_any_proto_init() { + if File_github_com_golang_protobuf_ptypes_any_any_proto != nil { + return } - return nil -} - -func init() { - proto.RegisterType((*Any)(nil), "google.protobuf.Any") -} - -func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_any_744b9ca530f228db) } - -var fileDescriptor_any_744b9ca530f228db = []byte{ - // 185 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f, - 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4, - 0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a, - 0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46, - 0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, - 0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xca, 0xe7, 0x12, 0x4e, 0xce, - 0xcf, 0xd5, 0x43, 0x33, 0xce, 0x89, 0xc3, 0x31, 0xaf, 0x32, 0x00, 0xc4, 0x09, 0x60, 0x8c, 0x52, - 0x4d, 0xcf, 0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0x4f, 0xcf, 0xcf, 0x49, 0xcc, - 0x4b, 0x47, 0xb8, 0xa8, 0x00, 0x64, 0x7a, 0x31, 0xc8, 0x61, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, - 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x8c, 0x0a, 0x80, 0x2a, 0xd1, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, - 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0x29, 0x4d, 0x62, 0x03, 0xeb, 0x35, 0x06, 0x04, 0x00, 0x00, 0xff, - 0xff, 0x13, 0xf8, 0xe8, 0x42, 0xdd, 0x00, 0x00, 0x00, + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_any_any_proto = out.File + file_github_com_golang_protobuf_ptypes_any_any_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_any_any_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_any_any_proto_depIdxs = nil } diff --git a/vendor/github.com/golang/protobuf/ptypes/any/any.proto b/vendor/github.com/golang/protobuf/ptypes/any/any.proto deleted file mode 100644 index c7486676..00000000 --- a/vendor/github.com/golang/protobuf/ptypes/any/any.proto +++ /dev/null @@ -1,149 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; -option go_package = "github.com/golang/protobuf/ptypes/any"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "AnyProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; - -// `Any` contains an arbitrary serialized protocol buffer message along with a -// URL that describes the type of the serialized message. -// -// Protobuf library provides support to pack/unpack Any values in the form -// of utility functions or additional generated methods of the Any type. -// -// Example 1: Pack and unpack a message in C++. -// -// Foo foo = ...; -// Any any; -// any.PackFrom(foo); -// ... -// if (any.UnpackTo(&foo)) { -// ... -// } -// -// Example 2: Pack and unpack a message in Java. -// -// Foo foo = ...; -// Any any = Any.pack(foo); -// ... -// if (any.is(Foo.class)) { -// foo = any.unpack(Foo.class); -// } -// -// Example 3: Pack and unpack a message in Python. -// -// foo = Foo(...) -// any = Any() -// any.Pack(foo) -// ... -// if any.Is(Foo.DESCRIPTOR): -// any.Unpack(foo) -// ... -// -// Example 4: Pack and unpack a message in Go -// -// foo := &pb.Foo{...} -// any, err := ptypes.MarshalAny(foo) -// ... -// foo := &pb.Foo{} -// if err := ptypes.UnmarshalAny(any, foo); err != nil { -// ... -// } -// -// The pack methods provided by protobuf library will by default use -// 'type.googleapis.com/full.type.name' as the type URL and the unpack -// methods only use the fully qualified type name after the last '/' -// in the type URL, for example "foo.bar.com/x/y.z" will yield type -// name "y.z". -// -// -// JSON -// ==== -// The JSON representation of an `Any` value uses the regular -// representation of the deserialized, embedded message, with an -// additional field `@type` which contains the type URL. Example: -// -// package google.profile; -// message Person { -// string first_name = 1; -// string last_name = 2; -// } -// -// { -// "@type": "type.googleapis.com/google.profile.Person", -// "firstName": , -// "lastName": -// } -// -// If the embedded message type is well-known and has a custom JSON -// representation, that representation will be embedded adding a field -// `value` which holds the custom JSON in addition to the `@type` -// field. Example (for message [google.protobuf.Duration][]): -// -// { -// "@type": "type.googleapis.com/google.protobuf.Duration", -// "value": "1.212s" -// } -// -message Any { - // A URL/resource name whose content describes the type of the - // serialized protocol buffer message. - // - // For URLs which use the scheme `http`, `https`, or no scheme, the - // following restrictions and interpretations apply: - // - // * If no scheme is provided, `https` is assumed. - // * The last segment of the URL's path must represent the fully - // qualified name of the type (as in `path/google.protobuf.Duration`). - // The name should be in a canonical form (e.g., leading "." is - // not accepted). - // * An HTTP GET on the URL must yield a [google.protobuf.Type][] - // value in binary format, or produce an error. - // * Applications are allowed to cache lookup results based on the - // URL, or have them precompiled into a binary to avoid any - // lookup. Therefore, binary compatibility needs to be preserved - // on changes to types. (Use versioned type names to manage - // breaking changes.) - // - // Schemes other than `http`, `https` (or the empty scheme) might be - // used with implementation specific semantics. - // - string type_url = 1; - - // Must be a valid serialized protocol buffer of the above specified type. - bytes value = 2; -} diff --git a/vendor/github.com/golang/protobuf/ptypes/doc.go b/vendor/github.com/golang/protobuf/ptypes/doc.go index c0d595da..d3c33259 100644 --- a/vendor/github.com/golang/protobuf/ptypes/doc.go +++ b/vendor/github.com/golang/protobuf/ptypes/doc.go @@ -1,35 +1,10 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. -/* -Package ptypes contains code for interacting with well-known types. -*/ +// Package ptypes provides functionality for interacting with well-known types. +// +// Deprecated: Well-known types have specialized functionality directly +// injected into the generated packages for each message type. +// See the deprecation notice for each function for the suggested alternative. package ptypes diff --git a/vendor/github.com/golang/protobuf/ptypes/duration.go b/vendor/github.com/golang/protobuf/ptypes/duration.go index 65cb0f8e..b2b55dd8 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration.go @@ -1,102 +1,76 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package ptypes -// This file implements conversions between google.protobuf.Duration -// and time.Duration. - import ( "errors" "fmt" "time" - durpb "github.com/golang/protobuf/ptypes/duration" + durationpb "github.com/golang/protobuf/ptypes/duration" ) +// Range of google.protobuf.Duration as specified in duration.proto. +// This is about 10,000 years in seconds. const ( - // Range of a durpb.Duration in seconds, as specified in - // google/protobuf/duration.proto. This is about 10,000 years in seconds. maxSeconds = int64(10000 * 365.25 * 24 * 60 * 60) minSeconds = -maxSeconds ) -// validateDuration determines whether the durpb.Duration is valid according to the -// definition in google/protobuf/duration.proto. A valid durpb.Duration -// may still be too large to fit into a time.Duration (the range of durpb.Duration -// is about 10,000 years, and the range of time.Duration is about 290). -func validateDuration(d *durpb.Duration) error { - if d == nil { - return errors.New("duration: nil Duration") - } - if d.Seconds < minSeconds || d.Seconds > maxSeconds { - return fmt.Errorf("duration: %v: seconds out of range", d) - } - if d.Nanos <= -1e9 || d.Nanos >= 1e9 { - return fmt.Errorf("duration: %v: nanos out of range", d) - } - // Seconds and Nanos must have the same sign, unless d.Nanos is zero. - if (d.Seconds < 0 && d.Nanos > 0) || (d.Seconds > 0 && d.Nanos < 0) { - return fmt.Errorf("duration: %v: seconds and nanos have different signs", d) - } - return nil -} - -// Duration converts a durpb.Duration to a time.Duration. Duration -// returns an error if the durpb.Duration is invalid or is too large to be -// represented in a time.Duration. -func Duration(p *durpb.Duration) (time.Duration, error) { - if err := validateDuration(p); err != nil { +// Duration converts a durationpb.Duration to a time.Duration. +// Duration returns an error if dur is invalid or overflows a time.Duration. +// +// Deprecated: Call the dur.AsDuration and dur.CheckValid methods instead. +func Duration(dur *durationpb.Duration) (time.Duration, error) { + if err := validateDuration(dur); err != nil { return 0, err } - d := time.Duration(p.Seconds) * time.Second - if int64(d/time.Second) != p.Seconds { - return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) + d := time.Duration(dur.Seconds) * time.Second + if int64(d/time.Second) != dur.Seconds { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", dur) } - if p.Nanos != 0 { - d += time.Duration(p.Nanos) - if (d < 0) != (p.Nanos < 0) { - return 0, fmt.Errorf("duration: %v is out of range for time.Duration", p) + if dur.Nanos != 0 { + d += time.Duration(dur.Nanos) * time.Nanosecond + if (d < 0) != (dur.Nanos < 0) { + return 0, fmt.Errorf("duration: %v is out of range for time.Duration", dur) } } return d, nil } -// DurationProto converts a time.Duration to a durpb.Duration. -func DurationProto(d time.Duration) *durpb.Duration { +// DurationProto converts a time.Duration to a durationpb.Duration. +// +// Deprecated: Call the durationpb.New function instead. +func DurationProto(d time.Duration) *durationpb.Duration { nanos := d.Nanoseconds() secs := nanos / 1e9 nanos -= secs * 1e9 - return &durpb.Duration{ - Seconds: secs, + return &durationpb.Duration{ + Seconds: int64(secs), Nanos: int32(nanos), } } + +// validateDuration determines whether the durationpb.Duration is valid +// according to the definition in google/protobuf/duration.proto. +// A valid durpb.Duration may still be too large to fit into a time.Duration +// Note that the range of durationpb.Duration is about 10,000 years, +// while the range of time.Duration is about 290 years. +func validateDuration(dur *durationpb.Duration) error { + if dur == nil { + return errors.New("duration: nil Duration") + } + if dur.Seconds < minSeconds || dur.Seconds > maxSeconds { + return fmt.Errorf("duration: %v: seconds out of range", dur) + } + if dur.Nanos <= -1e9 || dur.Nanos >= 1e9 { + return fmt.Errorf("duration: %v: nanos out of range", dur) + } + // Seconds and Nanos must have the same sign, unless d.Nanos is zero. + if (dur.Seconds < 0 && dur.Nanos > 0) || (dur.Seconds > 0 && dur.Nanos < 0) { + return fmt.Errorf("duration: %v: seconds and nanos have different signs", dur) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go index a7beb2c4..d0079ee3 100644 --- a/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/duration/duration.pb.go @@ -1,159 +1,63 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: google/protobuf/duration.proto +// source: github.com/golang/protobuf/ptypes/duration/duration.proto -package duration // import "github.com/golang/protobuf/ptypes/duration" +package duration -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + durationpb "google.golang.org/protobuf/types/known/durationpb" + reflect "reflect" +) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf +// Symbols defined in public import of google/protobuf/duration.proto. -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +type Duration = durationpb.Duration -// A Duration represents a signed, fixed-length span of time represented -// as a count of seconds and fractions of seconds at nanosecond -// resolution. It is independent of any calendar and concepts like "day" -// or "month". It is related to Timestamp in that the difference between -// two Timestamp values is a Duration and it can be added or subtracted -// from a Timestamp. Range is approximately +-10,000 years. -// -// # Examples -// -// Example 1: Compute Duration from two Timestamps in pseudo code. -// -// Timestamp start = ...; -// Timestamp end = ...; -// Duration duration = ...; -// -// duration.seconds = end.seconds - start.seconds; -// duration.nanos = end.nanos - start.nanos; -// -// if (duration.seconds < 0 && duration.nanos > 0) { -// duration.seconds += 1; -// duration.nanos -= 1000000000; -// } else if (durations.seconds > 0 && duration.nanos < 0) { -// duration.seconds -= 1; -// duration.nanos += 1000000000; -// } -// -// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -// -// Timestamp start = ...; -// Duration duration = ...; -// Timestamp end = ...; -// -// end.seconds = start.seconds + duration.seconds; -// end.nanos = start.nanos + duration.nanos; -// -// if (end.nanos < 0) { -// end.seconds -= 1; -// end.nanos += 1000000000; -// } else if (end.nanos >= 1000000000) { -// end.seconds += 1; -// end.nanos -= 1000000000; -// } -// -// Example 3: Compute Duration from datetime.timedelta in Python. -// -// td = datetime.timedelta(days=3, minutes=10) -// duration = Duration() -// duration.FromTimedelta(td) -// -// # JSON Mapping -// -// In JSON format, the Duration type is encoded as a string rather than an -// object, where the string ends in the suffix "s" (indicating seconds) and -// is preceded by the number of seconds, with nanoseconds expressed as -// fractional seconds. For example, 3 seconds with 0 nanoseconds should be -// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -// microsecond should be expressed in JSON format as "3.000001s". -// -// -type Duration struct { - // Signed seconds of the span of time. Must be from -315,576,000,000 - // to +315,576,000,000 inclusive. Note: these bounds are computed from: - // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - // Signed fractions of a second at nanosecond resolution of the span - // of time. Durations less than one second are represented with a 0 - // `seconds` field and a positive or negative `nanos` field. For durations - // of one second or more, a non-zero value for the `nanos` field must be - // of the same sign as the `seconds` field. Must be from -999,999,999 - // to +999,999,999 inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} +var File_github_com_golang_protobuf_ptypes_duration_duration_proto protoreflect.FileDescriptor -func (m *Duration) Reset() { *m = Duration{} } -func (m *Duration) String() string { return proto.CompactTextString(m) } -func (*Duration) ProtoMessage() {} -func (*Duration) Descriptor() ([]byte, []int) { - return fileDescriptor_duration_e7d612259e3f0613, []int{0} -} -func (*Duration) XXX_WellKnownType() string { return "Duration" } -func (m *Duration) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Duration.Unmarshal(m, b) -} -func (m *Duration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Duration.Marshal(b, m, deterministic) -} -func (dst *Duration) XXX_Merge(src proto.Message) { - xxx_messageInfo_Duration.Merge(dst, src) +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc = []byte{ + 0x0a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x64, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x35, 0x5a, 0x33, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, 0x70, 0x65, 0x73, + 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3b, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } -func (m *Duration) XXX_Size() int { - return xxx_messageInfo_Duration.Size(m) -} -func (m *Duration) XXX_DiscardUnknown() { - xxx_messageInfo_Duration.DiscardUnknown(m) -} - -var xxx_messageInfo_Duration proto.InternalMessageInfo -func (m *Duration) GetSeconds() int64 { - if m != nil { - return m.Seconds - } - return 0 +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name } -func (m *Duration) GetNanos() int32 { - if m != nil { - return m.Nanos +func init() { file_github_com_golang_protobuf_ptypes_duration_duration_proto_init() } +func file_github_com_golang_protobuf_ptypes_duration_duration_proto_init() { + if File_github_com_golang_protobuf_ptypes_duration_duration_proto != nil { + return } - return 0 -} - -func init() { - proto.RegisterType((*Duration)(nil), "google.protobuf.Duration") -} - -func init() { - proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_duration_e7d612259e3f0613) -} - -var fileDescriptor_duration_e7d612259e3f0613 = []byte{ - // 190 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, - 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a, - 0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56, - 0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5, - 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e, - 0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0xc3, 0x25, 0x9c, 0x9c, - 0x9f, 0xab, 0x87, 0x66, 0xa4, 0x13, 0x2f, 0xcc, 0xc0, 0x00, 0x90, 0x48, 0x00, 0x63, 0x94, 0x56, - 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, 0x5e, 0x72, 0x7e, 0xae, 0x7e, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, - 0x3a, 0xc2, 0x7d, 0x05, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x70, 0x67, 0xfe, 0x60, 0x64, 0x5c, 0xc4, - 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x6e, 0x00, 0x54, 0xa9, 0x5e, 0x78, - 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x4b, 0x12, 0x1b, 0xd8, 0x0c, 0x63, - 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x84, 0x30, 0xff, 0xf3, 0x00, 0x00, 0x00, + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_duration_duration_proto = out.File + file_github_com_golang_protobuf_ptypes_duration_duration_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_duration_duration_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_duration_duration_proto_depIdxs = nil } diff --git a/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto b/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto deleted file mode 100644 index 975fce41..00000000 --- a/vendor/github.com/golang/protobuf/ptypes/duration/duration.proto +++ /dev/null @@ -1,117 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; -option cc_enable_arenas = true; -option go_package = "github.com/golang/protobuf/ptypes/duration"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "DurationProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; - -// A Duration represents a signed, fixed-length span of time represented -// as a count of seconds and fractions of seconds at nanosecond -// resolution. It is independent of any calendar and concepts like "day" -// or "month". It is related to Timestamp in that the difference between -// two Timestamp values is a Duration and it can be added or subtracted -// from a Timestamp. Range is approximately +-10,000 years. -// -// # Examples -// -// Example 1: Compute Duration from two Timestamps in pseudo code. -// -// Timestamp start = ...; -// Timestamp end = ...; -// Duration duration = ...; -// -// duration.seconds = end.seconds - start.seconds; -// duration.nanos = end.nanos - start.nanos; -// -// if (duration.seconds < 0 && duration.nanos > 0) { -// duration.seconds += 1; -// duration.nanos -= 1000000000; -// } else if (durations.seconds > 0 && duration.nanos < 0) { -// duration.seconds -= 1; -// duration.nanos += 1000000000; -// } -// -// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -// -// Timestamp start = ...; -// Duration duration = ...; -// Timestamp end = ...; -// -// end.seconds = start.seconds + duration.seconds; -// end.nanos = start.nanos + duration.nanos; -// -// if (end.nanos < 0) { -// end.seconds -= 1; -// end.nanos += 1000000000; -// } else if (end.nanos >= 1000000000) { -// end.seconds += 1; -// end.nanos -= 1000000000; -// } -// -// Example 3: Compute Duration from datetime.timedelta in Python. -// -// td = datetime.timedelta(days=3, minutes=10) -// duration = Duration() -// duration.FromTimedelta(td) -// -// # JSON Mapping -// -// In JSON format, the Duration type is encoded as a string rather than an -// object, where the string ends in the suffix "s" (indicating seconds) and -// is preceded by the number of seconds, with nanoseconds expressed as -// fractional seconds. For example, 3 seconds with 0 nanoseconds should be -// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -// microsecond should be expressed in JSON format as "3.000001s". -// -// -message Duration { - - // Signed seconds of the span of time. Must be from -315,576,000,000 - // to +315,576,000,000 inclusive. Note: these bounds are computed from: - // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - int64 seconds = 1; - - // Signed fractions of a second at nanosecond resolution of the span - // of time. Durations less than one second are represented with a 0 - // `seconds` field and a positive or negative `nanos` field. For durations - // of one second or more, a non-zero value for the `nanos` field must be - // of the same sign as the `seconds` field. Must be from -999,999,999 - // to +999,999,999 inclusive. - int32 nanos = 2; -} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp.go b/vendor/github.com/golang/protobuf/ptypes/timestamp.go index 47f10dbc..8368a3f7 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp.go @@ -1,46 +1,18 @@ -// Go support for Protocol Buffers - Google's data interchange format -// -// Copyright 2016 The Go Authors. All rights reserved. -// https://github.com/golang/protobuf -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. package ptypes -// This file implements operations on google.protobuf.Timestamp. - import ( "errors" "fmt" "time" - tspb "github.com/golang/protobuf/ptypes/timestamp" + timestamppb "github.com/golang/protobuf/ptypes/timestamp" ) +// Range of google.protobuf.Duration as specified in timestamp.proto. const ( // Seconds field of the earliest valid Timestamp. // This is time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC).Unix(). @@ -50,44 +22,20 @@ const ( maxValidSeconds = 253402300800 ) -// validateTimestamp determines whether a Timestamp is valid. -// A valid timestamp represents a time in the range -// [0001-01-01, 10000-01-01) and has a Nanos field -// in the range [0, 1e9). -// -// If the Timestamp is valid, validateTimestamp returns nil. -// Otherwise, it returns an error that describes -// the problem. -// -// Every valid Timestamp can be represented by a time.Time, but the converse is not true. -func validateTimestamp(ts *tspb.Timestamp) error { - if ts == nil { - return errors.New("timestamp: nil Timestamp") - } - if ts.Seconds < minValidSeconds { - return fmt.Errorf("timestamp: %v before 0001-01-01", ts) - } - if ts.Seconds >= maxValidSeconds { - return fmt.Errorf("timestamp: %v after 10000-01-01", ts) - } - if ts.Nanos < 0 || ts.Nanos >= 1e9 { - return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts) - } - return nil -} - -// Timestamp converts a google.protobuf.Timestamp proto to a time.Time. +// Timestamp converts a timestamppb.Timestamp to a time.Time. // It returns an error if the argument is invalid. // -// Unlike most Go functions, if Timestamp returns an error, the first return value -// is not the zero time.Time. Instead, it is the value obtained from the +// Unlike most Go functions, if Timestamp returns an error, the first return +// value is not the zero time.Time. Instead, it is the value obtained from the // time.Unix function when passed the contents of the Timestamp, in the UTC // locale. This may or may not be a meaningful time; many invalid Timestamps // do map to valid time.Times. // // A nil Timestamp returns an error. The first return value in that case is // undefined. -func Timestamp(ts *tspb.Timestamp) (time.Time, error) { +// +// Deprecated: Call the ts.AsTime and ts.CheckValid methods instead. +func Timestamp(ts *timestamppb.Timestamp) (time.Time, error) { // Don't return the zero value on error, because corresponds to a valid // timestamp. Instead return whatever time.Unix gives us. var t time.Time @@ -100,7 +48,9 @@ func Timestamp(ts *tspb.Timestamp) (time.Time, error) { } // TimestampNow returns a google.protobuf.Timestamp for the current time. -func TimestampNow() *tspb.Timestamp { +// +// Deprecated: Call the timestamppb.Now function instead. +func TimestampNow() *timestamppb.Timestamp { ts, err := TimestampProto(time.Now()) if err != nil { panic("ptypes: time.Now() out of Timestamp range") @@ -110,12 +60,12 @@ func TimestampNow() *tspb.Timestamp { // TimestampProto converts the time.Time to a google.protobuf.Timestamp proto. // It returns an error if the resulting Timestamp is invalid. -func TimestampProto(t time.Time) (*tspb.Timestamp, error) { - seconds := t.Unix() - nanos := int32(t.Sub(time.Unix(seconds, 0))) - ts := &tspb.Timestamp{ - Seconds: seconds, - Nanos: nanos, +// +// Deprecated: Call the timestamppb.New function instead. +func TimestampProto(t time.Time) (*timestamppb.Timestamp, error) { + ts := ×tamppb.Timestamp{ + Seconds: t.Unix(), + Nanos: int32(t.Nanosecond()), } if err := validateTimestamp(ts); err != nil { return nil, err @@ -123,12 +73,40 @@ func TimestampProto(t time.Time) (*tspb.Timestamp, error) { return ts, nil } -// TimestampString returns the RFC 3339 string for valid Timestamps. For invalid -// Timestamps, it returns an error message in parentheses. -func TimestampString(ts *tspb.Timestamp) string { +// TimestampString returns the RFC 3339 string for valid Timestamps. +// For invalid Timestamps, it returns an error message in parentheses. +// +// Deprecated: Call the ts.AsTime method instead, +// followed by a call to the Format method on the time.Time value. +func TimestampString(ts *timestamppb.Timestamp) string { t, err := Timestamp(ts) if err != nil { return fmt.Sprintf("(%v)", err) } return t.Format(time.RFC3339Nano) } + +// validateTimestamp determines whether a Timestamp is valid. +// A valid timestamp represents a time in the range [0001-01-01, 10000-01-01) +// and has a Nanos field in the range [0, 1e9). +// +// If the Timestamp is valid, validateTimestamp returns nil. +// Otherwise, it returns an error that describes the problem. +// +// Every valid Timestamp can be represented by a time.Time, +// but the converse is not true. +func validateTimestamp(ts *timestamppb.Timestamp) error { + if ts == nil { + return errors.New("timestamp: nil Timestamp") + } + if ts.Seconds < minValidSeconds { + return fmt.Errorf("timestamp: %v before 0001-01-01", ts) + } + if ts.Seconds >= maxValidSeconds { + return fmt.Errorf("timestamp: %v after 10000-01-01", ts) + } + if ts.Nanos < 0 || ts.Nanos >= 1e9 { + return fmt.Errorf("timestamp: %v: nanos not in range [0, 1e9)", ts) + } + return nil +} diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go index 8e76ae97..a76f8076 100644 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go +++ b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.pb.go @@ -1,175 +1,64 @@ // Code generated by protoc-gen-go. DO NOT EDIT. -// source: google/protobuf/timestamp.proto +// source: github.com/golang/protobuf/ptypes/timestamp/timestamp.proto -package timestamp // import "github.com/golang/protobuf/ptypes/timestamp" +package timestamp -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" +) -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf +// Symbols defined in public import of google/protobuf/timestamp.proto. -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package +type Timestamp = timestamppb.Timestamp -// A Timestamp represents a point in time independent of any time zone -// or calendar, represented as seconds and fractions of seconds at -// nanosecond resolution in UTC Epoch time. It is encoded using the -// Proleptic Gregorian Calendar which extends the Gregorian calendar -// backwards to year one. It is encoded assuming all minutes are 60 -// seconds long, i.e. leap seconds are "smeared" so that no leap second -// table is needed for interpretation. Range is from -// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. -// By restricting to that range, we ensure that we can convert to -// and from RFC 3339 date strings. -// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). -// -// # Examples -// -// Example 1: Compute Timestamp from POSIX `time()`. -// -// Timestamp timestamp; -// timestamp.set_seconds(time(NULL)); -// timestamp.set_nanos(0); -// -// Example 2: Compute Timestamp from POSIX `gettimeofday()`. -// -// struct timeval tv; -// gettimeofday(&tv, NULL); -// -// Timestamp timestamp; -// timestamp.set_seconds(tv.tv_sec); -// timestamp.set_nanos(tv.tv_usec * 1000); -// -// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. -// -// FILETIME ft; -// GetSystemTimeAsFileTime(&ft); -// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; -// -// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z -// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. -// Timestamp timestamp; -// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); -// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); -// -// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. -// -// long millis = System.currentTimeMillis(); -// -// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) -// .setNanos((int) ((millis % 1000) * 1000000)).build(); -// -// -// Example 5: Compute Timestamp from current time in Python. -// -// timestamp = Timestamp() -// timestamp.GetCurrentTime() -// -// # JSON Mapping -// -// In JSON format, the Timestamp type is encoded as a string in the -// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -// where {year} is always expressed using four digits while {month}, {day}, -// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -// is required, though only UTC (as indicated by "Z") is presently supported. -// -// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -// 01:30 UTC on January 15, 2017. -// -// In JavaScript, one can convert a Date object to this format using the -// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] -// method. In Python, a standard `datetime.datetime` object can be converted -// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) -// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one -// can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--) -// to obtain a formatter capable of generating timestamps in this format. -// -// -type Timestamp struct { - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} +var File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto protoreflect.FileDescriptor -func (m *Timestamp) Reset() { *m = Timestamp{} } -func (m *Timestamp) String() string { return proto.CompactTextString(m) } -func (*Timestamp) ProtoMessage() {} -func (*Timestamp) Descriptor() ([]byte, []int) { - return fileDescriptor_timestamp_b826e8e5fba671a8, []int{0} -} -func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" } -func (m *Timestamp) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_Timestamp.Unmarshal(m, b) -} -func (m *Timestamp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_Timestamp.Marshal(b, m, deterministic) -} -func (dst *Timestamp) XXX_Merge(src proto.Message) { - xxx_messageInfo_Timestamp.Merge(dst, src) +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc = []byte{ + 0x0a, 0x3b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2f, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x42, 0x37, + 0x5a, 0x35, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x6c, + 0x61, 0x6e, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x70, 0x74, 0x79, + 0x70, 0x65, 0x73, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x3b, 0x74, 0x69, + 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x50, 0x00, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, } -func (m *Timestamp) XXX_Size() int { - return xxx_messageInfo_Timestamp.Size(m) -} -func (m *Timestamp) XXX_DiscardUnknown() { - xxx_messageInfo_Timestamp.DiscardUnknown(m) -} - -var xxx_messageInfo_Timestamp proto.InternalMessageInfo -func (m *Timestamp) GetSeconds() int64 { - if m != nil { - return m.Seconds - } - return 0 +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes = []interface{}{} +var file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name } -func (m *Timestamp) GetNanos() int32 { - if m != nil { - return m.Nanos +func init() { file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_init() } +func file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_init() { + if File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto != nil { + return } - return 0 -} - -func init() { - proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp") -} - -func init() { - proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_timestamp_b826e8e5fba671a8) -} - -var fileDescriptor_timestamp_b826e8e5fba671a8 = []byte{ - // 191 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f, - 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d, - 0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28, - 0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5, - 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89, - 0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x1d, 0x97, 0x70, - 0x72, 0x7e, 0xae, 0x1e, 0x9a, 0x99, 0x4e, 0x7c, 0x70, 0x13, 0x03, 0x40, 0x42, 0x01, 0x8c, 0x51, - 0xda, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, 0xb9, 0xfa, 0xe9, 0xf9, 0x39, 0x89, - 0x79, 0xe9, 0x08, 0x27, 0x16, 0x94, 0x54, 0x16, 0xa4, 0x16, 0x23, 0x5c, 0xfa, 0x83, 0x91, 0x71, - 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88, 0xc9, 0x01, 0x50, 0xb5, 0x7a, - 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9, 0xe5, 0x79, 0x21, 0x20, 0x3d, 0x49, 0x6c, 0x60, 0x43, - 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x77, 0x4a, 0x07, 0xf7, 0x00, 0x00, 0x00, + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes, + DependencyIndexes: file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs, + }.Build() + File_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto = out.File + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_rawDesc = nil + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_goTypes = nil + file_github_com_golang_protobuf_ptypes_timestamp_timestamp_proto_depIdxs = nil } diff --git a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto b/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto deleted file mode 100644 index 06750ab1..00000000 --- a/vendor/github.com/golang/protobuf/ptypes/timestamp/timestamp.proto +++ /dev/null @@ -1,133 +0,0 @@ -// Protocol Buffers - Google's data interchange format -// Copyright 2008 Google Inc. All rights reserved. -// https://developers.google.com/protocol-buffers/ -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -syntax = "proto3"; - -package google.protobuf; - -option csharp_namespace = "Google.Protobuf.WellKnownTypes"; -option cc_enable_arenas = true; -option go_package = "github.com/golang/protobuf/ptypes/timestamp"; -option java_package = "com.google.protobuf"; -option java_outer_classname = "TimestampProto"; -option java_multiple_files = true; -option objc_class_prefix = "GPB"; - -// A Timestamp represents a point in time independent of any time zone -// or calendar, represented as seconds and fractions of seconds at -// nanosecond resolution in UTC Epoch time. It is encoded using the -// Proleptic Gregorian Calendar which extends the Gregorian calendar -// backwards to year one. It is encoded assuming all minutes are 60 -// seconds long, i.e. leap seconds are "smeared" so that no leap second -// table is needed for interpretation. Range is from -// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. -// By restricting to that range, we ensure that we can convert to -// and from RFC 3339 date strings. -// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt). -// -// # Examples -// -// Example 1: Compute Timestamp from POSIX `time()`. -// -// Timestamp timestamp; -// timestamp.set_seconds(time(NULL)); -// timestamp.set_nanos(0); -// -// Example 2: Compute Timestamp from POSIX `gettimeofday()`. -// -// struct timeval tv; -// gettimeofday(&tv, NULL); -// -// Timestamp timestamp; -// timestamp.set_seconds(tv.tv_sec); -// timestamp.set_nanos(tv.tv_usec * 1000); -// -// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. -// -// FILETIME ft; -// GetSystemTimeAsFileTime(&ft); -// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; -// -// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z -// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. -// Timestamp timestamp; -// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); -// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); -// -// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. -// -// long millis = System.currentTimeMillis(); -// -// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) -// .setNanos((int) ((millis % 1000) * 1000000)).build(); -// -// -// Example 5: Compute Timestamp from current time in Python. -// -// timestamp = Timestamp() -// timestamp.GetCurrentTime() -// -// # JSON Mapping -// -// In JSON format, the Timestamp type is encoded as a string in the -// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -// where {year} is always expressed using four digits while {month}, {day}, -// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -// is required, though only UTC (as indicated by "Z") is presently supported. -// -// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -// 01:30 UTC on January 15, 2017. -// -// In JavaScript, one can convert a Date object to this format using the -// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString] -// method. In Python, a standard `datetime.datetime` object can be converted -// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) -// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one -// can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( -// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--) -// to obtain a formatter capable of generating timestamps in this format. -// -// -message Timestamp { - - // Represents seconds of UTC time since Unix epoch - // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - // 9999-12-31T23:59:59Z inclusive. - int64 seconds = 1; - - // Non-negative fractions of a second at nanosecond resolution. Negative - // second values with fractions must still have non-negative nanos values - // that count forward in time. Must be from 0 to 999,999,999 - // inclusive. - int32 nanos = 2; -} diff --git a/vendor/github.com/gookit/color/color_16.go b/vendor/github.com/gookit/color/color_16.go index 0b70efe4..eda226a1 100644 --- a/vendor/github.com/gookit/color/color_16.go +++ b/vendor/github.com/gookit/color/color_16.go @@ -41,15 +41,27 @@ func (o Opts) String() string { * Basic 16 color definition *************************************************************/ -// Base value for foreground/background color -// base: fg 30~37, bg 40~47 -// light: fg 90~97, bg 100~107 +const ( + // OptMax max option value. range: 0 - 9 + OptMax = 10 + // DiffFgBg diff foreground and background color + DiffFgBg = 10 +) + +// Boundary value for foreground/background color 16 +// +// - base: fg 30~37, bg 40~47 +// - light: fg 90~97, bg 100~107 const ( FgBase uint8 = 30 + FgMax uint8 = 37 BgBase uint8 = 40 + BgMax uint8 = 47 HiFgBase uint8 = 90 + HiFgMax uint8 = 97 HiBgBase uint8 = 100 + HiBgMax uint8 = 107 ) // Foreground colors. basic foreground colors 30 - 37 @@ -94,7 +106,7 @@ const ( BgDefault Color = 49 ) -// Extra background color 100 - 107(非标准) +// Extra background color 100 - 107 (non-standard) const ( BgDarkGray Color = iota + 100 BgLightRed @@ -108,7 +120,7 @@ const ( BgGray Color = 100 ) -// Option settings +// Option settings. range: 0 - 9 const ( OpReset Color = iota // 0 重置所有设置 OpBold // 1 加粗 @@ -248,9 +260,9 @@ func (c Color) Println(a ...any) { doPrintlnV2(c.String(), a) } // lightCyan := Cyan.Light() // lightCyan.Print("message") func (c Color) Light() Color { - val := int(c) + val := uint8(c) if val >= 30 && val <= 47 { - return Color(uint8(c) + 60) + return Color(val + 60) } // don't change @@ -264,9 +276,9 @@ func (c Color) Light() Color { // cyan := LightCyan.Darken() // cyan.Print("message") func (c Color) Darken() Color { - val := int(c) + val := uint8(c) if val >= 90 && val <= 107 { - return Color(uint8(c) - 60) + return Color(val - 60) } // don't change @@ -324,7 +336,7 @@ func (c Color) RGB() RGBColor { return emptyRGBColor } - return HEX(Basic2hex(val)) + return HEX(Basic2hex(val), c.IsBg()) } // Code convert to code string. eg "35" @@ -337,8 +349,23 @@ func (c Color) String() string { return strconv.FormatInt(int64(c), 10) } +// IsBg check is background color +func (c Color) IsBg() bool { + val := uint8(c) + return val >= BgBase && val <= BgMax || val >= HiBgBase && val <= HiBgMax +} + +// IsFg check is foreground color +func (c Color) IsFg() bool { + val := uint8(c) + return val >= FgBase && val <= FgMax || val >= HiFgBase && val <= HiFgMax +} + +// IsOption check is option code: 0-9 +func (c Color) IsOption() bool { return uint8(c) < OptMax } + // IsValid color value -func (c Color) IsValid() bool { return c < 107 } +func (c Color) IsValid() bool { return uint8(c) < HiBgMax } /************************************************************* * basic color maps diff --git a/vendor/github.com/gookit/color/color_256.go b/vendor/github.com/gookit/color/color_256.go index 991e604c..79ae5f8d 100644 --- a/vendor/github.com/gookit/color/color_256.go +++ b/vendor/github.com/gookit/color/color_256.go @@ -43,7 +43,8 @@ const ( * 8bit(256) Color: Bit8Color Color256 *************************************************************/ -// Color256 256 color (8 bit), uint8 range at 0 - 255 +// Color256 256 color (8 bit), uint8 range at 0 - 255. +// Support 256 color on windows CMD, PowerShell // // 颜色值使用10进制和16进制都可 0x98 = 152 // @@ -54,10 +55,9 @@ const ( // // example: // -// fg color: [152, 0] -// bg color: [152, 1] +// fg color: [152, 0] +// bg color: [152, 1] // -// NOTICE: now support 256 color on windows CMD, PowerShell // lint warn - Name starts with package name type Color256 [2]uint8 type Bit8Color = Color256 // alias @@ -164,9 +164,7 @@ func (c Color256) String() string { } // IsFg color -func (c Color256) IsFg() bool { - return c[1] == AsFg -} +func (c Color256) IsFg() bool { return c[1] == AsFg } // ToFg 256 color func (c Color256) ToFg() Color256 { @@ -175,9 +173,7 @@ func (c Color256) ToFg() Color256 { } // IsBg color -func (c Color256) IsBg() bool { - return c[1] == AsBg -} +func (c Color256) IsBg() bool { return c[1] == AsBg } // ToBg 256 color func (c Color256) ToBg() Color256 { @@ -186,9 +182,7 @@ func (c Color256) ToBg() Color256 { } // IsEmpty value -func (c Color256) IsEmpty() bool { - return c[1] > 1 -} +func (c Color256) IsEmpty() bool { return c[1] > 1 } /************************************************************* * 8bit(256) Style diff --git a/vendor/github.com/gookit/color/color_rgb.go b/vendor/github.com/gookit/color/color_rgb.go index 724cf665..bc129b71 100644 --- a/vendor/github.com/gookit/color/color_rgb.go +++ b/vendor/github.com/gookit/color/color_rgb.go @@ -44,6 +44,7 @@ const ( *************************************************************/ // RGBColor definition. +// Support RGB color on Windows CMD, PowerShell // // The first to third digits represent the color value. // The last digit represents the foreground(0), background(1), >1 is unset value @@ -54,8 +55,6 @@ const ( // // 3rd: Fg=0, Bg=1, >1: unset value // RGBColor{30,144,255, 0} // RGBColor{30,144,255, 1} -// -// NOTICE: now support RGB color on Windows CMD, PowerShell type RGBColor [4]uint8 // create an empty RGBColor @@ -251,6 +250,18 @@ func (c RGBColor) String() string { return "" } +// ToBg convert to background color +func (c RGBColor) ToBg() RGBColor { + c[3] = AsBg + return c +} + +// ToFg convert to foreground color +func (c RGBColor) ToFg() RGBColor { + c[3] = AsFg + return c +} + // IsEmpty value func (c RGBColor) IsEmpty() bool { return c[3] > AsBg diff --git a/vendor/github.com/gookit/color/convert.go b/vendor/github.com/gookit/color/convert.go index 39aac7d2..c7103536 100644 --- a/vendor/github.com/gookit/color/convert.go +++ b/vendor/github.com/gookit/color/convert.go @@ -52,6 +52,7 @@ var ( // ---------- basic(16) <=> RGB color convert ---------- // refer from Hyper app + // Tip: only keep foreground color, background color need convert to foreground color for convert to RGB basic2hexMap = map[uint8]string{ 30: "000000", // black 31: "c51e14", // red @@ -61,7 +62,7 @@ var ( 35: "c839c5", // magenta 36: "20c5c6", // cyan 37: "c7c7c7", // white - // - don't add bg color + // - don't add bg color, convert to fg color for convert to RGB // 40: "000000", // black // 41: "c51e14", // red // 42: "1dc121", // green @@ -428,10 +429,11 @@ func HexToRGB(hex string) []int { return HexToRgb(hex) } // HexToRgb convert hex color string to RGB numbers // // Usage: -// rgb := HexToRgb("ccc") // rgb: [204 204 204] -// rgb := HexToRgb("aabbcc") // rgb: [170 187 204] -// rgb := HexToRgb("#aabbcc") // rgb: [170 187 204] -// rgb := HexToRgb("0xad99c0") // rgb: [170 187 204] +// +// rgb := HexToRgb("ccc") // rgb: [204 204 204] +// rgb := HexToRgb("aabbcc") // rgb: [170 187 204] +// rgb := HexToRgb("#aabbcc") // rgb: [170 187 204] +// rgb := HexToRgb("0xad99c0") // rgb: [170 187 204] func HexToRgb(hex string) (rgb []int) { hex = strings.TrimSpace(hex) if hex == "" { @@ -474,6 +476,7 @@ func Rgb2hex(rgb []int) string { return RgbToHex(rgb) } // RgbToHex convert RGB-code to hex-code // // Usage: +// // hex := RgbToHex([]int{170, 187, 204}) // hex: "aabbcc" func RgbToHex(rgb []int) string { hexNodes := make([]string, len(rgb)) @@ -488,10 +491,15 @@ func RgbToHex(rgb []int) string { * 4bit(16) color <=> RGB/True color *************************************************************/ +// BasicToHex convert basic color to hex string. +func BasicToHex(val uint8) string { + val = Bg2Fg(val) + return basic2hexMap[val] +} + // Basic2hex convert basic color to hex string. func Basic2hex(val uint8) string { - val = Fg2Bg(val) - return basic2hexMap[val] + return BasicToHex(val) } // Hex2basic convert hex string to basic color code. @@ -663,6 +671,7 @@ func C256ToRgbV1(val uint8) (rgb []uint8) { // returns r, g, and b in the set [0, 255]. // // Usage: +// // HslIntToRgb(0, 100, 50) // red // HslIntToRgb(120, 100, 50) // lime // HslIntToRgb(120, 100, 25) // dark green @@ -677,6 +686,7 @@ func HslIntToRgb(h, s, l int) (rgb []uint8) { // returns r, g, and b in the set [0, 255]. // // Usage: +// // rgbVals := HslToRgb(0, 1, 0.5) // red func HslToRgb(h, s, l float64) (rgb []uint8) { var r, g, b float64 diff --git a/vendor/github.com/gookit/color/style.go b/vendor/github.com/gookit/color/style.go index a009d1d6..353d39f1 100644 --- a/vendor/github.com/gookit/color/style.go +++ b/vendor/github.com/gookit/color/style.go @@ -37,7 +37,8 @@ func (s *Style) Add(cs ...Color) { *s = append(*s, cs...) } -// Render render text +// Render colored text +// // Usage: // // color.New(color.FgGreen).Render("text") @@ -46,8 +47,9 @@ func (s Style) Render(a ...any) string { return RenderCode(s.String(), a...) } -// Renderln render text line. +// Renderln render text with newline. // like Println, will add spaces for each argument +// // Usage: // // color.New(color.FgGreen).Renderln("text", "more") diff --git a/vendor/github.com/gorilla/mux/.editorconfig b/vendor/github.com/gorilla/mux/.editorconfig new file mode 100644 index 00000000..c6b74c3e --- /dev/null +++ b/vendor/github.com/gorilla/mux/.editorconfig @@ -0,0 +1,20 @@ +; https://editorconfig.org/ + +root = true + +[*] +insert_final_newline = true +charset = utf-8 +trim_trailing_whitespace = true +indent_style = space +indent_size = 2 + +[{Makefile,go.mod,go.sum,*.go,.gitmodules}] +indent_style = tab +indent_size = 4 + +[*.md] +indent_size = 4 +trim_trailing_whitespace = false + +eclint_indent_style = unset \ No newline at end of file diff --git a/vendor/github.com/gorilla/mux/.gitignore b/vendor/github.com/gorilla/mux/.gitignore new file mode 100644 index 00000000..84039fec --- /dev/null +++ b/vendor/github.com/gorilla/mux/.gitignore @@ -0,0 +1 @@ +coverage.coverprofile diff --git a/vendor/github.com/gorilla/mux/AUTHORS b/vendor/github.com/gorilla/mux/AUTHORS deleted file mode 100644 index b722392e..00000000 --- a/vendor/github.com/gorilla/mux/AUTHORS +++ /dev/null @@ -1,8 +0,0 @@ -# This is the official list of gorilla/mux authors for copyright purposes. -# -# Please keep the list sorted. - -Google LLC (https://opensource.google.com/) -Kamil Kisielk -Matt Silverlock -Rodrigo Moraes (https://github.com/moraes) diff --git a/vendor/github.com/gorilla/mux/LICENSE b/vendor/github.com/gorilla/mux/LICENSE index 6903df63..bb9d80bc 100644 --- a/vendor/github.com/gorilla/mux/LICENSE +++ b/vendor/github.com/gorilla/mux/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2012-2018 The Gorilla Authors. All rights reserved. +Copyright (c) 2023 The Gorilla Authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/vendor/github.com/gorilla/mux/Makefile b/vendor/github.com/gorilla/mux/Makefile new file mode 100644 index 00000000..98f5ab75 --- /dev/null +++ b/vendor/github.com/gorilla/mux/Makefile @@ -0,0 +1,34 @@ +GO_LINT=$(shell which golangci-lint 2> /dev/null || echo '') +GO_LINT_URI=github.com/golangci/golangci-lint/cmd/golangci-lint@latest + +GO_SEC=$(shell which gosec 2> /dev/null || echo '') +GO_SEC_URI=github.com/securego/gosec/v2/cmd/gosec@latest + +GO_VULNCHECK=$(shell which govulncheck 2> /dev/null || echo '') +GO_VULNCHECK_URI=golang.org/x/vuln/cmd/govulncheck@latest + +.PHONY: golangci-lint +golangci-lint: + $(if $(GO_LINT), ,go install $(GO_LINT_URI)) + @echo "##### Running golangci-lint" + golangci-lint run -v + +.PHONY: gosec +gosec: + $(if $(GO_SEC), ,go install $(GO_SEC_URI)) + @echo "##### Running gosec" + gosec ./... + +.PHONY: govulncheck +govulncheck: + $(if $(GO_VULNCHECK), ,go install $(GO_VULNCHECK_URI)) + @echo "##### Running govulncheck" + govulncheck ./... + +.PHONY: verify +verify: golangci-lint gosec govulncheck + +.PHONY: test +test: + @echo "##### Running tests" + go test -race -cover -coverprofile=coverage.coverprofile -covermode=atomic -v ./... \ No newline at end of file diff --git a/vendor/github.com/gorilla/mux/README.md b/vendor/github.com/gorilla/mux/README.md index 35eea9f1..382513d5 100644 --- a/vendor/github.com/gorilla/mux/README.md +++ b/vendor/github.com/gorilla/mux/README.md @@ -1,12 +1,12 @@ # gorilla/mux -[![GoDoc](https://godoc.org/github.com/gorilla/mux?status.svg)](https://godoc.org/github.com/gorilla/mux) -[![CircleCI](https://circleci.com/gh/gorilla/mux.svg?style=svg)](https://circleci.com/gh/gorilla/mux) -[![Sourcegraph](https://sourcegraph.com/github.com/gorilla/mux/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/mux?badge) +![testing](https://github.com/gorilla/mux/actions/workflows/test.yml/badge.svg) +[![codecov](https://codecov.io/github/gorilla/mux/branch/main/graph/badge.svg)](https://codecov.io/github/gorilla/mux) +[![godoc](https://godoc.org/github.com/gorilla/mux?status.svg)](https://godoc.org/github.com/gorilla/mux) +[![sourcegraph](https://sourcegraph.com/github.com/gorilla/mux/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/mux?badge) -![Gorilla Logo](https://cloud-cdn.questionable.services/gorilla-icon-64.png) -https://www.gorillatoolkit.org/pkg/mux +![Gorilla Logo](https://github.com/gorilla/.github/assets/53367916/d92caabf-98e0-473e-bfbf-ab554ba435e5) Package `gorilla/mux` implements a request router and dispatcher for matching incoming requests to their respective handler. @@ -247,32 +247,25 @@ type spaHandler struct { // file located at the index path on the SPA handler will be served. This // is suitable behavior for serving an SPA (single page application). func (h spaHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { - // get the absolute path to prevent directory traversal - path, err := filepath.Abs(r.URL.Path) - if err != nil { - // if we failed to get the absolute path respond with a 400 bad request - // and stop - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - - // prepend the path with the path to the static directory - path = filepath.Join(h.staticPath, path) + // Join internally call path.Clean to prevent directory traversal + path := filepath.Join(h.staticPath, r.URL.Path) - // check whether a file exists at the given path - _, err = os.Stat(path) - if os.IsNotExist(err) { - // file does not exist, serve index.html + // check whether a file exists or is a directory at the given path + fi, err := os.Stat(path) + if os.IsNotExist(err) || fi.IsDir() { + // file does not exist or path is a directory, serve index.html http.ServeFile(w, r, filepath.Join(h.staticPath, h.indexPath)) return - } else if err != nil { - // if we got an error (that wasn't that the file doesn't exist) stating the - // file, return a 500 internal server error and stop + } + + if err != nil { + // if we got an error (that wasn't that the file doesn't exist) stating the + // file, return a 500 internal server error and stop http.Error(w, err.Error(), http.StatusInternalServerError) - return + return } - // otherwise, use http.FileServer to serve the static dir + // otherwise, use http.FileServer to serve the static file http.FileServer(http.Dir(h.staticPath)).ServeHTTP(w, r) } @@ -375,6 +368,19 @@ url, err := r.Get("article").URL("subdomain", "news", "id", "42") ``` +To find all the required variables for a given route when calling `URL()`, the method `GetVarNames()` is available: +```go +r := mux.NewRouter() +r.Host("{domain}"). + Path("/{group}/{item_id}"). + Queries("some_data1", "{some_data1}"). + Queries("some_data2", "{some_data2}"). + Name("article") + +// Will print [domain group item_id some_data1 some_data2] +fmt.Println(r.Get("article").GetVarNames()) + +``` ### Walking Routes The `Walk` function on `mux.Router` can be used to visit all of the routes that are registered on a router. For example, @@ -572,7 +578,7 @@ func (amw *authenticationMiddleware) Middleware(next http.Handler) http.Handler r := mux.NewRouter() r.HandleFunc("/", handler) -amw := authenticationMiddleware{} +amw := authenticationMiddleware{tokenUsers: make(map[string]string)} amw.Populate() r.Use(amw.Middleware) @@ -758,7 +764,8 @@ func TestMetricsHandler(t *testing.T) { rr := httptest.NewRecorder() - // Need to create a router that we can pass the request through so that the vars will be added to the context + // To add the vars to the context, + // we need to create a router through which we can pass the request. router := mux.NewRouter() router.HandleFunc("/metrics/{type}", MetricsHandler) router.ServeHTTP(rr, req) diff --git a/vendor/github.com/gorilla/mux/doc.go b/vendor/github.com/gorilla/mux/doc.go index bd5a38b5..80601351 100644 --- a/vendor/github.com/gorilla/mux/doc.go +++ b/vendor/github.com/gorilla/mux/doc.go @@ -10,18 +10,18 @@ http.ServeMux, mux.Router matches incoming requests against a list of registered routes and calls a handler for the route that matches the URL or other conditions. The main features are: - * Requests can be matched based on URL host, path, path prefix, schemes, - header and query values, HTTP methods or using custom matchers. - * URL hosts, paths and query values can have variables with an optional - regular expression. - * Registered URLs can be built, or "reversed", which helps maintaining - references to resources. - * Routes can be used as subrouters: nested routes are only tested if the - parent route matches. This is useful to define groups of routes that - share common conditions like a host, a path prefix or other repeated - attributes. As a bonus, this optimizes request matching. - * It implements the http.Handler interface so it is compatible with the - standard http.ServeMux. + - Requests can be matched based on URL host, path, path prefix, schemes, + header and query values, HTTP methods or using custom matchers. + - URL hosts, paths and query values can have variables with an optional + regular expression. + - Registered URLs can be built, or "reversed", which helps maintaining + references to resources. + - Routes can be used as subrouters: nested routes are only tested if the + parent route matches. This is useful to define groups of routes that + share common conditions like a host, a path prefix or other repeated + attributes. As a bonus, this optimizes request matching. + - It implements the http.Handler interface so it is compatible with the + standard http.ServeMux. Let's start registering a couple of URL paths and handlers: @@ -301,6 +301,5 @@ A more complex authentication middleware, which maps session token to users, cou r.Use(amw.Middleware) Note: The handler chain will be stopped if your middleware doesn't call `next.ServeHTTP()` with the corresponding parameters. This can be used to abort a request if the middleware writer wants to. - */ package mux diff --git a/vendor/github.com/gorilla/mux/mux.go b/vendor/github.com/gorilla/mux/mux.go index 782a34b2..1e089906 100644 --- a/vendor/github.com/gorilla/mux/mux.go +++ b/vendor/github.com/gorilla/mux/mux.go @@ -31,24 +31,26 @@ func NewRouter() *Router { // It implements the http.Handler interface, so it can be registered to serve // requests: // -// var router = mux.NewRouter() +// var router = mux.NewRouter() // -// func main() { -// http.Handle("/", router) -// } +// func main() { +// http.Handle("/", router) +// } // // Or, for Google App Engine, register it in a init() function: // -// func init() { -// http.Handle("/", router) -// } +// func init() { +// http.Handle("/", router) +// } // // This will send all incoming requests to the router. type Router struct { // Configurable Handler to be used when no route matches. + // This can be used to render your own 404 Not Found errors. NotFoundHandler http.Handler // Configurable Handler to be used when the request method does not match the route. + // This can be used to render your own 405 Method Not Allowed errors. MethodNotAllowedHandler http.Handler // Routes to be matched, in order. diff --git a/vendor/github.com/gorilla/mux/regexp.go b/vendor/github.com/gorilla/mux/regexp.go index 0144842b..5d05cfa0 100644 --- a/vendor/github.com/gorilla/mux/regexp.go +++ b/vendor/github.com/gorilla/mux/regexp.go @@ -22,10 +22,10 @@ type routeRegexpOptions struct { type regexpType int const ( - regexpTypePath regexpType = 0 - regexpTypeHost regexpType = 1 - regexpTypePrefix regexpType = 2 - regexpTypeQuery regexpType = 3 + regexpTypePath regexpType = iota + regexpTypeHost + regexpTypePrefix + regexpTypeQuery ) // newRouteRegexp parses a route template and returns a routeRegexp, @@ -195,7 +195,7 @@ func (r *routeRegexp) Match(req *http.Request, match *RouteMatch) bool { // url builds a URL part using the given values. func (r *routeRegexp) url(values map[string]string) (string, error) { - urlValues := make([]interface{}, len(r.varsN), len(r.varsN)) + urlValues := make([]interface{}, len(r.varsN)) for k, v := range r.varsN { value, ok := values[v] if !ok { diff --git a/vendor/github.com/gorilla/mux/route.go b/vendor/github.com/gorilla/mux/route.go index 750afe57..e8f11df2 100644 --- a/vendor/github.com/gorilla/mux/route.go +++ b/vendor/github.com/gorilla/mux/route.go @@ -64,8 +64,18 @@ func (r *Route) Match(req *http.Request, match *RouteMatch) bool { match.MatchErr = nil } - matchErr = nil + matchErr = nil // nolint:ineffassign return false + } else { + // Multiple routes may share the same path but use different HTTP methods. For instance: + // Route 1: POST "/users/{id}". + // Route 2: GET "/users/{id}", parameters: "id": "[0-9]+". + // + // The router must handle these cases correctly. For a GET request to "/users/abc" with "id" as "-2", + // The router should return a "Not Found" error as no route fully matches this request. + if match.MatchErr == ErrMethodMismatch { + match.MatchErr = nil + } } } @@ -230,9 +240,9 @@ func (m headerMatcher) Match(r *http.Request, match *RouteMatch) bool { // Headers adds a matcher for request header values. // It accepts a sequence of key/value pairs to be matched. For example: // -// r := mux.NewRouter() -// r.Headers("Content-Type", "application/json", -// "X-Requested-With", "XMLHttpRequest") +// r := mux.NewRouter().NewRoute() +// r.Headers("Content-Type", "application/json", +// "X-Requested-With", "XMLHttpRequest") // // The above route will only match if both request header values match. // If the value is an empty string, it will match any value if the key is set. @@ -255,9 +265,9 @@ func (m headerRegexMatcher) Match(r *http.Request, match *RouteMatch) bool { // HeadersRegexp accepts a sequence of key/value pairs, where the value has regex // support. For example: // -// r := mux.NewRouter() -// r.HeadersRegexp("Content-Type", "application/(text|json)", -// "X-Requested-With", "XMLHttpRequest") +// r := mux.NewRouter().NewRoute() +// r.HeadersRegexp("Content-Type", "application/(text|json)", +// "X-Requested-With", "XMLHttpRequest") // // The above route will only match if both the request header matches both regular expressions. // If the value is an empty string, it will match any value if the key is set. @@ -283,10 +293,10 @@ func (r *Route) HeadersRegexp(pairs ...string) *Route { // // For example: // -// r := mux.NewRouter() -// r.Host("www.example.com") -// r.Host("{subdomain}.domain.com") -// r.Host("{subdomain:[a-z]+}.domain.com") +// r := mux.NewRouter().NewRoute() +// r.Host("www.example.com") +// r.Host("{subdomain}.domain.com") +// r.Host("{subdomain:[a-z]+}.domain.com") // // Variable names must be unique in a given route. They can be retrieved // calling mux.Vars(request). @@ -342,11 +352,11 @@ func (r *Route) Methods(methods ...string) *Route { // // For example: // -// r := mux.NewRouter() -// r.Path("/products/").Handler(ProductsHandler) -// r.Path("/products/{key}").Handler(ProductsHandler) -// r.Path("/articles/{category}/{id:[0-9]+}"). -// Handler(ArticleHandler) +// r := mux.NewRouter().NewRoute() +// r.Path("/products/").Handler(ProductsHandler) +// r.Path("/products/{key}").Handler(ProductsHandler) +// r.Path("/articles/{category}/{id:[0-9]+}"). +// Handler(ArticleHandler) // // Variable names must be unique in a given route. They can be retrieved // calling mux.Vars(request). @@ -377,8 +387,8 @@ func (r *Route) PathPrefix(tpl string) *Route { // It accepts a sequence of key/value pairs. Values may define variables. // For example: // -// r := mux.NewRouter() -// r.Queries("foo", "bar", "id", "{id:[0-9]+}") +// r := mux.NewRouter().NewRoute() +// r.Queries("foo", "bar", "id", "{id:[0-9]+}") // // The above route will only match if the URL contains the defined queries // values, e.g.: ?foo=bar&id=42. @@ -473,11 +483,11 @@ func (r *Route) BuildVarsFunc(f BuildVarsFunc) *Route { // // It will test the inner routes only if the parent route matched. For example: // -// r := mux.NewRouter() -// s := r.Host("www.example.com").Subrouter() -// s.HandleFunc("/products/", ProductsHandler) -// s.HandleFunc("/products/{key}", ProductHandler) -// s.HandleFunc("/articles/{category}/{id:[0-9]+}"), ArticleHandler) +// r := mux.NewRouter().NewRoute() +// s := r.Host("www.example.com").Subrouter() +// s.HandleFunc("/products/", ProductsHandler) +// s.HandleFunc("/products/{key}", ProductHandler) +// s.HandleFunc("/articles/{category}/{id:[0-9]+}"), ArticleHandler) // // Here, the routes registered in the subrouter won't be tested if the host // doesn't match. @@ -497,36 +507,36 @@ func (r *Route) Subrouter() *Router { // It accepts a sequence of key/value pairs for the route variables. For // example, given this route: // -// r := mux.NewRouter() -// r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). -// Name("article") +// r := mux.NewRouter() +// r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). +// Name("article") // // ...a URL for it can be built using: // -// url, err := r.Get("article").URL("category", "technology", "id", "42") +// url, err := r.Get("article").URL("category", "technology", "id", "42") // // ...which will return an url.URL with the following path: // -// "/articles/technology/42" +// "/articles/technology/42" // // This also works for host variables: // -// r := mux.NewRouter() -// r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). -// Host("{subdomain}.domain.com"). -// Name("article") +// r := mux.NewRouter() +// r.HandleFunc("/articles/{category}/{id:[0-9]+}", ArticleHandler). +// Host("{subdomain}.domain.com"). +// Name("article") // -// // url.String() will be "http://news.domain.com/articles/technology/42" -// url, err := r.Get("article").URL("subdomain", "news", -// "category", "technology", -// "id", "42") +// // url.String() will be "http://news.domain.com/articles/technology/42" +// url, err := r.Get("article").URL("subdomain", "news", +// "category", "technology", +// "id", "42") // // The scheme of the resulting url will be the first argument that was passed to Schemes: // -// // url.String() will be "https://example.com" -// r := mux.NewRouter() -// url, err := r.Host("example.com") -// .Schemes("https", "http").URL() +// // url.String() will be "https://example.com" +// r := mux.NewRouter().NewRoute() +// url, err := r.Host("example.com") +// .Schemes("https", "http").URL() // // All variables defined in the route are required, and their values must // conform to the corresponding patterns. @@ -718,6 +728,25 @@ func (r *Route) GetHostTemplate() (string, error) { return r.regexp.host.template, nil } +// GetVarNames returns the names of all variables added by regexp matchers +// These can be used to know which route variables should be passed into r.URL() +func (r *Route) GetVarNames() ([]string, error) { + if r.err != nil { + return nil, r.err + } + var varNames []string + if r.regexp.host != nil { + varNames = append(varNames, r.regexp.host.varsN...) + } + if r.regexp.path != nil { + varNames = append(varNames, r.regexp.path.varsN...) + } + for _, regx := range r.regexp.queries { + varNames = append(varNames, regx.varsN...) + } + return varNames, nil +} + // prepareVars converts the route variable pairs into a map. If the route has a // BuildVarsFunc, it is invoked. func (r *Route) prepareVars(pairs ...string) (map[string]string, error) { diff --git a/vendor/github.com/itchyny/gojq/CHANGELOG.md b/vendor/github.com/itchyny/gojq/CHANGELOG.md index cee2ce33..65e605fb 100644 --- a/vendor/github.com/itchyny/gojq/CHANGELOG.md +++ b/vendor/github.com/itchyny/gojq/CHANGELOG.md @@ -1,4 +1,51 @@ # Changelog +## [v0.12.12](https://github.com/itchyny/gojq/compare/v0.12.11..v0.12.12) (2023-03-01) +* fix assignment operator (`=`) with overlapping paths and multiple values (`[[]] | .. = ..`) +* fix crash on multiplying large numbers to an empty string (`9223372036854775807 * ""`) +* improve zsh completion file + +## [v0.12.11](https://github.com/itchyny/gojq/compare/v0.12.10..v0.12.11) (2022-12-24) +* fix crash on assignment operator (`=`) with multiple values (`. = (0,0)`) +* fix `isnormal` and `normals` functions against subnormal numbers + +## [v0.12.10](https://github.com/itchyny/gojq/compare/v0.12.9..v0.12.10) (2022-12-01) +* fix `break` in `try`-`catch` query (`label $x | try break $x catch .`) +* fix path value validation for `getpath` function (`path(getpath([[0]][0]))`) +* fix path value validation for custom iterator functions +* fix `walk` function with argument emitting multiple values (`[1],{x:1} | walk(.,0)`) +* fix `@csv`, `@tsv`, `@sh` to escape the null character (`["\u0000"] | @csv,@tsv,@sh`) +* improve performance of assignment operator (`=`), update-assignment operator (`|=`), + `map_values`, `del`, `delpaths`, `walk`, `ascii_downcase`, and `ascii_upcase` functions + +## [v0.12.9](https://github.com/itchyny/gojq/compare/v0.12.8..v0.12.9) (2022-09-01) +* fix `fromjson` to emit error on unexpected trailing string +* fix path analyzer on variable argument evaluation (`def f($x): .y; path(f(.x))`) +* fix raw input option `--raw-input` (`-R`) to keep carriage returns and support 64KiB+ lines + +## [v0.12.8](https://github.com/itchyny/gojq/compare/v0.12.7..v0.12.8) (2022-06-01) +* implement `gojq.Compare` for comparing values in custom internal functions +* implement `gojq.TypeOf` for obtaining type name of values in custom internal functions +* implement `gojq.Preview` for previewing values for error messages of custom internal functions +* fix query lexer to parse string literals as JSON to support surrogate pairs (`"\ud83d\ude04"`) +* fix priority bug of declared and builtin functions (`def empty: .; null | select(.)`) +* fix string indexing by index out of bounds to emit `null` (`"abc" | .[3]`) +* fix array binding pattern not to match against strings (`"abc" as [$a] ?// $a | $a`) +* fix `sub` and `gsub` functions to emit results in the same order of jq +* fix `fromjson` to keep integer precision (`"10000000000000000" | fromjson + 1`) +* fix stream option to raise error against incomplete JSON input +* improve array updating index and string repetition to increase limitations +* improve `mktime` to support nanoseconds, just like `gmtime` and `now` +* improve query lexer to report unterminated string literals +* improve performance of string indexing and slicing by reducing allocations +* improve performance of object and array indexing, slicing, and iteration, + by validating path values by comparing data addresses. This change improves jq + compatibility of path value validation (`{} | {}.x = 0`, `[0] | [.[]][] = 1`). + Also optimize constant indexing and slicing by specialized instruction +* improve performance of `add` (on array of strings), `flatten`, `min`, `max`, + `sort`, `unique`, `join`, `to_entries`, `from_entries`, `indices`, `index`, + `rindex`, `startswith`, `endswith`, `ltrimstr`, `rtrimstr`, `explode`, + `capture`, `sub`, and `gsub` functions + ## [v0.12.7](https://github.com/itchyny/gojq/compare/v0.12.6..v0.12.7) (2022-03-01) * fix precedence of try expression against operators (`try 0 * error(0)`) * fix iterator suffix with optional operator (`0 | .x[]?`) @@ -187,7 +234,7 @@ ## [v0.7.0](https://github.com/itchyny/gojq/compare/v0.6.0..v0.7.0) (2019-12-22) * implement YAML input (`--yaml-input`) and output (`--yaml-output`) * fix pipe in object value -* fix precedence of if, try, reduce and foreach expressions +* fix precedence of `if`, `try`, `reduce` and `foreach` expressions * release from GitHub Actions ## [v0.6.0](https://github.com/itchyny/gojq/compare/v0.5.0..v0.6.0) (2019-08-26) diff --git a/vendor/github.com/itchyny/gojq/Dockerfile b/vendor/github.com/itchyny/gojq/Dockerfile index 7beaf4ed..284ece77 100644 --- a/vendor/github.com/itchyny/gojq/Dockerfile +++ b/vendor/github.com/itchyny/gojq/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.17 AS builder +FROM golang:1.19 AS builder WORKDIR /app COPY . . diff --git a/vendor/github.com/itchyny/gojq/LICENSE b/vendor/github.com/itchyny/gojq/LICENSE index e3fc027c..3f4fcb26 100644 --- a/vendor/github.com/itchyny/gojq/LICENSE +++ b/vendor/github.com/itchyny/gojq/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2019-2022 itchyny +Copyright (c) 2019-2023 itchyny Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/github.com/itchyny/gojq/Makefile b/vendor/github.com/itchyny/gojq/Makefile index b5de9126..b7cdb400 100644 --- a/vendor/github.com/itchyny/gojq/Makefile +++ b/vendor/github.com/itchyny/gojq/Makefile @@ -1,8 +1,8 @@ BIN := gojq VERSION := $$(make -s show-version) VERSION_PATH := cli -CURRENT_REVISION := $(shell git rev-parse --short HEAD) -BUILD_LDFLAGS := "-s -w -X github.com/itchyny/$(BIN)/cli.revision=$(CURRENT_REVISION)" +CURRENT_REVISION = $(shell git rev-parse --short HEAD) +BUILD_LDFLAGS = "-s -w -X github.com/itchyny/$(BIN)/cli.revision=$(CURRENT_REVISION)" GOBIN ?= $(shell go env GOPATH)/bin SHELL := /bin/bash @@ -19,7 +19,7 @@ build-dev: parser.go builtin.go .PHONY: build-debug build-debug: parser.go builtin.go - go build -tags debug -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN) + go build -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN) builtin.go: builtin.jq parser.go.y parser.go query.go operator.go _tools/* GOOS= GOARCH= go generate @@ -33,26 +33,26 @@ $(GOBIN)/goyacc: .PHONY: install install: - go install -ldflags=$(BUILD_LDFLAGS) ./... + go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN) .PHONY: install-dev install-dev: parser.go builtin.go - go install -ldflags=$(BUILD_LDFLAGS) ./... + go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN) .PHONY: install-debug install-debug: parser.go builtin.go - go install -tags debug -ldflags=$(BUILD_LDFLAGS) ./... + go install -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN) .PHONY: show-version show-version: $(GOBIN)/gobump - @gobump show -r $(VERSION_PATH) + @gobump show -r "$(VERSION_PATH)" $(GOBIN)/gobump: @go install github.com/x-motemen/gobump/cmd/gobump@latest .PHONY: cross cross: $(GOBIN)/goxz CREDITS - goxz -n $(BIN) -pv=v$(VERSION) -include _$(BIN) -arch=amd64,arm64 \ + goxz -n $(BIN) -pv=v$(VERSION) -include _$(BIN) \ -build-ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN) $(GOBIN)/goxz: @@ -72,7 +72,7 @@ test: build .PHONY: lint lint: $(GOBIN)/staticcheck go vet ./... - staticcheck -checks all,-ST1000 -tags debug ./... + staticcheck -checks all -tags gojq_debug ./... $(GOBIN)/staticcheck: go install honnef.co/go/tools/cmd/staticcheck@latest @@ -89,27 +89,15 @@ clean: .PHONY: update update: export GOPROXY=direct update: - rm -f go.sum && go get -u -d ./... && go get -d github.com/mattn/go-runewidth@v0.0.9 && go mod tidy - sed -i.bak '/require (/,/)/d' go.dev.mod && rm -f go.dev.{sum,mod.bak} + go get -u -d ./... && go mod tidy + go mod edit -modfile=go.dev.mod -droprequire=github.com/itchyny/{astgen,timefmt}-go go get -u -d -modfile=go.dev.mod github.com/itchyny/{astgen,timefmt}-go && go generate .PHONY: bump bump: $(GOBIN)/gobump -ifneq ($(shell git status --porcelain),) - $(error git workspace is dirty) -endif -ifneq ($(shell git rev-parse --abbrev-ref HEAD),main) - $(error current branch is not main) -endif + test -z "$$(git status --porcelain || echo .)" + test "$$(git branch --show-current)" = "main" @gobump up -w "$(VERSION_PATH)" git commit -am "bump up version to $(VERSION)" git tag "v$(VERSION)" - git push origin main - git push origin "refs/tags/v$(VERSION)" - -.PHONY: upload -upload: $(GOBIN)/ghr - ghr "v$(VERSION)" goxz - -$(GOBIN)/ghr: - go install github.com/tcnksm/ghr@latest + git push --atomic origin main tag "v$(VERSION)" diff --git a/vendor/github.com/itchyny/gojq/README.md b/vendor/github.com/itchyny/gojq/README.md index baba061f..6370e440 100644 --- a/vendor/github.com/itchyny/gojq/README.md +++ b/vendor/github.com/itchyny/gojq/README.md @@ -75,11 +75,11 @@ docker run -i --rm ghcr.io/itchyny/gojq ## Difference to jq - gojq is purely implemented with Go language and is completely portable. jq depends on the C standard library so the availability of math functions depends on the library. jq also depends on the regular expression library and it makes build scripts complex. - gojq implements nice error messages for invalid query and JSON input. The error message of jq is sometimes difficult to tell where to fix the query. -- gojq does not keep the order of object keys. I understand this might cause problems for some scripts but basically, we should not rely on the order of object keys. Due to this limitation, gojq does not have `keys_unsorted` function and `--sort-keys` (`-S`) option. I would implement when ordered map is implemented in the standard library of Go but I'm less motivated. Also, gojq assumes only valid JSON input while jq deals with some JSON extensions; `NaN`, `Infinity` and `[000]`. -- gojq supports arbitrary-precision integer calculation while jq does not. This is important to keep the precision of numeric IDs or nanosecond values. You can also use gojq to solve some mathematical problems which require big integers. Note that mathematical functions convert integers to floating-point numbers; only addition, subtraction, multiplication, modulo operation, and division (when divisible) keep integer precisions. When you want to calculate floor division of big integers, use `def intdiv($x; $y): ($x - $x % $y) / $y;`, instead of `$x / $y`. -- gojq fixes various bugs of jq. gojq correctly deletes elements of arrays by `|= empty` ([jq#2051](https://github.com/stedolan/jq/issues/2051)). gojq fixes `try`/`catch` handling ([jq#1859](https://github.com/stedolan/jq/issues/1859), [jq#1885](https://github.com/stedolan/jq/issues/1885), [jq#2140](https://github.com/stedolan/jq/issues/2140)). gojq fixes `nth/2` to output nothing when the count is equal to or larger than the stream size ([jq#1867](https://github.com/stedolan/jq/issues/1867)). gojq consistently counts by characters (not by bytes) in `index`, `rindex`, and `indices` functions; `"12345" | .[index("3"):]` results in `"345"` ([jq#1430](https://github.com/stedolan/jq/issues/1430), [jq#1624](https://github.com/stedolan/jq/issues/1624)), and supports string indexing; `"abcde"[2]` ([jq#1520](https://github.com/stedolan/jq/issues/1520)). gojq accepts indexing query `.e0` ([jq#1526](https://github.com/stedolan/jq/issues/1526), [jq#1651](https://github.com/stedolan/jq/issues/1651)), and allows `gsub` to handle patterns including `"^"` ([jq#2148](https://github.com/stedolan/jq/issues/2148)). gojq improves variable lexer to allow using keywords for variable names, especially in binding patterns, also disallows spaces after `$` ([jq#526](https://github.com/stedolan/jq/issues/526)). gojq fixes handling files with no newline characters at the end ([jq#2374](https://github.com/stedolan/jq/issues/2374)). -- gojq implements `@uri` to escape all the reserved characters defined in RFC 3986, Sec. 2.2 ([jq#1506](https://github.com/stedolan/jq/issues/1506)), and fixes `@base64d` to allow binary string as the decoded string ([jq#1931](https://github.com/stedolan/jq/issues/1931)). gojq improves time formatting and parsing, deals with `%f` in `strftime` and `strptime` ([jq#1409](https://github.com/stedolan/jq/issues/1409)), parses timezone offsets with `fromdate` and `fromdateiso8601` ([jq#1053](https://github.com/stedolan/jq/issues/1053)), supports timezone name/offset with `%Z`/`%z` in `strptime` ([jq#929](https://github.com/stedolan/jq/issues/929), [jq#2195](https://github.com/stedolan/jq/issues/2195)), and looks up correct timezone during daylight saving time on formatting with `%Z` ([jq#1912](https://github.com/stedolan/jq/issues/1912)). -- gojq does not support some functions intentionally; `get_jq_origin`, `get_prog_origin`, `get_search_list` (unstable, not listed in jq document), `input_line_number`, `$__loc__` (performance issue), `recurse_down` (deprecated in jq). gojq does not support some flags; `--ascii-output, -a` (performance issue), `--seq` (not used commonly), `--sort-keys, -S` (sorts by default because `map[string]interface{}` does not keep the order), `--unbuffered` (unbuffered by default). gojq normalizes floating-point numbers to fit to double-precision floating-point numbers. gojq does not support some regular expression flags (regular expression engine differences). gojq does not support BOM (`encoding/json` does not support this). gojq disallows using keywords for function names (declaration of `def true: .;` is a confusing query). +- gojq does not keep the order of object keys. I understand this might cause problems for some scripts but basically, we should not rely on the order of object keys. Due to this limitation, gojq does not have `keys_unsorted` function and `--sort-keys` (`-S`) option. I would implement when ordered map is implemented in the standard library of Go but I'm less motivated. +- gojq supports arbitrary-precision integer calculation while jq does not; jq loses the precision of large integers when calculation is involved. Note that even with gojq, all mathematical functions, including `floor` and `round`, convert integers to floating-point numbers; only addition, subtraction, multiplication, modulo, and division operators (when divisible) keep the integer precision. To calculate floor division of integers without losing the precision, use `def idivide($n): (. - . % $n) / $n;`. To round down floating-point numbers to integers, use `def ifloor: floor | tostring | tonumber;`, but note that this function does not work with large floating-point numbers and also loses the precision of large integers. +- gojq fixes various bugs of jq. gojq correctly deletes elements of arrays by `|= empty` ([jq#2051](https://github.com/stedolan/jq/issues/2051)). gojq fixes `try`/`catch` handling ([jq#1859](https://github.com/stedolan/jq/issues/1859), [jq#1885](https://github.com/stedolan/jq/issues/1885), [jq#2140](https://github.com/stedolan/jq/issues/2140)). gojq fixes `nth/2` to output nothing when the count is equal to or larger than the stream size ([jq#1867](https://github.com/stedolan/jq/issues/1867)). gojq consistently counts by characters (not by bytes) in `index`, `rindex`, and `indices` functions; `"12345" | .[index("3"):]` results in `"345"` ([jq#1430](https://github.com/stedolan/jq/issues/1430), [jq#1624](https://github.com/stedolan/jq/issues/1624)). gojq handles overlapping occurrence differently in `rindex` and `indices`; `"ababa" | [rindex("aba"), indices("aba")]` results in `[2,[0,2]]` ([jq#2433](https://github.com/stedolan/jq/issues/2433)). gojq supports string indexing; `"abcde"[2]` ([jq#1520](https://github.com/stedolan/jq/issues/1520)). gojq accepts indexing query `.e0` ([jq#1526](https://github.com/stedolan/jq/issues/1526), [jq#1651](https://github.com/stedolan/jq/issues/1651)), and allows `gsub` to handle patterns including `"^"` ([jq#2148](https://github.com/stedolan/jq/issues/2148)). gojq improves variable lexer to allow using keywords for variable names, especially in binding patterns, also disallows spaces after `$` ([jq#526](https://github.com/stedolan/jq/issues/526)). gojq fixes handling files with no newline characters at the end ([jq#2374](https://github.com/stedolan/jq/issues/2374)). +- gojq truncates down floating-point numbers on indexing (`[0] | .[0.5]` results in `0` not `null`), and slicing (`[0,1,2] | .[0.5:1.5]` results in `[0]` not `[0,1]`). gojq parses unary operators with higher precedence than variable binding (`[-1 as $x | 1,$x]` results in `[1,-1]` not `[-1,-1]`). gojq implements `@uri` to escape all the reserved characters defined in RFC 3986, Sec. 2.2 ([jq#1506](https://github.com/stedolan/jq/issues/1506)), and fixes `@base64d` to allow binary string as the decoded string ([jq#1931](https://github.com/stedolan/jq/issues/1931)). gojq improves time formatting and parsing; deals with `%f` in `strftime` and `strptime` ([jq#1409](https://github.com/stedolan/jq/issues/1409)), parses timezone offsets with `fromdate` and `fromdateiso8601` ([jq#1053](https://github.com/stedolan/jq/issues/1053)), supports timezone name/offset with `%Z`/`%z` in `strptime` ([jq#929](https://github.com/stedolan/jq/issues/929), [jq#2195](https://github.com/stedolan/jq/issues/2195)), and looks up correct timezone during daylight saving time on formatting with `%Z` ([jq#1912](https://github.com/stedolan/jq/issues/1912)). gojq supports nanoseconds in date and time functions. +- gojq does not support some functions intentionally; `get_jq_origin`, `get_prog_origin`, `get_search_list` (unstable, not listed in jq document), `input_line_number`, `$__loc__` (performance issue), `recurse_down` (deprecated in jq). gojq does not support some flags; `--ascii-output, -a` (performance issue), `--seq` (not used commonly), `--sort-keys, -S` (sorts by default because `map[string]any` does not keep the order), `--unbuffered` (unbuffered by default). gojq does not parse JSON extensions supported by jq; `NaN`, `Infinity`, and `[000]`. gojq normalizes floating-point numbers to fit to double-precision floating-point numbers. gojq does not support or behaves differently with some regular expression metacharacters and flags (regular expression engine differences). gojq does not support BOM (`encoding/json` does not support this). gojq disallows using keywords for function names (`def true: .; true` is a confusing query), and module name prefixes in function declarations (using module prefixes like `def m::f: .;` is undocumented). - gojq supports reading from YAML input (`--yaml-input`) while jq does not. gojq also supports YAML output (`--yaml-output`). ### Color configuration @@ -109,7 +109,7 @@ func main() { if err != nil { log.Fatalln(err) } - input := map[string]interface{}{"foo": []interface{}{1, 2, 3}} + input := map[string]any{"foo": []any{1, 2, 3}} iter := query.Run(input) // or query.RunWithContext for { v, ok := iter.Next() @@ -127,10 +127,10 @@ func main() { - Firstly, use [`gojq.Parse(string) (*Query, error)`](https://pkg.go.dev/github.com/itchyny/gojq#Parse) to get the query from a string. - Secondly, get the result iterator - using [`query.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Query.Run) or [`query.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Query.RunWithContext) - - or alternatively, compile the query using [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) and then [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) or [`code.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Code.RunWithContext). You can reuse the `*Code` against multiple inputs to avoid compilation of the same query. - - In either case, you cannot use custom type values as the query input. The type should be `[]interface{}` for an array and `map[string]interface{}` for a map (just like decoded to an `interface{}` using the [encoding/json](https://golang.org/pkg/encoding/json/) package). You can't use `[]int` or `map[string]string`, for example. If you want to query your custom struct, marshal to JSON, unmarshal to `interface{}` and use it as the query input. -- Thirdly, iterate through the results using [`iter.Next() (interface{}, bool)`](https://pkg.go.dev/github.com/itchyny/gojq#Iter). The iterator can emit an error so make sure to handle it. The method returns `true` with results, and `false` when the iterator terminates. - - The return type is not `(interface{}, error)` because iterators can emit multiple errors and you can continue after an error. It is difficult for the iterator to tell the termination in this situation. + - or alternatively, compile the query using [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) and then [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) or [`code.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Code.RunWithContext). You can reuse the `*Code` against multiple inputs to avoid compilation of the same query. But for arguments of `code.Run`, do not give values sharing same data between multiple calls. + - In either case, you cannot use custom type values as the query input. The type should be `[]any` for an array and `map[string]any` for a map (just like decoded to an `any` using the [encoding/json](https://golang.org/pkg/encoding/json/) package). You can't use `[]int` or `map[string]string`, for example. If you want to query your custom struct, marshal to JSON, unmarshal to `any` and use it as the query input. +- Thirdly, iterate through the results using [`iter.Next() (any, bool)`](https://pkg.go.dev/github.com/itchyny/gojq#Iter). The iterator can emit an error so make sure to handle it. The method returns `true` with results, and `false` when the iterator terminates. + - The return type is not `(any, error)` because iterators can emit multiple errors and you can continue after an error. It is difficult for the iterator to tell the termination in this situation. - Note that the result iterator may emit infinite number of values; `repeat(0)` and `range(infinite)`. It may stuck with no output value; `def f: f; f`. Use `RunWithContext` when you want to limit the execution time. [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) allows to configure the following compiler options. diff --git a/vendor/github.com/itchyny/gojq/_gojq b/vendor/github.com/itchyny/gojq/_gojq index 4c94718a..d403a314 100644 --- a/vendor/github.com/itchyny/gojq/_gojq +++ b/vendor/github.com/itchyny/gojq/_gojq @@ -2,31 +2,42 @@ _gojq() { - _arguments -C \ - '(-c --compact-output)'{-c,--compact-output}'[compact output]' \ - '(-r --raw-output)'{-r,--raw-output}'[output raw strings]' \ - '(-j --join-output)'{-j,--join-output}'[stop printing a newline after each output]' \ - '(-0 --nul-output)'{-0,--nul-output}'[print NUL after each output]' \ - '(-C --color-output)'{-C,--color-output}'[colorize output even if piped]' \ - '(-M --monochrome-output)'{-M,--monochrome-output}'[stop colorizing output]' \ - '(--yaml-output)'--yaml-output'[output by YAML]' \ - '(--indent)'--indent'[number of spaces for indentation]:indentation count' \ - '(--tab)'--tab'[use tabs for indentation]' \ + _arguments -s -S \ + '(-r --raw-output -j --join-output -0 --nul-output)'{-r,--raw-output}'[output raw strings]' \ + '(-r --raw-output -j --join-output -0 --nul-output)'{-j,--join-output}'[output without newlines]' \ + '(-r --raw-output -j --join-output -0 --nul-output)'{-0,--nul-output}'[output with NUL character]' \ + '(-c --compact-output --indent --tab --yaml-output)'{-c,--compact-output}'[output without pretty-printing]' \ + '(-c --compact-output --tab --yaml-output)--indent=[number of spaces for indentation]:indentation count:(2 4 8)' \ + '(-c --compact-output --indent --yaml-output)--tab[use tabs for indentation]' \ + '(-c --compact-output --indent --tab )--yaml-output[output in YAML format]' \ + '(-C --color-output -M --monochrome-output)'{-C,--color-output}'[output with colors even if piped]' \ + '(-C --color-output -M --monochrome-output)'{-M,--monochrome-output}'[output without colors]' \ '(-n --null-input)'{-n,--null-input}'[use null as input value]' \ - '(-R --raw-input)'{-R,--raw-input}'[read input as raw strings]' \ + '(-R --raw-input --stream --yaml-input)'{-R,--raw-input}'[read input as raw strings]' \ + '(-R --raw-input --yaml-input)--stream[parse input in stream fashion]' \ + '(-R --raw-input --stream )--yaml-input[read input as YAML format]' \ '(-s --slurp)'{-s,--slurp}'[read all inputs into an array]' \ - '(--stream)'--stream'[parse input in stream fashion]' \ - '(--yaml-input)'--yaml-input'[read input as YAML]' \ - '(-f --from-file)'{-f,--from-file}'[load query from file]:filename of jq query:_files' \ - '(-L)'-L'[directory to search modules from]:module directory:_directories' \ - '(--arg)'--arg'[set variable to string value]:variable name:' \ - '(--argjson)'--argjson'[set variable to JSON value]:variable name:' \ - '(--slurpfile)'--slurpfile'[set variable to the JSON contents of the file]:variable name:' \ - '(--rawfile)'--rawfile'[set variable to the contents of the file]:variable name:' \ - '(--args)'--args'[consume remaining arguments as positional string values]' \ - '(--jsonargs)'--jsonargs'[consume remaining arguments as positional JSON values]' \ + '(-f --from-file 1)'{-f,--from-file}='[load query from file]:filename of jq query:_files' \ + '*-L=[directory to search modules from]:module directory:_directories' \ + '*--arg[set a string value to a variable]:variable name: :string value' \ + '*--argjson[set a JSON value to a variable]:variable name: :JSON value' \ + '*--slurpfile[set the JSON contents of a file to a variable]:variable name: :JSON file:_files' \ + '*--rawfile[set the contents of a file to a variable]:variable name: :file:_files' \ + '*--args[consume remaining arguments as positional string values]' \ + '*--jsonargs[consume remaining arguments as positional JSON values]' \ '(-e --exit-status)'{-e,--exit-status}'[exit 1 when the last value is false or null]' \ - '(-v --version)'{-v,--version}'[print version]' \ - '(-h --help)'{-h,--help}'[print help]' \ - && ret=0 + '(- 1 *)'{-v,--version}'[display version information]' \ + '(- 1 *)'{-h,--help}'[display help information]' \ + '1: :_guard "^-([[:alpha:]0]#|-*)" "jq query"' \ + '*: :_gojq_args' +} + +_gojq_args() { + if (($words[(I)--args] > $words[(I)--jsonargs])); then + _message 'string value' + elif (($words[(I)--args] < $words[(I)--jsonargs])); then + _message 'JSON value' + else + _arguments '*:input file:_files' + fi } diff --git a/vendor/github.com/itchyny/gojq/builtin.go b/vendor/github.com/itchyny/gojq/builtin.go index 65f9e234..ccf31358 100644 --- a/vendor/github.com/itchyny/gojq/builtin.go +++ b/vendor/github.com/itchyny/gojq/builtin.go @@ -7,78 +7,61 @@ func init() { "IN": []*FuncDef{&FuncDef{Name: "IN", Args: []string{"s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "s"}, Op: OpEq, Right: &Query{Func: "."}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "IN", Args: []string{"src", "s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "src"}, Op: OpEq, Right: &Query{Func: "s"}}, &Query{Func: "."}}}}}}}, "INDEX": []*FuncDef{&FuncDef{Name: "INDEX", Args: []string{"stream", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "stream"}}, Pattern: &Pattern{Name: "$row"}, Start: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$row"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "idx_expr"}, Op: OpPipe, Right: &Query{Func: "tostring"}}}}}}, Op: OpAssign, Right: &Query{Func: "$row"}}}}}}, &FuncDef{Name: "INDEX", Args: []string{"idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "INDEX", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "idx_expr"}}}}}}}, "JOIN": []*FuncDef{&FuncDef{Name: "JOIN", Args: []string{"$idx", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr", "join_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "join_expr"}}}}}, - "all": []*FuncDef{&FuncDef{Name: "all", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "all", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "all", Args: []string{"g", "y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "y"}, Op: OpAnd, Right: &Query{Func: "empty"}}}}}}}}}, - "any": []*FuncDef{&FuncDef{Name: "any", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "any", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "any", Args: []string{"g", "y"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "y"}, Op: OpOr, Right: &Query{Func: "empty"}}}}}}}, Op: OpPipe, Right: &Query{Func: "not"}}}}, + "_assign": []*FuncDef{}, + "_modify": []*FuncDef{}, + "all": []*FuncDef{&FuncDef{Name: "all", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Func: "."}}}}}}, &FuncDef{Name: "all", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "all", Args: []string{"g", "y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "y"}, Op: OpPipe, Right: &Query{Func: "not"}}}}}}}}}}}}}, + "any": []*FuncDef{&FuncDef{Name: "any", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Func: "."}}}}}}, &FuncDef{Name: "any", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "any", Args: []string{"g", "y"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "y"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "not"}}}}, "arrays": []*FuncDef{&FuncDef{Name: "arrays", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}}}}}}}, - "ascii_downcase": []*FuncDef{&FuncDef{Name: "ascii_downcase", Body: &Query{Left: &Query{Func: "explode"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeNumber, Number: "65"}}, Op: OpLe, Right: &Query{Func: "."}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "90"}}}}, Then: &Query{Left: &Query{Func: "."}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "32"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "implode"}}}}}, - "ascii_upcase": []*FuncDef{&FuncDef{Name: "ascii_upcase", Body: &Query{Left: &Query{Func: "explode"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeNumber, Number: "97"}}, Op: OpLe, Right: &Query{Func: "."}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "122"}}}}, Then: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "32"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "implode"}}}}}, - "assign": []*FuncDef{&FuncDef{Name: "_assign", Args: []string{"ps", "$v"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "ps"}}}}, Pattern: &Pattern{Name: "$p"}, Start: &Query{Func: "."}, Update: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Func: "$p"}, &Query{Func: "$v"}}}}}}}}}}, "booleans": []*FuncDef{&FuncDef{Name: "booleans", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "boolean"}}}}}}}}}}, - "capture": []*FuncDef{&FuncDef{Name: "capture", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "capture", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "capture", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpNe, Right: &Query{Func: "null"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "add"}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}}}}}, - "combinations": []*FuncDef{&FuncDef{Name: "combinations", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "combinations"}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$y"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}, Op: OpAdd, Right: &Query{Func: "$y"}}}}}}}}}}}}}}}}}, &FuncDef{Name: "combinations", Args: []string{"n"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$dot"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "range", Args: []*Query{&Query{Func: "n"}}}}}, Op: OpPipe, Right: &Query{Func: "$dot"}}}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}}}}}}, + "capture": []*FuncDef{&FuncDef{Name: "capture", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "capture", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "capture", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Func: "_capture"}}}}, + "combinations": []*FuncDef{&FuncDef{Name: "combinations", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}}}}}}}}}}}, &FuncDef{Name: "combinations", Args: []string{"n"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "limit", Args: []*Query{&Query{Func: "n"}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{&Query{Func: "."}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}, "del": []*FuncDef{&FuncDef{Name: "del", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "f"}}}}}}}}}}}}}}, - "endswith": []*FuncDef{&FuncDef{Name: "endswith", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Then: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}}}}}, IsSlice: true}}}, Op: OpEq, Right: &Query{Func: "$x"}}, Else: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "endswith"}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "endswith"}}}}}}}}}}}}, "finites": []*FuncDef{&FuncDef{Name: "finites", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isfinite"}}}}}}}, "first": []*FuncDef{&FuncDef{Name: "first", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, &FuncDef{Name: "first", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}}, - "flatten": []*FuncDef{&FuncDef{Name: "_flatten", Args: []string{"$x"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "$x"}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Left: &Query{Func: "$x"}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "."}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "add"}}}, &FuncDef{Name: "flatten", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "flatten depth must not be negative"}}}}}}}, Else: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Func: "$x"}}}}}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}}, &FuncDef{Name: "flatten", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_flatten", Args: []*Query{&Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}, - "from_entries": []*FuncDef{&FuncDef{Name: "from_entries", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "key"}}}, Op: OpAlt, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Key"}}}, Op: OpAlt, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Name"}}}}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "value"}}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "value"}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "Value"}}}}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "add"}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}}}}, "fromdate": []*FuncDef{&FuncDef{Name: "fromdate", Body: &Query{Func: "fromdateiso8601"}}}, "fromdateiso8601": []*FuncDef{&FuncDef{Name: "fromdateiso8601", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strptime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%S%z"}}}}}}}, Op: OpPipe, Right: &Query{Func: "mktime"}}}}, - "fromstream": []*FuncDef{&FuncDef{Name: "fromstream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "x", Val: &ObjectVal{Queries: []*Query{&Query{Func: "null"}}}}, &ObjectKeyVal{Key: "e", Val: &ObjectVal{Queries: []*Query{&Query{Func: "false"}}}}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$init"}}, Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "f"}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "$init"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Func: "$init"}, Else: &Query{Func: "."}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "2"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "x"}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "x"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}, + "fromstream": []*FuncDef{&FuncDef{Name: "fromstream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "x", Val: &ObjectVal{Queries: []*Query{&Query{Func: "null"}}}}, &ObjectKeyVal{Key: "e", Val: &ObjectVal{Queries: []*Query{&Query{Func: "false"}}}}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$init"}}, Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "f"}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "$init"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Func: "$init"}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "2"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "x"}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "x"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}, "group_by": []*FuncDef{&FuncDef{Name: "group_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_group_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}}, "gsub": []*FuncDef{&FuncDef{Name: "gsub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, &FuncDef{Name: "gsub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}}}, "in": []*FuncDef{&FuncDef{Name: "in", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}}, - "index": []*FuncDef{&FuncDef{Name: "index", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_lindex", Args: []*Query{&Query{Func: "$x"}}}}}}}, - "indices": []*FuncDef{&FuncDef{Name: "indices", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_indices", Args: []*Query{&Query{Func: "$x"}}}}}}}, "inputs": []*FuncDef{&FuncDef{Name: "inputs", Body: &Query{Term: &Term{Type: TermTypeTry, Try: &Try{Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{&Query{Func: "input"}}}}}, Catch: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "break"}}}}, Then: &Query{Func: "empty"}, Else: &Query{Func: "error"}}}}}}}}}, "inside": []*FuncDef{&FuncDef{Name: "inside", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "contains", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}}, "isempty": []*FuncDef{&FuncDef{Name: "isempty", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "false"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}, Op: OpComma, Right: &Query{Func: "true"}}}}}}}, "iterables": []*FuncDef{&FuncDef{Name: "iterables", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}}, - "join": []*FuncDef{&FuncDef{Name: "join", Args: []string{"$x"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_join", Args: []*Query{&Query{Func: "$x"}}}}}}}}, "last": []*FuncDef{&FuncDef{Name: "last", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}, &FuncDef{Name: "last", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "null"}, Update: &Query{Func: "$item"}}}}}}, "leaf_paths": []*FuncDef{&FuncDef{Name: "leaf_paths", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "paths", Args: []*Query{&Query{Func: "scalars"}}}}}}}, "limit": []*FuncDef{&FuncDef{Name: "limit", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}, Else: &Query{Func: "empty"}}}}}}}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Func: "empty"}}}, Else: &Query{Func: "g"}}}}}}, - "ltrimstr": []*FuncDef{&FuncDef{Name: "ltrimstr", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "startswith", Args: []*Query{&Query{Func: "$x"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}, IsSlice: true}}}}}}}}, "map": []*FuncDef{&FuncDef{Name: "map", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}}, "map_values": []*FuncDef{&FuncDef{Name: "map_values", Args: []string{"f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpModify, Right: &Query{Func: "f"}}}}, - "match": []*FuncDef{&FuncDef{Name: "match", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "match", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "false"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}}, - "max": []*FuncDef{&FuncDef{Name: "max", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "max_by", Args: []*Query{&Query{Func: "."}}}}}}}, + "match": []*FuncDef{&FuncDef{Name: "match", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "match", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "false"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}, "max_by": []*FuncDef{&FuncDef{Name: "max_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_max_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}}, - "min": []*FuncDef{&FuncDef{Name: "min", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "min_by", Args: []*Query{&Query{Func: "."}}}}}}}, "min_by": []*FuncDef{&FuncDef{Name: "min_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_min_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}}, - "modify": []*FuncDef{&FuncDef{Name: "_modify", Args: []string{"ps", "f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "ps"}}}}, Pattern: &Pattern{Name: "$p"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}, Update: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpAdd, Right: &Query{Func: "$p"}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$q"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Func: "$q"}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$q"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}}}}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$p"}}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}}}}}}}}}, "normals": []*FuncDef{&FuncDef{Name: "normals", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isnormal"}}}}}}}, "not": []*FuncDef{&FuncDef{Name: "not", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "."}, Then: &Query{Func: "false"}, Else: &Query{Func: "true"}}}}}}, - "nth": []*FuncDef{&FuncDef{Name: "nth", Args: []string{"$n"}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}}}}}, &FuncDef{Name: "nth", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "nth doesn't support negative indices"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Op: OpOr, Right: &Query{Func: "empty"}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}}}}}}}}, + "nth": []*FuncDef{&FuncDef{Name: "nth", Args: []string{"$n"}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}}}}}, &FuncDef{Name: "nth", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "nth doesn't support negative indices"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Left: &Query{Func: "$n"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}}, "nulls": []*FuncDef{&FuncDef{Name: "nulls", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Func: "null"}}}}}}}}, "numbers": []*FuncDef{&FuncDef{Name: "numbers", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "number"}}}}}}}}}}, "objects": []*FuncDef{&FuncDef{Name: "objects", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}, - "paths": []*FuncDef{&FuncDef{Name: "paths", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{&Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}}, &FuncDef{Name: "paths", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "paths"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}}}}}}}}}}}}}}}, + "paths": []*FuncDef{&FuncDef{Name: "paths", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: ".."}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}}}}, &FuncDef{Name: "paths", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "paths"}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}, Op: OpPipe, Right: &Query{Func: "$p"}}}}}}}}}, "range": []*FuncDef{&FuncDef{Name: "range", Args: []string{"$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}, &Query{Func: "$end"}, &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Func: "$start"}, &Query{Func: "$end"}, &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end", "$step"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Func: "$start"}, &Query{Func: "$end"}, &Query{Func: "$step"}}}}}}}, "recurse": []*FuncDef{&FuncDef{Name: "recurse", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}}}, &FuncDef{Name: "recurse", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}, Func: "r"}}, &FuncDef{Name: "recurse", Args: []string{"f", "cond"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "cond"}}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}}, Func: "r"}}}, "repeat": []*FuncDef{&FuncDef{Name: "repeat", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_repeat", Body: &Query{Left: &Query{Func: "f"}, Op: OpComma, Right: &Query{Func: "_repeat"}}}}, Func: "_repeat"}}}, - "rindex": []*FuncDef{&FuncDef{Name: "rindex", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_rindex", Args: []*Query{&Query{Func: "$x"}}}}}}}, - "rtrimstr": []*FuncDef{&FuncDef{Name: "rtrimstr", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Left: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}, Op: OpAnd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "endswith", Args: []*Query{&Query{Func: "$x"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{End: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}}}}}, IsSlice: true}}}}}}}}, "scalars": []*FuncDef{&FuncDef{Name: "scalars", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}}, - "scan": []*FuncDef{&FuncDef{Name: "scan", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "scan", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "scan", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Left: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}, Op: OpAdd, Right: &Query{Func: "$flags"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Index: &Index{Name: "string"}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}, + "scan": []*FuncDef{&FuncDef{Name: "scan", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "scan", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "scan", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}, Else: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Index: &Index{Name: "string"}}}}}}}}}}}}}}, "select": []*FuncDef{&FuncDef{Name: "select", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "f"}, Then: &Query{Func: "."}, Else: &Query{Func: "empty"}}}}}}, - "sort": []*FuncDef{&FuncDef{Name: "sort", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sort_by", Args: []*Query{&Query{Func: "."}}}}}}}, "sort_by": []*FuncDef{&FuncDef{Name: "sort_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_sort_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}}, - "splits": []*FuncDef{&FuncDef{Name: "splits", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "splits", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "splits", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "split", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}}, - "startswith": []*FuncDef{&FuncDef{Name: "startswith", Args: []string{"$x"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}, Then: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{End: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Func: "length"}}, IsSlice: true}}}, Op: OpEq, Right: &Query{Func: "$x"}}, Else: &Query{Left: &Query{Func: "$x"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "startswith"}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_type_error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "startswith"}}}}}}}}}}}}, + "splits": []*FuncDef{&FuncDef{Name: "splits", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "splits", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "splits", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "split", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}}, "strings": []*FuncDef{&FuncDef{Name: "strings", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}}}, - "sub": []*FuncDef{&FuncDef{Name: "sub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "sub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$in"}}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_sub", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, Then: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$r"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "captures"}}, &Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Op: OpNe, Right: &Query{Func: "null"}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{KeyQuery: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "name"}}}, Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "add"}, Op: OpAlt, Right: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "string"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, End: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, IsSlice: true}}}}}}, Op: OpAdd, Right: &Query{Func: "str"}}}}}}}, &ObjectKeyVal{Key: "offset", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "length"}}}}}}}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$x"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "matches"}}, &Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}}}}}}, Else: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$in"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}}}}}}}, Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}}}, &ObjectKeyVal{Key: "offset", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}, + "sub": []*FuncDef{&FuncDef{Name: "sub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "sub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$str"}}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_sub", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{&Suffix{Index: &Index{End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$r"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$r"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "_capture"}, Op: OpPipe, Right: &Query{Func: "str"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "length"}}}}}}, End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}, &ObjectKeyVal{Key: "offset", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{&Suffix{Index: &Index{End: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}, IsSlice: true}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}}}}, Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}, "test": []*FuncDef{&FuncDef{Name: "test", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "test", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "test", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "true"}}}}}}}, - "to_entries": []*FuncDef{&FuncDef{Name: "to_entries", Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "keys"}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$k"}}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "key", Val: &ObjectVal{Queries: []*Query{&Query{Func: "$k"}}}}, &ObjectKeyVal{Key: "value", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$k"}}}}}}}}}}}}}}}}}}}}}, "todate": []*FuncDef{&FuncDef{Name: "todate", Body: &Query{Func: "todateiso8601"}}}, "todateiso8601": []*FuncDef{&FuncDef{Name: "todateiso8601", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strftime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%SZ"}}}}}}}}}, "tostream": []*FuncDef{&FuncDef{Name: "tostream", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}, Op: OpComma, Right: &Query{Func: "."}}}}, Func: "r"}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}, Pattern: &Pattern{Name: "$q"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpComma, Right: &Query{Func: "."}}}}}, Update: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpAdd, Right: &Query{Func: "$q"}}}}}}}}}}}}}}}}, - "truncate_stream": []*FuncDef{&FuncDef{Name: "truncate_stream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$n"}}, Body: &Query{Left: &Query{Func: "null"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$input"}}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpPipe, Right: &Query{Func: "length"}}}}, Op: OpGt, Right: &Query{Func: "$n"}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$input"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}, &Suffix{Index: &Index{Start: &Query{Func: "$n"}, IsSlice: true}}}}}}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}}}}}, - "unique": []*FuncDef{&FuncDef{Name: "unique", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "unique_by", Args: []*Query{&Query{Func: "."}}}}}}}, + "truncate_stream": []*FuncDef{&FuncDef{Name: "truncate_stream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$n"}}, Body: &Query{Left: &Query{Func: "null"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Func: "$n"}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpModify, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}, IsSlice: true}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}, "unique_by": []*FuncDef{&FuncDef{Name: "unique_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_unique_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}}, "until": []*FuncDef{&FuncDef{Name: "until", Args: []string{"cond", "next"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_until", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Func: "."}, Else: &Query{Left: &Query{Func: "next"}, Op: OpPipe, Right: &Query{Func: "_until"}}}}}}}, Func: "_until"}}}, "values": []*FuncDef{&FuncDef{Name: "values", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Func: "null"}}}}}}}}, - "walk": []*FuncDef{&FuncDef{Name: "walk", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_walk", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map_values", Args: []*Query{&Query{Func: "_walk"}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}, Func: "_walk"}}}, + "walk": []*FuncDef{&FuncDef{Name: "walk", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_walk", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "_walk"}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map_values", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "last", Args: []*Query{&Query{Func: "_walk"}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}, Func: "_walk"}}}, "while": []*FuncDef{&FuncDef{Name: "while", Args: []string{"cond", "update"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_while", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "update"}, Op: OpPipe, Right: &Query{Func: "_while"}}}}}, Else: &Query{Func: "empty"}}}}}}, Func: "_while"}}}, "with_entries": []*FuncDef{&FuncDef{Name: "with_entries", Args: []string{"f"}, Body: &Query{Left: &Query{Func: "to_entries"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "f"}}}}}, Op: OpPipe, Right: &Query{Func: "from_entries"}}}}}, } diff --git a/vendor/github.com/itchyny/gojq/builtin.jq b/vendor/github.com/itchyny/gojq/builtin.jq index b6a9e1d8..66d63073 100644 --- a/vendor/github.com/itchyny/gojq/builtin.jq +++ b/vendor/github.com/itchyny/gojq/builtin.jq @@ -1,10 +1,6 @@ def not: if . then false else true end; def in(xs): . as $x | xs | has($x); def map(f): [.[] | f]; -def to_entries: [keys[] as $k | {key: $k, value: .[$k]}]; -def from_entries: - map({ (.key // .Key // .name // .Name): (if has("value") then .value else .Value end) }) - | add // {}; def with_entries(f): to_entries | map(f) | from_entries; def select(f): if f then . else empty end; def recurse: recurse(.[]?); @@ -24,21 +20,10 @@ def range($end): _range(0; $end; 1); def range($start; $end): _range($start; $end; 1); def range($start; $end; $step): _range($start; $end; $step); -def _flatten($x): - map(if type == "array" and $x != 0 then _flatten($x - 1) else [.] end) | add; -def flatten($x): - if $x < 0 - then error("flatten depth must not be negative") - else _flatten($x) // [] end; -def flatten: _flatten(-1) // []; -def min: min_by(.); def min_by(f): _min_by(map([f])); -def max: max_by(.); def max_by(f): _max_by(map([f])); -def sort: sort_by(.); def sort_by(f): _sort_by(map([f])); def group_by(f): _group_by(map([f])); -def unique: unique_by(.); def unique_by(f): _unique_by(map([f])); def arrays: select(type == "array"); @@ -54,55 +39,21 @@ def values: select(. != null); def scalars: select(type | . != "array" and . != "object"); def leaf_paths: paths(scalars); -def indices($x): _indices($x); -def index($x): _lindex($x); -def rindex($x): _rindex($x); def inside(xs): . as $x | xs | contains($x); -def startswith($x): - if type == "string" then - if $x|type == "string" then - .[:$x | length] == $x - else - $x | _type_error("startswith") - end - else - _type_error("startswith") - end; -def endswith($x): - if type == "string" then - if $x|type == "string" then - .[- ($x | length):] == $x - else - $x | _type_error("endswith") - end - else - _type_error("endswith") - end; -def ltrimstr($x): - if type == "string" and ($x|type == "string") and startswith($x) then - .[$x | length:] - end; -def rtrimstr($x): - if type == "string" and ($x|type == "string") and endswith($x) then - .[:- ($x | length)] - end; - def combinations: if length == 0 then [] else - .[0][] as $x | .[1:] | combinations as $y | [$x] + $y + .[0][] as $x | [$x] + (.[1:] | combinations) end; -def combinations(n): - . as $dot | [range(n) | $dot] | combinations; -def join($x): - if type != "array" then [.[]] end | _join($x); -def ascii_downcase: - explode | map(if 65 <= . and . <= 90 then . + 32 end) | implode; -def ascii_upcase: - explode | map(if 97 <= . and . <= 122 then . - 32 end) | implode; +def combinations(n): [limit(n; repeat(.))] | combinations; def walk(f): - def _walk: if type | . == "array" or . == "object" then map_values(_walk) end | f; + def _walk: + if type == "array" then + map(_walk) + elif type == "object" then + map_values(last(_walk)) + end | f; _walk; def first: .[0]; @@ -110,17 +61,20 @@ def first(g): label $out | g | ., break $out; def last: .[-1]; def last(g): reduce g as $item (null; $item); def isempty(g): label $out | (g | false, break $out), true; -def all: all(.[]; .); +def all: all(.); def all(y): all(.[]; y); -def all(g; y): isempty(g|y and empty); -def any: any(.[]; .); +def all(g; y): isempty(g | select(y | not)); +def any: any(.); def any(y): any(.[]; y); -def any(g; y): isempty(g|y or empty) | not; +def any(g; y): isempty(g | select(y)) | not; def limit($n; g): if $n > 0 then - label $out - | foreach g as $item - ($n; .-1; $item, if . <= 0 then break $out else empty end) + label $out | + foreach g as $item ( + $n; + . - 1; + $item, if . <= 0 then break $out else empty end + ) elif $n == 0 then empty else @@ -131,41 +85,39 @@ def nth($n; g): if $n < 0 then error("nth doesn't support negative indices") else - label $out - | foreach g as $item - ($n; .-1; . < 0 or empty | $item, break $out) + label $out | + foreach g as $item ( + $n + 1; + . - 1; + if . <= 0 then $item, break $out else empty end + ) end; def truncate_stream(f): - . as $n | null | f | . as $input - | if (.[0] | length) > $n then setpath([0]; $input[0][$n:]) else empty end; + . as $n | null | f | + if .[0] | length > $n then .[0] |= .[$n:] else empty end; def fromstream(f): - { x: null, e: false } as $init - | foreach f as $i - ( $init; - if .e then $init else . end - | if $i | length == 2 - then setpath(["e"]; $i[0] | length==0) | setpath(["x"] + $i[0]; $i[1]) - else setpath(["e"]; $i[0] | length==1) end; - if .e then .x else empty end); + { x: null, e: false } as $init | + foreach f as $i ( + $init; + if .e then $init end | + if $i | length == 2 then + setpath(["e"]; $i[0] | length == 0) | + setpath(["x"] + $i[0]; $i[1]) + else + setpath(["e"]; $i[0] | length == 1) + end; + if .e then .x else empty end + ); def tostream: - path(def r: (.[]? | r), .; r) as $p - | getpath($p) - | reduce path(.[]?) as $q ([$p, .]; [$p + $q]); + path(def r: (.[]? | r), .; r) as $p | + getpath($p) | + reduce path(.[]?) as $q ([$p, .]; [$p + $q]); -def _assign(ps; $v): - reduce path(ps) as $p (.; setpath($p; $v)); -def _modify(ps; f): - reduce path(ps) as $p - ([., []]; label $out | (([0] + $p) as $q | setpath($q; getpath($q) | f) | ., break $out), setpath([1]; .[1] + [$p])) - | . as $x | $x[0] | delpaths($x[1]); def map_values(f): .[] |= f; def del(f): delpaths([path(f)]); -def paths: - path(recurse(if type | . == "array" or . == "object" then .[] else empty end)) - | select(length > 0); -def paths(f): - . as $x | paths | select(. as $p | $x | getpath($p) | f); +def paths: path(..) | select(. != []); +def paths(f): paths as $p | select(getpath($p) | f) | $p; def fromdateiso8601: strptime("%Y-%m-%dT%H:%M:%S%z") | mktime; def todateiso8601: strftime("%Y-%m-%dT%H:%M:%SZ"); @@ -173,39 +125,37 @@ def fromdate: fromdateiso8601; def todate: todateiso8601; def match($re): match($re; null); -def match($re; $flags): _match($re; $flags; false) | .[]; +def match($re; $flags): _match($re; $flags; false)[]; def test($re): test($re; null); def test($re; $flags): _match($re; $flags; true); def capture($re): capture($re; null); -def capture($re; $flags): - match($re; $flags) - | [.captures[] | select(.name != null) | { (.name): .string }] - | add // {}; +def capture($re; $flags): match($re; $flags) | _capture; def scan($re): scan($re; null); def scan($re; $flags): - match($re; "g" + $flags) - | if .captures|length > 0 then [.captures[].string] else .string end; + match($re; $flags + "g") | + if .captures == [] then + .string + else + [.captures[].string] + end; def splits($re): splits($re; null); -def splits($re; $flags): split($re; $flags) | .[]; +def splits($re; $flags): split($re; $flags)[]; def sub($re; str): sub($re; str; null); def sub($re; str; $flags): - . as $in - | def _sub: - if .matches|length > 0 - then - . as $x | .matches[0] as $r - | [$r.captures[] | select(.name != null) | { (.name): .string }] - | add // {} - | { - string: ($x.string + $in[$x.offset:$r.offset] + str), - offset: ($r.offset + $r.length), - matches: $x.matches[1:] - } - | _sub - else - .string + $in[.offset:] - end; - { string: "", offset: 0, matches: [match($re; $flags)] } | _sub; + . as $str | + def _sub: + if .matches == [] then + $str[:.offset] + .string + else + .matches[-1] as $r | + { + string: (($r | _capture | str) + $str[$r.offset+$r.length:.offset] + .string), + offset: $r.offset, + matches: .matches[:-1], + } | + _sub + end; + { string: "", matches: [match($re; $flags)] } | _sub; def gsub($re; str): sub($re; str; "g"); def gsub($re; str; $flags): sub($re; str; $flags + "g"); @@ -216,8 +166,9 @@ def inputs: if . == "break" then empty else error end; def INDEX(stream; idx_expr): - reduce stream as $row ({}; .[$row|idx_expr|tostring] = $row); -def INDEX(idx_expr): INDEX(.[]; idx_expr); + reduce stream as $row ({}; .[$row | idx_expr | tostring] = $row); +def INDEX(idx_expr): + INDEX(.[]; idx_expr); def JOIN($idx; idx_expr): [.[] | [., $idx[idx_expr]]]; def JOIN($idx; stream; idx_expr): diff --git a/vendor/github.com/itchyny/gojq/code.go b/vendor/github.com/itchyny/gojq/code.go index f1935d84..33505bde 100644 --- a/vendor/github.com/itchyny/gojq/code.go +++ b/vendor/github.com/itchyny/gojq/code.go @@ -1,7 +1,7 @@ package gojq type code struct { - v interface{} + v any op opcode } @@ -25,13 +25,15 @@ const ( opbacktrack opjump opjumpifnot + opindex + opindexarray opcall opcallrec oppushpc opcallpc opscope opret - opeach + opiter opexpbegin opexpend oppathbegin @@ -74,6 +76,10 @@ func (op opcode) String() string { return "jump" case opjumpifnot: return "jumpifnot" + case opindex: + return "index" + case opindexarray: + return "indexarray" case opcall: return "call" case opcallrec: @@ -86,8 +92,8 @@ func (op opcode) String() string { return "scope" case opret: return "ret" - case opeach: - return "each" + case opiter: + return "iter" case opexpbegin: return "expbegin" case opexpend: diff --git a/vendor/github.com/itchyny/gojq/compare.go b/vendor/github.com/itchyny/gojq/compare.go index 9f0d5338..e70c1fbb 100644 --- a/vendor/github.com/itchyny/gojq/compare.go +++ b/vendor/github.com/itchyny/gojq/compare.go @@ -5,19 +5,17 @@ import ( "math/big" ) -func compare(l, r interface{}) int { +// Compare l and r, and returns jq-flavored comparison value. +// The result will be 0 if l == r, -1 if l < r, and +1 if l > r. +// This comparison is used by built-in operators and functions. +func Compare(l, r any) int { + return compare(l, r) +} + +func compare(l, r any) int { return binopTypeSwitch(l, r, - func(l, r int) interface{} { - switch { - case l < r: - return -1 - case l == r: - return 0 - default: - return 1 - } - }, - func(l, r float64) interface{} { + compareInt, + func(l, r float64) any { switch { case l < r || math.IsNaN(l): return -1 @@ -27,10 +25,10 @@ func compare(l, r interface{}) int { return 1 } }, - func(l, r *big.Int) interface{} { + func(l, r *big.Int) any { return l.Cmp(r) }, - func(l, r string) interface{} { + func(l, r string) any { switch { case l < r: return -1 @@ -40,66 +38,63 @@ func compare(l, r interface{}) int { return 1 } }, - func(l, r []interface{}) interface{} { - for i := 0; ; i++ { - if i >= len(l) { - if i >= len(r) { - return 0 - } - return -1 - } - if i >= len(r) { - return 1 - } + func(l, r []any) any { + n := len(l) + if len(r) < n { + n = len(r) + } + for i := 0; i < n; i++ { if cmp := compare(l[i], r[i]); cmp != 0 { return cmp } } + return compareInt(len(l), len(r)) }, - func(l, r map[string]interface{}) interface{} { + func(l, r map[string]any) any { lk, rk := funcKeys(l), funcKeys(r) if cmp := compare(lk, rk); cmp != 0 { return cmp } - for _, k := range lk.([]interface{}) { + for _, k := range lk.([]any) { if cmp := compare(l[k.(string)], r[k.(string)]); cmp != 0 { return cmp } } return 0 }, - func(l, r interface{}) interface{} { - ln, rn := getTypeOrdNum(l), getTypeOrdNum(r) - switch { - case ln < rn: - return -1 - case ln == rn: - return 0 - default: - return 1 - } + func(l, r any) any { + return compareInt(typeIndex(l), typeIndex(r)) }, ).(int) } -func getTypeOrdNum(v interface{}) int { +func compareInt(l, r int) any { + switch { + case l < r: + return -1 + case l == r: + return 0 + default: + return 1 + } +} + +func typeIndex(v any) int { switch v := v.(type) { - case nil: + default: return 0 case bool: - if v { - return 2 + if !v { + return 1 } - return 1 + return 2 case int, float64, *big.Int: return 3 case string: return 4 - case []interface{}: + case []any: return 5 - case map[string]interface{}: + case map[string]any: return 6 - default: - return -1 } } diff --git a/vendor/github.com/itchyny/gojq/compiler.go b/vendor/github.com/itchyny/gojq/compiler.go index 00daa04c..135387fa 100644 --- a/vendor/github.com/itchyny/gojq/compiler.go +++ b/vendor/github.com/itchyny/gojq/compiler.go @@ -2,7 +2,6 @@ package gojq import ( "context" - "encoding/json" "errors" "fmt" "sort" @@ -18,6 +17,7 @@ type compiler struct { inputIter Iter codes []*code codeinfos []codeinfo + builtinScope *scopeinfo scopes []*scopeinfo scopecnt int } @@ -30,16 +30,17 @@ type Code struct { } // Run runs the code with the variable values (which should be in the -// same order as the given variables using WithVariables) and returns +// same order as the given variables using [WithVariables]) and returns // a result iterator. // -// It is safe to call this method of a *Code in multiple goroutines. -func (c *Code) Run(v interface{}, values ...interface{}) Iter { +// It is safe to call this method in goroutines, to reuse a compiled [*Code]. +// But for arguments, do not give values sharing same data between goroutines. +func (c *Code) Run(v any, values ...any) Iter { return c.RunWithContext(context.Background(), v, values...) } // RunWithContext runs the code with context. -func (c *Code) RunWithContext(ctx context.Context, v interface{}, values ...interface{}) Iter { +func (c *Code) RunWithContext(ctx context.Context, v any, values ...any) Iter { if len(values) > len(c.variables) { return NewIter(&tooManyVariableValuesError{}) } else if len(values) < len(c.variables) { @@ -51,16 +52,6 @@ func (c *Code) RunWithContext(ctx context.Context, v interface{}, values ...inte return newEnv(ctx).execute(c, normalizeNumbers(v), values...) } -// ModuleLoader is an interface for loading modules. -// -// Implement following optional methods. Use NewModuleLoader to load local modules. -// LoadModule(string) (*Query, error) -// LoadModuleWithMeta(string, map[string]interface{}) (*Query, error) -// LoadInitModules() ([]*Query, error) -// LoadJSON(string) (interface{}, error) -// LoadJSONWithMeta(string, map[string]interface{}) (interface{}, error) -type ModuleLoader interface{} - type scopeinfo struct { variables []*varinfo funcs []*funcinfo @@ -87,6 +78,7 @@ func Compile(q *Query, options ...CompilerOption) (*Code, error) { for _, opt := range options { opt(c) } + c.builtinScope = c.newScope() scope := c.newScope() c.scopes = []*scopeinfo{scope} setscope := c.lazy(func() *code { @@ -152,15 +144,15 @@ func (c *compiler) compileImport(i *Import) error { return fmt.Errorf("cannot load module: %q", path) } if strings.HasPrefix(alias, "$") { - var vals interface{} + var vals any if moduleLoader, ok := c.moduleLoader.(interface { - LoadJSONWithMeta(string, map[string]interface{}) (interface{}, error) + LoadJSONWithMeta(string, map[string]any) (any, error) }); ok { if vals, err = moduleLoader.LoadJSONWithMeta(path, i.Meta.ToValue()); err != nil { return err } } else if moduleLoader, ok := c.moduleLoader.(interface { - LoadJSON(string) (interface{}, error) + LoadJSON(string) (any, error) }); ok { if vals, err = moduleLoader.LoadJSON(path); err != nil { return err @@ -177,7 +169,7 @@ func (c *compiler) compileImport(i *Import) error { } var q *Query if moduleLoader, ok := c.moduleLoader.(interface { - LoadModuleWithMeta(string, map[string]interface{}) (*Query, error) + LoadModuleWithMeta(string, map[string]any) (*Query, error) }); ok { if q, err = moduleLoader.LoadModuleWithMeta(path, i.Meta.ToValue()); err != nil { return err @@ -190,8 +182,11 @@ func (c *compiler) compileImport(i *Import) error { } } c.appendCodeInfo("module " + path) - defer c.appendCodeInfo("end of module " + path) - return c.compileModule(q, alias) + if err = c.compileModule(q, alias); err != nil { + return err + } + c.appendCodeInfo("end of module " + path) + return nil } func (c *compiler) compileModule(q *Query, alias string) error { @@ -274,6 +269,31 @@ func (c *compiler) lookupFuncOrVariable(name string) (*funcinfo, *varinfo) { return nil, nil } +func (c *compiler) lookupBuiltin(name string, argcnt int) *funcinfo { + s := c.builtinScope + for i := len(s.funcs) - 1; i >= 0; i-- { + if f := s.funcs[i]; f.name == name && f.argcnt == argcnt { + return f + } + } + return nil +} + +func (c *compiler) appendBuiltin(name string, argcnt int) func() { + setjump := c.lazy(func() *code { + return &code{op: opjump, v: len(c.codes)} + }) + c.appendCodeInfo(name) + c.builtinScope.funcs = append( + c.builtinScope.funcs, + &funcinfo{name, len(c.codes), argcnt}, + ) + return func() { + setjump() + c.appendCodeInfo("end of " + name) + } +} + func (c *compiler) newScope() *scopeinfo { i := c.scopecnt // do not use len(c.scopes) because it pops c.scopecnt++ @@ -294,29 +314,22 @@ func (c *compiler) newScopeDepth() func() { func (c *compiler) compileFuncDef(e *FuncDef, builtin bool) error { var scope *scopeinfo if builtin { - scope = c.scopes[0] - for i := len(scope.funcs) - 1; i >= 0; i-- { - if f := scope.funcs[i]; f.name == e.Name && f.argcnt == len(e.Args) { - return nil - } - } + scope = c.builtinScope } else { scope = c.scopes[len(c.scopes)-1] } defer c.lazy(func() *code { - return &code{op: opjump, v: c.pc()} + return &code{op: opjump, v: len(c.codes)} })() c.appendCodeInfo(e.Name) - defer c.appendCodeInfo("end of " + e.Name) - pc := c.pc() - scope.funcs = append(scope.funcs, &funcinfo{e.Name, pc, len(e.Args)}) + scope.funcs = append(scope.funcs, &funcinfo{e.Name, len(c.codes), len(e.Args)}) defer func(scopes []*scopeinfo, variables []string) { c.scopes, c.variables = scopes, variables }(c.scopes, c.variables) c.variables = c.variables[len(c.variables):] scope = c.newScope() if builtin { - c.scopes = []*scopeinfo{c.scopes[0], scope} + c.scopes = []*scopeinfo{c.builtinScope, scope} } else { c.scopes = append(c.scopes, scope) } @@ -344,14 +357,20 @@ func (c *compiler) compileFuncDef(e *FuncDef, builtin bool) error { } for _, w := range vis { c.append(&code{op: opload, v: v}) + c.append(&code{op: opexpbegin}) c.append(&code{op: opload, v: w.index}) c.append(&code{op: opcallpc}) c.appendCodeInfo(w.name) c.append(&code{op: opstore, v: c.pushVariable(w.name)}) + c.append(&code{op: opexpend}) } c.append(&code{op: opload, v: v}) } - return c.compile(e.Body) + if err := c.compile(e.Body); err != nil { + return err + } + c.appendCodeInfo("end of " + e.Name) + return nil } func (c *compiler) compileQuery(e *Query) error { @@ -425,15 +444,15 @@ func (c *compiler) compileQuery(e *Query) error { func (c *compiler) compileComma(l, r *Query) error { setfork := c.lazy(func() *code { - return &code{op: opfork, v: c.pc() + 1} + return &code{op: opfork, v: len(c.codes)} }) if err := c.compileQuery(l); err != nil { return err } - setfork() defer c.lazy(func() *code { - return &code{op: opjump, v: c.pc()} + return &code{op: opjump, v: len(c.codes)} })() + setfork() return c.compileQuery(r) } @@ -442,23 +461,23 @@ func (c *compiler) compileAlt(l, r *Query) error { found := c.newVariable() c.append(&code{op: opstore, v: found}) setfork := c.lazy(func() *code { - return &code{op: opfork, v: c.pc()} // opload found + return &code{op: opfork, v: len(c.codes)} // opload found }) if err := c.compileQuery(l); err != nil { return err } c.append(&code{op: opdup}) - c.append(&code{op: opjumpifnot, v: c.pc() + 4}) // oppop - c.append(&code{op: oppush, v: true}) // found some value + c.append(&code{op: opjumpifnot, v: len(c.codes) + 4}) // oppop + c.append(&code{op: oppush, v: true}) // found some value c.append(&code{op: opstore, v: found}) defer c.lazy(func() *code { - return &code{op: opjump, v: c.pc()} // ret + return &code{op: opjump, v: len(c.codes)} })() c.append(&code{op: oppop}) c.append(&code{op: opbacktrack}) setfork() c.append(&code{op: opload, v: found}) - c.append(&code{op: opjumpifnot, v: c.pc() + 3}) + c.append(&code{op: opjumpifnot, v: len(c.codes) + 3}) c.append(&code{op: opbacktrack}) // if found, backtrack c.append(&code{op: oppop}) return c.compileQuery(r) @@ -467,8 +486,9 @@ func (c *compiler) compileAlt(l, r *Query) error { func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) error { switch op { case OpAssign: - // .foo.bar = f => setpath(["foo", "bar"]; f) - if xs := l.toIndices(); xs != nil { + // optimize assignment operator with constant indexing and slicing + // .foo.[0].[1:2] = f => setpath(["foo",0,{"start":1,"end":2}]; f) + if xs := l.toIndices(nil); xs != nil { // ref: compileCall v := c.newVariable() c.append(&code{op: opstore, v: v}) @@ -478,7 +498,7 @@ func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) error { } c.append(&code{op: oppush, v: xs}) c.append(&code{op: opload, v: v}) - c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["setpath"].callback, 2, "setpath"}}) + c.append(&code{op: opcall, v: [3]any{internalFuncs["setpath"].callback, 2, "setpath"}}) return nil } fallthrough @@ -517,7 +537,12 @@ func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) error { } } -func (c *compiler) compileBind(b *Bind) error { +func (c *compiler) compileBind(e *Term, b *Bind) error { + c.append(&code{op: opdup}) + c.append(&code{op: opexpbegin}) + if err := c.compileTerm(e); err != nil { + return err + } var pc int var vs [][2]int for i, p := range b.Patterns { @@ -534,97 +559,86 @@ func (c *compiler) compileBind(b *Bind) error { c.append(&code{op: opstore, v: v}) } } - vs, err = c.compilePattern(p) - if err != nil { + if vs, err = c.compilePattern(vs[:0], p); err != nil { return err } if i < len(b.Patterns)-1 { defer c.lazy(func() *code { return &code{op: opjump, v: pc} })() - pcc = c.pc() + pcc = len(c.codes) } } if len(b.Patterns) > 1 { - pc = c.pc() + pc = len(c.codes) } if len(b.Patterns) == 1 && c.codes[len(c.codes)-2].op == opexpbegin { c.codes[len(c.codes)-2].op = opnop } else { - c.append(&code{op: opexpend}) // ref: compileTermSuffix + c.append(&code{op: opexpend}) } return c.compileQuery(b.Body) } -func (c *compiler) compilePattern(p *Pattern) ([][2]int, error) { +func (c *compiler) compilePattern(vs [][2]int, p *Pattern) ([][2]int, error) { + var err error c.appendCodeInfo(p) if p.Name != "" { v := c.pushVariable(p.Name) c.append(&code{op: opstore, v: v}) - return [][2]int{v}, nil + return append(vs, v), nil } else if len(p.Array) > 0 { - var vs [][2]int v := c.newVariable() c.append(&code{op: opstore, v: v}) for i, p := range p.Array { - c.append(&code{op: oppush, v: i}) - c.append(&code{op: opload, v: v}) c.append(&code{op: opload, v: v}) - // ref: compileCall - c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}}) - ns, err := c.compilePattern(p) - if err != nil { + c.append(&code{op: opindexarray, v: i}) + if vs, err = c.compilePattern(vs, p); err != nil { return nil, err } - vs = append(vs, ns...) } return vs, nil } else if len(p.Object) > 0 { - var vs [][2]int v := c.newVariable() c.append(&code{op: opstore, v: v}) for _, kv := range p.Object { var key, name string - if kv.KeyOnly != "" { - key, name = kv.KeyOnly[1:], kv.KeyOnly - c.append(&code{op: oppush, v: key}) - } else if kv.Key != "" { - key = kv.Key - if key != "" && key[0] == '$' { + c.append(&code{op: opload, v: v}) + if key = kv.Key; key != "" { + if key[0] == '$' { key, name = key[1:], key } - c.append(&code{op: oppush, v: key}) } else if kv.KeyString != nil { - c.append(&code{op: opload, v: v}) - if err := c.compileString(kv.KeyString, nil); err != nil { - return nil, err + if key = kv.KeyString.Str; key == "" { + if err := c.compileString(kv.KeyString, nil); err != nil { + return nil, err + } } } else if kv.KeyQuery != nil { - c.append(&code{op: opload, v: v}) if err := c.compileQuery(kv.KeyQuery); err != nil { return nil, err } } - c.append(&code{op: opload, v: v}) - c.append(&code{op: opload, v: v}) - // ref: compileCall - c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}}) + if key != "" { + c.append(&code{op: opindex, v: key}) + } else { + c.append(&code{op: opload, v: v}) + c.append(&code{op: oppush, v: nil}) + // ref: compileCall + c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}}) + } if name != "" { if kv.Val != nil { c.append(&code{op: opdup}) } - ns, err := c.compilePattern(&Pattern{Name: name}) - if err != nil { + if vs, err = c.compilePattern(vs, &Pattern{Name: name}); err != nil { return nil, err } - vs = append(vs, ns...) } if kv.Val != nil { - ns, err := c.compilePattern(kv.Val) - if err != nil { + if vs, err = c.compilePattern(vs, kv.Val); err != nil { return nil, err } - vs = append(vs, ns...) } } return vs, nil @@ -650,17 +664,17 @@ func (c *compiler) compileIf(e *If) error { } pcc := len(c.codes) setjumpifnot := c.lazy(func() *code { - return &code{op: opjumpifnot, v: c.pc() + 1} // if falsy, skip then clause + return &code{op: opjumpifnot, v: len(c.codes)} // skip then clause }) f = c.newScopeDepth() if err := c.compileQuery(e.Then); err != nil { return err } f() - setjumpifnot() defer c.lazy(func() *code { - return &code{op: opjump, v: c.pc()} // jump to ret after else clause + return &code{op: opjump, v: len(c.codes)} })() + setjumpifnot() if len(e.Elif) > 0 { return c.compileIf(&If{e.Elif[0].Cond, e.Elif[0].Then, e.Elif[1:], e.Else}) } @@ -686,7 +700,7 @@ func (c *compiler) compileIf(e *If) error { func (c *compiler) compileTry(e *Try) error { c.appendCodeInfo(e) setforktrybegin := c.lazy(func() *code { - return &code{op: opforktrybegin, v: c.pc()} + return &code{op: opforktrybegin, v: len(c.codes)} }) f := c.newScopeDepth() if err := c.compileQuery(e.Body); err != nil { @@ -695,7 +709,7 @@ func (c *compiler) compileTry(e *Try) error { f() c.append(&code{op: opforktryend}) defer c.lazy(func() *code { - return &code{op: opjump, v: c.pc()} + return &code{op: opjump, v: len(c.codes)} })() setforktrybegin() if e.Catch != nil { @@ -709,9 +723,9 @@ func (c *compiler) compileTry(e *Try) error { func (c *compiler) compileReduce(e *Reduce) error { c.appendCodeInfo(e) defer c.newScopeDepth()() - defer c.lazy(func() *code { - return &code{op: opfork, v: c.pc() - 2} - })() + setfork := c.lazy(func() *code { + return &code{op: opfork, v: len(c.codes)} + }) c.append(&code{op: opdup}) v := c.newVariable() f := c.newScopeDepth() @@ -723,7 +737,7 @@ func (c *compiler) compileReduce(e *Reduce) error { if err := c.compileTerm(e.Term); err != nil { return err } - if _, err := c.compilePattern(e.Pattern); err != nil { + if _, err := c.compilePattern(nil, e.Pattern); err != nil { return err } c.append(&code{op: opload, v: v}) @@ -734,6 +748,7 @@ func (c *compiler) compileReduce(e *Reduce) error { f() c.append(&code{op: opstore, v: v}) c.append(&code{op: opbacktrack}) + setfork() c.append(&code{op: oppop}) c.append(&code{op: opload, v: v}) return nil @@ -753,7 +768,7 @@ func (c *compiler) compileForeach(e *Foreach) error { if err := c.compileTerm(e.Term); err != nil { return err } - if _, err := c.compilePattern(e.Pattern); err != nil { + if _, err := c.compilePattern(nil, e.Pattern); err != nil { return err } c.append(&code{op: opload, v: v}) @@ -774,9 +789,7 @@ func (c *compiler) compileForeach(e *Foreach) error { func (c *compiler) compileLabel(e *Label) error { c.appendCodeInfo(e) v := c.pushVariable("$%" + e.Ident[1:]) - defer c.lazy(func() *code { - return &code{op: opforklabel, v: v} - })() + c.append(&code{op: opforklabel, v: v}) return c.compileQuery(e.Body) } @@ -787,21 +800,21 @@ func (c *compiler) compileBreak(label string) error { } c.append(&code{op: oppop}) c.append(&code{op: opload, v: v}) - c.append(&code{op: opcall, v: [3]interface{}{ - func(v interface{}, _ []interface{}) interface{} { - return &breakError{label, v} - }, - 0, - "_break", - }}) + c.append(&code{op: opcall, v: [3]any{funcBreak(label), 0, "_break"}}) return nil } +func funcBreak(label string) func(any, []any) any { + return func(v any, _ []any) any { + return &breakError{label, v} + } +} + func (c *compiler) compileTerm(e *Term) error { if len(e.SuffixList) > 0 { s := e.SuffixList[len(e.SuffixList)-1] t := *e // clone without changing e - (&t).SuffixList = t.SuffixList[:len(e.SuffixList)-1] + t.SuffixList = t.SuffixList[:len(e.SuffixList)-1] return c.compileTermSuffix(&t, s) } switch e.Type { @@ -827,11 +840,7 @@ func (c *compiler) compileTerm(e *Term) error { case TermTypeArray: return c.compileArray(e.Array) case TermTypeNumber: - v := normalizeNumber(json.Number(e.Number)) - if err, ok := v.(error); ok { - return err - } - c.append(&code{op: opconst, v: v}) + c.append(&code{op: opconst, v: toNumber(e.Number)}) return nil case TermTypeUnary: return c.compileUnary(e.Unary) @@ -860,10 +869,15 @@ func (c *compiler) compileTerm(e *Term) error { } func (c *compiler) compileIndex(e *Term, x *Index) error { - c.appendCodeInfo(x) - if x.Name != "" { - return c.compileCall("_index", []*Query{{Term: e}, {Term: &Term{Type: TermTypeString, Str: &String{Str: x.Name}}}}) + if k := x.toIndexKey(); k != nil { + if err := c.compileTerm(e); err != nil { + return err + } + c.appendCodeInfo(x) + c.append(&code{op: opindex, v: k}) + return nil } + c.appendCodeInfo(x) if x.Str != nil { return c.compileCall("_index", []*Query{{Term: e}, {Term: &Term{Type: TermTypeString, Str: x.Str}}}) } @@ -880,12 +894,11 @@ func (c *compiler) compileIndex(e *Term, x *Index) error { } func (c *compiler) compileFunc(e *Func) error { - name := e.Name if len(e.Args) == 0 { - if f, v := c.lookupFuncOrVariable(name); f != nil { + if f, v := c.lookupFuncOrVariable(e.Name); f != nil { return c.compileCallPc(f, e.Args) } else if v != nil { - if name[0] == '$' { + if e.Name[0] == '$' { c.append(&code{op: oppop}) c.append(&code{op: opload, v: v.index}) } else { @@ -893,8 +906,8 @@ func (c *compiler) compileFunc(e *Func) error { c.append(&code{op: opcallpc}) } return nil - } else if name == "$ENV" || name == "env" { - env := make(map[string]interface{}) + } else if e.Name == "$ENV" || e.Name == "env" { + env := make(map[string]any) if c.environLoader != nil { for _, kv := range c.environLoader() { if i := strings.IndexByte(kv, '='); i > 0 { @@ -904,36 +917,42 @@ func (c *compiler) compileFunc(e *Func) error { } c.append(&code{op: opconst, v: env}) return nil - } else if name[0] == '$' { - return &variableNotFoundError{name} + } else if e.Name[0] == '$' { + return &variableNotFoundError{e.Name} } } else { for i := len(c.scopes) - 1; i >= 0; i-- { s := c.scopes[i] for j := len(s.funcs) - 1; j >= 0; j-- { - if f := s.funcs[j]; f.name == name && f.argcnt == len(e.Args) { + if f := s.funcs[j]; f.name == e.Name && f.argcnt == len(e.Args) { return c.compileCallPc(f, e.Args) } } } } - if name[0] == '_' { - name = name[1:] + if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil { + return c.compileCallPc(f, e.Args) } - if fds, ok := builtinFuncDefs[name]; ok { + if fds, ok := builtinFuncDefs[e.Name]; ok { for _, fd := range fds { if len(fd.Args) == len(e.Args) { if err := c.compileFuncDef(fd, true); err != nil { return err } + break } } - s := c.scopes[0] - for i := len(s.funcs) - 1; i >= 0; i-- { - if f := s.funcs[i]; f.name == e.Name && f.argcnt == len(e.Args) { - return c.compileCallPc(f, e.Args) + if len(fds) == 0 { + switch e.Name { + case "_assign": + c.compileAssign() + case "_modify": + c.compileModify() } } + if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil { + return c.compileCallPc(f, e.Args) + } } if fn, ok := internalFuncs[e.Name]; ok && fn.accept(len(e.Args)) { switch e.Name { @@ -949,27 +968,27 @@ func (c *compiler) compileFunc(e *Func) error { return nil case "builtins": return c.compileCallInternal( - [3]interface{}{c.funcBuiltins, 0, e.Name}, + [3]any{c.funcBuiltins, 0, e.Name}, e.Args, true, - false, + -1, ) case "input": if c.inputIter == nil { return &inputNotAllowedError{} } return c.compileCallInternal( - [3]interface{}{c.funcInput, 0, e.Name}, + [3]any{c.funcInput, 0, e.Name}, e.Args, true, - false, + -1, ) case "modulemeta": return c.compileCallInternal( - [3]interface{}{c.funcModulemeta, 0, e.Name}, + [3]any{c.funcModulemeta, 0, e.Name}, e.Args, true, - false, + -1, ) default: return c.compileCall(e.Name, e.Args) @@ -977,22 +996,130 @@ func (c *compiler) compileFunc(e *Func) error { } if fn, ok := c.customFuncs[e.Name]; ok && fn.accept(len(e.Args)) { if err := c.compileCallInternal( - [3]interface{}{fn.callback, len(e.Args), e.Name}, + [3]any{fn.callback, len(e.Args), e.Name}, e.Args, true, - false, + -1, ); err != nil { return err } if fn.iter { - c.append(&code{op: opeach}) + c.append(&code{op: opiter}) } return nil } return &funcNotFoundError{e} } -func (c *compiler) funcBuiltins(interface{}, []interface{}) interface{} { +// Appends the compiled code for the assignment operator (`=`) to maximize +// performance. Originally the operator was implemented as follows. +// +// def _assign(p; $x): reduce path(p) as $q (.; setpath($q; $x)); +// +// To overcome the difficulty of reducing allocations on `setpath`, we use the +// `allocator` type and track the allocated addresses during the reduction. +func (c *compiler) compileAssign() { + defer c.appendBuiltin("_assign", 2)() + scope := c.newScope() + v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1} + x, a := [2]int{scope.id, 2}, [2]int{scope.id, 3} + // Cannot reuse v, p due to backtracking in x. + w, q := [2]int{scope.id, 4}, [2]int{scope.id, 5} + c.appends( + &code{op: opscope, v: [3]int{scope.id, 6, 2}}, + &code{op: opstore, v: v}, // def _assign(p; $x): + &code{op: opstore, v: p}, + &code{op: opstore, v: x}, + &code{op: opload, v: v}, + &code{op: opexpbegin}, + &code{op: opload, v: x}, + &code{op: opcallpc}, + &code{op: opstore, v: x}, + &code{op: opexpend}, + &code{op: oppush, v: nil}, + &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}}, + &code{op: opstore, v: a}, + &code{op: opload, v: v}, + &code{op: opfork, v: len(c.codes) + 30}, // reduce [L1] + &code{op: opdup}, + &code{op: opstore, v: w}, + &code{op: oppathbegin}, // path(p) + &code{op: opload, v: p}, + &code{op: opcallpc}, + &code{op: opload, v: w}, + &code{op: oppathend}, + &code{op: opstore, v: q}, // as $q (.; + &code{op: opload, v: a}, // setpath($q; $x) + &code{op: opload, v: x}, + &code{op: opload, v: q}, + &code{op: opload, v: w}, + &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}}, + &code{op: opstore, v: w}, + &code{op: opbacktrack}, // ); + &code{op: oppop}, // [L1] + &code{op: opload, v: w}, + &code{op: opret}, + ) +} + +// Appends the compiled code for the update-assignment operator (`|=`) to +// maximize performance. We use the `allocator` type, just like `_assign/2`. +func (c *compiler) compileModify() { + defer c.appendBuiltin("_modify", 2)() + scope := c.newScope() + v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1} + f, d := [2]int{scope.id, 2}, [2]int{scope.id, 3} + a, l := [2]int{scope.id, 4}, [2]int{scope.id, 5} + c.appends( + &code{op: opscope, v: [3]int{scope.id, 6, 2}}, + &code{op: opstore, v: v}, // def _modify(p; f): + &code{op: opstore, v: p}, + &code{op: opstore, v: f}, + &code{op: oppush, v: []any{}}, + &code{op: opstore, v: d}, + &code{op: oppush, v: nil}, + &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}}, + &code{op: opstore, v: a}, + &code{op: opload, v: v}, + &code{op: opfork, v: len(c.codes) + 39}, // reduce [L1] + &code{op: oppathbegin}, // path(p) + &code{op: opload, v: p}, + &code{op: opcallpc}, + &code{op: opload, v: v}, + &code{op: oppathend}, + &code{op: opstore, v: p}, // as $p (.; + &code{op: opforklabel, v: l}, // label $l | + &code{op: opload, v: v}, // + &code{op: opfork, v: len(c.codes) + 36}, // [L2] + &code{op: oppop}, // (getpath($p) | + &code{op: opload, v: a}, + &code{op: opload, v: p}, + &code{op: opload, v: v}, + &code{op: opcall, v: [3]any{internalFuncs["getpath"].callback, 1, "getpath"}}, + &code{op: opload, v: f}, // f) + &code{op: opcallpc}, + &code{op: opload, v: p}, // setpath($p; ...) + &code{op: opload, v: v}, + &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}}, + &code{op: opstore, v: v}, + &code{op: opload, v: v}, // ., break $l + &code{op: opfork, v: len(c.codes) + 34}, // [L4] + &code{op: opjump, v: len(c.codes) + 38}, // [L3] + &code{op: opload, v: l}, // [L4] + &code{op: opcall, v: [3]any{funcBreak(""), 0, "_break"}}, + &code{op: opload, v: p}, // append $p to $d [L2] + &code{op: opappend, v: d}, // + &code{op: opbacktrack}, // ) | [L3] + &code{op: oppop}, // delpaths($d); [L1] + &code{op: opload, v: a}, + &code{op: opload, v: d}, + &code{op: opload, v: v}, + &code{op: opcall, v: [3]any{funcDelpathsWithAllocator, 2, "_delpaths"}}, + &code{op: opret}, + ) +} + +func (c *compiler) funcBuiltins(any, []any) any { type funcNameArity struct { name string arity int @@ -1027,14 +1154,14 @@ func (c *compiler) funcBuiltins(interface{}, []interface{}) interface{} { return xs[i].name < xs[j].name || xs[i].name == xs[j].name && xs[i].arity < xs[j].arity }) - ys := make([]interface{}, len(xs)) + ys := make([]any, len(xs)) for i, x := range xs { ys[i] = x.name + "/" + strconv.Itoa(x.arity) } return ys } -func (c *compiler) funcInput(interface{}, []interface{}) interface{} { +func (c *compiler) funcInput(any, []any) any { v, ok := c.inputIter.Next() if !ok { return errors.New("break") @@ -1042,7 +1169,7 @@ func (c *compiler) funcInput(interface{}, []interface{}) interface{} { return normalizeNumbers(v) } -func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} { +func (c *compiler) funcModulemeta(v any, _ []any) any { s, ok := v.(string) if !ok { return &funcTypeError{"modulemeta", v} @@ -1053,7 +1180,7 @@ func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} { var q *Query var err error if moduleLoader, ok := c.moduleLoader.(interface { - LoadModuleWithMeta(string, map[string]interface{}) (*Query, error) + LoadModuleWithMeta(string, map[string]any) (*Query, error) }); ok { if q, err = moduleLoader.LoadModuleWithMeta(s, nil); err != nil { return err @@ -1067,13 +1194,13 @@ func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} { } meta := q.Meta.ToValue() if meta == nil { - meta = make(map[string]interface{}) + meta = make(map[string]any) } - var deps []interface{} + var deps []any for _, i := range q.Imports { v := i.Meta.ToValue() if v == nil { - v = make(map[string]interface{}) + v = make(map[string]any) } else { for k := range v { // dirty hack to remove the internal fields @@ -1103,7 +1230,7 @@ func (c *compiler) funcModulemeta(v interface{}, _ []interface{}) interface{} { func (c *compiler) compileObject(e *Object) error { c.appendCodeInfo(e) if len(e.KeyVals) == 0 { - c.append(&code{op: opconst, v: map[string]interface{}{}}) + c.append(&code{op: opconst, v: map[string]any{}}) return nil } defer c.newScopeDepth()() @@ -1128,7 +1255,7 @@ func (c *compiler) compileObject(e *Object) error { return nil } } - w := make(map[string]interface{}, l) + w := make(map[string]any, l) for i := 0; i < l; i++ { w[c.codes[pc+i*3].v.(string)] = c.codes[pc+i*3+2].v } @@ -1138,61 +1265,57 @@ func (c *compiler) compileObject(e *Object) error { } func (c *compiler) compileObjectKeyVal(v [2]int, kv *ObjectKeyVal) error { - if kv.KeyOnly != "" { - if kv.KeyOnly[0] == '$' { - c.append(&code{op: oppush, v: kv.KeyOnly[1:]}) - c.append(&code{op: opload, v: v}) - return c.compileFunc(&Func{Name: kv.KeyOnly}) - } - c.append(&code{op: oppush, v: kv.KeyOnly}) - c.append(&code{op: opload, v: v}) - return c.compileIndex(&Term{Type: TermTypeIdentity}, &Index{Name: kv.KeyOnly}) - } else if kv.KeyOnlyString != nil { - c.append(&code{op: opload, v: v}) - if err := c.compileString(kv.KeyOnlyString, nil); err != nil { - return err - } - c.append(&code{op: opdup}) - c.append(&code{op: opload, v: v}) - c.append(&code{op: opload, v: v}) - // ref: compileCall - c.append(&code{op: opcall, v: [3]interface{}{internalFuncs["_index"].callback, 2, "_index"}}) - return nil - } else { - if kv.KeyQuery != nil { - c.append(&code{op: opload, v: v}) - f := c.newScopeDepth() - if err := c.compileQuery(kv.KeyQuery); err != nil { - return err + if key := kv.Key; key != "" { + if key[0] == '$' { + if kv.Val == nil { // {$foo} == {foo:$foo} + c.append(&code{op: oppush, v: key[1:]}) } - f() - } else if kv.KeyString != nil { c.append(&code{op: opload, v: v}) - if err := c.compileString(kv.KeyString, nil); err != nil { + if err := c.compileFunc(&Func{Name: key}); err != nil { return err } - if d := c.codes[len(c.codes)-1]; d.op == opconst { - c.codes[len(c.codes)-2] = &code{op: oppush, v: d.v} - c.codes = c.codes[:len(c.codes)-1] + } else { + c.append(&code{op: oppush, v: key}) + if kv.Val == nil { // {foo} == {foo:.foo} + c.append(&code{op: opload, v: v}) + c.append(&code{op: opindex, v: key}) } - } else if kv.Key[0] == '$' { + } + } else if key := kv.KeyString; key != nil { + if key.Queries == nil { + c.append(&code{op: oppush, v: key.Str}) + if kv.Val == nil { // {"foo"} == {"foo":.["foo"]} + c.append(&code{op: opload, v: v}) + c.append(&code{op: opindex, v: key.Str}) + } + } else { c.append(&code{op: opload, v: v}) - if err := c.compileFunc(&Func{Name: kv.Key}); err != nil { + if err := c.compileString(key, nil); err != nil { return err } - } else { - c.append(&code{op: oppush, v: kv.Key}) + if kv.Val == nil { + c.append(&code{op: opdup}) + c.append(&code{op: opload, v: v}) + c.append(&code{op: oppush, v: nil}) + // ref: compileCall + c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}}) + } } + } else if kv.KeyQuery != nil { c.append(&code{op: opload, v: v}) - return c.compileObjectVal(kv.Val) - } -} - -func (c *compiler) compileObjectVal(e *ObjectVal) error { - for _, e := range e.Queries { - if err := c.compileQuery(e); err != nil { + f := c.newScopeDepth() + if err := c.compileQuery(kv.KeyQuery); err != nil { return err } + f() + } + if kv.Val != nil { + c.append(&code{op: opload, v: v}) + for _, e := range kv.Val.Queries { + if err := c.compileQuery(e); err != nil { + return err + } + } } return nil } @@ -1200,25 +1323,23 @@ func (c *compiler) compileObjectVal(e *ObjectVal) error { func (c *compiler) compileArray(e *Array) error { c.appendCodeInfo(e) if e.Query == nil { - c.append(&code{op: opconst, v: []interface{}{}}) + c.append(&code{op: opconst, v: []any{}}) return nil } - c.append(&code{op: oppush, v: []interface{}{}}) + c.append(&code{op: oppush, v: []any{}}) arr := c.newVariable() c.append(&code{op: opstore, v: arr}) pc := len(c.codes) - c.append(&code{op: opfork}) - defer func() { - if pc < len(c.codes) { - c.codes[pc].v = c.pc() - 2 - } - }() + setfork := c.lazy(func() *code { + return &code{op: opfork, v: len(c.codes)} + }) defer c.newScopeDepth()() if err := c.compileQuery(e.Query); err != nil { return err } c.append(&code{op: opappend, v: arr}) c.append(&code{op: opbacktrack}) + setfork() c.append(&code{op: oppop}) c.append(&code{op: opload, v: arr}) if e.Query.Op == OpPipe { @@ -1236,7 +1357,7 @@ func (c *compiler) compileArray(e *Array) error { return nil } } - v := make([]interface{}, l) + v := make([]any, l) for i := 0; i < l; i++ { v[i] = c.codes[pc+i*2+l].v } @@ -1247,6 +1368,10 @@ func (c *compiler) compileArray(e *Array) error { func (c *compiler) compileUnary(e *Unary) error { c.appendCodeInfo(e) + if v := e.toNumber(); v != nil { + c.append(&code{op: opconst, v: v}) + return nil + } if err := c.compileTerm(e.Term); err != nil { return err } @@ -1260,12 +1385,12 @@ func (c *compiler) compileUnary(e *Unary) error { } } -func (c *compiler) compileFormat(fmt string, str *String) error { - f := formatToFunc(fmt) +func (c *compiler) compileFormat(format string, str *String) error { + f := formatToFunc(format) if f == nil { f = &Func{ Name: "format", - Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: fmt[1:]}}}}, + Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: format[1:]}}}}, } } if str == nil { @@ -1274,8 +1399,8 @@ func (c *compiler) compileFormat(fmt string, str *String) error { return c.compileString(str, f) } -func formatToFunc(fmt string) *Func { - switch fmt { +func formatToFunc(format string) *Func { + switch format { case "@text": return &Func{Name: "tostring"} case "@json": @@ -1328,14 +1453,14 @@ func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error { if err := c.compileTerm(e); err != nil { return err } - c.append(&code{op: opeach}) + c.append(&code{op: opiter}) return nil } else if s.Optional { if len(e.SuffixList) > 0 { - if u, ok := e.SuffixList[len(e.SuffixList)-1].toTerm(); ok { - t := *e // clone without changing e - (&t).SuffixList = t.SuffixList[:len(e.SuffixList)-1] - if err := c.compileTerm(&t); err != nil { + if u := e.SuffixList[len(e.SuffixList)-1].toTerm(); u != nil { + // no need to clone (ref: compileTerm) + e.SuffixList = e.SuffixList[:len(e.SuffixList)-1] + if err := c.compileTerm(e); err != nil { return err } e = u @@ -1343,12 +1468,7 @@ func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error { } return c.compileTry(&Try{Body: &Query{Term: e}}) } else if s.Bind != nil { - c.append(&code{op: opdup}) - c.append(&code{op: opexpbegin}) - if err := c.compileTerm(e); err != nil { - return err - } - return c.compileBind(s.Bind) + return c.compileBind(e, s.Bind) } else { return fmt.Errorf("invalid suffix: %s", s) } @@ -1356,46 +1476,56 @@ func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error { func (c *compiler) compileCall(name string, args []*Query) error { fn := internalFuncs[name] + var indexing int + switch name { + case "_index", "_slice": + indexing = 1 + case "getpath": + indexing = 0 + default: + indexing = -1 + } if err := c.compileCallInternal( - [3]interface{}{fn.callback, len(args), name}, + [3]any{fn.callback, len(args), name}, args, true, - name == "_index" || name == "_slice", + indexing, ); err != nil { return err } if fn.iter { - c.append(&code{op: opeach}) + c.append(&code{op: opiter}) } return nil } func (c *compiler) compileCallPc(fn *funcinfo, args []*Query) error { - return c.compileCallInternal(fn.pc, args, false, false) + return c.compileCallInternal(fn.pc, args, false, -1) } func (c *compiler) compileCallInternal( - fn interface{}, args []*Query, internal, indexing bool) error { + fn any, args []*Query, internal bool, indexing int, +) error { if len(args) == 0 { c.append(&code{op: opcall, v: fn}) return nil } - idx := c.newVariable() - c.append(&code{op: opstore, v: idx}) - if indexing && len(args) > 1 { + v := c.newVariable() + c.append(&code{op: opstore, v: v}) + if indexing >= 0 { c.append(&code{op: opexpbegin}) } for i := len(args) - 1; i >= 0; i-- { - pc := c.pc() + 1 // skip opjump (ref: compileFuncDef) + pc := len(c.codes) + 1 // skip opjump (ref: compileFuncDef) name := "lambda:" + strconv.Itoa(pc) if err := c.compileFuncDef(&FuncDef{Name: name, Body: args[i]}, false); err != nil { return err } if internal { - switch c.pc() - pc { + switch len(c.codes) - pc { case 2: // optimize identity argument (opscope, opret) j := len(c.codes) - 3 - c.codes[j] = &code{op: opload, v: idx} + c.codes[j] = &code{op: opload, v: v} c.codes = c.codes[:j+1] s := c.scopes[len(c.scopes)-1] s.funcs = s.funcs[:len(s.funcs)-1] @@ -1406,7 +1536,7 @@ func (c *compiler) compileCallInternal( c.codes[j] = &code{op: oppush, v: c.codes[j+2].v} c.codes = c.codes[:j+1] } else { - c.codes[j] = &code{op: opload, v: idx} + c.codes[j] = &code{op: opload, v: v} c.codes[j+1] = c.codes[j+2] c.codes = c.codes[:j+2] } @@ -1414,14 +1544,14 @@ func (c *compiler) compileCallInternal( s.funcs = s.funcs[:len(s.funcs)-1] c.deleteCodeInfo(name) default: - c.append(&code{op: opload, v: idx}) + c.append(&code{op: opload, v: v}) c.append(&code{op: oppushpc, v: pc}) c.append(&code{op: opcallpc}) } } else { c.append(&code{op: oppushpc, v: pc}) } - if indexing && i == 1 { + if i == indexing { if c.codes[len(c.codes)-2].op == opexpbegin { c.codes[len(c.codes)-2] = c.codes[len(c.codes)-1] c.codes = c.codes[:len(c.codes)-1] @@ -1430,7 +1560,11 @@ func (c *compiler) compileCallInternal( } } } - c.append(&code{op: opload, v: idx}) + if indexing > 0 { + c.append(&code{op: oppush, v: nil}) + } else { + c.append(&code{op: opload, v: v}) + } c.append(&code{op: opcall, v: fn}) return nil } @@ -1439,8 +1573,8 @@ func (c *compiler) append(code *code) { c.codes = append(c.codes, code) } -func (c *compiler) pc() int { - return len(c.codes) +func (c *compiler) appends(codes ...*code) { + c.codes = append(c.codes, codes...) } func (c *compiler) lazy(f func() *code) func() { diff --git a/vendor/github.com/itchyny/gojq/debug.go b/vendor/github.com/itchyny/gojq/debug.go index 4a346fb2..ad3d7216 100644 --- a/vendor/github.com/itchyny/gojq/debug.go +++ b/vendor/github.com/itchyny/gojq/debug.go @@ -1,5 +1,5 @@ -//go:build debug -// +build debug +//go:build gojq_debug +// +build gojq_debug package gojq @@ -32,7 +32,7 @@ type codeinfo struct { pc int } -func (c *compiler) appendCodeInfo(x interface{}) { +func (c *compiler) appendCodeInfo(x any) { if !debug { return } @@ -47,7 +47,7 @@ func (c *compiler) appendCodeInfo(x interface{}) { if c.codes[len(c.codes)-1] != nil && c.codes[len(c.codes)-1].op == opret && strings.HasPrefix(name, "end of ") { diff = -1 } - c.codeinfos = append(c.codeinfos, codeinfo{name, c.pc() + diff}) + c.codeinfos = append(c.codeinfos, codeinfo{name, len(c.codes) + diff}) } func (c *compiler) deleteCodeInfo(name string) { @@ -182,7 +182,7 @@ func debugOperand(c *code) string { switch v := c.v.(type) { case int: return strconv.Itoa(v) - case [3]interface{}: + case [3]any: return fmt.Sprintf("%s/%d", v[2], v[1]) default: panic(c) @@ -192,17 +192,21 @@ func debugOperand(c *code) string { } } -func debugValue(v interface{}) string { +func debugValue(v any) string { switch v := v.(type) { case Iter: return fmt.Sprintf("gojq.Iter(%#v)", v) + case []pathValue: + return fmt.Sprintf("[]gojq.pathValue(%v)", v) case [2]int: return fmt.Sprintf("[%d,%d]", v[0], v[1]) case [3]int: return fmt.Sprintf("[%d,%d,%d]", v[0], v[1], v[2]) - case [3]interface{}: + case [3]any: return fmt.Sprintf("[%v,%v,%v]", v[0], v[1], v[2]) + case allocator: + return fmt.Sprintf("%v", v) default: - return previewValue(v) + return Preview(v) } } diff --git a/vendor/github.com/itchyny/gojq/deepequal.go b/vendor/github.com/itchyny/gojq/deepequal.go deleted file mode 100644 index 37c56e42..00000000 --- a/vendor/github.com/itchyny/gojq/deepequal.go +++ /dev/null @@ -1,48 +0,0 @@ -package gojq - -import ( - "math" - "math/big" -) - -func deepEqual(l, r interface{}) bool { - return binopTypeSwitch(l, r, - func(l, r int) interface{} { - return l == r - }, - func(l, r float64) interface{} { - return l == r || math.IsNaN(l) && math.IsNaN(r) - }, - func(l, r *big.Int) interface{} { - return l.Cmp(r) == 0 - }, - func(l, r string) interface{} { - return l == r - }, - func(l, r []interface{}) interface{} { - if len(l) != len(r) { - return false - } - for i, v := range l { - if !deepEqual(v, r[i]) { - return false - } - } - return true - }, - func(l, r map[string]interface{}) interface{} { - if len(l) != len(r) { - return false - } - for k, v := range l { - if !deepEqual(v, r[k]) { - return false - } - } - return true - }, - func(l, r interface{}) interface{} { - return l == r - }, - ).(bool) -} diff --git a/vendor/github.com/itchyny/gojq/encoder.go b/vendor/github.com/itchyny/gojq/encoder.go index c7905f96..3233e8a9 100644 --- a/vendor/github.com/itchyny/gojq/encoder.go +++ b/vendor/github.com/itchyny/gojq/encoder.go @@ -14,25 +14,29 @@ import ( // Marshal returns the jq-flavored JSON encoding of v. // -// This method only accepts limited types (nil, bool, int, float64, *big.Int, -// string, []interface{} and map[string]interface{}) because these are the -// possible types a gojq iterator can emit. This method marshals NaN to null, -// truncates infinities to (+|-) math.MaxFloat64, uses \b and \f in strings, -// and does not escape '<' and '>' for embedding in HTML. These behaviors are -// based on the marshaler of jq command and different from Go standard library -// method json.Marshal. -func Marshal(v interface{}) ([]byte, error) { +// This method accepts only limited types (nil, bool, int, float64, *big.Int, +// string, []any, and map[string]any) because these are the possible types a +// gojq iterator can emit. This method marshals NaN to null, truncates +// infinities to (+|-) math.MaxFloat64, uses \b and \f in strings, and does not +// escape '<', '>', '&', '\u2028', and '\u2029'. These behaviors are based on +// the marshaler of jq command, and different from json.Marshal in the Go +// standard library. Note that the result is not safe to embed in HTML. +func Marshal(v any) ([]byte, error) { var b bytes.Buffer (&encoder{w: &b}).encode(v) return b.Bytes(), nil } -func jsonMarshal(v interface{}) string { +func jsonMarshal(v any) string { var sb strings.Builder (&encoder{w: &sb}).encode(v) return sb.String() } +func jsonEncodeString(sb *strings.Builder, v string) { + (&encoder{w: sb}).encodeString(v) +} + type encoder struct { w interface { io.Writer @@ -42,7 +46,7 @@ type encoder struct { buf [64]byte } -func (e *encoder) encode(v interface{}) { +func (e *encoder) encode(v any) { switch v := v.(type) { case nil: e.w.WriteString("null") @@ -60,12 +64,12 @@ func (e *encoder) encode(v interface{}) { e.w.Write(v.Append(e.buf[:0], 10)) case string: e.encodeString(v) - case []interface{}: + case []any: e.encodeArray(v) - case map[string]interface{}: - e.encodeMap(v) + case map[string]any: + e.encodeObject(v) default: - panic(fmt.Sprintf("invalid value: %v", v)) + panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v)) } } @@ -80,12 +84,12 @@ func (e *encoder) encodeFloat64(f float64) { } else if f <= -math.MaxFloat64 { f = -math.MaxFloat64 } - fmt := byte('f') + format := byte('f') if x := math.Abs(f); x != 0 && x < 1e-6 || x >= 1e21 { - fmt = 'e' + format = 'e' } - buf := strconv.AppendFloat(e.buf[:0], f, fmt, -1, 64) - if fmt == 'e' { + buf := strconv.AppendFloat(e.buf[:0], f, format, -1, 64) + if format == 'e' { // clean up e-09 to e-9 if n := len(buf); n >= 4 && buf[n-4] == 'e' && buf[n-3] == '-' && buf[n-2] == '0' { buf[n-2] = buf[n-1] @@ -101,30 +105,31 @@ func (e *encoder) encodeString(s string) { start := 0 for i := 0; i < len(s); { if b := s[i]; b < utf8.RuneSelf { - if ']' <= b && b <= '~' || '#' <= b && b <= '[' || b == ' ' || b == '!' { + if ' ' <= b && b <= '~' && b != '"' && b != '\\' { i++ continue } if start < i { e.w.WriteString(s[start:i]) } - e.w.WriteByte('\\') switch b { - case '\\', '"': - e.w.WriteByte(b) + case '"': + e.w.WriteString(`\"`) + case '\\': + e.w.WriteString(`\\`) case '\b': - e.w.WriteByte('b') + e.w.WriteString(`\b`) case '\f': - e.w.WriteByte('f') + e.w.WriteString(`\f`) case '\n': - e.w.WriteByte('n') + e.w.WriteString(`\n`) case '\r': - e.w.WriteByte('r') + e.w.WriteString(`\r`) case '\t': - e.w.WriteByte('t') + e.w.WriteString(`\t`) default: const hex = "0123456789abcdef" - e.w.WriteString("u00") + e.w.WriteString(`\u00`) e.w.WriteByte(hex[b>>4]) e.w.WriteByte(hex[b&0xF]) } @@ -150,7 +155,7 @@ func (e *encoder) encodeString(s string) { e.w.WriteByte('"') } -func (e *encoder) encodeArray(vs []interface{}) { +func (e *encoder) encodeArray(vs []any) { e.w.WriteByte('[') for i, v := range vs { if i > 0 { @@ -161,11 +166,11 @@ func (e *encoder) encodeArray(vs []interface{}) { e.w.WriteByte(']') } -func (e *encoder) encodeMap(vs map[string]interface{}) { +func (e *encoder) encodeObject(vs map[string]any) { e.w.WriteByte('{') type keyVal struct { key string - val interface{} + val any } kvs := make([]keyVal, len(vs)) var i int diff --git a/vendor/github.com/itchyny/gojq/env.go b/vendor/github.com/itchyny/gojq/env.go index dd1859c4..bf058eda 100644 --- a/vendor/github.com/itchyny/gojq/env.go +++ b/vendor/github.com/itchyny/gojq/env.go @@ -7,7 +7,7 @@ type env struct { stack *stack paths *stack scopes *scopeStack - values []interface{} + values []any codes []*code codeinfos []codeinfo forks []fork @@ -15,7 +15,7 @@ type env struct { offset int expdepth int label int - args [32]interface{} // len(env.args) > maxarity + args [32]any // len(env.args) > maxarity ctx context.Context } diff --git a/vendor/github.com/itchyny/gojq/error.go b/vendor/github.com/itchyny/gojq/error.go index c13e9777..695463f3 100644 --- a/vendor/github.com/itchyny/gojq/error.go +++ b/vendor/github.com/itchyny/gojq/error.go @@ -1,23 +1,18 @@ package gojq -import ( - "fmt" - "math/big" - "strconv" - "strings" -) +import "strconv" // ValueError is an interface for errors with a value for internal function. // Return an error implementing this interface when you want to catch error // values (not error messages) by try-catch, just like built-in error function. -// Refer to WithFunction to add a custom internal function. +// Refer to [WithFunction] to add a custom internal function. type ValueError interface { error - Value() interface{} + Value() any } type expectedObjectError struct { - v interface{} + v any } func (err *expectedObjectError) Error() string { @@ -25,7 +20,7 @@ func (err *expectedObjectError) Error() string { } type expectedArrayError struct { - v interface{} + v any } func (err *expectedArrayError) Error() string { @@ -33,7 +28,7 @@ func (err *expectedArrayError) Error() string { } type expectedStringError struct { - v interface{} + v any } func (err *expectedStringError) Error() string { @@ -41,7 +36,7 @@ func (err *expectedStringError) Error() string { } type iteratorError struct { - v interface{} + v any } func (err *iteratorError) Error() string { @@ -49,15 +44,15 @@ func (err *iteratorError) Error() string { } type arrayIndexTooLargeError struct { - v interface{} + v any } func (err *arrayIndexTooLargeError) Error() string { - return "array index too large: " + previewValue(err.v) + return "array index too large: " + Preview(err.v) } type objectKeyNotStringError struct { - v interface{} + v any } func (err *objectKeyNotStringError) Error() string { @@ -65,15 +60,23 @@ func (err *objectKeyNotStringError) Error() string { } type arrayIndexNotNumberError struct { - v interface{} + v any } func (err *arrayIndexNotNumberError) Error() string { return "expected a number for indexing an array but got: " + typeErrorPreview(err.v) } +type stringIndexNotNumberError struct { + v any +} + +func (err *stringIndexNotNumberError) Error() string { + return "expected a number for indexing a string but got: " + typeErrorPreview(err.v) +} + type expectedStartEndError struct { - v interface{} + v any } func (err *expectedStartEndError) Error() string { @@ -82,7 +85,7 @@ func (err *expectedStartEndError) Error() string { type lengthMismatchError struct { name string - v, x []interface{} + v, x []any } func (err *lengthMismatchError) Error() string { @@ -105,7 +108,7 @@ func (err *funcNotFoundError) Error() string { type funcTypeError struct { name string - v interface{} + v any } func (err *funcTypeError) Error() string { @@ -113,7 +116,7 @@ func (err *funcTypeError) Error() string { } type exitCodeError struct { - value interface{} + value any code int halt bool } @@ -129,7 +132,7 @@ func (err *exitCodeError) IsEmptyError() bool { return err.value == nil } -func (err *exitCodeError) Value() interface{} { +func (err *exitCodeError) Value() any { return err.value } @@ -141,25 +144,41 @@ func (err *exitCodeError) IsHaltError() bool { return err.halt } -type funcContainsError struct { - l, r interface{} +type containsTypeError struct { + l, r any } -func (err *funcContainsError) Error() string { - return "cannot check contains(" + previewValue(err.r) + "): " + typeErrorPreview(err.l) +func (err *containsTypeError) Error() string { + return "cannot check contains(" + Preview(err.r) + "): " + typeErrorPreview(err.l) } type hasKeyTypeError struct { - l, r interface{} + l, r any } func (err *hasKeyTypeError) Error() string { return "cannot check whether " + typeErrorPreview(err.l) + " has a key: " + typeErrorPreview(err.r) } +type flattenDepthError struct { + v float64 +} + +func (err *flattenDepthError) Error() string { + return "flatten depth must not be negative: " + typeErrorPreview(err.v) +} + +type joinTypeError struct { + v any +} + +func (err *joinTypeError) Error() string { + return "cannot join: " + typeErrorPreview(err.v) +} + type unaryTypeError struct { name string - v interface{} + v any } func (err *unaryTypeError) Error() string { @@ -168,7 +187,7 @@ func (err *unaryTypeError) Error() string { type binopTypeError struct { name string - l, r interface{} + l, r any } func (err *binopTypeError) Error() string { @@ -176,7 +195,7 @@ func (err *binopTypeError) Error() string { } type zeroDivisionError struct { - l, r interface{} + l, r any } func (err *zeroDivisionError) Error() string { @@ -184,11 +203,11 @@ func (err *zeroDivisionError) Error() string { } type zeroModuloError struct { - l, r interface{} + l, r any } func (err *zeroModuloError) Error() string { - return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r) + "" + return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r) } type formatNotFoundError struct { @@ -199,21 +218,13 @@ func (err *formatNotFoundError) Error() string { return "format not defined: " + err.n } -type formatCsvTsvRowError struct { +type formatRowError struct { typ string - v interface{} + v any } -func (err *formatCsvTsvRowError) Error() string { - return "invalid " + err.typ + " row: " + typeErrorPreview(err.v) -} - -type formatShError struct { - v interface{} -} - -func (err *formatShError) Error() string { - return "cannot escape for shell: " + typeErrorPreview(err.v) +func (err *formatRowError) Error() string { + return "@" + err.typ + " cannot format an array including: " + typeErrorPreview(err.v) } type tooManyVariableValuesError struct{} @@ -248,7 +259,7 @@ func (err *variableNameError) Error() string { type breakError struct { n string - v interface{} + v any } func (err *breakError) Error() string { @@ -268,7 +279,7 @@ func (err *tryEndError) Error() string { } type invalidPathError struct { - v interface{} + v any } func (err *invalidPathError) Error() string { @@ -276,7 +287,7 @@ func (err *invalidPathError) Error() string { } type invalidPathIterError struct { - v interface{} + v any } func (err *invalidPathIterError) Error() string { @@ -284,24 +295,24 @@ func (err *invalidPathIterError) Error() string { } type getpathError struct { - v, path interface{} + v, path any } func (err *getpathError) Error() string { - return "cannot getpath with " + previewValue(err.path) + " against: " + typeErrorPreview(err.v) + "" + return "cannot getpath with " + Preview(err.path) + " against: " + typeErrorPreview(err.v) } type queryParseError struct { - typ, fname, contents string - err error + fname, contents string + err error } -func (err *queryParseError) QueryParseError() (string, string, string, error) { - return err.typ, err.fname, err.contents, err.err +func (err *queryParseError) QueryParseError() (string, string, error) { + return err.fname, err.contents, err.err } func (err *queryParseError) Error() string { - return "invalid " + err.typ + ": " + err.fname + ": " + err.err.Error() + return "invalid query: " + err.fname + ": " + err.err.Error() } type jsonParseError struct { @@ -317,113 +328,13 @@ func (err *jsonParseError) Error() string { return "invalid json: " + err.fname + ": " + err.err.Error() } -func typeErrorPreview(v interface{}) string { - if _, ok := v.(Iter); ok { - return "gojq.Iter" - } - p := preview(v) - if p != "" { - p = " (" + p + ")" - } - return typeof(v) + p -} - -func typeof(v interface{}) string { - switch v := v.(type) { +func typeErrorPreview(v any) string { + switch v.(type) { case nil: return "null" - case bool: - return "boolean" - case int, float64, *big.Int: - return "number" - case string: - return "string" - case []interface{}: - return "array" - case map[string]interface{}: - return "object" + case Iter: + return "gojq.Iter" default: - panic(fmt.Sprintf("invalid value: %v", v)) - } -} - -type limitedWriter struct { - buf []byte - off int -} - -func (w *limitedWriter) Write(bs []byte) (int, error) { - n := copy(w.buf[w.off:], bs) - if w.off += n; w.off == len(w.buf) { - panic(nil) - } - return n, nil -} - -func (w *limitedWriter) WriteByte(b byte) error { - w.buf[w.off] = b - if w.off++; w.off == len(w.buf) { - panic(nil) - } - return nil -} - -func (w *limitedWriter) WriteString(s string) (int, error) { - n := copy(w.buf[w.off:], s) - if w.off += n; w.off == len(w.buf) { - panic(nil) - } - return n, nil -} - -func (w *limitedWriter) String() string { - return string(w.buf[:w.off]) -} - -func jsonLimitedMarshal(v interface{}, n int) (s string) { - w := &limitedWriter{buf: make([]byte, n)} - defer func() { - recover() - s = w.String() - }() - (&encoder{w: w}).encode(v) - return -} - -func preview(v interface{}) string { - if v == nil { - return "" - } - s := jsonLimitedMarshal(v, 32) - if l := 30; len(s) > l { - var trailing string - switch v.(type) { - case string: - trailing = ` ..."` - case []interface{}: - trailing = " ...]" - case map[string]interface{}: - trailing = " ...}" - default: - trailing = " ..." - } - var sb strings.Builder - sb.Grow(l + 5) - for _, c := range s { - sb.WriteRune(c) - if sb.Len() >= l-len(trailing) { - sb.WriteString(trailing) - break - } - } - s = sb.String() - } - return s -} - -func previewValue(v interface{}) string { - if v == nil { - return "null" + return TypeOf(v) + " (" + Preview(v) + ")" } - return preview(v) } diff --git a/vendor/github.com/itchyny/gojq/execute.go b/vendor/github.com/itchyny/gojq/execute.go index d861c628..d43ef3e9 100644 --- a/vendor/github.com/itchyny/gojq/execute.go +++ b/vendor/github.com/itchyny/gojq/execute.go @@ -2,11 +2,12 @@ package gojq import ( "context" - "fmt" + "math" + "reflect" "sort" ) -func (env *env) execute(bc *Code, v interface{}, vars ...interface{}) Iter { +func (env *env) execute(bc *Code, v any, vars ...any) Iter { env.codes = bc.codes env.codeinfos = bc.codeinfos env.push(v) @@ -17,7 +18,7 @@ func (env *env) execute(bc *Code, v interface{}, vars ...interface{}) Iter { return env } -func (env *env) Next() (interface{}, bool) { +func (env *env) Next() (any, bool) { var err error pc, callpc, index := env.pc, len(env.codes)-1, -1 backtrack, hasCtx := env.backtrack, env.ctx != context.Background() @@ -42,9 +43,9 @@ loop: case oppop: env.pop() case opdup: - x := env.pop() - env.push(x) - env.push(x) + v := env.pop() + env.push(v) + env.push(v) case opconst: env.pop() env.push(code.v) @@ -57,7 +58,7 @@ loop: break loop } n := code.v.(int) - m := make(map[string]interface{}, n) + m := make(map[string]any, n) for i := 0; i < n; i++ { v, k := env.pop(), env.pop() s, ok := k.(string) @@ -70,7 +71,7 @@ loop: env.push(m) case opappend: i := env.index(code.v.([2]int)) - env.values[i] = append(env.values[i].([]interface{}), env.pop()) + env.values[i] = append(env.values[i].([]any), env.pop()) case opfork: if backtrack { if err != nil { @@ -78,9 +79,8 @@ loop: } pc, backtrack = code.v.(int), false goto loop - } else { - env.pushfork(pc) } + env.pushfork(pc) case opforktrybegin: if backtrack { if err == nil { @@ -90,6 +90,8 @@ loop: case *tryEndError: err = er.err break loop + case *breakError: + break loop case ValueError: if er, ok := er.(*exitCodeError); ok && er.halt { break loop @@ -107,18 +109,16 @@ loop: } pc, backtrack, err = code.v.(int), false, nil goto loop - } else { - env.pushfork(pc) } + env.pushfork(pc) case opforktryend: if backtrack { if err != nil { err = &tryEndError{err} } break loop - } else { - env.pushfork(pc) } + env.pushfork(pc) case opforkalt: if backtrack { if err == nil { @@ -126,9 +126,8 @@ loop: } pc, backtrack, err = code.v.(int), false, nil goto loop - } else { - env.pushfork(pc) } + env.pushfork(pc) case opforklabel: if backtrack { label := env.pop() @@ -136,13 +135,12 @@ loop: err = nil } break loop - } else { - env.push(env.label) - env.pushfork(pc) - env.pop() - env.values[env.index(code.v.([2]int))] = env.label - env.label++ } + env.push(env.label) + env.pushfork(pc) + env.pop() + env.values[env.index(code.v.([2]int))] = env.label + env.label++ case opbacktrack: break loop case opjump: @@ -153,6 +151,30 @@ loop: pc = code.v.(int) goto loop } + case opindex, opindexarray: + if backtrack { + break loop + } + p, v := code.v, env.pop() + if code.op == opindexarray && v != nil { + if _, ok := v.([]any); !ok { + err = &expectedArrayError{v} + break loop + } + } + w := funcIndex2(nil, v, p) + if e, ok := w.(error); ok { + err = e + break loop + } + env.push(w) + if !env.paths.empty() && env.expdepth == 0 { + if !env.pathIntact(v) { + err = &invalidPathError{v} + break loop + } + env.paths.push(pathValue{path: p, value: w}) + } case opcall: if backtrack { break loop @@ -161,13 +183,13 @@ loop: case int: pc, callpc, index = v, pc, env.scopes.index goto loop - case [3]interface{}: + case [3]any: argcnt := v[1].(int) x, args := env.pop(), env.args[:argcnt] for i := 0; i < argcnt; i++ { args[i] = env.pop() } - w := v[0].(func(interface{}, []interface{}) interface{})(x, args) + w := v[0].(func(any, []any) any)(x, args) if e, ok := w.(error); ok { if er, ok := e.(*exitCodeError); !ok || er.value != nil || er.halt { err = e @@ -175,14 +197,31 @@ loop: break loop } env.push(w) - if !env.paths.empty() { - var ps []interface{} - ps, err = env.pathEntries(v[2].(string), x, args) - if err != nil { - break loop - } - for _, p := range ps { - env.paths.push(pathValue{path: p, value: w}) + if !env.paths.empty() && env.expdepth == 0 { + switch v[2].(string) { + case "_index": + if x = args[0]; !env.pathIntact(x) { + err = &invalidPathError{x} + break loop + } + env.paths.push(pathValue{path: args[1], value: w}) + case "_slice": + if x = args[0]; !env.pathIntact(x) { + err = &invalidPathError{x} + break loop + } + env.paths.push(pathValue{ + path: map[string]any{"start": args[2], "end": args[1]}, + value: w, + }) + case "getpath": + if !env.pathIntact(x) { + err = &invalidPathError{x} + break loop + } + for _, p := range args[0].([]any) { + env.paths.push(pathValue{path: p, value: w}) + } } } default: @@ -199,7 +238,7 @@ loop: goto loop case opscope: xs := code.v.([3]int) - var saveindex, outerindex, limit int + var saveindex, outerindex int if index == env.scopes.index { if callpc >= 0 { saveindex = index @@ -207,7 +246,7 @@ loop: callpc, saveindex = env.popscope() } } else { - env.scopes.save(&saveindex, &limit) + saveindex, _ = env.scopes.save() env.scopes.index = index } if outerindex = index; outerindex >= 0 { @@ -218,7 +257,7 @@ loop: env.scopes.push(scope{xs[0], env.offset, callpc, saveindex, outerindex}) env.offset += xs[1] if env.offset > len(env.values) { - vs := make([]interface{}, env.offset*2) + vs := make([]any, env.offset*2) copy(vs, env.values) env.values = vs } @@ -230,7 +269,7 @@ loop: if env.scopes.empty() { return env.pop(), true } - case opeach: + case opiter: if err != nil { break loop } @@ -239,9 +278,8 @@ loop: switch v := env.pop().(type) { case []pathValue: xs = v - case []interface{}: - if !env.paths.empty() && env.expdepth == 0 && - !deepEqual(v, env.paths.top().(pathValue).value) { + case []any: + if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) { err = &invalidPathIterError{v} break loop } @@ -252,9 +290,8 @@ loop: for i, v := range v { xs[i] = pathValue{path: i, value: v} } - case map[string]interface{}: - if !env.paths.empty() && env.expdepth == 0 && - !deepEqual(v, env.paths.top().(pathValue).value) { + case map[string]any: + if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) { err = &invalidPathIterError{v} break loop } @@ -271,10 +308,6 @@ loop: return xs[i].path.(string) < xs[j].path.(string) }) case Iter: - if !env.paths.empty() && env.expdepth == 0 { - err = &invalidPathIterError{v} - break loop - } if w, ok := v.Next(); ok { env.push(v) env.pushfork(pc) @@ -289,6 +322,7 @@ loop: break loop default: err = &iteratorError{v} + env.push(emptyIter{}) break loop } if len(xs) > 1 { @@ -312,18 +346,13 @@ loop: if backtrack { break loop } - if env.expdepth > 0 { - panic(fmt.Sprintf("unexpected expdepth: %d", env.expdepth)) - } env.pop() - x := env.pop() - if deepEqual(x, env.paths.top().(pathValue).value) { - env.push(env.poppaths()) - env.expdepth = env.paths.pop().(int) - } else { - err = &invalidPathError{x} + if v := env.pop(); !env.pathIntact(v) { + err = &invalidPathError{v} break loop } + env.push(env.poppaths()) + env.expdepth = env.paths.pop().(int) default: panic(code.op) } @@ -338,11 +367,11 @@ loop: return nil, false } -func (env *env) push(v interface{}) { +func (env *env) push(v any) { env.stack.push(v) } -func (env *env) pop() interface{} { +func (env *env) pop() any { return env.stack.pop() } @@ -357,9 +386,9 @@ func (env *env) popscope() (int, int) { func (env *env) pushfork(pc int) { f := fork{pc: pc, expdepth: env.expdepth} - env.stack.save(&f.stackindex, &f.stacklimit) - env.scopes.save(&f.scopeindex, &f.scopelimit) - env.paths.save(&f.pathindex, &f.pathlimit) + f.stackindex, f.stacklimit = env.stack.save() + f.scopeindex, f.scopelimit = env.scopes.save() + f.pathindex, f.pathlimit = env.paths.save() env.forks = append(env.forks, f) env.debugForks(pc, ">>>") } @@ -386,39 +415,28 @@ func (env *env) index(v [2]int) int { } type pathValue struct { - path, value interface{} + path, value any } -func (env *env) pathEntries(name string, x interface{}, args []interface{}) ([]interface{}, error) { - switch name { - case "_index": - if env.expdepth > 0 { - return nil, nil - } else if !deepEqual(args[0], env.paths.top().(pathValue).value) { - return nil, &invalidPathError{x} - } - return []interface{}{args[1]}, nil - case "_slice": - if env.expdepth > 0 { - return nil, nil - } else if !deepEqual(args[0], env.paths.top().(pathValue).value) { - return nil, &invalidPathError{x} +func (env *env) pathIntact(v any) bool { + w := env.paths.top().(pathValue).value + switch v := v.(type) { + case []any, map[string]any: + switch w.(type) { + case []any, map[string]any: + v, w := reflect.ValueOf(v), reflect.ValueOf(w) + return v.Pointer() == w.Pointer() && v.Len() == w.Len() } - return []interface{}{map[string]interface{}{"start": args[2], "end": args[1]}}, nil - case "getpath": - if env.expdepth > 0 { - return nil, nil - } else if !deepEqual(x, env.paths.top().(pathValue).value) { - return nil, &invalidPathError{x} + case float64: + if w, ok := w.(float64); ok { + return v == w || math.IsNaN(v) && math.IsNaN(w) } - return args[0].([]interface{}), nil - default: - return nil, nil } + return v == w } -func (env *env) poppaths() []interface{} { - var xs []interface{} +func (env *env) poppaths() []any { + var xs []any for { p := env.paths.pop().(pathValue) if p.path == nil { diff --git a/vendor/github.com/itchyny/gojq/func.go b/vendor/github.com/itchyny/gojq/func.go index ca4caa69..d94a7a47 100644 --- a/vendor/github.com/itchyny/gojq/func.go +++ b/vendor/github.com/itchyny/gojq/func.go @@ -4,9 +4,11 @@ import ( "encoding/base64" "encoding/json" "fmt" + "io" "math" "math/big" "net/url" + "reflect" "regexp" "sort" "strconv" @@ -30,7 +32,7 @@ const ( type function struct { argcount int iter bool - callback func(interface{}, []interface{}) interface{} + callback func(any, []any) any } func (fn function) accept(cnt int) bool { @@ -51,15 +53,26 @@ func init() { "utf8bytelength": argFunc0(funcUtf8ByteLength), "keys": argFunc0(funcKeys), "has": argFunc1(funcHas), + "to_entries": argFunc0(funcToEntries), + "from_entries": argFunc0(funcFromEntries), "add": argFunc0(funcAdd), "tonumber": argFunc0(funcToNumber), "tostring": argFunc0(funcToString), "type": argFunc0(funcType), "reverse": argFunc0(funcReverse), "contains": argFunc1(funcContains), + "indices": argFunc1(funcIndices), + "index": argFunc1(funcIndex), + "rindex": argFunc1(funcRindex), + "startswith": argFunc1(funcStartsWith), + "endswith": argFunc1(funcEndsWith), + "ltrimstr": argFunc1(funcLtrimstr), + "rtrimstr": argFunc1(funcRtrimstr), "explode": argFunc0(funcExplode), "implode": argFunc0(funcImplode), "split": {argcount1 | argcount2, false, funcSplit}, + "ascii_downcase": argFunc0(funcASCIIDowncase), + "ascii_upcase": argFunc0(funcASCIIUpcase), "tojson": argFunc0(funcToJSON), "fromjson": argFunc0(funcFromJSON), "format": argFunc1(funcFormat), @@ -70,11 +83,8 @@ func init() { "_tosh": argFunc0(funcToSh), "_tobase64": argFunc0(funcToBase64), "_tobase64d": argFunc0(funcToBase64d), - "_index": argFunc2(funcIndex), + "_index": argFunc2(funcIndex2), "_slice": argFunc3(funcSlice), - "_indices": argFunc1(funcIndices), - "_lindex": argFunc1(funcLindex), - "_rindex": argFunc1(funcRindex), "_plus": argFunc0(funcOpPlus), "_negate": argFunc0(funcOpNegate), "_add": argFunc2(funcOpAdd), @@ -89,13 +99,18 @@ func init() { "_less": argFunc2(funcOpLt), "_greatereq": argFunc2(funcOpGe), "_lesseq": argFunc2(funcOpLe), + "flatten": {argcount0 | argcount1, false, funcFlatten}, "_range": {argcount3, true, funcRange}, + "min": argFunc0(funcMin), "_min_by": argFunc1(funcMinBy), + "max": argFunc0(funcMax), "_max_by": argFunc1(funcMaxBy), + "sort": argFunc0(funcSort), "_sort_by": argFunc1(funcSortBy), "_group_by": argFunc1(funcGroupBy), + "unique": argFunc0(funcUnique), "_unique_by": argFunc1(funcUniqueBy), - "_join": argFunc1(funcJoin), + "join": argFunc1(funcJoin), "sin": mathFunc("sin", math.Sin), "cos": mathFunc("cos", math.Cos), "tan": mathFunc("tan", math.Tan), @@ -176,47 +191,47 @@ func init() { "strptime": argFunc1(funcStrptime), "now": argFunc0(funcNow), "_match": argFunc3(funcMatch), + "_capture": argFunc0(funcCapture), "error": {argcount0 | argcount1, false, funcError}, "halt": argFunc0(funcHalt), "halt_error": {argcount0 | argcount1, false, funcHaltError}, - "_type_error": argFunc1(internalfuncTypeError), } } -func argFunc0(fn func(interface{}) interface{}) function { +func argFunc0(f func(any) any) function { return function{ - argcount0, false, func(v interface{}, _ []interface{}) interface{} { - return fn(v) + argcount0, false, func(v any, _ []any) any { + return f(v) }, } } -func argFunc1(fn func(_, _ interface{}) interface{}) function { +func argFunc1(f func(_, _ any) any) function { return function{ - argcount1, false, func(v interface{}, args []interface{}) interface{} { - return fn(v, args[0]) + argcount1, false, func(v any, args []any) any { + return f(v, args[0]) }, } } -func argFunc2(fn func(_, _, _ interface{}) interface{}) function { +func argFunc2(f func(_, _, _ any) any) function { return function{ - argcount2, false, func(v interface{}, args []interface{}) interface{} { - return fn(v, args[0], args[1]) + argcount2, false, func(v any, args []any) any { + return f(v, args[0], args[1]) }, } } -func argFunc3(fn func(_, _, _, _ interface{}) interface{}) function { +func argFunc3(f func(_, _, _, _ any) any) function { return function{ - argcount3, false, func(v interface{}, args []interface{}) interface{} { - return fn(v, args[0], args[1], args[2]) + argcount3, false, func(v any, args []any) any { + return f(v, args[0], args[1], args[2]) }, } } func mathFunc(name string, f func(float64) float64) function { - return argFunc0(func(v interface{}) interface{} { + return argFunc0(func(v any) any { x, ok := toFloat(v) if !ok { return &funcTypeError{name, v} @@ -226,7 +241,7 @@ func mathFunc(name string, f func(float64) float64) function { } func mathFunc2(name string, f func(_, _ float64) float64) function { - return argFunc2(func(_, x, y interface{}) interface{} { + return argFunc2(func(_, x, y any) any { l, ok := toFloat(x) if !ok { return &funcTypeError{name, x} @@ -240,7 +255,7 @@ func mathFunc2(name string, f func(_, _ float64) float64) function { } func mathFunc3(name string, f func(_, _, _ float64) float64) function { - return argFunc3(func(_, a, b, c interface{}) interface{} { + return argFunc3(func(_, a, b, c any) any { x, ok := toFloat(a) if !ok { return &funcTypeError{name, a} @@ -257,14 +272,10 @@ func mathFunc3(name string, f func(_, _, _ float64) float64) function { }) } -func funcLength(v interface{}) interface{} { +func funcLength(v any) any { switch v := v.(type) { - case []interface{}: - return len(v) - case map[string]interface{}: - return len(v) - case string: - return len([]rune(v)) + case nil: + return 0 case int: if v >= 0 { return v @@ -277,128 +288,207 @@ func funcLength(v interface{}) interface{} { return v } return new(big.Int).Abs(v) - case nil: - return 0 + case string: + return len([]rune(v)) + case []any: + return len(v) + case map[string]any: + return len(v) default: return &funcTypeError{"length", v} } } -func funcUtf8ByteLength(v interface{}) interface{} { - switch v := v.(type) { - case string: - return len(v) - default: +func funcUtf8ByteLength(v any) any { + s, ok := v.(string) + if !ok { return &funcTypeError{"utf8bytelength", v} } + return len(s) } -func funcKeys(v interface{}) interface{} { +func funcKeys(v any) any { switch v := v.(type) { - case []interface{}: - w := make([]interface{}, len(v)) + case []any: + w := make([]any, len(v)) for i := range v { w[i] = i } return w - case map[string]interface{}: - w := make([]string, len(v)) - var i int - for k := range v { + case map[string]any: + w := make([]any, len(v)) + for i, k := range keys(v) { w[i] = k - i++ } - sort.Strings(w) - u := make([]interface{}, len(v)) - for i, x := range w { - u[i] = x - } - return u + return w default: return &funcTypeError{"keys", v} } } -func funcHas(v, x interface{}) interface{} { +func keys(v map[string]any) []string { + w := make([]string, len(v)) + var i int + for k := range v { + w[i] = k + i++ + } + sort.Strings(w) + return w +} + +func values(v any) ([]any, bool) { + switch v := v.(type) { + case []any: + return v, true + case map[string]any: + vs := make([]any, len(v)) + for i, k := range keys(v) { + vs[i] = v[k] + } + return vs, true + default: + return nil, false + } +} + +func funcHas(v, x any) any { switch v := v.(type) { - case []interface{}: + case []any: if x, ok := toInt(x); ok { return 0 <= x && x < len(v) } - return &hasKeyTypeError{v, x} - case map[string]interface{}: - switch x := x.(type) { - case string: + case map[string]any: + if x, ok := x.(string); ok { _, ok := v[x] return ok - default: - return &hasKeyTypeError{v, x} } case nil: return false - default: - return &hasKeyTypeError{v, x} } + return &hasKeyTypeError{v, x} } -func funcAdd(v interface{}) interface{} { - if vs, ok := v.(map[string]interface{}); ok { - xs := make([]string, len(vs)) - var i int - for k := range vs { - xs[i] = k - i++ +func funcToEntries(v any) any { + switch v := v.(type) { + case []any: + w := make([]any, len(v)) + for i, x := range v { + w[i] = map[string]any{"key": i, "value": x} + } + return w + case map[string]any: + w := make([]any, len(v)) + for i, k := range keys(v) { + w[i] = map[string]any{"key": k, "value": v[k]} } - sort.Strings(xs) - us := make([]interface{}, len(vs)) - for i, x := range xs { - us[i] = vs[x] + return w + default: + return &funcTypeError{"to_entries", v} + } +} + +func funcFromEntries(v any) any { + vs, ok := v.([]any) + if !ok { + return &funcTypeError{"from_entries", v} + } + w := make(map[string]any, len(vs)) + for _, v := range vs { + switch v := v.(type) { + case map[string]any: + var ( + key string + value any + ok bool + ) + for _, k := range [4]string{"key", "Key", "name", "Name"} { + if k := v[k]; k != nil && k != false { + if key, ok = k.(string); !ok { + return &objectKeyNotStringError{k} + } + break + } + } + if !ok { + return &objectKeyNotStringError{nil} + } + for _, k := range [2]string{"value", "Value"} { + if value, ok = v[k]; ok { + break + } + } + w[key] = value + default: + return &funcTypeError{"from_entries", v} } - v = us } - vs, ok := v.([]interface{}) + return w +} + +func funcAdd(v any) any { + vs, ok := values(v) if !ok { return &funcTypeError{"add", v} } v = nil for _, x := range vs { - switch y := x.(type) { - case map[string]interface{}: + switch x := x.(type) { + case nil: + continue + case string: switch w := v.(type) { case nil: - m := make(map[string]interface{}, len(y)) - for k, e := range y { - m[k] = e - } - v = m + var sb strings.Builder + sb.WriteString(x) + v = &sb continue - case map[string]interface{}: - for k, e := range y { - w[k] = e - } + case *strings.Builder: + w.WriteString(x) continue } - case []interface{}: + case []any: switch w := v.(type) { case nil: - s := make([]interface{}, len(y)) - copy(s, y) + s := make([]any, len(x)) + copy(s, x) v = s continue - case []interface{}: - v = append(w, y...) + case []any: + v = append(w, x...) + continue + } + case map[string]any: + switch w := v.(type) { + case nil: + m := make(map[string]any, len(x)) + for k, e := range x { + m[k] = e + } + v = m + continue + case map[string]any: + for k, e := range x { + w[k] = e + } continue } } + if sb, ok := v.(*strings.Builder); ok { + v = sb.String() + } v = funcOpAdd(nil, v, x) if err, ok := v.(error); ok { return err } } + if sb, ok := v.(*strings.Builder); ok { + v = sb.String() + } return v } -func funcToNumber(v interface{}) interface{} { +func funcToNumber(v any) any { switch v := v.(type) { case int, float64, *big.Int: return v @@ -406,130 +496,227 @@ func funcToNumber(v interface{}) interface{} { if !newLexer(v).validNumber() { return fmt.Errorf("invalid number: %q", v) } - return normalizeNumber(json.Number(v)) + return toNumber(v) default: return &funcTypeError{"tonumber", v} } } -func funcToString(v interface{}) interface{} { +func toNumber(v string) any { + return normalizeNumber(json.Number(v)) +} + +func funcToString(v any) any { if s, ok := v.(string); ok { return s } return funcToJSON(v) } -func funcType(v interface{}) interface{} { - return typeof(v) +func funcType(v any) any { + return TypeOf(v) } -func funcReverse(v interface{}) interface{} { - vs, ok := v.([]interface{}) +func funcReverse(v any) any { + vs, ok := v.([]any) if !ok { - return &expectedArrayError{v} + return &funcTypeError{"reverse", v} } - ws := make([]interface{}, len(vs)) + ws := make([]any, len(vs)) for i, v := range vs { ws[len(ws)-i-1] = v } return ws } -func funcContains(v, x interface{}) interface{} { - switch v := v.(type) { - case nil: - if x == nil { - return true - } - case bool: - switch x := x.(type) { - case bool: - if v == x { - return true - } - } - } +func funcContains(v, x any) any { return binopTypeSwitch(v, x, - func(l, r int) interface{} { return l == r }, - func(l, r float64) interface{} { return l == r }, - func(l, r *big.Int) interface{} { return l.Cmp(r) == 0 }, - func(l, r string) interface{} { return strings.Contains(l, r) }, - func(l, r []interface{}) interface{} { - for _, x := range r { - var found bool - for _, y := range l { - if funcContains(y, x) == true { - found = true - break + func(l, r int) any { return l == r }, + func(l, r float64) any { return l == r }, + func(l, r *big.Int) any { return l.Cmp(r) == 0 }, + func(l, r string) any { return strings.Contains(l, r) }, + func(l, r []any) any { + R: + for _, r := range r { + for _, l := range l { + if funcContains(l, r) == true { + continue R } } - if !found { - return false - } + return false } return true }, - func(l, r map[string]interface{}) interface{} { - for k, rk := range r { - lk, ok := l[k] - if !ok { - return false - } - c := funcContains(lk, rk) - if _, ok := c.(error); ok { - return false - } - if c == false { + func(l, r map[string]any) any { + if len(l) < len(r) { + return false + } + for k, r := range r { + if l, ok := l[k]; !ok || funcContains(l, r) != true { return false } } return true }, - func(l, r interface{}) interface{} { return &funcContainsError{l, r} }, + func(l, r any) any { + if l == r { + return true + } + return &containsTypeError{l, r} + }, ) } -func funcExplode(v interface{}) interface{} { +func funcIndices(v, x any) any { + return indexFunc(v, x, indices) +} + +func indices(vs, xs []any) any { + var rs []any + if len(xs) == 0 { + return rs + } + for i := 0; i <= len(vs)-len(xs); i++ { + if compare(vs[i:i+len(xs)], xs) == 0 { + rs = append(rs, i) + } + } + return rs +} + +func funcIndex(v, x any) any { + return indexFunc(v, x, func(vs, xs []any) any { + if len(xs) == 0 { + return nil + } + for i := 0; i <= len(vs)-len(xs); i++ { + if compare(vs[i:i+len(xs)], xs) == 0 { + return i + } + } + return nil + }) +} + +func funcRindex(v, x any) any { + return indexFunc(v, x, func(vs, xs []any) any { + if len(xs) == 0 { + return nil + } + for i := len(vs) - len(xs); i >= 0; i-- { + if compare(vs[i:i+len(xs)], xs) == 0 { + return i + } + } + return nil + }) +} + +func indexFunc(v, x any, f func(_, _ []any) any) any { switch v := v.(type) { + case nil: + return nil + case []any: + switch x := x.(type) { + case []any: + return f(v, x) + default: + return f(v, []any{x}) + } case string: - return explode(v) + if x, ok := x.(string); ok { + return f(explode(v), explode(x)) + } + return &expectedStringError{x} default: + return &expectedArrayError{v} + } +} + +func funcStartsWith(v, x any) any { + s, ok := v.(string) + if !ok { + return &funcTypeError{"startswith", v} + } + t, ok := x.(string) + if !ok { + return &funcTypeError{"startswith", x} + } + return strings.HasPrefix(s, t) +} + +func funcEndsWith(v, x any) any { + s, ok := v.(string) + if !ok { + return &funcTypeError{"endswith", v} + } + t, ok := x.(string) + if !ok { + return &funcTypeError{"endswith", x} + } + return strings.HasSuffix(s, t) +} + +func funcLtrimstr(v, x any) any { + s, ok := v.(string) + if !ok { + return v + } + t, ok := x.(string) + if !ok { + return v + } + return strings.TrimPrefix(s, t) +} + +func funcRtrimstr(v, x any) any { + s, ok := v.(string) + if !ok { + return v + } + t, ok := x.(string) + if !ok { + return v + } + return strings.TrimSuffix(s, t) +} + +func funcExplode(v any) any { + s, ok := v.(string) + if !ok { return &funcTypeError{"explode", v} } + return explode(s) } -func explode(s string) []interface{} { - rs := []int32(s) - xs := make([]interface{}, len(rs)) - for i, r := range rs { +func explode(s string) []any { + xs := make([]any, len([]rune(s))) + var i int + for _, r := range s { xs[i] = int(r) + i++ } return xs } -func funcImplode(v interface{}) interface{} { - switch v := v.(type) { - case []interface{}: - return implode(v) - default: +func funcImplode(v any) any { + vs, ok := v.([]any) + if !ok { return &funcTypeError{"implode", v} } -} - -func implode(v []interface{}) interface{} { var sb strings.Builder - sb.Grow(len(v)) - for _, r := range v { - if r, ok := toInt(r); ok && 0 <= r && r <= utf8.MaxRune { + sb.Grow(len(vs)) + for _, v := range vs { + if r, ok := toInt(v); ok && 0 <= r && r <= utf8.MaxRune { sb.WriteRune(rune(r)) } else { - return &funcTypeError{"implode", v} + return &funcTypeError{"implode", vs} } } return sb.String() } -func funcSplit(v interface{}, args []interface{}) interface{} { +func funcSplit(v any, args []any) any { s, ok := v.(string) if !ok { return &funcTypeError{"split", v} @@ -556,43 +743,71 @@ func funcSplit(v interface{}, args []interface{}) interface{} { } ss = r.Split(s, -1) } - xs := make([]interface{}, len(ss)) + xs := make([]any, len(ss)) for i, s := range ss { xs[i] = s } return xs } -func funcToJSON(v interface{}) interface{} { - return jsonMarshal(v) +func funcASCIIDowncase(v any) any { + s, ok := v.(string) + if !ok { + return &funcTypeError{"ascii_downcase", v} + } + return strings.Map(func(r rune) rune { + if 'A' <= r && r <= 'Z' { + return r + ('a' - 'A') + } + return r + }, s) } -func funcFromJSON(v interface{}) interface{} { - switch v := v.(type) { - case string: - var w interface{} - err := json.Unmarshal([]byte(v), &w) - if err != nil { - return err +func funcASCIIUpcase(v any) any { + s, ok := v.(string) + if !ok { + return &funcTypeError{"ascii_upcase", v} + } + return strings.Map(func(r rune) rune { + if 'a' <= r && r <= 'z' { + return r - ('a' - 'A') } - return w - default: + return r + }, s) +} + +func funcToJSON(v any) any { + return jsonMarshal(v) +} + +func funcFromJSON(v any) any { + s, ok := v.(string) + if !ok { return &funcTypeError{"fromjson", v} } + var w any + dec := json.NewDecoder(strings.NewReader(s)) + dec.UseNumber() + if err := dec.Decode(&w); err != nil { + return err + } + if _, err := dec.Token(); err != io.EOF { + return &funcTypeError{"fromjson", v} + } + return normalizeNumbers(w) } -func funcFormat(v, x interface{}) interface{} { - switch x := x.(type) { - case string: - fmt := "@" + x - f := formatToFunc(fmt) - if f == nil { - return &formatNotFoundError{fmt} - } - return internalFuncs[f.Name].callback(v, nil) - default: +func funcFormat(v, x any) any { + s, ok := x.(string) + if !ok { return &funcTypeError{"format", x} } + format := "@" + s + f := formatToFunc(format) + if f == nil { + return &formatNotFoundError{format} + } + return internalFuncs[f.Name].callback(v, nil) } var htmlEscaper = strings.NewReplacer( @@ -603,7 +818,7 @@ var htmlEscaper = strings.NewReplacer( `"`, """, ) -func funcToHTML(v interface{}) interface{} { +func funcToHTML(v any) any { switch x := funcToString(v).(type) { case string: return htmlEscaper.Replace(x) @@ -612,7 +827,7 @@ func funcToHTML(v interface{}) interface{} { } } -func funcToURI(v interface{}) interface{} { +func funcToURI(v any) any { switch x := funcToString(v).(type) { case string: return url.QueryEscape(x) @@ -621,9 +836,14 @@ func funcToURI(v interface{}) interface{} { } } -func funcToCSV(v interface{}) interface{} { - return funcToCSVTSV("csv", v, ",", func(s string) string { - return `"` + strings.ReplaceAll(s, `"`, `""`) + `"` +var csvEscaper = strings.NewReplacer( + `"`, `""`, + "\x00", `\0`, +) + +func funcToCSV(v any) any { + return formatJoin("csv", v, ",", func(s string) string { + return `"` + csvEscaper.Replace(s) + `"` }) } @@ -632,72 +852,49 @@ var tsvEscaper = strings.NewReplacer( "\r", `\r`, "\n", `\n`, "\\", `\\`, + "\x00", `\0`, ) -func funcToTSV(v interface{}) interface{} { - return funcToCSVTSV("tsv", v, "\t", func(s string) string { - return tsvEscaper.Replace(s) - }) +func funcToTSV(v any) any { + return formatJoin("tsv", v, "\t", tsvEscaper.Replace) } -func funcToCSVTSV(typ string, v interface{}, sep string, escape func(string) string) interface{} { - switch xs := v.(type) { - case []interface{}: - ys := make([]string, len(xs)) - for i, x := range xs { - y, err := toCSVTSV(typ, x, escape) - if err != nil { - return err - } - ys[i] = y - } - return strings.Join(ys, sep) - default: - return &expectedArrayError{v} - } -} +var shEscaper = strings.NewReplacer( + "'", `'\''`, + "\x00", `\0`, +) -func toCSVTSV(typ string, v interface{}, escape func(string) string) (string, error) { - switch v := v.(type) { - case map[string]interface{}, []interface{}: - return "", &formatCsvTsvRowError{typ, v} - case string: - return escape(v), nil - default: - if s := jsonMarshal(v); s != "null" { - return s, nil - } - return "", nil +func funcToSh(v any) any { + if _, ok := v.([]any); !ok { + v = []any{v} } + return formatJoin("sh", v, " ", func(s string) string { + return "'" + shEscaper.Replace(s) + "'" + }) } -func funcToSh(v interface{}) interface{} { - var xs []interface{} - if w, ok := v.([]interface{}); ok { - xs = w - } else { - xs = []interface{}{v} +func formatJoin(typ string, v any, sep string, escape func(string) string) any { + vs, ok := v.([]any) + if !ok { + return &funcTypeError{"@" + typ, v} } - var s strings.Builder - for i, x := range xs { - if i > 0 { - s.WriteByte(' ') - } - switch x := x.(type) { - case map[string]interface{}, []interface{}: - return &formatShError{x} + ss := make([]string, len(vs)) + for i, v := range vs { + switch v := v.(type) { + case []any, map[string]any: + return &formatRowError{typ, v} case string: - s.WriteByte('\'') - s.WriteString(strings.ReplaceAll(x, "'", `'\''`)) - s.WriteByte('\'') + ss[i] = escape(v) default: - s.WriteString(jsonMarshal(x)) + if s := jsonMarshal(v); s != "null" || typ == "sh" { + ss[i] = s + } } } - return s.String() + return strings.Join(ss, sep) } -func funcToBase64(v interface{}) interface{} { +func funcToBase64(v any) any { switch x := funcToString(v).(type) { case string: return base64.StdEncoding.EncodeToString([]byte(x)) @@ -706,7 +903,7 @@ func funcToBase64(v interface{}) interface{} { } } -func funcToBase64d(v interface{}) interface{} { +func funcToBase64d(v any) any { switch x := funcToString(v).(type) { case string: if i := strings.IndexRune(x, base64.StdPadding); i >= 0 { @@ -722,48 +919,39 @@ func funcToBase64d(v interface{}) interface{} { } } -func funcIndex(_, v, x interface{}) interface{} { +func funcIndex2(_, v, x any) any { switch x := x.(type) { case string: switch v := v.(type) { case nil: return nil - case map[string]interface{}: + case map[string]any: return v[x] default: return &expectedObjectError{v} } case int, float64, *big.Int: - idx, _ := toInt(x) + i, _ := toInt(x) switch v := v.(type) { case nil: return nil - case []interface{}: - return funcIndexSlice(nil, nil, &idx, v) + case []any: + return index(v, i) case string: - switch v := funcIndexSlice(nil, nil, &idx, explode(v)).(type) { - case []interface{}: - return implode(v) - case int: - return implode([]interface{}{v}) - case nil: - return "" - default: - panic(v) - } + return indexString(v, i) default: return &expectedArrayError{v} } - case []interface{}: + case []any: switch v := v.(type) { case nil: return nil - case []interface{}: + case []any: return indices(v, x) default: return &expectedArrayError{v} } - case map[string]interface{}: + case map[string]any: if v == nil { return nil } @@ -777,192 +965,163 @@ func funcIndex(_, v, x interface{}) interface{} { } return funcSlice(nil, v, end, start) default: - return &objectKeyNotStringError{x} + switch v.(type) { + case []any: + return &arrayIndexNotNumberError{x} + case string: + return &stringIndexNotNumberError{x} + default: + return &objectKeyNotStringError{x} + } } } -func indices(vs, xs []interface{}) interface{} { - var rs []interface{} - if len(xs) == 0 { - return rs +func index(vs []any, i int) any { + i = clampIndex(i, -1, len(vs)) + if 0 <= i && i < len(vs) { + return vs[i] } - for i := 0; i < len(vs) && i < len(vs)-len(xs)+1; i++ { - var neq bool - for j, y := range xs { - if neq = compare(vs[i+j], y) != 0; neq { - break + return nil +} + +func indexString(s string, i int) any { + l := len([]rune(s)) + i = clampIndex(i, -1, l) + if 0 <= i && i < l { + for _, r := range s { + if i--; i < 0 { + return string(r) } } - if !neq { - rs = append(rs, i) - } } - return rs + return nil } -func funcSlice(_, v, end, start interface{}) (r interface{}) { - if w, ok := v.(string); ok { - v = explode(w) - defer func() { - switch s := r.(type) { - case []interface{}: - r = implode(s) - case int: - r = implode([]interface{}{s}) - case nil: - r = "" - case error: - default: - panic(r) - } - }() - } +func funcSlice(_, v, e, s any) (r any) { switch v := v.(type) { case nil: return nil - case []interface{}: - if start != nil { - if start, ok := toInt(start); ok { - if end != nil { - if end, ok := toInt(end); ok { - return funcIndexSlice(&start, &end, nil, v) - } - return &arrayIndexNotNumberError{end} - } - return funcIndexSlice(&start, nil, nil, v) - } - return &arrayIndexNotNumberError{start} - } - if end != nil { - if end, ok := toInt(end); ok { - return funcIndexSlice(nil, &end, nil, v) - } - return &arrayIndexNotNumberError{end} - } - return v + case []any: + return slice(v, e, s) + case string: + return sliceString(v, e, s) default: return &expectedArrayError{v} } } -func funcIndexSlice(start, end, index *int, a []interface{}) interface{} { - aa := a - if index != nil { - i := toIndex(aa, *index) - if i < 0 { - return nil +func slice(vs []any, e, s any) any { + var start, end int + if s != nil { + if i, ok := toInt(s); ok { + start = clampIndex(i, 0, len(vs)) + } else { + return &arrayIndexNotNumberError{s} } - return a[i] } - if end != nil { - i := toIndex(aa, *end) - if i == -1 { - i = len(a) - } else if i == -2 { - i = 0 + if e != nil { + if i, ok := toInt(e); ok { + end = clampIndex(i, start, len(vs)) + } else { + return &arrayIndexNotNumberError{e} + } + } else { + end = len(vs) + } + return vs[start:end] +} + +func sliceString(v string, e, s any) any { + var start, end int + l := len([]rune(v)) + if s != nil { + if i, ok := toInt(s); ok { + start = clampIndex(i, 0, l) + } else { + return &stringIndexNotNumberError{s} + } + } + if e != nil { + if i, ok := toInt(e); ok { + end = clampIndex(i, start, l) + } else { + return &stringIndexNotNumberError{e} + } + } else { + end = l + } + if start < l { + for i := range v { + if start--; start < 0 { + start = i + break + } } - a = a[:i] + } else { + start = len(v) } - if start != nil { - i := toIndex(aa, *start) - if i == -1 || len(a) < i { - i = len(a) - } else if i == -2 { - i = 0 + if end < l { + for i := range v { + if end--; end < 0 { + end = i + break + } } - a = a[i:] + } else { + end = len(v) } - return a + return v[start:end] } -func toIndex(a []interface{}, i int) int { - l := len(a) - switch { - case i < -l: - return -2 - case i < 0: - return l + i - case i < l: +func clampIndex(i, min, max int) int { + if i < 0 { + i += max + } + if i < min { + return min + } else if i < max { return i - default: - return -1 + } else { + return max } } -func funcIndices(v, x interface{}) interface{} { - return indexFunc(v, x, indices) -} - -func funcLindex(v, x interface{}) interface{} { - return indexFunc(v, x, func(vs, xs []interface{}) interface{} { - if len(xs) == 0 { - return nil - } - for i := 0; i < len(vs) && i < len(vs)-len(xs)+1; i++ { - var neq bool - for j, y := range xs { - if neq = compare(vs[i+j], y) != 0; neq { - break - } - } - if !neq { - return i - } - } - return nil - }) -} - -func funcRindex(v, x interface{}) interface{} { - return indexFunc(v, x, func(vs, xs []interface{}) interface{} { - if len(xs) == 0 { - return nil - } - i := len(vs) - 1 - if j := len(vs) - len(xs); j < i { - i = j +func funcFlatten(v any, args []any) any { + vs, ok := values(v) + if !ok { + return &funcTypeError{"flatten", v} + } + var depth float64 + if len(args) == 0 { + depth = -1 + } else { + depth, ok = toFloat(args[0]) + if !ok { + return &funcTypeError{"flatten", args[0]} } - for ; i >= 0; i-- { - var neq bool - for j, y := range xs { - if neq = compare(vs[i+j], y) != 0; neq { - break - } - } - if !neq { - return i - } + if depth < 0 { + return &flattenDepthError{depth} } - return nil - }) + } + return flatten(nil, vs, depth) } -func indexFunc(v, x interface{}, f func(_, _ []interface{}) interface{}) interface{} { - switch v := v.(type) { - case nil: - return nil - case []interface{}: - switch x := x.(type) { - case []interface{}: - return f(v, x) - default: - return f(v, []interface{}{x}) - } - case string: - if x, ok := x.(string); ok { - return f(explode(v), explode(x)) +func flatten(xs, vs []any, depth float64) []any { + for _, v := range vs { + if vs, ok := v.([]any); ok && depth != 0 { + xs = flatten(xs, vs, depth-1) + } else { + xs = append(xs, v) } - return &expectedStringError{x} - default: - return &expectedArrayError{v} } + return xs } type rangeIter struct { - value, end, step interface{} + value, end, step any } -func (iter *rangeIter) Next() (interface{}, bool) { +func (iter *rangeIter) Next() (any, bool) { if compare(iter.step, 0)*compare(iter.value, iter.end) >= 0 { return nil, false } @@ -971,7 +1130,7 @@ func (iter *rangeIter) Next() (interface{}, bool) { return v, true } -func funcRange(_ interface{}, xs []interface{}) interface{} { +func funcRange(_ any, xs []any) any { for _, x := range xs { switch x.(type) { case int, float64, *big.Int: @@ -982,43 +1141,59 @@ func funcRange(_ interface{}, xs []interface{}) interface{} { return &rangeIter{xs[0], xs[1], xs[2]} } -func funcMinBy(v, x interface{}) interface{} { - vs, ok := v.([]interface{}) +func funcMin(v any) any { + vs, ok := v.([]any) if !ok { - return &expectedArrayError{v} + return &funcTypeError{"min", v} } - xs, ok := x.([]interface{}) + return minMaxBy(vs, vs, true) +} + +func funcMinBy(v, x any) any { + vs, ok := v.([]any) + if !ok { + return &funcTypeError{"min_by", v} + } + xs, ok := x.([]any) if !ok { - return &expectedArrayError{x} + return &funcTypeError{"min_by", x} } if len(vs) != len(xs) { return &lengthMismatchError{"min_by", vs, xs} } - return funcMinMaxBy(vs, xs, true) + return minMaxBy(vs, xs, true) } -func funcMaxBy(v, x interface{}) interface{} { - vs, ok := v.([]interface{}) +func funcMax(v any) any { + vs, ok := v.([]any) if !ok { - return &expectedArrayError{v} + return &funcTypeError{"max", v} + } + return minMaxBy(vs, vs, false) +} + +func funcMaxBy(v, x any) any { + vs, ok := v.([]any) + if !ok { + return &funcTypeError{"max_by", v} } - xs, ok := x.([]interface{}) + xs, ok := x.([]any) if !ok { - return &expectedArrayError{x} + return &funcTypeError{"max_by", x} } if len(vs) != len(xs) { return &lengthMismatchError{"max_by", vs, xs} } - return funcMinMaxBy(vs, xs, false) + return minMaxBy(vs, xs, false) } -func funcMinMaxBy(vs, xs []interface{}, isMin bool) interface{} { +func minMaxBy(vs, xs []any, isMin bool) any { if len(vs) == 0 { return nil } i, j, x := 0, 0, xs[0] for i++; i < len(xs); i++ { - if (compare(x, xs[i]) > 0) == isMin { + if compare(x, xs[i]) > 0 == isMin { j, x = i, xs[i] } } @@ -1026,45 +1201,83 @@ func funcMinMaxBy(vs, xs []interface{}, isMin bool) interface{} { } type sortItem struct { - value, key interface{} + value, key any +} + +func sortItems(name string, v, x any) ([]*sortItem, error) { + vs, ok := v.([]any) + if !ok { + return nil, &funcTypeError{name, v} + } + xs, ok := x.([]any) + if !ok { + return nil, &funcTypeError{name, x} + } + if len(vs) != len(xs) { + return nil, &lengthMismatchError{name, vs, xs} + } + items := make([]*sortItem, len(vs)) + for i, v := range vs { + items[i] = &sortItem{v, xs[i]} + } + sort.SliceStable(items, func(i, j int) bool { + return compare(items[i].key, items[j].key) < 0 + }) + return items, nil +} + +func funcSort(v any) any { + return sortBy("sort", v, v) } -func funcSortBy(v, x interface{}) interface{} { - items, err := sortItems("sort_by", v, x) +func funcSortBy(v, x any) any { + return sortBy("sort_by", v, x) +} + +func sortBy(name string, v, x any) any { + items, err := sortItems(name, v, x) if err != nil { return err } - rs := make([]interface{}, len(items)) + rs := make([]any, len(items)) for i, x := range items { rs[i] = x.value } return rs } -func funcGroupBy(v, x interface{}) interface{} { +func funcGroupBy(v, x any) any { items, err := sortItems("group_by", v, x) if err != nil { return err } - var rs []interface{} - var last interface{} + var rs []any + var last any for i, r := range items { if i == 0 || compare(last, r.key) != 0 { - rs, last = append(rs, []interface{}{r.value}), r.key + rs, last = append(rs, []any{r.value}), r.key } else { - rs[len(rs)-1] = append(rs[len(rs)-1].([]interface{}), r.value) + rs[len(rs)-1] = append(rs[len(rs)-1].([]any), r.value) } } return rs } -func funcUniqueBy(v, x interface{}) interface{} { - items, err := sortItems("unique_by", v, x) +func funcUnique(v any) any { + return uniqueBy("unique", v, v) +} + +func funcUniqueBy(v, x any) any { + return uniqueBy("unique_by", v, x) +} + +func uniqueBy(name string, v, x any) any { + items, err := sortItems(name, v, x) if err != nil { return err } - var rs []interface{} - var last interface{} + var rs []any + var last any for i, r := range items { if i == 0 || compare(last, r.key) != 0 { rs, last = append(rs, r.value), r.key @@ -1073,10 +1286,10 @@ func funcUniqueBy(v, x interface{}) interface{} { return rs } -func funcJoin(v, x interface{}) interface{} { - vs, ok := v.([]interface{}) +func funcJoin(v, x any) any { + vs, ok := values(v) if !ok { - return &expectedArrayError{v} + return &funcTypeError{"join", v} } if len(vs) == 0 { return "" @@ -1086,48 +1299,26 @@ func funcJoin(v, x interface{}) interface{} { return &funcTypeError{"join", x} } ss := make([]string, len(vs)) - for i, e := range vs { - switch e := e.(type) { + for i, v := range vs { + switch v := v.(type) { case nil: case string: - ss[i] = e + ss[i] = v case bool: - if e { + if v { ss[i] = "true" } else { ss[i] = "false" } case int, float64, *big.Int: - ss[i] = jsonMarshal(e) + ss[i] = jsonMarshal(v) default: - return &unaryTypeError{"join", e} + return &joinTypeError{v} } } return strings.Join(ss, sep) } -func sortItems(name string, v, x interface{}) ([]*sortItem, error) { - vs, ok := v.([]interface{}) - if !ok { - return nil, &expectedArrayError{v} - } - xs, ok := x.([]interface{}) - if !ok { - return nil, &expectedArrayError{x} - } - if len(vs) != len(xs) { - return nil, &lengthMismatchError{name, vs, xs} - } - items := make([]*sortItem, len(vs)) - for i, v := range vs { - items[i] = &sortItem{v, xs[i]} - } - sort.SliceStable(items, func(i, j int) bool { - return compare(items[i].key, items[j].key) < 0 - }) - return items, nil -} - func funcSignificand(v float64) float64 { if math.IsNaN(v) || math.IsInf(v, 0) || v == 0.0 { return v @@ -1139,22 +1330,22 @@ func funcExp10(v float64) float64 { return math.Pow(10, v) } -func funcFrexp(v interface{}) interface{} { +func funcFrexp(v any) any { x, ok := toFloat(v) if !ok { return &funcTypeError{"frexp", v} } f, e := math.Frexp(x) - return []interface{}{f, e} + return []any{f, e} } -func funcModf(v interface{}) interface{} { +func funcModf(v any) any { x, ok := toFloat(v) if !ok { return &funcTypeError{"modf", v} } i, f := math.Modf(x) - return []interface{}{f, i} + return []any{f, i} } func funcLgamma(v float64) float64 { @@ -1190,25 +1381,25 @@ func funcYn(l, r float64) float64 { return math.Yn(int(l), r) } -func funcInfinite(interface{}) interface{} { +func funcInfinite(any) any { return math.Inf(1) } -func funcIsfinite(v interface{}) interface{} { +func funcIsfinite(v any) any { x, ok := toFloat(v) return ok && !math.IsInf(x, 0) } -func funcIsinfinite(v interface{}) interface{} { +func funcIsinfinite(v any) any { x, ok := toFloat(v) return ok && math.IsInf(x, 0) } -func funcNan(interface{}) interface{} { +func funcNan(any) any { return math.NaN() } -func funcIsnan(v interface{}) interface{} { +func funcIsnan(v any) any { x, ok := toFloat(v) if !ok { if v == nil { @@ -1219,18 +1410,65 @@ func funcIsnan(v interface{}) interface{} { return math.IsNaN(x) } -func funcIsnormal(v interface{}) interface{} { - x, ok := toFloat(v) - return ok && !math.IsNaN(x) && !math.IsInf(x, 0) && x != 0.0 +func funcIsnormal(v any) any { + if v, ok := toFloat(v); ok { + e := math.Float64bits(v) & 0x7ff0000000000000 >> 52 + return 0 < e && e < 0x7ff + } + return false +} + +// An `allocator` creates new maps and slices, stores the allocated addresses. +// This allocator is used to reduce allocations on assignment operator (`=`), +// update-assignment operator (`|=`), and the `map_values`, `del`, `delpaths` +// functions. +type allocator map[uintptr]struct{} + +func funcAllocator(any, []any) any { + return allocator{} +} + +func (a allocator) allocated(v any) bool { + _, ok := a[reflect.ValueOf(v).Pointer()] + return ok +} + +func (a allocator) makeObject(l int) map[string]any { + v := make(map[string]any, l) + if a != nil { + a[reflect.ValueOf(v).Pointer()] = struct{}{} + } + return v +} + +func (a allocator) makeArray(l, c int) []any { + if c < l { + c = l + } + v := make([]any, l, c) + if a != nil { + a[reflect.ValueOf(v).Pointer()] = struct{}{} + } + return v +} + +func funcSetpath(v, p, n any) any { + // There is no need to use an allocator on a single update. + return setpath(v, p, n, nil) +} + +// Used in compiler#compileAssign and compiler#compileModify. +func funcSetpathWithAllocator(v any, args []any) any { + return setpath(v, args[0], args[1], args[2].(allocator)) } -func funcSetpath(v, p, w interface{}) interface{} { - path, ok := p.([]interface{}) +func setpath(v, p, n any, a allocator) any { + path, ok := p.([]any) if !ok { return &funcTypeError{"setpath", p} } var err error - if v, err = updatePaths(v, path, w, false); err != nil { + if v, err = update(v, path, n, a); err != nil { if err, ok := err.(*funcTypeError); ok { err.name = "setpath" } @@ -1239,188 +1477,74 @@ func funcSetpath(v, p, w interface{}) interface{} { return v } -func funcDelpaths(v, p interface{}) interface{} { - paths, ok := p.([]interface{}) +func funcDelpaths(v, p any) any { + return delpaths(v, p, allocator{}) +} + +// Used in compiler#compileAssign and compiler#compileModify. +func funcDelpathsWithAllocator(v any, args []any) any { + return delpaths(v, args[0], args[1].(allocator)) +} + +func delpaths(v, p any, a allocator) any { + paths, ok := p.([]any) if !ok { return &funcTypeError{"delpaths", p} } + if len(paths) == 0 { + return v + } // Fills the paths with an empty value and then delete them. We cannot delete // in each loop because array indices should not change. For example, // jq -n "[0, 1, 2, 3] | delpaths([[1], [2]])" #=> [0, 3]. var empty struct{} var err error for _, p := range paths { - path, ok := p.([]interface{}) + path, ok := p.([]any) if !ok { return &funcTypeError{"delpaths", p} } - if v, err = updatePaths(v, path, empty, true); err != nil { + if v, err = update(v, path, empty, a); err != nil { return err } } return deleteEmpty(v) } -func updatePaths(v interface{}, path []interface{}, w interface{}, delpaths bool) (interface{}, error) { +func update(v any, path []any, n any, a allocator) (any, error) { if len(path) == 0 { - return w, nil + return n, nil } - switch x := path[0].(type) { + switch p := path[0].(type) { case string: - if v == nil { - if delpaths { - return v, nil - } - v = make(map[string]interface{}) - } - switch uu := v.(type) { - case map[string]interface{}: - if _, ok := uu[x]; !ok && delpaths { - return v, nil - } - u, err := updatePaths(uu[x], path[1:], w, delpaths) - if err != nil { - return nil, err - } - vs := make(map[string]interface{}, len(uu)) - for k, v := range uu { - vs[k] = v - } - vs[x] = u - return vs, nil + switch v := v.(type) { + case nil: + return updateObject(nil, p, path[1:], n, a) + case map[string]any: + return updateObject(v, p, path[1:], n, a) case struct{}: return v, nil default: return nil, &expectedObjectError{v} } case int, float64, *big.Int: - if v == nil { - if delpaths { - return v, nil - } - v = []interface{}{} - } - switch uu := v.(type) { - case []interface{}: - y, _ := toInt(x) - l := len(uu) - var copied bool - if copied = y >= l; copied { - if delpaths { - return v, nil - } - if y > 0x3ffffff { - return nil, &arrayIndexTooLargeError{y} - } - l = y + 1 - ys := make([]interface{}, l) - copy(ys, uu) - uu = ys - } else if y < -l { - if delpaths { - return v, nil - } - return nil, &funcTypeError{v: y} - } else if y < 0 { - y += l - } - u, err := updatePaths(uu[y], path[1:], w, delpaths) - if err != nil { - return nil, err - } - if copied { - uu[y] = u - return uu, nil - } - vs := make([]interface{}, l) - copy(vs, uu) - vs[y] = u - return vs, nil + i, _ := toInt(p) + switch v := v.(type) { + case nil: + return updateArrayIndex(nil, i, path[1:], n, a) + case []any: + return updateArrayIndex(v, i, path[1:], n, a) case struct{}: return v, nil default: return nil, &expectedArrayError{v} } - case map[string]interface{}: - if len(x) == 0 { - switch v.(type) { - case []interface{}: - return nil, &arrayIndexNotNumberError{x} - default: - return nil, &objectKeyNotStringError{x} - } - } - if v == nil { - v = []interface{}{} - } - switch uu := v.(type) { - case []interface{}: - var start, end int - if x, ok := toInt(x["start"]); ok { - x := toIndex(uu, x) - if x > len(uu) || x == -1 { - start = len(uu) - } else if x == -2 { - start = 0 - } else { - start = x - } - } - if x, ok := toInt(x["end"]); ok { - x := toIndex(uu, x) - if x == -1 { - end = len(uu) - } else if x < start { - end = start - } else { - end = x - } - } else { - end = len(uu) - } - if delpaths { - if start >= end { - return uu, nil - } - if len(path) > 1 { - u, err := updatePaths(uu[start:end], path[1:], w, delpaths) - if err != nil { - return nil, err - } - switch us := u.(type) { - case []interface{}: - vs := make([]interface{}, len(uu)) - copy(vs, uu) - copy(vs[start:end], us) - return vs, nil - default: - return nil, &expectedArrayError{u} - } - } - vs := make([]interface{}, len(uu)) - copy(vs, uu) - for y := start; y < end; y++ { - vs[y] = w - } - return vs, nil - } - if len(path) > 1 { - u, err := updatePaths(uu[start:end], path[1:], w, delpaths) - if err != nil { - return nil, err - } - w = u - } - switch v := w.(type) { - case []interface{}: - vs := make([]interface{}, start+len(v)+len(uu)-end) - copy(vs, uu[:start]) - copy(vs[start:], v) - copy(vs[start+len(v):], uu[end:]) - return vs, nil - default: - return nil, &expectedArrayError{v} - } + case map[string]any: + switch v := v.(type) { + case nil: + return updateArraySlice(nil, p, path[1:], n, a) + case []any: + return updateArraySlice(v, p, path[1:], n, a) case struct{}: return v, nil default: @@ -1428,47 +1552,192 @@ func updatePaths(v interface{}, path []interface{}, w interface{}, delpaths bool } default: switch v.(type) { - case []interface{}: - return nil, &arrayIndexNotNumberError{x} + case []any: + return nil, &arrayIndexNotNumberError{p} default: - return nil, &objectKeyNotStringError{x} + return nil, &objectKeyNotStringError{p} + } + } +} + +func updateObject(v map[string]any, k string, path []any, n any, a allocator) (any, error) { + x, ok := v[k] + if !ok && n == struct{}{} { + return v, nil + } + u, err := update(x, path, n, a) + if err != nil { + return nil, err + } + if a.allocated(v) { + v[k] = u + return v, nil + } + w := a.makeObject(len(v) + 1) + for k, v := range v { + w[k] = v + } + w[k] = u + return w, nil +} + +func updateArrayIndex(v []any, i int, path []any, n any, a allocator) (any, error) { + var x any + if j := clampIndex(i, -1, len(v)); j < 0 { + if n == struct{}{} { + return v, nil + } + return nil, &funcTypeError{v: i} + } else if j < len(v) { + i = j + x = v[i] + } else { + if n == struct{}{} { + return v, nil } + if i >= 0x8000000 { + return nil, &arrayIndexTooLargeError{i} + } + } + u, err := update(x, path, n, a) + if err != nil { + return nil, err + } + l, c := len(v), cap(v) + if a.allocated(v) { + if i < c { + if i >= l { + v = v[:i+1] + } + v[i] = u + return v, nil + } + c *= 2 + } + if i >= l { + l = i + 1 + } + w := a.makeArray(l, c) + copy(w, v) + w[i] = u + return w, nil +} + +func updateArraySlice(v []any, m map[string]any, path []any, n any, a allocator) (any, error) { + s, ok := m["start"] + if !ok { + return nil, &expectedStartEndError{m} + } + e, ok := m["end"] + if !ok { + return nil, &expectedStartEndError{m} + } + var start, end int + if i, ok := toInt(s); ok { + start = clampIndex(i, 0, len(v)) + } + if i, ok := toInt(e); ok { + end = clampIndex(i, start, len(v)) + } else { + end = len(v) + } + if start == end && n == struct{}{} { + return v, nil + } + u, err := update(v[start:end], path, n, a) + if err != nil { + return nil, err + } + switch u := u.(type) { + case []any: + var w []any + if len(u) == end-start && a.allocated(v) { + w = v + } else { + w = a.makeArray(len(v)-(end-start)+len(u), 0) + copy(w, v[:start]) + copy(w[start+len(u):], v[end:]) + } + copy(w[start:], u) + return w, nil + case struct{}: + var w []any + if a.allocated(v) { + w = v + } else { + w = a.makeArray(len(v), 0) + copy(w, v) + } + for i := start; i < end; i++ { + w[i] = u + } + return w, nil + default: + return nil, &expectedArrayError{u} + } +} + +func deleteEmpty(v any) any { + switch v := v.(type) { + case struct{}: + return nil + case map[string]any: + for k, w := range v { + if w == struct{}{} { + delete(v, k) + } else { + v[k] = deleteEmpty(w) + } + } + return v + case []any: + var j int + for _, w := range v { + if w != struct{}{} { + v[j] = deleteEmpty(w) + j++ + } + } + for i := j; i < len(v); i++ { + v[i] = nil + } + return v[:j] + default: + return v } } -func funcGetpath(v, p interface{}) interface{} { - keys, ok := p.([]interface{}) +func funcGetpath(v, p any) any { + keys, ok := p.([]any) if !ok { return &funcTypeError{"getpath", p} } u := v for _, x := range keys { switch v.(type) { - case map[string]interface{}: - case []interface{}: - case nil: + case nil, []any, map[string]any: + v = funcIndex2(nil, v, x) + if _, ok := v.(error); ok { + return &getpathError{u, p} + } default: return &getpathError{u, p} } - v = funcIndex(nil, v, x) - if _, ok := v.(error); ok { - return &getpathError{u, p} - } } return v } -func funcTranspose(v interface{}) interface{} { - vss, ok := v.([]interface{}) +func funcTranspose(v any) any { + vss, ok := v.([]any) if !ok { return &funcTypeError{"transpose", v} } if len(vss) == 0 { - return []interface{}{} + return []any{} } var l int for _, vs := range vss { - vs, ok := vs.([]interface{}) + vs, ok := vs.([]any) if !ok { return &funcTypeError{"transpose", v} } @@ -1476,23 +1745,23 @@ func funcTranspose(v interface{}) interface{} { l = k } } - wss := make([][]interface{}, l) - xs := make([]interface{}, l) + wss := make([][]any, l) + xs := make([]any, l) for i, k := 0, len(vss); i < l; i++ { - s := make([]interface{}, k) + s := make([]any, k) wss[i] = s xs[i] = s } for i, vs := range vss { - for j, v := range vs.([]interface{}) { + for j, v := range vs.([]any) { wss[j][i] = v } } return xs } -func funcBsearch(v, t interface{}) interface{} { - vs, ok := v.([]interface{}) +func funcBsearch(v, t any) any { + vs, ok := v.([]any) if !ok { return &funcTypeError{"bsearch", v} } @@ -1505,23 +1774,23 @@ func funcBsearch(v, t interface{}) interface{} { return -i - 1 } -func funcGmtime(v interface{}) interface{} { +func funcGmtime(v any) any { if v, ok := toFloat(v); ok { return epochToArray(v, time.UTC) } return &funcTypeError{"gmtime", v} } -func funcLocaltime(v interface{}) interface{} { +func funcLocaltime(v any) any { if v, ok := toFloat(v); ok { return epochToArray(v, time.Local) } return &funcTypeError{"localtime", v} } -func epochToArray(v float64, loc *time.Location) []interface{} { +func epochToArray(v float64, loc *time.Location) []any { t := time.Unix(int64(v), int64((v-math.Floor(v))*1e9)).In(loc) - return []interface{}{ + return []any{ t.Year(), int(t.Month()) - 1, t.Day(), @@ -1533,22 +1802,26 @@ func epochToArray(v float64, loc *time.Location) []interface{} { } } -func funcMktime(v interface{}) interface{} { - if a, ok := v.([]interface{}); ok { +func funcMktime(v any) any { + if a, ok := v.([]any); ok { t, err := arrayToTime("mktime", a, time.UTC) if err != nil { return err } - return float64(t.Unix()) + return timeToEpoch(t) } return &funcTypeError{"mktime", v} } -func funcStrftime(v, x interface{}) interface{} { +func timeToEpoch(t time.Time) float64 { + return float64(t.Unix()) + float64(t.Nanosecond())/1e9 +} + +func funcStrftime(v, x any) any { if w, ok := toFloat(v); ok { v = epochToArray(w, time.UTC) } - if a, ok := v.([]interface{}); ok { + if a, ok := v.([]any); ok { if format, ok := x.(string); ok { t, err := arrayToTime("strftime", a, time.UTC) if err != nil { @@ -1561,11 +1834,11 @@ func funcStrftime(v, x interface{}) interface{} { return &funcTypeError{"strftime", v} } -func funcStrflocaltime(v, x interface{}) interface{} { +func funcStrflocaltime(v, x any) any { if w, ok := toFloat(v); ok { v = epochToArray(w, time.Local) } - if a, ok := v.([]interface{}); ok { + if a, ok := v.([]any); ok { if format, ok := x.(string); ok { t, err := arrayToTime("strflocaltime", a, time.Local) if err != nil { @@ -1578,7 +1851,7 @@ func funcStrflocaltime(v, x interface{}) interface{} { return &funcTypeError{"strflocaltime", v} } -func funcStrptime(v, x interface{}) interface{} { +func funcStrptime(v, x any) any { if v, ok := v.(string); ok { if format, ok := x.(string); ok { t, err := timefmt.Parse(v, format) @@ -1589,14 +1862,14 @@ func funcStrptime(v, x interface{}) interface{} { if t == s { return &funcTypeError{"strptime", v} } - return epochToArray(float64(t.Unix())+float64(t.Nanosecond())/1e9, time.UTC) + return epochToArray(timeToEpoch(t), time.UTC) } return &funcTypeError{"strptime", x} } return &funcTypeError{"strptime", v} } -func arrayToTime(name string, a []interface{}, loc *time.Location) (time.Time, error) { +func arrayToTime(name string, a []any, loc *time.Location) (time.Time, error) { var t time.Time if len(a) != 8 { return t, &funcTypeError{name, a} @@ -1636,12 +1909,11 @@ func arrayToTime(name string, a []interface{}, loc *time.Location) (time.Time, e return time.Date(y, time.Month(m), d, h, min, sec, nsec, loc), nil } -func funcNow(interface{}) interface{} { - t := time.Now() - return float64(t.Unix()) + float64(t.Nanosecond())/1e9 +func funcNow(any) any { + return timeToEpoch(time.Now()) } -func funcMatch(v, re, fs, testing interface{}) interface{} { +func funcMatch(v, re, fs, testing any) any { var flags string if fs != nil { v, ok := fs.(string) @@ -1674,16 +1946,16 @@ func funcMatch(v, re, fs, testing interface{}) interface{} { xs = [][]int{got} } } - res, names := make([]interface{}, len(xs)), r.SubexpNames() + res, names := make([]any, len(xs)), r.SubexpNames() for i, x := range xs { - captures := make([]interface{}, (len(x)-2)/2) + captures := make([]any, (len(x)-2)/2) for j := 1; j < len(x)/2; j++ { - var name interface{} + var name any if n := names[j]; n != "" { name = n } if x[j*2] < 0 { - captures[j-1] = map[string]interface{}{ + captures[j-1] = map[string]any{ "name": name, "offset": -1, "length": 0, @@ -1691,14 +1963,14 @@ func funcMatch(v, re, fs, testing interface{}) interface{} { } continue } - captures[j-1] = map[string]interface{}{ + captures[j-1] = map[string]any{ "name": name, "offset": len([]rune(s[:x[j*2]])), "length": len([]rune(s[:x[j*2+1]])) - len([]rune(s[:x[j*2]])), "string": s[x[j*2]:x[j*2+1]], } } - res[i] = map[string]interface{}{ + res[i] = map[string]any{ "offset": len([]rune(s[:x[0]])), "length": len([]rune(s[:x[1]])) - len([]rune(s[:x[0]])), "string": s[x[0]:x[1]], @@ -1728,7 +2000,28 @@ func compileRegexp(re, flags string) (*regexp.Regexp, error) { return r, nil } -func funcError(v interface{}, args []interface{}) interface{} { +func funcCapture(v any) any { + vs, ok := v.(map[string]any) + if !ok { + return &expectedObjectError{v} + } + v = vs["captures"] + captures, ok := v.([]any) + if !ok { + return &expectedArrayError{v} + } + w := make(map[string]any, len(captures)) + for _, capture := range captures { + if capture, ok := capture.(map[string]any); ok { + if name, ok := capture["name"].(string); ok { + w[name] = capture["string"] + } + } + } + return w +} + +func funcError(v any, args []any) any { if len(args) > 0 { v = args[0] } @@ -1739,11 +2032,11 @@ func funcError(v interface{}, args []interface{}) interface{} { return &exitCodeError{v, code, false} } -func funcHalt(interface{}) interface{} { +func funcHalt(any) any { return &exitCodeError{nil, 0, true} } -func funcHaltError(v interface{}, args []interface{}) interface{} { +func funcHaltError(v any, args []any) any { code := 5 if len(args) > 0 { var ok bool @@ -1754,14 +2047,7 @@ func funcHaltError(v interface{}, args []interface{}) interface{} { return &exitCodeError{v, code, true} } -func internalfuncTypeError(v, x interface{}) interface{} { - if x, ok := x.(string); ok { - return &funcTypeError{x, v} - } - return &funcTypeError{"_type_error", v} -} - -func toInt(x interface{}) (int, bool) { +func toInt(x any) (int, bool) { switch x := x.(type) { case int: return x, true @@ -1769,30 +2055,30 @@ func toInt(x interface{}) (int, bool) { return floatToInt(x), true case *big.Int: if x.IsInt64() { - if i := x.Int64(); minInt <= i && i <= maxInt { + if i := x.Int64(); math.MinInt <= i && i <= math.MaxInt { return int(i), true } } if x.Sign() > 0 { - return maxInt, true + return math.MaxInt, true } - return minInt, true + return math.MinInt, true default: return 0, false } } func floatToInt(x float64) int { - if minInt <= x && x <= maxInt { + if math.MinInt <= x && x <= math.MaxInt { return int(x) } if x > 0 { - return maxInt + return math.MaxInt } - return minInt + return math.MinInt } -func toFloat(x interface{}) (float64, bool) { +func toFloat(x any) (float64, bool) { switch x := x.(type) { case int: return float64(x), true diff --git a/vendor/github.com/itchyny/gojq/go.dev.mod b/vendor/github.com/itchyny/gojq/go.dev.mod index 46e652e2..9a0579ca 100644 --- a/vendor/github.com/itchyny/gojq/go.dev.mod +++ b/vendor/github.com/itchyny/gojq/go.dev.mod @@ -1,8 +1,8 @@ module github.com/itchyny/gojq -go 1.16 +go 1.18 require ( github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972 // indirect - github.com/itchyny/timefmt-go v0.1.3 // indirect + github.com/itchyny/timefmt-go v0.1.5 // indirect ) diff --git a/vendor/github.com/itchyny/gojq/go.dev.sum b/vendor/github.com/itchyny/gojq/go.dev.sum index 467aa0b9..66aee6c5 100644 --- a/vendor/github.com/itchyny/gojq/go.dev.sum +++ b/vendor/github.com/itchyny/gojq/go.dev.sum @@ -1,4 +1,4 @@ github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972 h1:XYWolmPDLTY9B1O5o/Ad811/mtVkaHWMiZdbPLm/nDA= github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972/go.mod h1:jTXcxGeQMJfFN3wWjtzb4aAaWDDN+QbezE0HjH1XfNk= -github.com/itchyny/timefmt-go v0.1.3 h1:7M3LGVDsqcd0VZH2U+x393obrzZisp7C0uEe921iRkU= -github.com/itchyny/timefmt-go v0.1.3/go.mod h1:0osSSCQSASBJMsIZnhAaF1C2fCBTJZXrnj37mG8/c+A= +github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE= +github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8= diff --git a/vendor/github.com/itchyny/gojq/gojq.go b/vendor/github.com/itchyny/gojq/gojq.go index 8f53b356..e078c809 100644 --- a/vendor/github.com/itchyny/gojq/gojq.go +++ b/vendor/github.com/itchyny/gojq/gojq.go @@ -1,5 +1,5 @@ -// Package gojq provides the parser and interpreter of gojq. +// Package gojq provides the parser and the interpreter of gojq. +// Please refer to [Usage as a library] for introduction. // -// Please refer to https://github.com/itchyny/gojq#usage-as-a-library for -// introduction of the usage as a library. +// [Usage as a library]: https://github.com/itchyny/gojq#usage-as-a-library package gojq diff --git a/vendor/github.com/itchyny/gojq/iter.go b/vendor/github.com/itchyny/gojq/iter.go index 0cee25ba..d0bed960 100644 --- a/vendor/github.com/itchyny/gojq/iter.go +++ b/vendor/github.com/itchyny/gojq/iter.go @@ -2,11 +2,11 @@ package gojq // Iter is an interface for an iterator. type Iter interface { - Next() (interface{}, bool) + Next() (any, bool) } -// NewIter creates a new Iter from values. -func NewIter(values ...interface{}) Iter { +// NewIter creates a new [Iter] from values. +func NewIter(values ...any) Iter { switch len(values) { case 0: return emptyIter{} @@ -20,16 +20,16 @@ func NewIter(values ...interface{}) Iter { type emptyIter struct{} -func (emptyIter) Next() (interface{}, bool) { +func (emptyIter) Next() (any, bool) { return nil, false } type unitIter struct { - value interface{} + value any done bool } -func (iter *unitIter) Next() (interface{}, bool) { +func (iter *unitIter) Next() (any, bool) { if iter.done { return nil, false } @@ -37,9 +37,9 @@ func (iter *unitIter) Next() (interface{}, bool) { return iter.value, true } -type sliceIter []interface{} +type sliceIter []any -func (iter *sliceIter) Next() (interface{}, bool) { +func (iter *sliceIter) Next() (any, bool) { if len(*iter) == 0 { return nil, false } diff --git a/vendor/github.com/itchyny/gojq/lexer.go b/vendor/github.com/itchyny/gojq/lexer.go index d36a6838..82bb2b6b 100644 --- a/vendor/github.com/itchyny/gojq/lexer.go +++ b/vendor/github.com/itchyny/gojq/lexer.go @@ -1,8 +1,7 @@ package gojq import ( - "strconv" - "strings" + "encoding/json" "unicode/utf8" ) @@ -235,9 +234,9 @@ func (l *lexer) Lex(lval *yySymType) (tokenType int) { return tok default: if ch >= utf8.RuneSelf { - r, _ := utf8.DecodeRuneInString(l.source[l.offset-1:]) + r, size := utf8.DecodeRuneInString(l.source[l.offset-1:]) + l.offset += size l.token = string(r) - l.offset += len(l.token) } } return int(ch) @@ -381,82 +380,129 @@ func (l *lexer) validNumber() bool { } func (l *lexer) scanString(start int) (int, string) { - var quote, newline bool + var decode bool + var controls int unquote := func(src string, quote bool) (string, error) { - if quote { - src = "\"" + src + "\"" + if !decode { + if quote { + return src, nil + } + return src[1 : len(src)-1], nil } - if newline { - src = strings.ReplaceAll(src, "\n", "\\n") + var buf []byte + if !quote && controls == 0 { + buf = []byte(src) + } else { + buf = quoteAndEscape(src, quote, controls) } - return strconv.Unquote(src) + if err := json.Unmarshal(buf, &src); err != nil { + return "", err + } + return src, nil } - for i, m := l.offset, len(l.source); i < m; i++ { + for i := l.offset; i < len(l.source); i++ { ch := l.source[i] switch ch { case '\\': - quote = !quote - case '\n': - newline = true - case '"': - if !quote { - if !l.inString { - l.offset = i + 1 - l.token = l.source[start:l.offset] - str, err := unquote(l.token, false) - if err != nil { - return tokInvalid, "" + if i++; i >= len(l.source) { + break + } + switch l.source[i] { + case 'u': + for j := 1; j <= 4; j++ { + if i+j >= len(l.source) || !isHex(l.source[i+j]) { + l.offset = i + j + l.token = l.source[i-1 : l.offset] + return tokInvalidEscapeSequence, "" } - return tokString, str } - if i > l.offset { - l.offset = i - l.token = l.source[start:l.offset] - str, err := unquote(l.token, true) - if err != nil { - return tokInvalid, "" - } - return tokString, str + i += 4 + fallthrough + case '"', '/', '\\', 'b', 'f', 'n', 'r', 't': + decode = true + case '(': + if !l.inString { + l.inString = true + return tokStringStart, "" } - l.inString = false - l.offset = i + 1 - return tokStringEnd, "" - } - quote = false - case '(': - if quote { - if l.inString { - if i > l.offset+1 { - l.offset = i - 1 - l.token = l.source[start:l.offset] - str, err := unquote(l.token, true) - if err != nil { - return tokInvalid, "" - } - return tokString, str - } - l.offset = i + 1 + if i == l.offset+1 { + l.offset += 2 l.inString = false return tokStringQuery, "" } - l.inString = true - return tokStringStart, "" + l.offset = i - 1 + l.token = l.source[start:l.offset] + str, err := unquote(l.token, true) + if err != nil { + return tokInvalid, "" + } + return tokString, str + default: + l.offset = i + 1 + l.token = l.source[l.offset-2 : l.offset] + return tokInvalidEscapeSequence, "" } - default: - if quote { - if !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || - '0' <= ch && ch <= '9' || ch == '\'' || ch == '"') { - l.offset = i + 1 - l.token = l.source[l.offset-2 : l.offset] + case '"': + if !l.inString { + l.offset = i + 1 + l.token = l.source[start:l.offset] + str, err := unquote(l.token, false) + if err != nil { return tokInvalid, "" } - quote = false + return tokString, str + } + if i > l.offset { + l.offset = i + l.token = l.source[start:l.offset] + str, err := unquote(l.token, true) + if err != nil { + return tokInvalid, "" + } + return tokString, str + } + l.inString = false + l.offset = i + 1 + return tokStringEnd, "" + default: + if !decode { + decode = ch > '~' + } + if ch < ' ' { // ref: unquoteBytes in encoding/json + controls++ } } } l.offset = len(l.source) - l.token = l.source[start:l.offset] - return tokInvalid, "" + l.token = "" + return tokUnterminatedString, "" +} + +func quoteAndEscape(src string, quote bool, controls int) []byte { + size := len(src) + controls*5 + if quote { + size += 2 + } + buf := make([]byte, size) + var j int + if quote { + buf[0] = '"' + buf[len(buf)-1] = '"' + j++ + } + for i := 0; i < len(src); i++ { + if ch := src[i]; ch < ' ' { + const hex = "0123456789abcdef" + copy(buf[j:], `\u00`) + buf[j+4] = hex[ch>>4] + buf[j+5] = hex[ch&0xF] + j += 6 + } else { + buf[j] = ch + j++ + } + } + return buf } type parseError struct { @@ -466,24 +512,18 @@ type parseError struct { } func (err *parseError) Error() string { - var message string - prefix := "unexpected" - switch { - case err.tokenType == eof: - message = "" - case err.tokenType == tokInvalid: - prefix = "invalid" - fallthrough - case err.tokenType >= utf8.RuneSelf: - if strings.HasPrefix(err.token, "\"") { - message = err.token - } else { - message = "\"" + err.token + "\"" - } + switch err.tokenType { + case eof: + return "unexpected EOF" + case tokInvalid: + return "invalid token " + jsonMarshal(err.token) + case tokInvalidEscapeSequence: + return `invalid escape sequence "` + err.token + `" in string literal` + case tokUnterminatedString: + return "unterminated string literal" default: - message = strconv.Quote(string(rune(err.tokenType))) + return "unexpected token " + jsonMarshal(err.token) } - return prefix + " token " + message } func (err *parseError) Token() (string, int) { @@ -492,12 +532,7 @@ func (err *parseError) Token() (string, int) { func (l *lexer) Error(string) { offset, token := l.offset, l.token - switch { - case l.tokenType == eof: - offset++ - case l.tokenType >= utf8.RuneSelf: - offset -= len(token) - 1 - default: + if l.tokenType != eof && l.tokenType < utf8.RuneSelf { token = string(rune(l.tokenType)) } l.err = &parseError{offset, token, l.tokenType} @@ -518,6 +553,12 @@ func isIdent(ch byte, tail bool) bool { tail && isNumber(ch) } +func isHex(ch byte) bool { + return 'a' <= ch && ch <= 'f' || + 'A' <= ch && ch <= 'F' || + isNumber(ch) +} + func isNumber(ch byte) bool { return '0' <= ch && ch <= '9' } diff --git a/vendor/github.com/itchyny/gojq/math.go b/vendor/github.com/itchyny/gojq/math.go deleted file mode 100644 index 55d64765..00000000 --- a/vendor/github.com/itchyny/gojq/math.go +++ /dev/null @@ -1,10 +0,0 @@ -package gojq - -import "math/bits" - -const ( - maxInt = 1<<(bits.UintSize-1) - 1 // math.MaxInt64 or math.MaxInt32 - minInt = -maxInt - 1 // math.MinInt64 or math.MinInt32 - maxHalfInt = 1<<(bits.UintSize/2-1) - 1 // math.MaxInt32 or math.MaxInt16 - minHalfInt = -maxHalfInt - 1 // math.MinInt32 or math.MinInt16 -) diff --git a/vendor/github.com/itchyny/gojq/module_loader.go b/vendor/github.com/itchyny/gojq/module_loader.go index b6cebca9..6e9ba48c 100644 --- a/vendor/github.com/itchyny/gojq/module_loader.go +++ b/vendor/github.com/itchyny/gojq/module_loader.go @@ -9,15 +9,26 @@ import ( "strings" ) -type moduleLoader struct { - paths []string -} +// ModuleLoader is the interface for loading modules. +// +// Implement following optional methods. Use [NewModuleLoader] to load local modules. +// +// LoadModule(string) (*Query, error) +// LoadModuleWithMeta(string, map[string]any) (*Query, error) +// LoadInitModules() ([]*Query, error) +// LoadJSON(string) (any, error) +// LoadJSONWithMeta(string, map[string]any) (any, error) +type ModuleLoader any -// NewModuleLoader creates a new ModuleLoader reading local modules in the paths. +// NewModuleLoader creates a new [ModuleLoader] reading local modules in the paths. func NewModuleLoader(paths []string) ModuleLoader { return &moduleLoader{expandHomeDir(paths)} } +type moduleLoader struct { + paths []string +} + func (l *moduleLoader) LoadInitModules() ([]*Query, error) { var qs []*Query for _, path := range l.paths { @@ -40,14 +51,14 @@ func (l *moduleLoader) LoadInitModules() ([]*Query, error) { } q, err := parseModule(path, string(cnt)) if err != nil { - return nil, &queryParseError{"query in module", path, string(cnt), err} + return nil, &queryParseError{path, string(cnt), err} } qs = append(qs, q) } return qs, nil } -func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]interface{}) (*Query, error) { +func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]any) (*Query, error) { path, err := l.lookupModule(name, ".jq", meta) if err != nil { return nil, err @@ -58,12 +69,12 @@ func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]interface } q, err := parseModule(path, string(cnt)) if err != nil { - return nil, &queryParseError{"query in module", path, string(cnt), err} + return nil, &queryParseError{path, string(cnt), err} } return q, nil } -func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{}) (interface{}, error) { +func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]any) (any, error) { path, err := l.lookupModule(name, ".json", meta) if err != nil { return nil, err @@ -73,11 +84,11 @@ func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{} return nil, err } defer f.Close() - var vals []interface{} + var vals []any dec := json.NewDecoder(f) dec.UseNumber() for { - var val interface{} + var val any if err := dec.Decode(&val); err != nil { if err == io.EOF { break @@ -96,7 +107,7 @@ func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]interface{} return vals, nil } -func (l *moduleLoader) lookupModule(name, extension string, meta map[string]interface{}) (string, error) { +func (l *moduleLoader) lookupModule(name, extension string, meta map[string]any) (string, error) { paths := l.paths if path := searchPath(meta); path != "" { paths = append([]string{path}, paths...) @@ -135,7 +146,7 @@ func parseModule(path, cnt string) (*Query, error) { return q, nil } -func searchPath(meta map[string]interface{}) string { +func searchPath(meta map[string]any) string { x, ok := meta["search"] if !ok { return "" diff --git a/vendor/github.com/itchyny/gojq/normalize.go b/vendor/github.com/itchyny/gojq/normalize.go index afca122b..2bfcd215 100644 --- a/vendor/github.com/itchyny/gojq/normalize.go +++ b/vendor/github.com/itchyny/gojq/normalize.go @@ -7,8 +7,8 @@ import ( "strings" ) -func normalizeNumber(v json.Number) interface{} { - if i, err := v.Int64(); err == nil && minInt <= i && i <= maxInt { +func normalizeNumber(v json.Number) any { + if i, err := v.Int64(); err == nil && math.MinInt <= i && i <= math.MaxInt { return int(i) } if strings.ContainsAny(v.String(), ".eE") { @@ -25,22 +25,22 @@ func normalizeNumber(v json.Number) interface{} { return math.Inf(1) } -func normalizeNumbers(v interface{}) interface{} { +func normalizeNumbers(v any) any { switch v := v.(type) { case json.Number: return normalizeNumber(v) case *big.Int: if v.IsInt64() { - if i := v.Int64(); minInt <= i && i <= maxInt { + if i := v.Int64(); math.MinInt <= i && i <= math.MaxInt { return int(i) } } return v case int64: - if v > maxInt || v < minInt { - return new(big.Int).SetInt64(v) + if math.MinInt <= v && v <= math.MaxInt { + return int(v) } - return int(v) + return big.NewInt(v) case int32: return int(v) case int16: @@ -48,68 +48,36 @@ func normalizeNumbers(v interface{}) interface{} { case int8: return int(v) case uint: - if v > maxInt { - return new(big.Int).SetUint64(uint64(v)) + if v <= math.MaxInt { + return int(v) } - return int(v) + return new(big.Int).SetUint64(uint64(v)) case uint64: - if v > maxInt { - return new(big.Int).SetUint64(v) + if v <= math.MaxInt { + return int(v) } - return int(v) + return new(big.Int).SetUint64(v) case uint32: - if uint64(v) > maxInt { - return new(big.Int).SetUint64(uint64(v)) + if uint64(v) <= math.MaxInt { + return int(v) } - return int(v) + return new(big.Int).SetUint64(uint64(v)) case uint16: return int(v) case uint8: return int(v) case float32: return float64(v) - case map[string]interface{}: - for k, x := range v { - v[k] = normalizeNumbers(x) - } - return v - case []interface{}: + case []any: for i, x := range v { v[i] = normalizeNumbers(x) } return v - default: - return v - } -} - -// It's ok to delete destructively because this function is used right after -// updatePaths, where it shallow-copies maps or slices on updates. -func deleteEmpty(v interface{}) interface{} { - switch v := v.(type) { - case struct{}: - return nil - case map[string]interface{}: - for k, w := range v { - if w == struct{}{} { - delete(v, k) - } else { - v[k] = deleteEmpty(w) - } + case map[string]any: + for k, x := range v { + v[k] = normalizeNumbers(x) } return v - case []interface{}: - var j int - for _, w := range v { - if w != struct{}{} { - v[j] = deleteEmpty(w) - j++ - } - } - for i := j; i < len(v); i++ { - v[i] = nil - } - return v[:j] default: return v } diff --git a/vendor/github.com/itchyny/gojq/operator.go b/vendor/github.com/itchyny/gojq/operator.go index 80e13ef9..73a548e0 100644 --- a/vendor/github.com/itchyny/gojq/operator.go +++ b/vendor/github.com/itchyny/gojq/operator.go @@ -37,7 +37,7 @@ const ( OpUpdateAlt ) -// String implements Stringer. +// String implements [fmt.Stringer]. func (op Operator) String() string { switch op { case OpPipe: @@ -93,7 +93,7 @@ func (op Operator) String() string { } } -// GoString implements GoStringer. +// GoString implements [fmt.GoStringer]. func (op Operator) GoString() (str string) { defer func() { str = "gojq." + str }() switch op { @@ -208,23 +208,19 @@ func (op Operator) getFunc() string { } func binopTypeSwitch( - l, r interface{}, - callbackInts func(_, _ int) interface{}, - callbackFloats func(_, _ float64) interface{}, - callbackBigInts func(_, _ *big.Int) interface{}, - callbackStrings func(_, _ string) interface{}, - callbackArrays func(_, _ []interface{}) interface{}, - callbackMaps func(_, _ map[string]interface{}) interface{}, - fallback func(_, _ interface{}) interface{}) interface{} { + l, r any, + callbackInts func(_, _ int) any, + callbackFloats func(_, _ float64) any, + callbackBigInts func(_, _ *big.Int) any, + callbackStrings func(_, _ string) any, + callbackArrays func(_, _ []any) any, + callbackMaps func(_, _ map[string]any) any, + fallback func(_, _ any) any) any { switch l := l.(type) { case int: switch r := r.(type) { case int: - if minHalfInt <= l && l <= maxHalfInt && - minHalfInt <= r && r <= maxHalfInt { - return callbackInts(l, r) - } - return callbackBigInts(big.NewInt(int64(l)), big.NewInt(int64(r))) + return callbackInts(l, r) case float64: return callbackFloats(float64(l), r) case *big.Int: @@ -261,16 +257,16 @@ func binopTypeSwitch( default: return fallback(l, r) } - case []interface{}: + case []any: switch r := r.(type) { - case []interface{}: + case []any: return callbackArrays(l, r) default: return fallback(l, r) } - case map[string]interface{}: + case map[string]any: switch r := r.(type) { - case map[string]interface{}: + case map[string]any: return callbackMaps(l, r) default: return fallback(l, r) @@ -280,7 +276,7 @@ func binopTypeSwitch( } } -func funcOpPlus(v interface{}) interface{} { +func funcOpPlus(v any) any { switch v := v.(type) { case int: return v @@ -293,7 +289,7 @@ func funcOpPlus(v interface{}) interface{} { } } -func funcOpNegate(v interface{}) interface{} { +func funcOpNegate(v any) any { switch v := v.(type) { case int: return -v @@ -306,28 +302,38 @@ func funcOpNegate(v interface{}) interface{} { } } -func funcOpAdd(_, l, r interface{}) interface{} { - if l == nil { - return r - } else if r == nil { - return l - } +func funcOpAdd(_, l, r any) any { return binopTypeSwitch(l, r, - func(l, r int) interface{} { return l + r }, - func(l, r float64) interface{} { return l + r }, - func(l, r *big.Int) interface{} { return new(big.Int).Add(l, r) }, - func(l, r string) interface{} { return l + r }, - func(l, r []interface{}) interface{} { + func(l, r int) any { + if v := l + r; (v >= l) == (r >= 0) { + return v + } + x, y := big.NewInt(int64(l)), big.NewInt(int64(r)) + return x.Add(x, y) + }, + func(l, r float64) any { return l + r }, + func(l, r *big.Int) any { return new(big.Int).Add(l, r) }, + func(l, r string) any { return l + r }, + func(l, r []any) any { + if len(l) == 0 { + return r + } if len(r) == 0 { return l - } else if len(l) == 0 { - return r } - v := make([]interface{}, 0, len(l)+len(r)) - return append(append(v, l...), r...) + v := make([]any, len(l)+len(r)) + copy(v, l) + copy(v[len(l):], r) + return v }, - func(l, r map[string]interface{}) interface{} { - m := make(map[string]interface{}, len(l)+len(r)) + func(l, r map[string]any) any { + if len(l) == 0 { + return r + } + if len(r) == 0 { + return l + } + m := make(map[string]any, len(l)+len(r)) for k, v := range l { m[k] = v } @@ -336,63 +342,71 @@ func funcOpAdd(_, l, r interface{}) interface{} { } return m }, - func(l, r interface{}) interface{} { return &binopTypeError{"add", l, r} }, + func(l, r any) any { + if l == nil { + return r + } + if r == nil { + return l + } + return &binopTypeError{"add", l, r} + }, ) } -func funcOpSub(_, l, r interface{}) interface{} { +func funcOpSub(_, l, r any) any { return binopTypeSwitch(l, r, - func(l, r int) interface{} { return l - r }, - func(l, r float64) interface{} { return l - r }, - func(l, r *big.Int) interface{} { return new(big.Int).Sub(l, r) }, - func(l, r string) interface{} { return &binopTypeError{"subtract", l, r} }, - func(l, r []interface{}) interface{} { - a := make([]interface{}, 0, len(l)) - for _, v := range l { - var found bool - for _, w := range r { - if compare(v, w) == 0 { - found = true - break + func(l, r int) any { + if v := l - r; (v <= l) == (r >= 0) { + return v + } + x, y := big.NewInt(int64(l)), big.NewInt(int64(r)) + return x.Sub(x, y) + }, + func(l, r float64) any { return l - r }, + func(l, r *big.Int) any { return new(big.Int).Sub(l, r) }, + func(l, r string) any { return &binopTypeError{"subtract", l, r} }, + func(l, r []any) any { + v := make([]any, 0, len(l)) + L: + for _, l := range l { + for _, r := range r { + if compare(l, r) == 0 { + continue L } } - if !found { - a = append(a, v) - } + v = append(v, l) } - return a + return v }, - func(l, r map[string]interface{}) interface{} { return &binopTypeError{"subtract", l, r} }, - func(l, r interface{}) interface{} { return &binopTypeError{"subtract", l, r} }, + func(l, r map[string]any) any { return &binopTypeError{"subtract", l, r} }, + func(l, r any) any { return &binopTypeError{"subtract", l, r} }, ) } -func funcOpMul(_, l, r interface{}) interface{} { +func funcOpMul(_, l, r any) any { return binopTypeSwitch(l, r, - func(l, r int) interface{} { return l * r }, - func(l, r float64) interface{} { return l * r }, - func(l, r *big.Int) interface{} { return new(big.Int).Mul(l, r) }, - func(l, r string) interface{} { return &binopTypeError{"multiply", l, r} }, - func(l, r []interface{}) interface{} { return &binopTypeError{"multiply", l, r} }, - deepMergeObjects, - func(l, r interface{}) interface{} { - multiplyString := func(s string, cnt float64) interface{} { - if cnt <= 0.0 || cnt > float64(maxHalfInt/(16*(len(s)+1))) || math.IsNaN(cnt) { - return nil - } - if cnt < 1.0 { - return s - } - return strings.Repeat(s, int(cnt)) + func(l, r int) any { + if v := l * r; r == 0 || v/r == l { + return v } + x, y := big.NewInt(int64(l)), big.NewInt(int64(r)) + return x.Mul(x, y) + }, + func(l, r float64) any { return l * r }, + func(l, r *big.Int) any { return new(big.Int).Mul(l, r) }, + func(l, r string) any { return &binopTypeError{"multiply", l, r} }, + func(l, r []any) any { return &binopTypeError{"multiply", l, r} }, + deepMergeObjects, + func(l, r any) any { if l, ok := l.(string); ok { - if f, ok := toFloat(r); ok { - return multiplyString(l, f) + if r, ok := toFloat(r); ok { + return repeatString(l, r) } } if r, ok := r.(string); ok { - if f, ok := toFloat(l); ok { - return multiplyString(r, f) + if l, ok := toFloat(l); ok { + return repeatString(r, l) } } return &binopTypeError{"multiply", l, r} @@ -400,15 +414,15 @@ func funcOpMul(_, l, r interface{}) interface{} { ) } -func deepMergeObjects(l, r map[string]interface{}) interface{} { - m := make(map[string]interface{}, len(l)+len(r)) +func deepMergeObjects(l, r map[string]any) any { + m := make(map[string]any, len(l)+len(r)) for k, v := range l { m[k] = v } for k, v := range r { if mk, ok := m[k]; ok { - if mk, ok := mk.(map[string]interface{}); ok { - if w, ok := v.(map[string]interface{}); ok { + if mk, ok := mk.(map[string]any); ok { + if w, ok := v.(map[string]any); ok { v = deepMergeObjects(mk, w) } } @@ -418,9 +432,19 @@ func deepMergeObjects(l, r map[string]interface{}) interface{} { return m } -func funcOpDiv(_, l, r interface{}) interface{} { +func repeatString(s string, n float64) any { + if n <= 0.0 || len(s) > 0 && n > float64(0x10000000/len(s)) || math.IsNaN(n) { + return nil + } + if int(n) < 1 { + return s + } + return strings.Repeat(s, int(n)) +} + +func funcOpDiv(_, l, r any) any { return binopTypeSwitch(l, r, - func(l, r int) interface{} { + func(l, r int) any { if r == 0 { if l == 0 { return math.NaN() @@ -432,7 +456,7 @@ func funcOpDiv(_, l, r interface{}) interface{} { } return float64(l) / float64(r) }, - func(l, r float64) interface{} { + func(l, r float64) any { if r == 0.0 { if l == 0.0 { return math.NaN() @@ -441,7 +465,7 @@ func funcOpDiv(_, l, r interface{}) interface{} { } return l / r }, - func(l, r *big.Int) interface{} { + func(l, r *big.Int) any { if r.Sign() == 0 { if l.Sign() == 0 { return math.NaN() @@ -454,78 +478,78 @@ func funcOpDiv(_, l, r interface{}) interface{} { } return bigToFloat(l) / bigToFloat(r) }, - func(l, r string) interface{} { + func(l, r string) any { if l == "" { - return []interface{}{} + return []any{} } xs := strings.Split(l, r) - vs := make([]interface{}, len(xs)) + vs := make([]any, len(xs)) for i, x := range xs { vs[i] = x } return vs }, - func(l, r []interface{}) interface{} { return &binopTypeError{"divide", l, r} }, - func(l, r map[string]interface{}) interface{} { return &binopTypeError{"divide", l, r} }, - func(l, r interface{}) interface{} { return &binopTypeError{"divide", l, r} }, + func(l, r []any) any { return &binopTypeError{"divide", l, r} }, + func(l, r map[string]any) any { return &binopTypeError{"divide", l, r} }, + func(l, r any) any { return &binopTypeError{"divide", l, r} }, ) } -func funcOpMod(_, l, r interface{}) interface{} { +func funcOpMod(_, l, r any) any { return binopTypeSwitch(l, r, - func(l, r int) interface{} { + func(l, r int) any { if r == 0 { return &zeroModuloError{l, r} } return l % r }, - func(l, r float64) interface{} { + func(l, r float64) any { ri := floatToInt(r) if ri == 0 { return &zeroModuloError{l, r} } return floatToInt(l) % ri }, - func(l, r *big.Int) interface{} { + func(l, r *big.Int) any { if r.Sign() == 0 { return &zeroModuloError{l, r} } return new(big.Int).Rem(l, r) }, - func(l, r string) interface{} { return &binopTypeError{"modulo", l, r} }, - func(l, r []interface{}) interface{} { return &binopTypeError{"modulo", l, r} }, - func(l, r map[string]interface{}) interface{} { return &binopTypeError{"modulo", l, r} }, - func(l, r interface{}) interface{} { return &binopTypeError{"modulo", l, r} }, + func(l, r string) any { return &binopTypeError{"modulo", l, r} }, + func(l, r []any) any { return &binopTypeError{"modulo", l, r} }, + func(l, r map[string]any) any { return &binopTypeError{"modulo", l, r} }, + func(l, r any) any { return &binopTypeError{"modulo", l, r} }, ) } -func funcOpAlt(_, l, r interface{}) interface{} { +func funcOpAlt(_, l, r any) any { if l == nil || l == false { return r } return l } -func funcOpEq(_, l, r interface{}) interface{} { +func funcOpEq(_, l, r any) any { return compare(l, r) == 0 } -func funcOpNe(_, l, r interface{}) interface{} { +func funcOpNe(_, l, r any) any { return compare(l, r) != 0 } -func funcOpGt(_, l, r interface{}) interface{} { +func funcOpGt(_, l, r any) any { return compare(l, r) > 0 } -func funcOpLt(_, l, r interface{}) interface{} { +func funcOpLt(_, l, r any) any { return compare(l, r) < 0 } -func funcOpGe(_, l, r interface{}) interface{} { +func funcOpGe(_, l, r any) any { return compare(l, r) >= 0 } -func funcOpLe(_, l, r interface{}) interface{} { +func funcOpLe(_, l, r any) any { return compare(l, r) <= 0 } diff --git a/vendor/github.com/itchyny/gojq/option.go b/vendor/github.com/itchyny/gojq/option.go index 5eb771cc..f1a110fa 100644 --- a/vendor/github.com/itchyny/gojq/option.go +++ b/vendor/github.com/itchyny/gojq/option.go @@ -6,7 +6,7 @@ import "fmt" type CompilerOption func(*compiler) // WithModuleLoader is a compiler option for module loader. -// If you want to load modules from the filesystem, use NewModuleLoader. +// If you want to load modules from the filesystem, use [NewModuleLoader]. func WithModuleLoader(moduleLoader ModuleLoader) CompilerOption { return func(c *compiler) { c.moduleLoader = moduleLoader @@ -15,7 +15,7 @@ func WithModuleLoader(moduleLoader ModuleLoader) CompilerOption { // WithEnvironLoader is a compiler option for environment variables loader. // The OS environment variables are not accessible by default due to security -// reason. You can pass os.Environ if you allow to access it. +// reasons. You can specify [os.Environ] as argument if you allow to access. func WithEnvironLoader(environLoader func() []string) CompilerOption { return func(c *compiler) { c.environLoader = environLoader @@ -23,7 +23,7 @@ func WithEnvironLoader(environLoader func() []string) CompilerOption { } // WithVariables is a compiler option for variable names. The variables can be -// used in the query. You have to give the values to code.Run in the same order. +// used in the query. You have to give the values to [*Code.Run] in the same order. func WithVariables(variables []string) CompilerOption { return func(c *compiler) { c.variables = variables @@ -35,31 +35,28 @@ func WithVariables(variables []string) CompilerOption { // values should satisfy 0 <= minarity <= maxarity <= 30, otherwise panics. // On handling numbers, you should take account to int, float64 and *big.Int. // These are the number types you are allowed to return, so do not return int64. -// Refer to ValueError to return a value error just like built-in error function. -// If you want to emit multiple values, call the empty function, accept a filter -// for its argument, or call another built-in function, then use LoadInitModules -// of the module loader. -func WithFunction(name string, minarity, maxarity int, - f func(interface{}, []interface{}) interface{}) CompilerOption { +// Refer to [ValueError] to return a value error just like built-in error +// function. If you want to emit multiple values, call the empty function, +// accept a filter for its argument, or call another built-in function, then +// use LoadInitModules of the module loader. +func WithFunction(name string, minarity, maxarity int, f func(any, []any) any) CompilerOption { return withFunction(name, minarity, maxarity, false, f) } // WithIterFunction is a compiler option for adding a custom iterator function. -// This is like the WithFunction option, but you can add a function which +// This is like the [WithFunction] option, but you can add a function which // returns an Iter to emit multiple values. You cannot define both iterator and // non-iterator functions of the same name (with possibly different arities). -// See also NewIter, which can be used to convert values or an error to an Iter. -func WithIterFunction(name string, minarity, maxarity int, - f func(interface{}, []interface{}) Iter) CompilerOption { +// See also [NewIter], which can be used to convert values or an error to an Iter. +func WithIterFunction(name string, minarity, maxarity int, f func(any, []any) Iter) CompilerOption { return withFunction(name, minarity, maxarity, true, - func(v interface{}, args []interface{}) interface{} { + func(v any, args []any) any { return f(v, args) }, ) } -func withFunction(name string, minarity, maxarity int, iter bool, - f func(interface{}, []interface{}) interface{}) CompilerOption { +func withFunction(name string, minarity, maxarity int, iter bool, f func(any, []any) any) CompilerOption { if !(0 <= minarity && minarity <= maxarity && maxarity <= 30) { panic(fmt.Sprintf("invalid arity for %q: %d, %d", name, minarity, maxarity)) } @@ -74,7 +71,7 @@ func withFunction(name string, minarity, maxarity int, iter bool, } c.customFuncs[name] = function{ argcount | fn.argcount, iter, - func(x interface{}, xs []interface{}) interface{} { + func(x any, xs []any) any { if argcount&(1< 0 { @@ -30,10 +35,10 @@ func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef { return xs } -//line parser.go.y:28 +//line parser.go.y:33 type yySymType struct { yys int - value interface{} + value any token string operator Operator } @@ -61,24 +66,26 @@ const tokModuleVariable = 57365 const tokIndex = 57366 const tokNumber = 57367 const tokFormat = 57368 -const tokInvalid = 57369 -const tokString = 57370 -const tokStringStart = 57371 -const tokStringQuery = 57372 -const tokStringEnd = 57373 -const tokIf = 57374 -const tokThen = 57375 -const tokElif = 57376 -const tokElse = 57377 -const tokEnd = 57378 -const tokTry = 57379 -const tokCatch = 57380 -const tokReduce = 57381 -const tokForeach = 57382 -const tokRecurse = 57383 -const tokFuncDefPost = 57384 -const tokTermPost = 57385 -const tokEmptyCatch = 57386 +const tokString = 57369 +const tokStringStart = 57370 +const tokStringQuery = 57371 +const tokStringEnd = 57372 +const tokIf = 57373 +const tokThen = 57374 +const tokElif = 57375 +const tokElse = 57376 +const tokEnd = 57377 +const tokTry = 57378 +const tokCatch = 57379 +const tokReduce = 57380 +const tokForeach = 57381 +const tokRecurse = 57382 +const tokFuncDefPost = 57383 +const tokTermPost = 57384 +const tokEmptyCatch = 57385 +const tokInvalid = 57386 +const tokInvalidEscapeSequence = 57387 +const tokUnterminatedString = 57388 var yyToknames = [...]string{ "$end", @@ -107,7 +114,6 @@ var yyToknames = [...]string{ "tokIndex", "tokNumber", "tokFormat", - "tokInvalid", "tokString", "tokStringStart", "tokStringQuery", @@ -125,6 +131,9 @@ var yyToknames = [...]string{ "tokFuncDefPost", "tokTermPost", "tokEmptyCatch", + "tokInvalid", + "tokInvalidEscapeSequence", + "tokUnterminatedString", "'|'", "','", "'+'", @@ -150,15 +159,15 @@ const yyEofCode = 1 const yyErrCode = 2 const yyInitialStackSize = 16 -//line parser.go.y:687 +//line parser.go.y:693 //line yacctab:1 -var yyExca = [...]int{ +var yyExca = [...]int16{ -1, 1, 1, -1, -2, 0, -1, 97, - 53, 0, + 55, 0, -2, 104, -1, 130, 5, 0, @@ -167,167 +176,170 @@ var yyExca = [...]int{ 9, 0, -2, 35, -1, 194, - 56, 114, + 58, 114, -2, 54, } const yyPrivate = 57344 -const yyLast = 1094 +const yyLast = 1127 -var yyAct = [...]int{ +var yyAct = [...]int16{ 86, 214, 174, 112, 12, 203, 9, 175, 111, 31, 190, 6, 156, 140, 117, 47, 95, 97, 93, 94, - 89, 227, 49, 75, 76, 7, 77, 78, 79, 240, - 235, 103, 239, 106, 164, 123, 226, 119, 107, 108, - 105, 234, 102, 75, 76, 113, 77, 78, 79, 163, - 122, 104, 211, 75, 76, 210, 77, 78, 79, 158, - 159, 264, 259, 243, 72, 74, 80, 81, 82, 83, - 84, 229, 73, 127, 275, 128, 129, 130, 131, 132, - 133, 134, 135, 136, 137, 138, 80, 81, 82, 83, - 84, 228, 73, 147, 72, 74, 80, 81, 82, 83, - 84, 145, 73, 141, 278, 161, 246, 277, 157, 225, - 166, 165, 144, 126, 125, 167, 88, 42, 43, 245, - 124, 258, 224, 206, 179, 180, 181, 44, 183, 184, - 73, 242, 177, 154, 153, 178, 142, 186, 49, 173, - 267, 100, 143, 92, 91, 90, 92, 191, 99, 197, - 150, 120, 200, 192, 201, 202, 188, 256, 257, 207, - 88, 182, 98, 198, 199, 209, 219, 7, 216, 101, - 215, 215, 218, 213, 113, 155, 185, 75, 76, 3, - 77, 78, 79, 42, 43, 221, 222, 28, 91, 90, - 92, 179, 180, 181, 230, 204, 205, 232, 8, 177, - 223, 27, 178, 80, 81, 82, 83, 84, 220, 73, - 85, 157, 241, 176, 46, 149, 237, 110, 72, 74, - 80, 81, 82, 83, 84, 88, 73, 152, 182, 196, - 79, 191, 195, 255, 7, 253, 254, 192, 248, 247, - 236, 160, 249, 250, 96, 262, 260, 261, 215, 263, - 11, 121, 189, 91, 90, 92, 11, 268, 269, 187, - 270, 82, 83, 84, 139, 73, 272, 273, 80, 81, - 82, 83, 84, 208, 73, 279, 10, 5, 271, 280, - 51, 52, 4, 53, 54, 55, 56, 57, 58, 59, - 60, 61, 62, 115, 116, 170, 2, 171, 169, 1, - 0, 42, 43, 0, 0, 63, 64, 65, 66, 67, - 68, 69, 70, 71, 0, 0, 20, 0, 17, 37, - 24, 25, 26, 38, 40, 39, 41, 23, 29, 30, - 114, 42, 43, 0, 212, 15, 0, 0, 0, 0, - 16, 0, 13, 14, 22, 0, 0, 0, 0, 0, - 33, 34, 0, 0, 0, 21, 0, 36, 0, 148, - 32, 0, 146, 35, 51, 52, 0, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 115, 116, 0, + 89, 141, 49, 7, 179, 180, 181, 240, 246, 264, + 239, 103, 177, 106, 178, 227, 164, 119, 107, 108, + 105, 245, 102, 75, 76, 113, 77, 78, 79, 123, + 226, 163, 211, 225, 259, 210, 142, 179, 180, 181, + 158, 159, 143, 182, 122, 177, 224, 178, 219, 7, + 235, 234, 104, 127, 243, 128, 129, 130, 131, 132, + 133, 134, 135, 136, 137, 138, 72, 74, 80, 81, + 82, 83, 84, 147, 73, 88, 182, 196, 73, 229, + 195, 145, 7, 150, 228, 161, 166, 165, 157, 126, + 125, 124, 144, 88, 258, 167, 80, 81, 82, 83, + 84, 206, 73, 44, 242, 91, 90, 92, 183, 184, + 82, 83, 84, 154, 73, 153, 267, 186, 49, 173, + 42, 43, 100, 91, 90, 92, 99, 191, 120, 197, + 256, 257, 200, 192, 201, 202, 188, 75, 76, 207, + 77, 78, 79, 198, 199, 209, 42, 43, 216, 92, + 215, 215, 218, 213, 113, 98, 75, 76, 185, 77, + 78, 79, 204, 205, 101, 221, 222, 170, 155, 171, + 169, 3, 28, 27, 230, 96, 220, 232, 176, 46, + 223, 11, 80, 81, 82, 83, 84, 11, 73, 78, + 79, 157, 241, 110, 8, 152, 237, 255, 236, 72, + 74, 80, 81, 82, 83, 84, 85, 73, 79, 278, + 160, 191, 277, 121, 189, 253, 254, 192, 248, 247, + 187, 139, 249, 250, 208, 262, 260, 261, 215, 263, + 80, 81, 82, 83, 84, 149, 73, 268, 269, 10, + 270, 5, 4, 2, 1, 88, 272, 273, 80, 81, + 82, 83, 84, 0, 73, 279, 0, 0, 271, 280, + 51, 52, 0, 53, 54, 55, 56, 57, 58, 59, + 60, 61, 62, 115, 116, 91, 90, 92, 0, 0, + 42, 43, 0, 87, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 88, 0, 20, 0, 17, 37, 24, + 25, 26, 38, 40, 39, 41, 23, 29, 30, 42, + 43, 0, 114, 15, 0, 0, 212, 0, 16, 0, + 13, 14, 22, 91, 90, 92, 0, 0, 0, 0, + 0, 33, 34, 0, 0, 0, 21, 0, 36, 0, + 148, 32, 0, 146, 35, 51, 52, 0, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, 115, 116, 0, 0, 0, 0, 0, 42, 43, 0, 0, 63, 64, 65, 66, 67, 68, 69, 70, 71, 18, 19, 20, 0, 17, 37, 24, 25, 26, 38, 40, 39, - 41, 23, 29, 30, 114, 42, 43, 0, 109, 15, - 0, 0, 0, 0, 16, 0, 13, 14, 22, 0, + 41, 23, 29, 30, 42, 43, 0, 114, 15, 0, + 0, 109, 0, 16, 0, 13, 14, 22, 0, 0, + 0, 0, 0, 0, 0, 0, 33, 34, 0, 0, + 0, 21, 0, 36, 0, 0, 32, 0, 20, 35, + 17, 37, 24, 25, 26, 38, 40, 39, 41, 23, + 29, 30, 42, 43, 0, 0, 15, 0, 0, 0, + 0, 16, 0, 13, 14, 22, 0, 0, 0, 0, 0, 0, 0, 0, 33, 34, 0, 0, 0, 21, - 0, 36, 0, 0, 32, 0, 20, 35, 17, 37, - 24, 25, 26, 38, 40, 39, 41, 23, 29, 30, - 0, 42, 43, 0, 0, 15, 0, 0, 0, 0, - 16, 0, 13, 14, 22, 0, 87, 0, 0, 0, - 33, 34, 0, 0, 0, 21, 88, 36, 0, 0, - 32, 0, 231, 35, 20, 0, 17, 37, 24, 25, - 26, 38, 40, 39, 41, 23, 29, 30, 0, 42, - 43, 0, 0, 15, 91, 90, 92, 0, 16, 0, - 13, 14, 22, 0, 0, 0, 0, 0, 33, 34, - 0, 0, 0, 21, 0, 36, 0, 0, 32, 0, - 118, 35, 20, 0, 17, 37, 24, 25, 26, 38, - 40, 39, 41, 23, 29, 30, 0, 42, 43, 0, - 0, 15, 0, 77, 78, 79, 16, 0, 13, 14, - 22, 0, 0, 0, 0, 0, 33, 34, 0, 0, - 0, 21, 0, 36, 0, 0, 32, 51, 52, 35, - 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, - 48, 0, 0, 80, 81, 82, 83, 84, 50, 73, - 0, 0, 63, 64, 65, 66, 67, 68, 69, 70, + 0, 36, 0, 0, 32, 0, 231, 35, 20, 0, + 17, 37, 24, 25, 26, 38, 40, 39, 41, 23, + 29, 30, 42, 43, 0, 0, 15, 0, 0, 0, + 0, 16, 0, 13, 14, 22, 0, 0, 0, 0, + 0, 0, 0, 0, 33, 34, 0, 0, 0, 21, + 0, 36, 0, 0, 32, 0, 118, 35, 20, 0, + 17, 37, 24, 25, 26, 38, 40, 39, 41, 23, + 29, 30, 42, 43, 0, 0, 15, 0, 77, 78, + 79, 16, 0, 13, 14, 22, 0, 0, 0, 0, + 0, 0, 0, 0, 33, 34, 0, 0, 0, 21, + 0, 36, 0, 0, 32, 51, 52, 35, 53, 54, + 55, 56, 57, 58, 59, 60, 61, 62, 48, 0, + 80, 81, 82, 83, 84, 50, 73, 0, 0, 63, + 64, 65, 66, 67, 68, 69, 70, 71, 51, 52, + 0, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 62, 48, 0, 0, 0, 0, 0, 0, 50, 0, + 0, 172, 63, 64, 65, 66, 67, 68, 69, 70, 71, 51, 52, 0, 53, 54, 55, 56, 57, 58, - 59, 60, 61, 62, 48, 0, 0, 0, 0, 0, - 0, 172, 50, 0, 0, 0, 63, 64, 65, 66, - 67, 68, 69, 70, 71, 51, 52, 0, 53, 54, - 55, 56, 57, 58, 59, 60, 61, 62, 115, 194, - 78, 79, 0, 0, 0, 45, 42, 43, 0, 0, - 63, 64, 65, 66, 67, 68, 69, 70, 71, 37, - 24, 25, 26, 38, 40, 39, 41, 23, 29, 30, - 0, 42, 43, 75, 76, 193, 77, 78, 79, 80, - 81, 82, 83, 84, 22, 73, 0, 0, 0, 0, - 33, 34, 0, 0, 0, 21, 0, 36, 0, 0, - 32, 75, 76, 35, 77, 78, 79, 0, 0, 0, - 0, 0, 0, 0, 72, 74, 80, 81, 82, 83, - 84, 0, 73, 0, 0, 0, 75, 76, 252, 77, - 78, 79, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 72, 74, 80, 81, 82, 83, 84, 0, - 73, 0, 0, 0, 75, 76, 233, 77, 78, 79, - 0, 0, 0, 0, 0, 0, 0, 72, 74, 80, - 81, 82, 83, 84, 0, 73, 0, 0, 0, 75, - 76, 168, 77, 78, 79, 0, 0, 0, 0, 0, + 59, 60, 61, 62, 115, 194, 0, 0, 0, 0, + 0, 42, 43, 0, 45, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 37, 24, 25, 26, 38, 40, + 39, 41, 23, 29, 30, 42, 43, 75, 76, 0, + 77, 78, 79, 193, 0, 0, 0, 0, 22, 0, + 0, 0, 0, 0, 0, 0, 0, 33, 34, 0, + 0, 0, 21, 0, 36, 0, 0, 32, 75, 76, + 35, 77, 78, 79, 0, 0, 0, 0, 0, 0, + 72, 74, 80, 81, 82, 83, 84, 0, 73, 0, + 0, 0, 75, 76, 252, 77, 78, 79, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 72, 74, 80, 81, 82, 83, 84, 0, 73, + 0, 0, 0, 75, 76, 233, 77, 78, 79, 0, 0, 0, 0, 0, 0, 72, 74, 80, 81, 82, - 83, 84, 0, 73, 0, 0, 75, 76, 281, 77, - 78, 79, 0, 0, 0, 0, 0, 0, 0, 0, + 83, 84, 0, 73, 0, 0, 0, 75, 76, 168, + 77, 78, 79, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 72, 74, 80, 81, + 82, 83, 84, 0, 73, 0, 0, 75, 76, 281, + 77, 78, 79, 0, 0, 0, 0, 0, 0, 0, 72, 74, 80, 81, 82, 83, 84, 0, 73, 0, 0, 75, 76, 276, 77, 78, 79, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 72, 74, 80, - 81, 82, 83, 84, 0, 73, 0, 0, 75, 76, - 251, 77, 78, 79, 0, 0, 0, 0, 0, 0, - 0, 0, 72, 74, 80, 81, 82, 83, 84, 0, - 73, 0, 0, 75, 76, 244, 77, 78, 79, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, - 74, 80, 81, 82, 83, 84, 0, 73, 0, 0, - 75, 76, 217, 77, 78, 79, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 72, 74, 80, 81, 82, 83, 84, 0, 73, 0, + 0, 75, 76, 251, 77, 78, 79, 0, 0, 0, 0, 0, 0, 0, 72, 74, 80, 81, 82, 83, - 84, 0, 73, 0, 0, 75, 76, 162, 77, 78, + 84, 0, 73, 0, 0, 75, 76, 244, 77, 78, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 72, 74, 80, 81, 82, 83, 84, 0, 73, - 0, 266, 75, 76, 0, 77, 78, 79, 0, 0, - 0, 0, 0, 0, 0, 0, 72, 74, 80, 81, - 82, 83, 84, 0, 73, 0, 265, 75, 76, 0, - 77, 78, 79, 0, 0, 0, 75, 76, 0, 77, - 78, 79, 0, 72, 74, 80, 81, 82, 83, 84, - 0, 73, 0, 238, 75, 76, 274, 77, 78, 79, - 0, 0, 0, 0, 0, 151, 0, 0, 72, 74, - 80, 81, 82, 83, 84, 0, 73, 72, 74, 80, - 81, 82, 83, 84, 0, 73, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 72, 74, 80, 81, 82, - 83, 84, 0, 73, + 0, 0, 0, 0, 72, 74, 80, 81, 82, 83, + 84, 0, 73, 0, 0, 75, 76, 217, 77, 78, + 79, 0, 0, 0, 0, 0, 0, 0, 72, 74, + 80, 81, 82, 83, 84, 0, 73, 0, 0, 75, + 76, 162, 77, 78, 79, 0, 0, 0, 0, 0, + 75, 76, 0, 77, 78, 79, 0, 0, 72, 74, + 80, 81, 82, 83, 84, 0, 73, 0, 275, 75, + 76, 0, 77, 78, 79, 0, 0, 0, 0, 0, + 0, 0, 72, 74, 80, 81, 82, 83, 84, 0, + 73, 0, 266, 72, 74, 80, 81, 82, 83, 84, + 0, 73, 0, 265, 75, 76, 0, 77, 78, 79, + 0, 0, 72, 74, 80, 81, 82, 83, 84, 0, + 73, 0, 238, 0, 0, 0, 75, 76, 0, 77, + 78, 79, 274, 0, 0, 75, 76, 0, 77, 78, + 79, 0, 0, 0, 0, 0, 0, 72, 74, 80, + 81, 82, 83, 84, 151, 73, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, + 74, 80, 81, 82, 83, 84, 0, 73, 72, 74, + 80, 81, 82, 83, 84, 0, 73, } -var yyPact = [...]int{ - 169, -1000, -1000, -35, -1000, 387, 72, 614, -1000, 1040, - -1000, 529, 462, 673, 673, 529, 529, 141, 120, 113, - 149, 89, -1000, -1000, -1000, -1000, -1000, -6, -1000, -1000, - 155, -1000, 529, 673, 673, 357, 481, 130, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -11, -1000, 64, 58, - 57, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, +var yyPact = [...]int16{ + 181, -1000, -1000, -39, -1000, 387, 66, 621, -1000, 1071, + -1000, 535, 289, 678, 678, 535, 535, 154, 119, 115, + 164, 113, -1000, -1000, -1000, -1000, -1000, 13, -1000, -1000, + 139, -1000, 535, 678, 678, 358, 485, 127, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, 1, -1000, 53, 52, + 51, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 529, -1000, 529, 529, 529, 529, 529, 529, - 529, 529, 529, 529, 529, -1000, 1040, 82, -1000, -1000, - -1000, 89, 303, 201, 136, 1022, 529, 96, 88, 161, - -35, 3, -1000, -1000, 529, -1000, 909, 92, 92, -1000, - -12, -1000, 55, 54, 529, -1000, -1000, -1000, -1000, 752, - -1000, 267, -1000, 580, 174, 174, 174, 1040, 39, 39, - 556, 662, 221, 156, 212, 212, 77, 77, 77, 131, - -1000, -1000, 82, 648, -1000, -1000, -1000, 173, 529, 82, - 82, 529, -1000, 529, 529, 175, 68, -1000, 529, 175, - -3, 1040, -1000, -1000, 273, 673, 673, 884, -1000, -1000, - -1000, 529, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 107, -1000, -1000, 529, 82, 63, -1000, -25, - -1000, 35, 15, 529, -1000, -1000, 433, 727, -16, -27, - 1040, -1000, 1040, -35, -1000, -1000, -1000, 988, -26, -1000, - -1000, 529, -1000, -1000, 86, 92, 86, 7, 857, -1000, - 60, -1000, 1040, -1000, -1000, 82, -1000, 648, 82, 82, - 832, -1000, 699, -1000, 529, 529, 123, 66, -1000, 6, - 175, 1040, 673, 673, -1000, -1000, 174, -1000, -1000, -1000, - -1000, 5, -1000, 961, 936, 104, 529, 529, -1000, 529, - -1000, 92, 86, -1000, 82, 529, 529, -1000, 1013, 1040, - 19, -1000, 805, 49, 529, -1000, -1000, -1000, 529, 1040, - 780, -1000, + -1000, -1000, 535, -1000, 535, 535, 535, 535, 535, 535, + 535, 535, 535, 535, 535, -1000, 1071, 0, -1000, -1000, + -1000, 113, 302, 241, 89, 1062, 535, 98, 86, 174, + -39, 2, -1000, -1000, 535, -1000, 921, 71, 71, -1000, + -12, -1000, 49, 48, 535, -1000, -1000, -1000, -1000, 758, + -1000, 160, -1000, 588, 40, 40, 40, 1071, 153, 153, + 561, 201, 219, 67, 79, 79, 43, 43, 43, 131, + -1000, -1000, 0, 654, -1000, -1000, -1000, 39, 535, 0, + 0, 535, -1000, 535, 535, 162, 64, -1000, 535, 162, + -5, 1071, -1000, -1000, 273, 678, 678, 897, -1000, -1000, + -1000, 535, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, 7, -1000, -1000, 535, 0, 5, -1000, -13, + -1000, 46, 41, 535, -1000, -1000, 435, 734, 12, 11, + 1071, -1000, 1071, -39, -1000, -1000, -1000, 1005, -30, -1000, + -1000, 535, -1000, -1000, 77, 71, 77, 16, 867, -1000, + -20, -1000, 1071, -1000, -1000, 0, -1000, 654, 0, 0, + 843, -1000, 703, -1000, 535, 535, 117, 57, -1000, -4, + 162, 1071, 678, 678, -1000, -1000, 40, -1000, -1000, -1000, + -1000, -29, -1000, 986, 975, 101, 535, 535, -1000, 535, + -1000, 71, 77, -1000, 0, 535, 535, -1000, 1040, 1071, + 951, -1000, 813, 172, 535, -1000, -1000, -1000, 535, 1071, + 789, -1000, } -var yyPgo = [...]int{ - 0, 299, 296, 282, 277, 276, 12, 198, 244, 273, - 0, 264, 13, 259, 252, 10, 4, 9, 251, 20, - 241, 240, 233, 227, 217, 8, 1, 2, 7, 214, - 15, 213, 208, 5, 201, 187, 14, 3, +var yyPgo = [...]int16{ + 0, 264, 263, 262, 261, 259, 12, 214, 195, 244, + 0, 241, 13, 240, 234, 10, 4, 9, 233, 20, + 230, 218, 217, 215, 213, 8, 1, 2, 7, 199, + 15, 198, 196, 5, 193, 192, 14, 3, } -var yyR1 = [...]int{ +var yyR1 = [...]int8{ 0, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 33, 33, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, @@ -346,7 +358,7 @@ var yyR1 = [...]int{ 36, 36, 36, 36, 36, 36, 36, 36, } -var yyR2 = [...]int{ +var yyR2 = [...]int8{ 0, 2, 0, 3, 2, 2, 0, 2, 6, 4, 0, 1, 0, 2, 5, 8, 1, 3, 1, 1, 2, 3, 5, 9, 9, 11, 7, 3, 4, 2, @@ -365,39 +377,39 @@ var yyR2 = [...]int{ 1, 1, 1, 1, 1, 1, 1, 1, } -var yyChk = [...]int{ - -1000, -1, -2, 10, -3, -4, -28, 60, -7, -10, - -5, -8, -16, 39, 40, 32, 37, 15, 11, 12, - 13, 52, 41, 24, 17, 18, 19, -34, -35, 25, - 26, -17, 57, 47, 48, 60, 54, 16, 20, 22, - 21, 23, 28, 29, 55, 61, -29, -30, 20, -36, - 28, 7, 8, 10, 11, 12, 13, 14, 15, 16, - 17, 18, 19, 32, 33, 34, 35, 36, 37, 38, - 39, 40, 45, 53, 46, 4, 5, 7, 8, 9, - 47, 48, 49, 50, 51, -7, -10, 14, 24, -19, - 53, 52, 54, -16, -16, -10, -8, -10, 21, 28, - 28, 20, -19, -17, 57, -17, -10, -16, -16, 61, - -24, -25, -37, -17, 57, 20, 21, -36, 59, -10, - 21, -18, 61, 46, 56, 56, 56, -10, -10, -10, +var yyChk = [...]int16{ + -1000, -1, -2, 10, -3, -4, -28, 62, -7, -10, + -5, -8, -16, 38, 39, 31, 36, 15, 11, 12, + 13, 54, 40, 24, 17, 18, 19, -34, -35, 25, + 26, -17, 59, 49, 50, 62, 56, 16, 20, 22, + 21, 23, 27, 28, 57, 63, -29, -30, 20, -36, + 27, 7, 8, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 31, 32, 33, 34, 35, 36, 37, + 38, 39, 47, 55, 48, 4, 5, 7, 8, 9, + 49, 50, 51, 52, 53, -7, -10, 14, 24, -19, + 55, 54, 56, -16, -16, -10, -8, -10, 21, 27, + 27, 20, -19, -17, 59, -17, -10, -16, -16, 63, + -24, -25, -37, -17, 59, 20, 21, -36, 61, -10, + 21, -18, 63, 48, 58, 58, 58, -10, -10, -10, -10, -10, -10, -10, -10, -10, -10, -10, -10, -11, - -12, 21, 54, 60, -19, -17, 59, -10, 56, 14, - 14, 33, -23, 38, 45, 14, -6, -28, 56, 57, - -20, -10, 58, 61, 46, 56, 56, -10, 59, 31, - 28, 30, 61, -30, -27, -28, -31, 25, 28, 17, - 18, 19, 54, -27, -27, 45, 6, -13, -12, -14, - -15, -37, -17, 57, 21, 59, 56, -10, -12, -12, - -10, -10, -10, -33, 20, 21, 55, -10, -9, -33, - 58, 55, 61, -25, -26, -16, -26, 58, -10, 59, - -32, -27, -10, -12, 59, 46, 61, 46, 56, 56, - -10, 59, -10, 59, 57, 57, -21, -6, 55, 58, - 55, -10, 45, 56, 58, 59, 46, -12, -15, -12, - -12, 58, 59, -10, -10, -22, 34, 35, 55, 56, - -33, -16, -26, -27, 56, 55, 55, 36, -10, -10, - -10, -12, -10, -10, 33, 55, 58, 58, 55, -10, - -10, 58, + -12, 21, 56, 62, -19, -17, 61, -10, 58, 14, + 14, 32, -23, 37, 47, 14, -6, -28, 58, 59, + -20, -10, 60, 63, 48, 58, 58, -10, 61, 30, + 27, 29, 63, -30, -27, -28, -31, 25, 27, 17, + 18, 19, 56, -27, -27, 47, 6, -13, -12, -14, + -15, -37, -17, 59, 21, 61, 58, -10, -12, -12, + -10, -10, -10, -33, 20, 21, 57, -10, -9, -33, + 60, 57, 63, -25, -26, -16, -26, 60, -10, 61, + -32, -27, -10, -12, 61, 48, 63, 48, 58, 58, + -10, 61, -10, 61, 59, 59, -21, -6, 57, 60, + 57, -10, 47, 58, 60, 61, 48, -12, -15, -12, + -12, 60, 61, -10, -10, -22, 33, 34, 57, 58, + -33, -16, -26, -27, 58, 57, 57, 35, -10, -10, + -10, -12, -10, -10, 32, 57, 60, 60, 57, -10, + -10, 60, } -var yyDef = [...]int{ +var yyDef = [...]int16{ 2, -2, 6, 0, 1, 12, 0, 0, 4, 5, 7, 12, 41, 0, 0, 0, 0, 0, 0, 0, 0, 55, 56, 57, 60, 61, 62, 63, 65, 66, @@ -429,31 +441,31 @@ var yyDef = [...]int{ 0, 25, } -var yyTok1 = [...]int{ +var yyTok1 = [...]int8{ 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 51, 3, 3, - 57, 58, 49, 47, 46, 48, 52, 50, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 56, 55, - 3, 3, 3, 53, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 53, 3, 3, + 59, 60, 51, 49, 48, 50, 54, 52, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 58, 57, + 3, 3, 3, 55, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 54, 3, 59, 3, 3, 3, 3, 3, 3, + 3, 56, 3, 61, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 60, 45, 61, + 3, 3, 3, 62, 47, 63, } -var yyTok2 = [...]int{ +var yyTok2 = [...]int8{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, - 42, 43, 44, + 42, 43, 44, 45, 46, } -var yyTok3 = [...]int{ +var yyTok3 = [...]int8{ 0, } @@ -535,9 +547,9 @@ func yyErrorMessage(state, lookAhead int) string { expected := make([]int, 0, 4) // Look for shiftable tokens. - base := yyPact[state] + base := int(yyPact[state]) for tok := TOKSTART; tok-1 < len(yyToknames); tok++ { - if n := base + tok; n >= 0 && n < yyLast && yyChk[yyAct[n]] == tok { + if n := base + tok; n >= 0 && n < yyLast && int(yyChk[int(yyAct[n])]) == tok { if len(expected) == cap(expected) { return res } @@ -547,13 +559,13 @@ func yyErrorMessage(state, lookAhead int) string { if yyDef[state] == -2 { i := 0 - for yyExca[i] != -1 || yyExca[i+1] != state { + for yyExca[i] != -1 || int(yyExca[i+1]) != state { i += 2 } // Look for tokens that we accept or reduce. for i += 2; yyExca[i] >= 0; i += 2 { - tok := yyExca[i] + tok := int(yyExca[i]) if tok < TOKSTART || yyExca[i+1] == 0 { continue } @@ -584,30 +596,30 @@ func yylex1(lex yyLexer, lval *yySymType) (char, token int) { token = 0 char = lex.Lex(lval) if char <= 0 { - token = yyTok1[0] + token = int(yyTok1[0]) goto out } if char < len(yyTok1) { - token = yyTok1[char] + token = int(yyTok1[char]) goto out } if char >= yyPrivate { if char < yyPrivate+len(yyTok2) { - token = yyTok2[char-yyPrivate] + token = int(yyTok2[char-yyPrivate]) goto out } } for i := 0; i < len(yyTok3); i += 2 { - token = yyTok3[i+0] + token = int(yyTok3[i+0]) if token == char { - token = yyTok3[i+1] + token = int(yyTok3[i+1]) goto out } } out: if token == 0 { - token = yyTok2[1] /* unknown char */ + token = int(yyTok2[1]) /* unknown char */ } if yyDebug >= 3 { __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char)) @@ -662,7 +674,7 @@ yystack: yyS[yyp].yys = yystate yynewstate: - yyn = yyPact[yystate] + yyn = int(yyPact[yystate]) if yyn <= yyFlag { goto yydefault /* simple state */ } @@ -673,8 +685,8 @@ yynewstate: if yyn < 0 || yyn >= yyLast { goto yydefault } - yyn = yyAct[yyn] - if yyChk[yyn] == yytoken { /* valid shift */ + yyn = int(yyAct[yyn]) + if int(yyChk[yyn]) == yytoken { /* valid shift */ yyrcvr.char = -1 yytoken = -1 yyVAL = yyrcvr.lval @@ -687,7 +699,7 @@ yynewstate: yydefault: /* default state action */ - yyn = yyDef[yystate] + yyn = int(yyDef[yystate]) if yyn == -2 { if yyrcvr.char < 0 { yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval) @@ -696,18 +708,18 @@ yydefault: /* look through exception table */ xi := 0 for { - if yyExca[xi+0] == -1 && yyExca[xi+1] == yystate { + if yyExca[xi+0] == -1 && int(yyExca[xi+1]) == yystate { break } xi += 2 } for xi += 2; ; xi += 2 { - yyn = yyExca[xi+0] + yyn = int(yyExca[xi+0]) if yyn < 0 || yyn == yytoken { break } } - yyn = yyExca[xi+1] + yyn = int(yyExca[xi+1]) if yyn < 0 { goto ret0 } @@ -729,10 +741,10 @@ yydefault: /* find a state where "error" is a legal shift action */ for yyp >= 0 { - yyn = yyPact[yyS[yyp].yys] + yyErrCode + yyn = int(yyPact[yyS[yyp].yys]) + yyErrCode if yyn >= 0 && yyn < yyLast { - yystate = yyAct[yyn] /* simulate a shift of "error" */ - if yyChk[yystate] == yyErrCode { + yystate = int(yyAct[yyn]) /* simulate a shift of "error" */ + if int(yyChk[yystate]) == yyErrCode { goto yystack } } @@ -768,7 +780,7 @@ yydefault: yypt := yyp _ = yypt // guard against "declared and not used" - yyp -= yyR2[yyn] + yyp -= int(yyR2[yyn]) // yyp is now the index of $0. Perform the default action. Iff the // reduced production is ε, $1 is possibly out of range. if yyp+1 >= len(yyS) { @@ -779,16 +791,16 @@ yydefault: yyVAL = yyS[yyp+1] /* consult goto table to find next state */ - yyn = yyR1[yyn] - yyg := yyPgo[yyn] + yyn = int(yyR1[yyn]) + yyg := int(yyPgo[yyn]) yyj := yyg + yyS[yyp].yys + 1 if yyj >= yyLast { - yystate = yyAct[yyg] + yystate = int(yyAct[yyg]) } else { - yystate = yyAct[yyj] - if yyChk[yystate] != -yyn { - yystate = yyAct[yyg] + yystate = int(yyAct[yyj]) + if int(yyChk[yystate]) != -yyn { + yystate = int(yyAct[yyg]) } } // dummy call; replaced with literal code @@ -796,7 +808,7 @@ yydefault: case 1: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:67 +//line parser.go.y:73 { if yyDollar[1].value != nil { yyDollar[2].value.(*Query).Meta = yyDollar[1].value.(*ConstObject) @@ -805,25 +817,25 @@ yydefault: } case 2: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:74 +//line parser.go.y:80 { yyVAL.value = nil } case 3: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:78 +//line parser.go.y:84 { yyVAL.value = yyDollar[2].value } case 4: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:84 +//line parser.go.y:90 { yyVAL.value = &Query{Imports: yyDollar[1].value.([]*Import), FuncDefs: reverseFuncDef(yyDollar[2].value.([]*FuncDef)), Term: &Term{Type: TermTypeIdentity}} } case 5: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:88 +//line parser.go.y:94 { if yyDollar[1].value != nil { yyDollar[2].value.(*Query).Imports = yyDollar[1].value.([]*Import) @@ -832,144 +844,144 @@ yydefault: } case 6: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:95 +//line parser.go.y:101 { yyVAL.value = []*Import(nil) } case 7: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:99 +//line parser.go.y:105 { yyVAL.value = append(yyDollar[1].value.([]*Import), yyDollar[2].value.(*Import)) } case 8: yyDollar = yyS[yypt-6 : yypt+1] -//line parser.go.y:105 +//line parser.go.y:111 { yyVAL.value = &Import{ImportPath: yyDollar[2].token, ImportAlias: yyDollar[4].token, Meta: yyDollar[5].value.(*ConstObject)} } case 9: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:109 +//line parser.go.y:115 { yyVAL.value = &Import{IncludePath: yyDollar[2].token, Meta: yyDollar[3].value.(*ConstObject)} } case 10: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:115 +//line parser.go.y:121 { yyVAL.value = (*ConstObject)(nil) } case 11: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:118 +//line parser.go.y:124 { } case 12: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:122 +//line parser.go.y:128 { yyVAL.value = []*FuncDef(nil) } case 13: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:126 +//line parser.go.y:132 { yyVAL.value = append(yyDollar[2].value.([]*FuncDef), yyDollar[1].value.(*FuncDef)) } case 14: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:132 +//line parser.go.y:138 { yyVAL.value = &FuncDef{Name: yyDollar[2].token, Body: yyDollar[4].value.(*Query)} } case 15: yyDollar = yyS[yypt-8 : yypt+1] -//line parser.go.y:136 +//line parser.go.y:142 { yyVAL.value = &FuncDef{yyDollar[2].token, yyDollar[4].value.([]string), yyDollar[7].value.(*Query)} } case 16: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:142 +//line parser.go.y:148 { yyVAL.value = []string{yyDollar[1].token} } case 17: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:146 +//line parser.go.y:152 { yyVAL.value = append(yyDollar[1].value.([]string), yyDollar[3].token) } case 18: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:151 +//line parser.go.y:157 { } case 19: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:152 +//line parser.go.y:158 { } case 20: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:156 +//line parser.go.y:162 { yyDollar[2].value.(*Query).FuncDefs = prependFuncDef(yyDollar[2].value.(*Query).FuncDefs, yyDollar[1].value.(*FuncDef)) yyVAL.value = yyDollar[2].value } case 21: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:161 +//line parser.go.y:167 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpPipe, Right: yyDollar[3].value.(*Query)} } case 22: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:165 +//line parser.go.y:171 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Bind: &Bind{yyDollar[3].value.([]*Pattern), yyDollar[5].value.(*Query)}}) yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)} } case 23: yyDollar = yyS[yypt-9 : yypt+1] -//line parser.go.y:170 +//line parser.go.y:176 { yyVAL.value = &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query)}}} } case 24: yyDollar = yyS[yypt-9 : yypt+1] -//line parser.go.y:174 +//line parser.go.y:180 { yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), nil}}} } case 25: yyDollar = yyS[yypt-11 : yypt+1] -//line parser.go.y:178 +//line parser.go.y:184 { yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), yyDollar[10].value.(*Query)}}} } case 26: yyDollar = yyS[yypt-7 : yypt+1] -//line parser.go.y:182 +//line parser.go.y:188 { yyVAL.value = &Query{Term: &Term{Type: TermTypeIf, If: &If{yyDollar[2].value.(*Query), yyDollar[4].value.(*Query), yyDollar[5].value.([]*IfElif), yyDollar[6].value.(*Query)}}} } case 27: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:186 +//line parser.go.y:192 { yyVAL.value = &Query{Term: &Term{Type: TermTypeTry, Try: &Try{yyDollar[2].value.(*Query), yyDollar[3].value.(*Query)}}} } case 28: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:190 +//line parser.go.y:196 { yyVAL.value = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{yyDollar[2].token, yyDollar[4].value.(*Query)}}} } case 29: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:194 +//line parser.go.y:200 { if t := yyDollar[1].value.(*Query).Term; t != nil { t.SuffixList = append(t.SuffixList, &Suffix{Optional: true}) @@ -979,175 +991,175 @@ yydefault: } case 30: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:202 +//line parser.go.y:208 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpComma, Right: yyDollar[3].value.(*Query)} } case 31: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:206 +//line parser.go.y:212 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)} } case 32: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:210 +//line parser.go.y:216 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)} } case 33: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:214 +//line parser.go.y:220 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpOr, Right: yyDollar[3].value.(*Query)} } case 34: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:218 +//line parser.go.y:224 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAnd, Right: yyDollar[3].value.(*Query)} } case 35: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:222 +//line parser.go.y:228 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)} } case 36: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:226 +//line parser.go.y:232 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAdd, Right: yyDollar[3].value.(*Query)} } case 37: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:230 +//line parser.go.y:236 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpSub, Right: yyDollar[3].value.(*Query)} } case 38: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:234 +//line parser.go.y:240 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMul, Right: yyDollar[3].value.(*Query)} } case 39: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:238 +//line parser.go.y:244 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpDiv, Right: yyDollar[3].value.(*Query)} } case 40: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:242 +//line parser.go.y:248 { yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMod, Right: yyDollar[3].value.(*Query)} } case 41: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:246 +//line parser.go.y:252 { yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)} } case 42: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:252 +//line parser.go.y:258 { yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)} } case 43: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:256 +//line parser.go.y:262 { yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern)) } case 44: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:262 +//line parser.go.y:268 { yyVAL.value = &Pattern{Name: yyDollar[1].token} } case 45: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:266 +//line parser.go.y:272 { yyVAL.value = &Pattern{Array: yyDollar[2].value.([]*Pattern)} } case 46: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:270 +//line parser.go.y:276 { yyVAL.value = &Pattern{Object: yyDollar[2].value.([]*PatternObject)} } case 47: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:276 +//line parser.go.y:282 { yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)} } case 48: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:280 +//line parser.go.y:286 { yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern)) } case 49: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:286 +//line parser.go.y:292 { yyVAL.value = []*PatternObject{yyDollar[1].value.(*PatternObject)} } case 50: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:290 +//line parser.go.y:296 { yyVAL.value = append(yyDollar[1].value.([]*PatternObject), yyDollar[3].value.(*PatternObject)) } case 51: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:296 +//line parser.go.y:302 { yyVAL.value = &PatternObject{Key: yyDollar[1].token, Val: yyDollar[3].value.(*Pattern)} } case 52: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:300 +//line parser.go.y:306 { yyVAL.value = &PatternObject{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*Pattern)} } case 53: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:304 +//line parser.go.y:310 { yyVAL.value = &PatternObject{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*Pattern)} } case 54: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:308 +//line parser.go.y:314 { - yyVAL.value = &PatternObject{KeyOnly: yyDollar[1].token} + yyVAL.value = &PatternObject{Key: yyDollar[1].token} } case 55: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:314 +//line parser.go.y:320 { yyVAL.value = &Term{Type: TermTypeIdentity} } case 56: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:318 +//line parser.go.y:324 { yyVAL.value = &Term{Type: TermTypeRecurse} } case 57: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:322 +//line parser.go.y:328 { yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Name: yyDollar[1].token}} } case 58: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:326 +//line parser.go.y:332 { if yyDollar[2].value.(*Suffix).Iter { yyVAL.value = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{yyDollar[2].value.(*Suffix)}} @@ -1157,569 +1169,569 @@ yydefault: } case 59: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:334 +//line parser.go.y:340 { yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Str: yyDollar[2].value.(*String)}} } case 60: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:338 +//line parser.go.y:344 { yyVAL.value = &Term{Type: TermTypeNull} } case 61: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:342 +//line parser.go.y:348 { yyVAL.value = &Term{Type: TermTypeTrue} } case 62: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:346 +//line parser.go.y:352 { yyVAL.value = &Term{Type: TermTypeFalse} } case 63: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:350 +//line parser.go.y:356 { yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}} } case 64: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:354 +//line parser.go.y:360 { yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token, Args: yyDollar[3].value.([]*Query)}} } case 65: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:358 +//line parser.go.y:364 { yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}} } case 66: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:362 +//line parser.go.y:368 { yyVAL.value = &Term{Type: TermTypeNumber, Number: yyDollar[1].token} } case 67: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:366 +//line parser.go.y:372 { yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token} } case 68: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:370 +//line parser.go.y:376 { yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token, Str: yyDollar[2].value.(*String)} } case 69: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:374 +//line parser.go.y:380 { yyVAL.value = &Term{Type: TermTypeString, Str: yyDollar[1].value.(*String)} } case 70: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:378 +//line parser.go.y:384 { yyVAL.value = &Term{Type: TermTypeQuery, Query: yyDollar[2].value.(*Query)} } case 71: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:382 +//line parser.go.y:388 { yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, yyDollar[2].value.(*Term)}} } case 72: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:386 +//line parser.go.y:392 { yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, yyDollar[2].value.(*Term)}} } case 73: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:390 +//line parser.go.y:396 { yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{}} } case 74: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:394 +//line parser.go.y:400 { yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}} } case 75: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:398 +//line parser.go.y:404 { yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}} } case 76: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:402 +//line parser.go.y:408 { yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{}} } case 77: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:406 +//line parser.go.y:412 { yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{yyDollar[2].value.(*Query)}} } case 78: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:410 +//line parser.go.y:416 { yyVAL.value = &Term{Type: TermTypeBreak, Break: yyDollar[2].token} } case 79: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:414 +//line parser.go.y:420 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Name: yyDollar[2].token}}) } case 80: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:418 +//line parser.go.y:424 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[2].value.(*Suffix)) } case 81: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:422 +//line parser.go.y:428 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Optional: true}) } case 82: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:426 +//line parser.go.y:432 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[3].value.(*Suffix)) } case 83: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:430 +//line parser.go.y:436 { yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Str: yyDollar[3].value.(*String)}}) } case 84: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:436 +//line parser.go.y:442 { yyVAL.value = &String{Str: yyDollar[1].token} } case 85: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:440 +//line parser.go.y:446 { yyVAL.value = &String{Queries: yyDollar[2].value.([]*Query)} } case 86: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:446 +//line parser.go.y:452 { yyVAL.value = []*Query{} } case 87: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:450 +//line parser.go.y:456 { yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: yyDollar[2].token}}}) } case 88: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:454 +//line parser.go.y:460 { yylex.(*lexer).inString = true yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeQuery, Query: yyDollar[3].value.(*Query)}}) } case 89: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:460 +//line parser.go.y:466 { } case 90: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:461 +//line parser.go.y:467 { } case 91: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:464 +//line parser.go.y:470 { } case 92: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:465 +//line parser.go.y:471 { } case 93: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:469 +//line parser.go.y:475 { yyVAL.value = &Suffix{Iter: true} } case 94: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:473 +//line parser.go.y:479 { yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query)}} } case 95: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:477 +//line parser.go.y:483 { yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), IsSlice: true}} } case 96: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:481 +//line parser.go.y:487 { yyVAL.value = &Suffix{Index: &Index{End: yyDollar[3].value.(*Query), IsSlice: true}} } case 97: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:485 +//line parser.go.y:491 { yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), End: yyDollar[4].value.(*Query), IsSlice: true}} } case 98: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:491 +//line parser.go.y:497 { yyVAL.value = []*Query{yyDollar[1].value.(*Query)} } case 99: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:495 +//line parser.go.y:501 { yyVAL.value = append(yyDollar[1].value.([]*Query), yyDollar[3].value.(*Query)) } case 100: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:501 +//line parser.go.y:507 { yyVAL.value = []*IfElif(nil) } case 101: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:505 +//line parser.go.y:511 { yyVAL.value = append(yyDollar[1].value.([]*IfElif), &IfElif{yyDollar[3].value.(*Query), yyDollar[5].value.(*Query)}) } case 102: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:511 +//line parser.go.y:517 { yyVAL.value = (*Query)(nil) } case 103: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:515 +//line parser.go.y:521 { yyVAL.value = yyDollar[2].value } case 104: yyDollar = yyS[yypt-0 : yypt+1] -//line parser.go.y:521 +//line parser.go.y:527 { yyVAL.value = (*Query)(nil) } case 105: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:525 +//line parser.go.y:531 { yyVAL.value = yyDollar[2].value } case 106: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:531 +//line parser.go.y:537 { yyVAL.value = []*ObjectKeyVal{yyDollar[1].value.(*ObjectKeyVal)} } case 107: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:535 +//line parser.go.y:541 { yyVAL.value = append(yyDollar[1].value.([]*ObjectKeyVal), yyDollar[3].value.(*ObjectKeyVal)) } case 108: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:541 +//line parser.go.y:547 { yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ObjectVal)} } case 109: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:545 +//line parser.go.y:551 { yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*ObjectVal)} } case 110: yyDollar = yyS[yypt-5 : yypt+1] -//line parser.go.y:549 +//line parser.go.y:555 { yyVAL.value = &ObjectKeyVal{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*ObjectVal)} } case 111: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:553 +//line parser.go.y:559 { - yyVAL.value = &ObjectKeyVal{KeyOnly: yyDollar[1].token} + yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token} } case 112: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:557 +//line parser.go.y:563 { - yyVAL.value = &ObjectKeyVal{KeyOnlyString: yyDollar[1].value.(*String)} + yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String)} } case 113: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:562 +//line parser.go.y:568 { } case 114: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:563 +//line parser.go.y:569 { } case 115: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:564 +//line parser.go.y:570 { } case 116: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:568 +//line parser.go.y:574 { yyVAL.value = &ObjectVal{[]*Query{{Term: yyDollar[1].value.(*Term)}}} } case 117: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:572 +//line parser.go.y:578 { yyVAL.value = &ObjectVal{append(yyDollar[1].value.(*ObjectVal).Queries, &Query{Term: yyDollar[3].value.(*Term)})} } case 118: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:578 +//line parser.go.y:584 { yyVAL.value = &ConstTerm{Object: yyDollar[1].value.(*ConstObject)} } case 119: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:582 +//line parser.go.y:588 { yyVAL.value = &ConstTerm{Array: yyDollar[1].value.(*ConstArray)} } case 120: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:586 +//line parser.go.y:592 { yyVAL.value = &ConstTerm{Number: yyDollar[1].token} } case 121: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:590 +//line parser.go.y:596 { yyVAL.value = &ConstTerm{Str: yyDollar[1].token} } case 122: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:594 +//line parser.go.y:600 { yyVAL.value = &ConstTerm{Null: true} } case 123: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:598 +//line parser.go.y:604 { yyVAL.value = &ConstTerm{True: true} } case 124: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:602 +//line parser.go.y:608 { yyVAL.value = &ConstTerm{False: true} } case 125: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:608 +//line parser.go.y:614 { yyVAL.value = &ConstObject{} } case 126: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:612 +//line parser.go.y:618 { yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)} } case 127: yyDollar = yyS[yypt-4 : yypt+1] -//line parser.go.y:616 +//line parser.go.y:622 { yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)} } case 128: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:622 +//line parser.go.y:628 { yyVAL.value = []*ConstObjectKeyVal{yyDollar[1].value.(*ConstObjectKeyVal)} } case 129: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:626 +//line parser.go.y:632 { yyVAL.value = append(yyDollar[1].value.([]*ConstObjectKeyVal), yyDollar[3].value.(*ConstObjectKeyVal)) } case 130: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:632 +//line parser.go.y:638 { yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)} } case 131: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:636 +//line parser.go.y:642 { yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)} } case 132: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:640 +//line parser.go.y:646 { yyVAL.value = &ConstObjectKeyVal{KeyString: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)} } case 133: yyDollar = yyS[yypt-2 : yypt+1] -//line parser.go.y:646 +//line parser.go.y:652 { yyVAL.value = &ConstArray{} } case 134: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:650 +//line parser.go.y:656 { yyVAL.value = &ConstArray{yyDollar[2].value.([]*ConstTerm)} } case 135: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:656 +//line parser.go.y:662 { yyVAL.value = []*ConstTerm{yyDollar[1].value.(*ConstTerm)} } case 136: yyDollar = yyS[yypt-3 : yypt+1] -//line parser.go.y:660 +//line parser.go.y:666 { yyVAL.value = append(yyDollar[1].value.([]*ConstTerm), yyDollar[3].value.(*ConstTerm)) } case 137: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:665 +//line parser.go.y:671 { } case 138: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:666 +//line parser.go.y:672 { } case 139: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:667 +//line parser.go.y:673 { } case 140: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:668 +//line parser.go.y:674 { } case 141: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:669 +//line parser.go.y:675 { } case 142: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:670 +//line parser.go.y:676 { } case 143: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:671 +//line parser.go.y:677 { } case 144: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:672 +//line parser.go.y:678 { } case 145: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:673 +//line parser.go.y:679 { } case 146: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:674 +//line parser.go.y:680 { } case 147: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:675 +//line parser.go.y:681 { } case 148: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:676 +//line parser.go.y:682 { } case 149: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:677 +//line parser.go.y:683 { } case 150: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:678 +//line parser.go.y:684 { } case 151: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:679 +//line parser.go.y:685 { } case 152: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:680 +//line parser.go.y:686 { } case 153: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:681 +//line parser.go.y:687 { } case 154: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:682 +//line parser.go.y:688 { } case 155: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:683 +//line parser.go.y:689 { } case 156: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:684 +//line parser.go.y:690 { } case 157: yyDollar = yyS[yypt-1 : yypt+1] -//line parser.go.y:685 +//line parser.go.y:691 { } } diff --git a/vendor/github.com/itchyny/gojq/parser.go.y b/vendor/github.com/itchyny/gojq/parser.go.y index 5e2bc081..380c3cf6 100644 --- a/vendor/github.com/itchyny/gojq/parser.go.y +++ b/vendor/github.com/itchyny/gojq/parser.go.y @@ -1,7 +1,12 @@ %{ package gojq -// Parse parses a query. +// Parse a query string, and returns the query struct. +// +// If parsing failed, the returned error has the method Token() (string, int), +// which reports the invalid token and the byte offset in the query string. The +// token is empty if the error occurred after scanning the entire query string. +// The byte offset is the scanned bytes when the error occurred. func Parse(src string) (*Query, error) { l := newLexer(src) if yyParse(l) > 0 { @@ -26,7 +31,7 @@ func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef { %} %union { - value interface{} + value any token string operator Operator } @@ -41,11 +46,12 @@ func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef { %token tokModule tokImport tokInclude tokDef tokAs tokLabel tokBreak %token tokNull tokTrue tokFalse %token tokIdent tokVariable tokModuleIdent tokModuleVariable -%token tokIndex tokNumber tokFormat tokInvalid +%token tokIndex tokNumber tokFormat %token tokString tokStringStart tokStringQuery tokStringEnd %token tokIf tokThen tokElif tokElse tokEnd %token tokTry tokCatch tokReduce tokForeach %token tokRecurse tokFuncDefPost tokTermPost tokEmptyCatch +%token tokInvalid tokInvalidEscapeSequence tokUnterminatedString %nonassoc tokFuncDefPost tokTermPost %right '|' @@ -306,7 +312,7 @@ objectpattern } | tokVariable { - $$ = &PatternObject{KeyOnly: $1} + $$ = &PatternObject{Key: $1} } term @@ -551,11 +557,11 @@ objectkeyval } | objectkey { - $$ = &ObjectKeyVal{KeyOnly: $1} + $$ = &ObjectKeyVal{Key: $1} } | string { - $$ = &ObjectKeyVal{KeyOnlyString: $1.(*String)} + $$ = &ObjectKeyVal{KeyString: $1.(*String)} } objectkey diff --git a/vendor/github.com/itchyny/gojq/preview.go b/vendor/github.com/itchyny/gojq/preview.go new file mode 100644 index 00000000..e082eb56 --- /dev/null +++ b/vendor/github.com/itchyny/gojq/preview.go @@ -0,0 +1,77 @@ +package gojq + +import "unicode/utf8" + +// Preview returns the preview string of v. The preview string is basically the +// same as the jq-flavored JSON encoding returned by [Marshal], but is truncated +// by 30 bytes, and more efficient than truncating the result of [Marshal]. +// +// This method is used by error messages of built-in operators and functions, +// and accepts only limited types (nil, bool, int, float64, *big.Int, string, +// []any, and map[string]any). Note that the maximum width and trailing strings +// on truncation may be changed in the future. +func Preview(v any) string { + bs := jsonLimitedMarshal(v, 32) + if l := 30; len(bs) > l { + var trailing string + switch v.(type) { + case string: + trailing = ` ..."` + case []any: + trailing = " ...]" + case map[string]any: + trailing = " ...}" + default: + trailing = " ..." + } + for len(bs) > l-len(trailing) { + _, size := utf8.DecodeLastRune(bs) + bs = bs[:len(bs)-size] + } + bs = append(bs, trailing...) + } + return string(bs) +} + +func jsonLimitedMarshal(v any, n int) (bs []byte) { + w := &limitedWriter{buf: make([]byte, n)} + defer func() { + _ = recover() + bs = w.Bytes() + }() + (&encoder{w: w}).encode(v) + return +} + +type limitedWriter struct { + buf []byte + off int +} + +func (w *limitedWriter) Write(bs []byte) (int, error) { + n := copy(w.buf[w.off:], bs) + if w.off += n; w.off == len(w.buf) { + panic(struct{}{}) + } + return n, nil +} + +func (w *limitedWriter) WriteByte(b byte) error { + w.buf[w.off] = b + if w.off++; w.off == len(w.buf) { + panic(struct{}{}) + } + return nil +} + +func (w *limitedWriter) WriteString(s string) (int, error) { + n := copy(w.buf[w.off:], s) + if w.off += n; w.off == len(w.buf) { + panic(struct{}{}) + } + return n, nil +} + +func (w *limitedWriter) Bytes() []byte { + return w.buf[:w.off] +} diff --git a/vendor/github.com/itchyny/gojq/query.go b/vendor/github.com/itchyny/gojq/query.go index 98b65027..5f20b4ff 100644 --- a/vendor/github.com/itchyny/gojq/query.go +++ b/vendor/github.com/itchyny/gojq/query.go @@ -2,8 +2,6 @@ package gojq import ( "context" - "encoding/json" - "strconv" "strings" ) @@ -21,13 +19,14 @@ type Query struct { // Run the query. // -// It is safe to call this method of a *Query in multiple goroutines. -func (e *Query) Run(v interface{}) Iter { +// It is safe to call this method in goroutines, to reuse a parsed [*Query]. +// But for arguments, do not give values sharing same data between goroutines. +func (e *Query) Run(v any) Iter { return e.RunWithContext(context.Background(), v) } // RunWithContext runs the query with context. -func (e *Query) RunWithContext(ctx context.Context, v interface{}) Iter { +func (e *Query) RunWithContext(ctx context.Context, v any) Iter { code, err := Compile(e) if err != nil { return NewIter(err) @@ -93,11 +92,18 @@ func (e *Query) minify() { } } -func (e *Query) toIndices() []interface{} { - if e.FuncDefs != nil || e.Right != nil || e.Term == nil { +func (e *Query) toIndexKey() any { + if e.Term == nil { return nil } - return e.Term.toIndices() + return e.Term.toIndexKey() +} + +func (e *Query) toIndices(xs []any) []any { + if e.Term == nil { + return nil + } + return e.Term.toIndices(xs) } // Import ... @@ -117,12 +123,12 @@ func (e *Import) String() string { func (e *Import) writeTo(s *strings.Builder) { if e.ImportPath != "" { s.WriteString("import ") - s.WriteString(strconv.Quote(e.ImportPath)) + jsonEncodeString(s, e.ImportPath) s.WriteString(" as ") s.WriteString(e.ImportAlias) } else { s.WriteString("include ") - s.WriteString(strconv.Quote(e.IncludePath)) + jsonEncodeString(s, e.IncludePath) } if e.Meta != nil { s.WriteByte(' ') @@ -308,25 +314,48 @@ func (e *Term) toFunc() string { } } -func (e *Term) toIndices() []interface{} { - if e.Index != nil { - xs := e.Index.toIndices() - if xs == nil { +func (e *Term) toIndexKey() any { + switch e.Type { + case TermTypeNumber: + return toNumber(e.Number) + case TermTypeUnary: + return e.Unary.toNumber() + case TermTypeString: + if e.Str.Queries == nil { + return e.Str.Str + } + return nil + default: + return nil + } +} + +func (e *Term) toIndices(xs []any) []any { + switch e.Type { + case TermTypeIndex: + if xs = e.Index.toIndices(xs); xs == nil { return nil } - for _, s := range e.SuffixList { - x := s.toIndices() - if x == nil { - return nil - } - xs = append(xs, x...) + case TermTypeQuery: + if xs = e.Query.toIndices(xs); xs == nil { + return nil } - return xs - } else if e.Query != nil && len(e.SuffixList) == 0 { - return e.Query.toIndices() - } else { + default: return nil } + for _, s := range e.SuffixList { + if xs = s.toIndices(xs); xs == nil { + return nil + } + } + return xs +} + +func (e *Term) toNumber() any { + if e.Type == TermTypeNumber { + return toNumber(e.Number) + } + return nil } // Unary ... @@ -350,6 +379,14 @@ func (e *Unary) minify() { e.Term.minify() } +func (e *Unary) toNumber() any { + v := e.Term.toNumber() + if v != nil && e.Op == OpSub { + v = funcOpNegate(v) + } + return v +} + // Pattern ... type Pattern struct { Name string @@ -393,7 +430,6 @@ type PatternObject struct { KeyString *String KeyQuery *Query Val *Pattern - KeyOnly string } func (e *PatternObject) String() string { @@ -416,9 +452,6 @@ func (e *PatternObject) writeTo(s *strings.Builder) { s.WriteString(": ") e.Val.writeTo(s) } - if e.KeyOnly != "" { - s.WriteString(e.KeyOnly) - } } // Index ... @@ -450,24 +483,22 @@ func (e *Index) writeTo(s *strings.Builder) { func (e *Index) writeSuffixTo(s *strings.Builder) { if e.Name != "" { s.WriteString(e.Name) + } else if e.Str != nil { + e.Str.writeTo(s) } else { - if e.Str != nil { - e.Str.writeTo(s) - } else { - s.WriteByte('[') - if e.IsSlice { - if e.Start != nil { - e.Start.writeTo(s) - } - s.WriteByte(':') - if e.End != nil { - e.End.writeTo(s) - } - } else { + s.WriteByte('[') + if e.IsSlice { + if e.Start != nil { e.Start.writeTo(s) } - s.WriteByte(']') + s.WriteByte(':') + if e.End != nil { + e.End.writeTo(s) + } + } else { + e.Start.writeTo(s) } + s.WriteByte(']') } } @@ -483,11 +514,38 @@ func (e *Index) minify() { } } -func (e *Index) toIndices() []interface{} { - if e.Name == "" { - return nil +func (e *Index) toIndexKey() any { + if e.Name != "" { + return e.Name + } else if e.Str != nil { + if e.Str.Queries == nil { + return e.Str.Str + } + } else if !e.IsSlice { + return e.Start.toIndexKey() + } else { + var start, end any + ok := true + if e.Start != nil { + start = e.Start.toIndexKey() + ok = start != nil + } + if e.End != nil && ok { + end = e.End.toIndexKey() + ok = end != nil + } + if ok { + return map[string]any{"start": start, "end": end} + } + } + return nil +} + +func (e *Index) toIndices(xs []any) []any { + if k := e.toIndexKey(); k != nil { + return append(xs, k) } - return []interface{}{e.Name} + return nil } // Func ... @@ -543,7 +601,7 @@ func (e *String) String() string { func (e *String) writeTo(s *strings.Builder) { if e.Queries == nil { - s.WriteString(strconv.Quote(e.Str)) + jsonEncodeString(s, e.Str) return } s.WriteByte('"') @@ -599,12 +657,10 @@ func (e *Object) minify() { // ObjectKeyVal ... type ObjectKeyVal struct { - Key string - KeyString *String - KeyQuery *Query - Val *ObjectVal - KeyOnly string - KeyOnlyString *String + Key string + KeyString *String + KeyQuery *Query + Val *ObjectVal } func (e *ObjectKeyVal) String() string { @@ -627,11 +683,6 @@ func (e *ObjectKeyVal) writeTo(s *strings.Builder) { s.WriteString(": ") e.Val.writeTo(s) } - if e.KeyOnly != "" { - s.WriteString(e.KeyOnly) - } else if e.KeyOnlyString != nil { - e.KeyOnlyString.writeTo(s) - } } func (e *ObjectKeyVal) minify() { @@ -643,9 +694,6 @@ func (e *ObjectKeyVal) minify() { if e.Val != nil { e.Val.minify() } - if e.KeyOnlyString != nil { - e.KeyOnlyString.minify() - } } // ObjectVal ... @@ -737,21 +785,21 @@ func (e *Suffix) minify() { } } -func (e *Suffix) toTerm() (*Term, bool) { +func (e *Suffix) toTerm() *Term { if e.Index != nil { - return &Term{Type: TermTypeIndex, Index: e.Index}, true + return &Term{Type: TermTypeIndex, Index: e.Index} } else if e.Iter { - return &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}, true + return &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}} } else { - return nil, false + return nil } } -func (e *Suffix) toIndices() []interface{} { +func (e *Suffix) toIndices(xs []any) []any { if e.Index == nil { return nil } - return e.Index.toIndices() + return e.Index.toIndices(xs) } // Bind ... @@ -1005,17 +1053,17 @@ func (e *ConstTerm) writeTo(s *strings.Builder) { } else if e.False { s.WriteString("false") } else { - s.WriteString(strconv.Quote(e.Str)) + jsonEncodeString(s, e.Str) } } -func (e *ConstTerm) toValue() interface{} { +func (e *ConstTerm) toValue() any { if e.Object != nil { return e.Object.ToValue() } else if e.Array != nil { return e.Array.toValue() } else if e.Number != "" { - return normalizeNumber(json.Number(e.Number)) + return toNumber(e.Number) } else if e.Null { return nil } else if e.True { @@ -1053,12 +1101,12 @@ func (e *ConstObject) writeTo(s *strings.Builder) { s.WriteString(" }") } -// ToValue converts the object to map[string]interface{}. -func (e *ConstObject) ToValue() map[string]interface{} { +// ToValue converts the object to map[string]any. +func (e *ConstObject) ToValue() map[string]any { if e == nil { return nil } - v := make(map[string]interface{}, len(e.KeyVals)) + v := make(map[string]any, len(e.KeyVals)) for _, e := range e.KeyVals { key := e.Key if key == "" { @@ -1114,8 +1162,8 @@ func (e *ConstArray) writeTo(s *strings.Builder) { s.WriteByte(']') } -func (e *ConstArray) toValue() []interface{} { - v := make([]interface{}, len(e.Elems)) +func (e *ConstArray) toValue() []any { + v := make([]any, len(e.Elems)) for i, e := range e.Elems { v[i] = e.toValue() } diff --git a/vendor/github.com/itchyny/gojq/release.go b/vendor/github.com/itchyny/gojq/release.go index 196fb772..c34dfb45 100644 --- a/vendor/github.com/itchyny/gojq/release.go +++ b/vendor/github.com/itchyny/gojq/release.go @@ -1,11 +1,11 @@ -//go:build !debug -// +build !debug +//go:build !gojq_debug +// +build !gojq_debug package gojq type codeinfo struct{} -func (c *compiler) appendCodeInfo(interface{}) {} +func (c *compiler) appendCodeInfo(any) {} func (c *compiler) deleteCodeInfo(string) {} diff --git a/vendor/github.com/itchyny/gojq/scope_stack.go b/vendor/github.com/itchyny/gojq/scope_stack.go index 82d620be..e140ca15 100644 --- a/vendor/github.com/itchyny/gojq/scope_stack.go +++ b/vendor/github.com/itchyny/gojq/scope_stack.go @@ -39,11 +39,12 @@ func (s *scopeStack) empty() bool { return s.index < 0 } -func (s *scopeStack) save(index, limit *int) { - *index, *limit = s.index, s.limit +func (s *scopeStack) save() (index, limit int) { + index, limit = s.index, s.limit if s.index > s.limit { s.limit = s.index } + return } func (s *scopeStack) restore(index, limit int) { diff --git a/vendor/github.com/itchyny/gojq/stack.go b/vendor/github.com/itchyny/gojq/stack.go index f629d28e..a0e265c8 100644 --- a/vendor/github.com/itchyny/gojq/stack.go +++ b/vendor/github.com/itchyny/gojq/stack.go @@ -7,7 +7,7 @@ type stack struct { } type block struct { - value interface{} + value any next int } @@ -15,7 +15,7 @@ func newStack() *stack { return &stack{index: -1, limit: -1} } -func (s *stack) push(v interface{}) { +func (s *stack) push(v any) { b := block{v, s.index} i := s.index + 1 if i <= s.limit { @@ -29,13 +29,13 @@ func (s *stack) push(v interface{}) { } } -func (s *stack) pop() interface{} { +func (s *stack) pop() any { b := s.data[s.index] s.index = b.next return b.value } -func (s *stack) top() interface{} { +func (s *stack) top() any { return s.data[s.index].value } @@ -43,11 +43,12 @@ func (s *stack) empty() bool { return s.index < 0 } -func (s *stack) save(index, limit *int) { - *index, *limit = s.index, s.limit +func (s *stack) save() (index, limit int) { + index, limit = s.index, s.limit if s.index > s.limit { s.limit = s.index } + return } func (s *stack) restore(index, limit int) { diff --git a/vendor/github.com/itchyny/gojq/term_type.go b/vendor/github.com/itchyny/gojq/term_type.go index 1670948c..941e7ba9 100644 --- a/vendor/github.com/itchyny/gojq/term_type.go +++ b/vendor/github.com/itchyny/gojq/term_type.go @@ -1,6 +1,6 @@ package gojq -// TermType represents the type of Term. +// TermType represents the type of [Term]. type TermType int // TermType list. @@ -27,7 +27,7 @@ const ( TermTypeQuery ) -// GoString implements GoStringer. +// GoString implements [fmt.GoStringer]. func (termType TermType) GoString() (str string) { defer func() { str = "gojq." + str }() switch termType { diff --git a/vendor/github.com/itchyny/gojq/type.go b/vendor/github.com/itchyny/gojq/type.go new file mode 100644 index 00000000..bb388e20 --- /dev/null +++ b/vendor/github.com/itchyny/gojq/type.go @@ -0,0 +1,29 @@ +package gojq + +import ( + "fmt" + "math/big" +) + +// TypeOf returns the jq-flavored type name of v. +// +// This method is used by built-in type/0 function, and accepts only limited +// types (nil, bool, int, float64, *big.Int, string, []any, and map[string]any). +func TypeOf(v any) string { + switch v.(type) { + case nil: + return "null" + case bool: + return "boolean" + case int, float64, *big.Int: + return "number" + case string: + return "string" + case []any: + return "array" + case map[string]any: + return "object" + default: + panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v)) + } +} diff --git a/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md index 52b37d05..61a4e9dc 100644 --- a/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md +++ b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md @@ -1,4 +1,11 @@ # Changelog +## [v0.1.5](https://github.com/itchyny/timefmt-go/compare/v0.1.4..v0.1.5) (2022-12-01) +* support parsing time zone offset with name using both `%z` and `%Z` + +## [v0.1.4](https://github.com/itchyny/timefmt-go/compare/v0.1.3..v0.1.4) (2022-09-01) +* improve documents +* drop support for Go 1.16 + ## [v0.1.3](https://github.com/itchyny/timefmt-go/compare/v0.1.2..v0.1.3) (2021-04-14) * implement `ParseInLocation` for configuring the default location diff --git a/vendor/github.com/itchyny/timefmt-go/LICENSE b/vendor/github.com/itchyny/timefmt-go/LICENSE index 4d650fa8..84d6cb03 100644 --- a/vendor/github.com/itchyny/timefmt-go/LICENSE +++ b/vendor/github.com/itchyny/timefmt-go/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2020,2021 itchyny +Copyright (c) 2020-2022 itchyny Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/github.com/itchyny/timefmt-go/Makefile b/vendor/github.com/itchyny/timefmt-go/Makefile index bacef7bf..a87cb286 100644 --- a/vendor/github.com/itchyny/timefmt-go/Makefile +++ b/vendor/github.com/itchyny/timefmt-go/Makefile @@ -1,20 +1,19 @@ GOBIN ?= $(shell go env GOPATH)/bin -export GO111MODULE=on .PHONY: all all: test .PHONY: test test: - go test -v ./... + go test -v -race ./... .PHONY: lint -lint: $(GOBIN)/golint +lint: $(GOBIN)/staticcheck go vet ./... - golint -set_exit_status ./... + staticcheck -checks all,-ST1000 ./... -$(GOBIN)/golint: - cd && go get golang.org/x/lint/golint +$(GOBIN)/staticcheck: + go install honnef.co/go/tools/cmd/staticcheck@latest .PHONY: clean clean: diff --git a/vendor/github.com/itchyny/timefmt-go/README.md b/vendor/github.com/itchyny/timefmt-go/README.md index 078b1e1a..f01af961 100644 --- a/vendor/github.com/itchyny/timefmt-go/README.md +++ b/vendor/github.com/itchyny/timefmt-go/README.md @@ -1,7 +1,7 @@ # timefmt-go [![CI Status](https://github.com/itchyny/timefmt-go/workflows/CI/badge.svg)](https://github.com/itchyny/timefmt-go/actions) [![Go Report Card](https://goreportcard.com/badge/github.com/itchyny/timefmt-go)](https://goreportcard.com/report/github.com/itchyny/timefmt-go) -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/timefmt-go/blob/main/LICENSE) +[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/itchyny/timefmt-go/blob/main/LICENSE) [![release](https://img.shields.io/github/release/itchyny/timefmt-go/all.svg)](https://github.com/itchyny/timefmt-go/releases) [![pkg.go.dev](https://pkg.go.dev/badge/github.com/itchyny/timefmt-go)](https://pkg.go.dev/github.com/itchyny/timefmt-go) @@ -35,14 +35,15 @@ func main() { Please refer to [`man 3 strftime`](https://linux.die.net/man/3/strftime) and [`man 3 strptime`](https://linux.die.net/man/3/strptime) for formatters. +As an extension, `%f` directive is supported for zero-padded microseconds, which originates from Python. Note that `E` and `O` modifier characters are not supported. ## Comparison to other libraries - This library - provides both formatting and parsing functions in pure Go language, - - depends only on the Go standard libraries not to grows up the module file. + - depends only on the Go standard libraries not to grow up dependency. - `Format` (`strftime`) implements glibc extensions including - - width specifier like `%10A, %10B %2k:%M`, + - width specifier like `%6Y %10B %4Z` (limited to 1024 bytes), - omitting padding modifier like `%-y-%-m-%-d`, - space padding modifier like `%_y-%_m-%_d`, - upper case modifier like `%^a %^b`, diff --git a/vendor/github.com/itchyny/timefmt-go/format.go b/vendor/github.com/itchyny/timefmt-go/format.go index 1d9b3672..eea976ee 100644 --- a/vendor/github.com/itchyny/timefmt-go/format.go +++ b/vendor/github.com/itchyny/timefmt-go/format.go @@ -1,6 +1,7 @@ package timefmt import ( + "math" "strconv" "time" ) @@ -10,8 +11,7 @@ func Format(t time.Time, format string) string { return string(AppendFormat(make([]byte, 0, 64), t, format)) } -// AppendFormat appends formatted time to the bytes. -// You can use this method to reduce allocations. +// AppendFormat appends formatted time string to the buffer. func AppendFormat(buf []byte, t time.Time, format string) []byte { year, month, day := t.Date() hour, min, sec := t.Clock() @@ -74,7 +74,7 @@ func AppendFormat(buf []byte, t time.Time, format string) []byte { b = format[i] if b <= '9' && '0' <= b { width = width*10 + int(b&0x0F) - if width >= int((^uint(0)>>1)/10) { + if width >= math.MaxInt/10 { width = maxWidth } } else { diff --git a/vendor/github.com/itchyny/timefmt-go/parse.go b/vendor/github.com/itchyny/timefmt-go/parse.go index 2d2b5f4d..83b0df2c 100644 --- a/vendor/github.com/itchyny/timefmt-go/parse.go +++ b/vendor/github.com/itchyny/timefmt-go/parse.go @@ -25,7 +25,7 @@ func parse(source, format string, loc, base *time.Location) (t time.Time, err er } }() var j, century, yday, colons int - var pm bool + var pm, hasZoneName, hasZoneOffset bool var pending string for i, l := 0, len(source); i < len(format); i++ { if b := format[i]; b == '%' { @@ -158,14 +158,14 @@ func parse(source, format string, loc, base *time.Location) (t time.Time, err er hour, min, sec = t.Clock() month = int(mon) case 'f': - var msec, k, d int - if msec, k, err = parseNumber(source, j, 6, 'f'); err != nil { + var usec, k, d int + if usec, k, err = parseNumber(source, j, 6, 'f'); err != nil { return } - nsec = msec * 1000 for j, d = k, k-j; d < 6; d++ { - nsec *= 10 + usec *= 10 } + nsec = usec * 1000 case 'Z': k := j for ; k < l; k++ { @@ -178,7 +178,14 @@ func parse(source, format string, loc, base *time.Location) (t time.Time, err er err = fmt.Errorf(`cannot parse %q with "%%Z"`, source[j:k]) return } - loc = t.Location() + if hasZoneOffset { + name, _ := t.Zone() + _, offset := locationZone(loc) + loc = time.FixedZone(name, offset) + } else { + loc = t.Location() + } + hasZoneName = true j = k case 'z': if j >= l { @@ -231,7 +238,12 @@ func parse(source, format string, loc, base *time.Location) (t time.Time, err er j = k } } - loc, colons = time.FixedZone("", sign*((hour*60+min)*60+sec)), 0 + var name string + if hasZoneName { + name, _ = locationZone(loc) + } + loc, colons = time.FixedZone(name, sign*((hour*60+min)*60+sec)), 0 + hasZoneOffset = true case 'Z': loc, colons, j = time.UTC, 0, j+1 default: @@ -328,6 +340,10 @@ func parse(source, format string, loc, base *time.Location) (t time.Time, err er return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), nil } +func locationZone(loc *time.Location) (name string, offset int) { + return time.Date(2000, time.January, 1, 0, 0, 0, 0, loc).Zone() +} + type parseFormatError byte func (err parseFormatError) Error() string { diff --git a/vendor/github.com/itchyny/timefmt-go/timefmt.go b/vendor/github.com/itchyny/timefmt-go/timefmt.go new file mode 100644 index 00000000..45bf6ae9 --- /dev/null +++ b/vendor/github.com/itchyny/timefmt-go/timefmt.go @@ -0,0 +1,2 @@ +// Package timefmt provides functions for formatting and parsing date time strings. +package timefmt diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE b/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE deleted file mode 100644 index 14127cd8..00000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/LICENSE +++ /dev/null @@ -1,9 +0,0 @@ -(The MIT License) - -Copyright (c) 2017 marvin + konsorten GmbH (open-source@konsorten.de) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md b/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md deleted file mode 100644 index 949b77e3..00000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# Windows Terminal Sequences - -This library allow for enabling Windows terminal color support for Go. - -See [Console Virtual Terminal Sequences](https://docs.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences) for details. - -## Usage - -```go -import ( - "syscall" - - sequences "github.com/konsorten/go-windows-terminal-sequences" -) - -func main() { - sequences.EnableVirtualTerminalProcessing(syscall.Stdout, true) -} - -``` - -## Authors - -The tool is sponsored by the [marvin + konsorten GmbH](http://www.konsorten.de). - -We thank all the authors who provided code to this library: - -* Felix Kollmann - -## License - -(The MIT License) - -Copyright (c) 2018 marvin + konsorten GmbH (open-source@konsorten.de) - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go b/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go deleted file mode 100644 index ef18d8f9..00000000 --- a/vendor/github.com/konsorten/go-windows-terminal-sequences/sequences.go +++ /dev/null @@ -1,36 +0,0 @@ -// +build windows - -package sequences - -import ( - "syscall" - "unsafe" -) - -var ( - kernel32Dll *syscall.LazyDLL = syscall.NewLazyDLL("Kernel32.dll") - setConsoleMode *syscall.LazyProc = kernel32Dll.NewProc("SetConsoleMode") -) - -func EnableVirtualTerminalProcessing(stream syscall.Handle, enable bool) error { - const ENABLE_VIRTUAL_TERMINAL_PROCESSING uint32 = 0x4 - - var mode uint32 - err := syscall.GetConsoleMode(syscall.Stdout, &mode) - if err != nil { - return err - } - - if enable { - mode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING - } else { - mode &^= ENABLE_VIRTUAL_TERMINAL_PROCESSING - } - - ret, _, err := setConsoleMode.Call(uintptr(unsafe.Pointer(stream)), uintptr(mode)) - if ret == 0 { - return err - } - - return nil -} diff --git a/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go b/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go index 7ae7091f..88908773 100644 --- a/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go +++ b/vendor/github.com/lithammer/fuzzysearch/fuzzy/fuzzy.go @@ -3,7 +3,6 @@ package fuzzy import ( - "bytes" "unicode" "unicode/utf8" @@ -53,9 +52,12 @@ func MatchNormalizedFold(source, target string) bool { } func match(source, target string, transformer transform.Transformer) bool { - source = stringTransform(source, transformer) - target = stringTransform(target, transformer) + sourceT := stringTransform(source, transformer) + targetT := stringTransform(target, transformer) + return matchTransformed(sourceT, targetT) +} +func matchTransformed(source, target string) bool { lenDiff := len(target) - len(source) if lenDiff < 0 { @@ -101,10 +103,13 @@ func FindNormalizedFold(source string, targets []string) []string { } func find(source string, targets []string, transformer transform.Transformer) []string { + sourceT := stringTransform(source, transformer) + var matches []string for _, target := range targets { - if match(source, target, transformer) { + targetT := stringTransform(target, transformer) + if matchTransformed(sourceT, targetT) { matches = append(matches, target) } } @@ -194,10 +199,13 @@ func RankFindNormalizedFold(source string, targets []string) Ranks { } func rankFind(source string, targets []string, transformer transform.Transformer) Ranks { + sourceT := stringTransform(source, transformer) + var r Ranks for index, target := range targets { - if match(source, target, transformer) { + targetT := stringTransform(target, transformer) + if matchTransformed(sourceT, targetT) { distance := LevenshteinDistance(source, target) r = append(r, Rank{source, target, distance, index}) } @@ -251,19 +259,30 @@ func stringTransform(s string, t transform.Transformer) (transformed string) { type unicodeFoldTransformer struct{ transform.NopResetter } func (unicodeFoldTransformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { - runes := bytes.Runes(src) - var lowerRunes []rune - for _, r := range runes { - lowerRunes = append(lowerRunes, unicode.ToLower(r)) - } - - srcBytes := []byte(string(lowerRunes)) - n := copy(dst, srcBytes) - if n < len(srcBytes) { - err = transform.ErrShortDst + // Converting src to a string allocates. + // In theory, it need not; see https://go.dev/issue/27148. + // It is possible to write this loop using utf8.DecodeRune + // and thereby avoid allocations, but it is noticeably slower. + // So just let's wait for the compiler to get smarter. + for _, r := range string(src) { + if r == utf8.RuneError { + // Go spec for ranging over a string says: + // If the iteration encounters an invalid UTF-8 sequence, + // the second value will be 0xFFFD, the Unicode replacement character, + // and the next iteration will advance a single byte in the string. + nSrc++ + } else { + nSrc += utf8.RuneLen(r) + } + r = unicode.ToLower(r) + x := utf8.RuneLen(r) + if x > len(dst[nDst:]) { + err = transform.ErrShortDst + break + } + nDst += utf8.EncodeRune(dst[nDst:], r) } - - return n, n, err + return nDst, nSrc, err } type nopTransformer struct{ transform.NopResetter } diff --git a/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go b/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go index 4fb5838c..c0fc1910 100644 --- a/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go +++ b/vendor/github.com/lithammer/fuzzysearch/fuzzy/levenshtein.go @@ -33,11 +33,13 @@ func LevenshteinDistance(s, t string) int { return column[len(r1)] } -func min(a, b, c int) int { - if a < b && a < c { +func min2(a, b int) int { + if a < b { return a - } else if b < c { - return b } - return c + return b +} + +func min(a, b, c int) int { + return min2(min2(a, b), c) } diff --git a/vendor/github.com/logrusorgru/aurora/.gitignore b/vendor/github.com/logrusorgru/aurora/.gitignore deleted file mode 100644 index dbcb7cc7..00000000 --- a/vendor/github.com/logrusorgru/aurora/.gitignore +++ /dev/null @@ -1,34 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof -*.out - -# coverage - -cover.html - -# benchcmp - -*.cmp - diff --git a/vendor/github.com/logrusorgru/aurora/.travis.yml b/vendor/github.com/logrusorgru/aurora/.travis.yml deleted file mode 100644 index 570e361b..00000000 --- a/vendor/github.com/logrusorgru/aurora/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -language: go -go: - - tip -before_install: - - go get github.com/axw/gocov/gocov - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover -script: - - $HOME/gopath/bin/goveralls -service=travis-ci diff --git a/vendor/github.com/logrusorgru/aurora/AUTHORS.md b/vendor/github.com/logrusorgru/aurora/AUTHORS.md deleted file mode 100644 index 0ee9e3eb..00000000 --- a/vendor/github.com/logrusorgru/aurora/AUTHORS.md +++ /dev/null @@ -1,8 +0,0 @@ -AUTHORS -======= - -- Konstantin Ivanov @logrusorgru -- Mattias Eriksson @snaggen -- Ousmane Traore @otraore -- Simon Legner @simon04 -- Sevenate @sevenate diff --git a/vendor/github.com/logrusorgru/aurora/CHANGELOG.md b/vendor/github.com/logrusorgru/aurora/CHANGELOG.md deleted file mode 100644 index ad0a2025..00000000 --- a/vendor/github.com/logrusorgru/aurora/CHANGELOG.md +++ /dev/null @@ -1,59 +0,0 @@ -Changes -======= - ---- -16:05:02 -Thursday, July 2, 2020 - -Change license from the WTFPL to the Unlicense due to pkg.go.dev restriction. - ---- -15:39:40 -Wednesday, April 17, 2019 - -- Bright background and foreground colors -- 8-bit indexed colors `Index`, `BgIndex` -- 24 grayscale colors `Gray`, `BgGray` -- `Yellow` and `BgYellow` methods, mark Brow and BgBrown as deprecated - Following specifications, correct name of the colors are yellow, but - by historical reason they are called brown. Both, the `Yellow` and the - `Brown` methods (including `Bg+`) represents the same colors. The Brown - are leaved for backward compatibility until Go modules production release. -- Additional formats - + `Faint` that is opposite to the `Bold` - + `DoublyUnderline` - + `Fraktur` - + `Italic` - + `Underline` - + `SlowBlink` with `Blink` alias - + `RapidBlink` - + `Reverse` that is alias for the `Inverse` - + `Conceal` with `Hidden` alias - + `CrossedOut` with `StrikeThrough` alias - + `Framed` - + `Encircled` - + `Overlined` -- Add AUTHORS.md file and change all copyright notices. -- `Reset` method to create clear value. `Reset` method that replaces - `Bleach` method. The `Bleach` method was marked as deprecated. - ---- - -14:25:49 -Friday, August 18, 2017 - -- LICENSE.md changed to LICENSE -- fix email in README.md -- add "no warranty" to README.md -- set proper copyright date - ---- - -16:59:28 -Tuesday, November 8, 2016 - -- Rid out off sync.Pool -- Little optimizations (very little) -- Improved benchmarks - ---- diff --git a/vendor/github.com/logrusorgru/aurora/LICENSE b/vendor/github.com/logrusorgru/aurora/LICENSE deleted file mode 100644 index 68a49daa..00000000 --- a/vendor/github.com/logrusorgru/aurora/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -This is free and unencumbered software released into the public domain. - -Anyone is free to copy, modify, publish, use, compile, sell, or -distribute this software, either in source code form or as a compiled -binary, for any purpose, commercial or non-commercial, and by any -means. - -In jurisdictions that recognize copyright laws, the author or authors -of this software dedicate any and all copyright interest in the -software to the public domain. We make this dedication for the benefit -of the public at large and to the detriment of our heirs and -successors. We intend this dedication to be an overt act of -relinquishment in perpetuity of all present and future rights to this -software under copyright law. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - -For more information, please refer to diff --git a/vendor/github.com/logrusorgru/aurora/README.md b/vendor/github.com/logrusorgru/aurora/README.md deleted file mode 100644 index e0afce1c..00000000 --- a/vendor/github.com/logrusorgru/aurora/README.md +++ /dev/null @@ -1,314 +0,0 @@ -Aurora -====== - -[![go.dev reference](https://img.shields.io/badge/go.dev-reference-007d9c?logo=go&logoColor=white)](https://pkg.go.dev/github.com/logrusorgru/aurora?tab=doc) -[![Unlicense](https://img.shields.io/badge/license-unlicense-blue.svg)](http://unlicense.org/) -[![Build Status](https://travis-ci.org/logrusorgru/aurora.svg)](https://travis-ci.org/logrusorgru/aurora) -[![Coverage Status](https://coveralls.io/repos/logrusorgru/aurora/badge.svg?branch=master)](https://coveralls.io/r/logrusorgru/aurora?branch=master) -[![GoReportCard](https://goreportcard.com/badge/logrusorgru/aurora)](https://goreportcard.com/report/logrusorgru/aurora) -[![Gitter](https://img.shields.io/badge/chat-on_gitter-46bc99.svg?logo=data:image%2Fsvg%2Bxml%3Bbase64%2CPHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIGhlaWdodD0iMTQiIHdpZHRoPSIxNCI%2BPGcgZmlsbD0iI2ZmZiI%2BPHJlY3QgeD0iMCIgeT0iMyIgd2lkdGg9IjEiIGhlaWdodD0iNSIvPjxyZWN0IHg9IjIiIHk9IjQiIHdpZHRoPSIxIiBoZWlnaHQ9IjciLz48cmVjdCB4PSI0IiB5PSI0IiB3aWR0aD0iMSIgaGVpZ2h0PSI3Ii8%2BPHJlY3QgeD0iNiIgeT0iNCIgd2lkdGg9IjEiIGhlaWdodD0iNCIvPjwvZz48L3N2Zz4%3D&logoWidth=10)](https://gitter.im/logrusorgru/aurora) - -Ultimate ANSI colors for Golang. The package supports Printf/Sprintf etc. - - -![aurora logo](https://github.com/logrusorgru/aurora/blob/master/gopher_aurora.png) - -# TOC - -- [Installation](#installation) -- [Usage](#usage) - + [Simple](#simple) - + [Printf](#printf) - + [aurora.Sprintf](#aurorasprintf) - + [Enable/Disable colors](#enabledisable-colors) -- [Chains](#chains) -- [Colorize](#colorize) -- [Grayscale](#grayscale) -- [8-bit colors](#8-bit-colors) -- [Supported Colors & Formats](#supported-colors--formats) - + [All colors](#all-colors) - + [Standard and bright colors](#standard-and-bright-colors) - + [Formats are likely supported](#formats-are-likely-supported) - + [Formats are likely unsupported](#formats-are-likely-unsupported) -- [Limitations](#limitations) - + [Windows](#windows) - + [TTY](#tty) -- [Licensing](#licensing) - -# Installation - -Get -``` -go get -u github.com/logrusorgru/aurora -``` -Test -``` -go test -cover github.com/logrusorgru/aurora -``` - -# Usage - -### Simple - -```go -package main - -import ( - "fmt" - - . "github.com/logrusorgru/aurora" -) - -func main() { - fmt.Println("Hello,", Magenta("Aurora")) - fmt.Println(Bold(Cyan("Cya!"))) -} - -``` - -![simple png](https://github.com/logrusorgru/aurora/blob/master/simple.png) - -### Printf - -```go -package main - -import ( - "fmt" - - . "github.com/logrusorgru/aurora" -) - -func main() { - fmt.Printf("Got it %d times\n", Green(1240)) - fmt.Printf("PI is %+1.2e\n", Cyan(3.14)) -} - -``` - -![printf png](https://github.com/logrusorgru/aurora/blob/master/printf.png) - -### aurora.Sprintf - -```go -package main - -import ( - "fmt" - - . "github.com/logrusorgru/aurora" -) - -func main() { - fmt.Println(Sprintf(Magenta("Got it %d times"), Green(1240))) -} - -``` - -![sprintf png](https://github.com/logrusorgru/aurora/blob/master/sprintf.png) - -### Enable/Disable colors - -```go -package main - -import ( - "fmt" - "flag" - - "github.com/logrusorgru/aurora" -) - -// colorizer -var au aurora.Aurora - -var colors = flag.Bool("colors", false, "enable or disable colors") - -func init() { - flag.Parse() - au = aurora.NewAurora(*colors) -} - -func main() { - // use colorizer - fmt.Println(au.Green("Hello")) -} - -``` -Without flags: -![disable png](https://github.com/logrusorgru/aurora/blob/master/disable.png) - -With `-colors` flag: -![enable png](https://github.com/logrusorgru/aurora/blob/master/enable.png) - -# Chains - -The following samples are equal - -```go -x := BgMagenta(Bold(Red("x"))) -``` - -```go -x := Red("x").Bold().BgMagenta() -``` - -The second is more readable - -# Colorize - -There is `Colorize` function that allows to choose some colors and -format from a side - -```go - -func getColors() Color { - // some stuff that returns appropriate colors and format -} - -// [...] - -func main() { - fmt.Println(Colorize("Greeting", getColors())) -} - -``` -Less complicated example - -```go -x := Colorize("Greeting", GreenFg|GrayBg|BoldFm) -``` - -Unlike other color functions and methods (such as Red/BgBlue etc) -a `Colorize` clears previous colors - -```go -x := Red("x").Colorize(BgGreen) // will be with green background only -``` - -# Grayscale - -```go -fmt.Println(" ", - Gray(1-1, " 00-23 ").BgGray(24-1), - Gray(4-1, " 03-19 ").BgGray(20-1), - Gray(8-1, " 07-15 ").BgGray(16-1), - Gray(12-1, " 11-11 ").BgGray(12-1), - Gray(16-1, " 15-07 ").BgGray(8-1), - Gray(20-1, " 19-03 ").BgGray(4-1), - Gray(24-1, " 23-00 ").BgGray(1-1), -) -``` - -![grayscale png](https://github.com/logrusorgru/aurora/blob/master/aurora_grayscale.png) - -# 8-bit colors - -Methods `Index` and `BgIndex` implements 8-bit colors. - -| Index/BgIndex | Meaning | Foreground | Background | -| -------------- | --------------- | ---------- | ---------- | -| 0- 7 | standard colors | 30- 37 | 40- 47 | -| 8- 15 | bright colors | 90- 97 | 100-107 | -| 16-231 | 216 colors | 38;5;n | 48;5;n | -| 232-255 | 24 grayscale | 38;5;n | 48;5;n | - - -# Supported colors & formats - -- formats - + bold (1) - + faint (2) - + doubly-underline (21) - + fraktur (20) - + italic (3) - + underline (4) - + slow blink (5) - + rapid blink (6) - + reverse video (7) - + conceal (8) - + crossed out (9) - + framed (51) - + encircled (52) - + overlined (53) -- background and foreground colors, including bright - + black - + red - + green - + yellow (brown) - + blue - + magenta - + cyan - + white - + 24 grayscale colors - + 216 8-bit colors - -### All colors - -![linux png](https://github.com/logrusorgru/aurora/blob/master/aurora_colors_black.png) -![white png](https://github.com/logrusorgru/aurora/blob/master/aurora_colors_white.png) - -### Standard and bright colors - -![linux black standard png](https://github.com/logrusorgru/aurora/blob/master/aurora_black_standard.png) -![linux white standard png](https://github.com/logrusorgru/aurora/blob/master/aurora_white_standard.png) - -### Formats are likely supported - -![formats supported gif](https://github.com/logrusorgru/aurora/blob/master/aurora_formats.gif) - -### Formats are likely unsupported - -![formats rarely supported png](https://github.com/logrusorgru/aurora/blob/master/aurora_rarely_supported.png) - -# Limitations - -There is no way to represent `%T` and `%p` with colors using -a standard approach - -```go -package main - -import ( - "fmt" - - . "github.com/logrusorgru/aurora" -) - -func main() { - r := Red("red") - var i int - fmt.Printf("%T %p\n", r, Green(&i)) -} -``` - -Output will be without colors - -``` -aurora.value %!p(aurora.value={0xc42000a310 768 0}) -``` - -The obvious workaround is `Red(fmt.Sprintf("%T", some))` - -### Windows - -The Aurora provides ANSI colors only, so there is no support for Windows. That said, there are workarounds available. -Check out these comments to learn more: - -- [Using go-colorable](https://github.com/logrusorgru/aurora/issues/2#issuecomment-299014211). -- [Using registry for Windows 10](https://github.com/logrusorgru/aurora/issues/10#issue-476361247). - -### TTY - -The Aurora has no internal TTY detectors by design. Take a look - [this comment](https://github.com/logrusorgru/aurora/issues/2#issuecomment-299030108) if you want turn -on colors for a terminal only, and turn them off for a file. - -### Licensing - -Copyright © 2016-2020 The Aurora Authors. This work is free. -It comes without any warranty, to the extent permitted by applicable -law. You can redistribute it and/or modify it under the terms of the -the Unlicense. See the LICENSE file for more details. - - diff --git a/vendor/github.com/logrusorgru/aurora/aurora.go b/vendor/github.com/logrusorgru/aurora/aurora.go deleted file mode 100644 index 3b302305..00000000 --- a/vendor/github.com/logrusorgru/aurora/aurora.go +++ /dev/null @@ -1,725 +0,0 @@ -// -// Copyright (c) 2016-2020 The Aurora Authors. All rights reserved. -// This program is free software. It comes without any warranty, -// to the extent permitted by applicable law. You can redistribute -// it and/or modify it under the terms of the Unlicense. See LICENSE -// file for more details or see below. -// - -// -// This is free and unencumbered software released into the public domain. -// -// Anyone is free to copy, modify, publish, use, compile, sell, or -// distribute this software, either in source code form or as a compiled -// binary, for any purpose, commercial or non-commercial, and by any -// means. -// -// In jurisdictions that recognize copyright laws, the author or authors -// of this software dedicate any and all copyright interest in the -// software to the public domain. We make this dedication for the benefit -// of the public at large and to the detriment of our heirs and -// successors. We intend this dedication to be an overt act of -// relinquishment in perpetuity of all present and future rights to this -// software under copyright law. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -// For more information, please refer to -// - -// Package aurora implements ANSI-colors -package aurora - -import ( - "fmt" -) - -// An Aurora implements colorizer interface. -// It also can be a non-colorizer -type Aurora interface { - - // Reset wraps given argument returning Value - // without formats and colors. - Reset(arg interface{}) Value - - // - // Formats - // - // - // Bold or increased intensity (1). - Bold(arg interface{}) Value - // Faint, decreased intensity (2). - Faint(arg interface{}) Value - // - // DoublyUnderline or Bold off, double-underline - // per ECMA-48 (21). - DoublyUnderline(arg interface{}) Value - // Fraktur, rarely supported (20). - Fraktur(arg interface{}) Value - // - // Italic, not widely supported, sometimes - // treated as inverse (3). - Italic(arg interface{}) Value - // Underline (4). - Underline(arg interface{}) Value - // - // SlowBlink, blinking less than 150 - // per minute (5). - SlowBlink(arg interface{}) Value - // RapidBlink, blinking 150+ per minute, - // not widely supported (6). - RapidBlink(arg interface{}) Value - // Blink is alias for the SlowBlink. - Blink(arg interface{}) Value - // - // Reverse video, swap foreground and - // background colors (7). - Reverse(arg interface{}) Value - // Inverse is alias for the Reverse - Inverse(arg interface{}) Value - // - // Conceal, hidden, not widely supported (8). - Conceal(arg interface{}) Value - // Hidden is alias for the Conceal - Hidden(arg interface{}) Value - // - // CrossedOut, characters legible, but - // marked for deletion (9). - CrossedOut(arg interface{}) Value - // StrikeThrough is alias for the CrossedOut. - StrikeThrough(arg interface{}) Value - // - // Framed (51). - Framed(arg interface{}) Value - // Encircled (52). - Encircled(arg interface{}) Value - // - // Overlined (53). - Overlined(arg interface{}) Value - - // - // Foreground colors - // - // - // Black foreground color (30) - Black(arg interface{}) Value - // Red foreground color (31) - Red(arg interface{}) Value - // Green foreground color (32) - Green(arg interface{}) Value - // Yellow foreground color (33) - Yellow(arg interface{}) Value - // Brown foreground color (33) - // - // Deprecated: use Yellow instead, following specification - Brown(arg interface{}) Value - // Blue foreground color (34) - Blue(arg interface{}) Value - // Magenta foreground color (35) - Magenta(arg interface{}) Value - // Cyan foreground color (36) - Cyan(arg interface{}) Value - // White foreground color (37) - White(arg interface{}) Value - // - // Bright foreground colors - // - // BrightBlack foreground color (90) - BrightBlack(arg interface{}) Value - // BrightRed foreground color (91) - BrightRed(arg interface{}) Value - // BrightGreen foreground color (92) - BrightGreen(arg interface{}) Value - // BrightYellow foreground color (93) - BrightYellow(arg interface{}) Value - // BrightBlue foreground color (94) - BrightBlue(arg interface{}) Value - // BrightMagenta foreground color (95) - BrightMagenta(arg interface{}) Value - // BrightCyan foreground color (96) - BrightCyan(arg interface{}) Value - // BrightWhite foreground color (97) - BrightWhite(arg interface{}) Value - // - // Other - // - // Index of pre-defined 8-bit foreground color - // from 0 to 255 (38;5;n). - // - // 0- 7: standard colors (as in ESC [ 30–37 m) - // 8- 15: high intensity colors (as in ESC [ 90–97 m) - // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) - // 232-255: grayscale from black to white in 24 steps - // - Index(n uint8, arg interface{}) Value - // Gray from 0 to 23. - Gray(n uint8, arg interface{}) Value - - // - // Background colors - // - // - // BgBlack background color (40) - BgBlack(arg interface{}) Value - // BgRed background color (41) - BgRed(arg interface{}) Value - // BgGreen background color (42) - BgGreen(arg interface{}) Value - // BgYellow background color (43) - BgYellow(arg interface{}) Value - // BgBrown background color (43) - // - // Deprecated: use BgYellow instead, following specification - BgBrown(arg interface{}) Value - // BgBlue background color (44) - BgBlue(arg interface{}) Value - // BgMagenta background color (45) - BgMagenta(arg interface{}) Value - // BgCyan background color (46) - BgCyan(arg interface{}) Value - // BgWhite background color (47) - BgWhite(arg interface{}) Value - // - // Bright background colors - // - // BgBrightBlack background color (100) - BgBrightBlack(arg interface{}) Value - // BgBrightRed background color (101) - BgBrightRed(arg interface{}) Value - // BgBrightGreen background color (102) - BgBrightGreen(arg interface{}) Value - // BgBrightYellow background color (103) - BgBrightYellow(arg interface{}) Value - // BgBrightBlue background color (104) - BgBrightBlue(arg interface{}) Value - // BgBrightMagenta background color (105) - BgBrightMagenta(arg interface{}) Value - // BgBrightCyan background color (106) - BgBrightCyan(arg interface{}) Value - // BgBrightWhite background color (107) - BgBrightWhite(arg interface{}) Value - // - // Other - // - // BgIndex of 8-bit pre-defined background color - // from 0 to 255 (48;5;n). - // - // 0- 7: standard colors (as in ESC [ 40–47 m) - // 8- 15: high intensity colors (as in ESC [100–107 m) - // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) - // 232-255: grayscale from black to white in 24 steps - // - BgIndex(n uint8, arg interface{}) Value - // BgGray from 0 to 23. - BgGray(n uint8, arg interface{}) Value - - // - // Special - // - // Colorize removes existing colors and - // formats of the argument and applies given. - Colorize(arg interface{}, color Color) Value - - // - // Support methods - // - // Sprintf allows to use colored format. - Sprintf(format interface{}, args ...interface{}) string -} - -// NewAurora returns a new Aurora interface that -// will support or not support colors depending -// the enableColors argument -func NewAurora(enableColors bool) Aurora { - if enableColors { - return aurora{} - } - return auroraClear{} -} - -// no colors - -type auroraClear struct{} - -func (auroraClear) Reset(arg interface{}) Value { return valueClear{arg} } - -func (auroraClear) Bold(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Faint(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) DoublyUnderline(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Fraktur(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Italic(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Underline(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) SlowBlink(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) RapidBlink(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Blink(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Reverse(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Inverse(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Conceal(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Hidden(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) CrossedOut(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) StrikeThrough(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Framed(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Encircled(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Overlined(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Black(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Red(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Green(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Yellow(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Brown(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Blue(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Magenta(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Cyan(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) White(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightBlack(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightRed(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightGreen(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightYellow(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightBlue(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightMagenta(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightCyan(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BrightWhite(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Index(_ uint8, arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Gray(_ uint8, arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBlack(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgRed(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgGreen(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgYellow(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrown(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBlue(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgMagenta(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgCyan(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgWhite(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightBlack(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightRed(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightGreen(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightYellow(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightBlue(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightMagenta(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightCyan(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgBrightWhite(arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgIndex(_ uint8, arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) BgGray(_ uint8, arg interface{}) Value { - return valueClear{arg} -} - -func (auroraClear) Colorize(arg interface{}, _ Color) Value { - return valueClear{arg} -} - -func (auroraClear) Sprintf(format interface{}, args ...interface{}) string { - if str, ok := format.(string); ok { - return fmt.Sprintf(str, args...) - } - return fmt.Sprintf(fmt.Sprint(format), args...) -} - -// colorized - -type aurora struct{} - -func (aurora) Reset(arg interface{}) Value { - return Reset(arg) -} - -func (aurora) Bold(arg interface{}) Value { - return Bold(arg) -} - -func (aurora) Faint(arg interface{}) Value { - return Faint(arg) -} - -func (aurora) DoublyUnderline(arg interface{}) Value { - return DoublyUnderline(arg) -} - -func (aurora) Fraktur(arg interface{}) Value { - return Fraktur(arg) -} - -func (aurora) Italic(arg interface{}) Value { - return Italic(arg) -} - -func (aurora) Underline(arg interface{}) Value { - return Underline(arg) -} - -func (aurora) SlowBlink(arg interface{}) Value { - return SlowBlink(arg) -} - -func (aurora) RapidBlink(arg interface{}) Value { - return RapidBlink(arg) -} - -func (aurora) Blink(arg interface{}) Value { - return Blink(arg) -} - -func (aurora) Reverse(arg interface{}) Value { - return Reverse(arg) -} - -func (aurora) Inverse(arg interface{}) Value { - return Inverse(arg) -} - -func (aurora) Conceal(arg interface{}) Value { - return Conceal(arg) -} - -func (aurora) Hidden(arg interface{}) Value { - return Hidden(arg) -} - -func (aurora) CrossedOut(arg interface{}) Value { - return CrossedOut(arg) -} - -func (aurora) StrikeThrough(arg interface{}) Value { - return StrikeThrough(arg) -} - -func (aurora) Framed(arg interface{}) Value { - return Framed(arg) -} - -func (aurora) Encircled(arg interface{}) Value { - return Encircled(arg) -} - -func (aurora) Overlined(arg interface{}) Value { - return Overlined(arg) -} - -func (aurora) Black(arg interface{}) Value { - return Black(arg) -} - -func (aurora) Red(arg interface{}) Value { - return Red(arg) -} - -func (aurora) Green(arg interface{}) Value { - return Green(arg) -} - -func (aurora) Yellow(arg interface{}) Value { - return Yellow(arg) -} - -func (aurora) Brown(arg interface{}) Value { - return Brown(arg) -} - -func (aurora) Blue(arg interface{}) Value { - return Blue(arg) -} - -func (aurora) Magenta(arg interface{}) Value { - return Magenta(arg) -} - -func (aurora) Cyan(arg interface{}) Value { - return Cyan(arg) -} - -func (aurora) White(arg interface{}) Value { - return White(arg) -} - -func (aurora) BrightBlack(arg interface{}) Value { - return BrightBlack(arg) -} - -func (aurora) BrightRed(arg interface{}) Value { - return BrightRed(arg) -} - -func (aurora) BrightGreen(arg interface{}) Value { - return BrightGreen(arg) -} - -func (aurora) BrightYellow(arg interface{}) Value { - return BrightYellow(arg) -} - -func (aurora) BrightBlue(arg interface{}) Value { - return BrightBlue(arg) -} - -func (aurora) BrightMagenta(arg interface{}) Value { - return BrightMagenta(arg) -} - -func (aurora) BrightCyan(arg interface{}) Value { - return BrightCyan(arg) -} - -func (aurora) BrightWhite(arg interface{}) Value { - return BrightWhite(arg) -} - -func (aurora) Index(index uint8, arg interface{}) Value { - return Index(index, arg) -} - -func (aurora) Gray(n uint8, arg interface{}) Value { - return Gray(n, arg) -} - -func (aurora) BgBlack(arg interface{}) Value { - return BgBlack(arg) -} - -func (aurora) BgRed(arg interface{}) Value { - return BgRed(arg) -} - -func (aurora) BgGreen(arg interface{}) Value { - return BgGreen(arg) -} - -func (aurora) BgYellow(arg interface{}) Value { - return BgYellow(arg) -} - -func (aurora) BgBrown(arg interface{}) Value { - return BgBrown(arg) -} - -func (aurora) BgBlue(arg interface{}) Value { - return BgBlue(arg) -} - -func (aurora) BgMagenta(arg interface{}) Value { - return BgMagenta(arg) -} - -func (aurora) BgCyan(arg interface{}) Value { - return BgCyan(arg) -} - -func (aurora) BgWhite(arg interface{}) Value { - return BgWhite(arg) -} - -func (aurora) BgBrightBlack(arg interface{}) Value { - return BgBrightBlack(arg) -} - -func (aurora) BgBrightRed(arg interface{}) Value { - return BgBrightRed(arg) -} - -func (aurora) BgBrightGreen(arg interface{}) Value { - return BgBrightGreen(arg) -} - -func (aurora) BgBrightYellow(arg interface{}) Value { - return BgBrightYellow(arg) -} - -func (aurora) BgBrightBlue(arg interface{}) Value { - return BgBrightBlue(arg) -} - -func (aurora) BgBrightMagenta(arg interface{}) Value { - return BgBrightMagenta(arg) -} - -func (aurora) BgBrightCyan(arg interface{}) Value { - return BgBrightCyan(arg) -} - -func (aurora) BgBrightWhite(arg interface{}) Value { - return BgBrightWhite(arg) -} - -func (aurora) BgIndex(n uint8, arg interface{}) Value { - return BgIndex(n, arg) -} - -func (aurora) BgGray(n uint8, arg interface{}) Value { - return BgGray(n, arg) -} - -func (aurora) Colorize(arg interface{}, color Color) Value { - return Colorize(arg, color) -} - -func (aurora) Sprintf(format interface{}, args ...interface{}) string { - return Sprintf(format, args...) -} diff --git a/vendor/github.com/logrusorgru/aurora/aurora_black_standard.png b/vendor/github.com/logrusorgru/aurora/aurora_black_standard.png deleted file mode 100644 index 83658d60..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_black_standard.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_colors_black.png b/vendor/github.com/logrusorgru/aurora/aurora_colors_black.png deleted file mode 100644 index 61b1602a..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_colors_black.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_colors_white.png b/vendor/github.com/logrusorgru/aurora/aurora_colors_white.png deleted file mode 100644 index ec42b072..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_colors_white.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_formats.gif b/vendor/github.com/logrusorgru/aurora/aurora_formats.gif deleted file mode 100644 index 670dd1c3..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_formats.gif and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_grayscale.png b/vendor/github.com/logrusorgru/aurora/aurora_grayscale.png deleted file mode 100644 index 40ecac6a..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_grayscale.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_rarely_supported.png b/vendor/github.com/logrusorgru/aurora/aurora_rarely_supported.png deleted file mode 100644 index 51b7b6d2..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_rarely_supported.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/aurora_white_standard.png b/vendor/github.com/logrusorgru/aurora/aurora_white_standard.png deleted file mode 100644 index 1fe99d78..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/aurora_white_standard.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/color.go b/vendor/github.com/logrusorgru/aurora/color.go deleted file mode 100644 index 486fb674..00000000 --- a/vendor/github.com/logrusorgru/aurora/color.go +++ /dev/null @@ -1,398 +0,0 @@ -// -// Copyright (c) 2016-2020 The Aurora Authors. All rights reserved. -// This program is free software. It comes without any warranty, -// to the extent permitted by applicable law. You can redistribute -// it and/or modify it under the terms of the Unlicense. See LICENSE -// file for more details or see below. -// - -// -// This is free and unencumbered software released into the public domain. -// -// Anyone is free to copy, modify, publish, use, compile, sell, or -// distribute this software, either in source code form or as a compiled -// binary, for any purpose, commercial or non-commercial, and by any -// means. -// -// In jurisdictions that recognize copyright laws, the author or authors -// of this software dedicate any and all copyright interest in the -// software to the public domain. We make this dedication for the benefit -// of the public at large and to the detriment of our heirs and -// successors. We intend this dedication to be an overt act of -// relinquishment in perpetuity of all present and future rights to this -// software under copyright law. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -// For more information, please refer to -// - -package aurora - -// A Color type is a color. It can contain -// one background color, one foreground color -// and a format, including ideogram related -// formats. -type Color uint - -/* - - Developer note. - - The int type is architecture depended and can be - represented as int32 or int64. - - Thus, we can use 32-bits only to be fast and - cross-platform. - - All supported formats requires 14 bits. It is - first 14 bits. - - A foreground color requires 8 bit + 1 bit (presence flag). - And the same for background color. - - The Color representations - - [ bg 8 bit ] [fg 8 bit ] [ fg/bg 2 bits ] [ fm 14 bits ] - - https://play.golang.org/p/fq2zcNstFoF - -*/ - -// Special formats -const ( - BoldFm Color = 1 << iota // 1 - FaintFm // 2 - ItalicFm // 3 - UnderlineFm // 4 - SlowBlinkFm // 5 - RapidBlinkFm // 6 - ReverseFm // 7 - ConcealFm // 8 - CrossedOutFm // 9 - - FrakturFm // 20 - DoublyUnderlineFm // 21 or bold off for some systems - - FramedFm // 51 - EncircledFm // 52 - OverlinedFm // 53 - - InverseFm = ReverseFm // alias to ReverseFm - BlinkFm = SlowBlinkFm // alias to SlowBlinkFm - HiddenFm = ConcealFm // alias to ConcealFm - StrikeThroughFm = CrossedOutFm // alias to CrossedOutFm - - maskFm = BoldFm | FaintFm | - ItalicFm | UnderlineFm | - SlowBlinkFm | RapidBlinkFm | - ReverseFm | - ConcealFm | CrossedOutFm | - - FrakturFm | DoublyUnderlineFm | - - FramedFm | EncircledFm | OverlinedFm - - flagFg Color = 1 << 14 // presence flag (14th bit) - flagBg Color = 1 << 15 // presence flag (15th bit) - - shiftFg = 16 // shift for foreground (starting from 16th bit) - shiftBg = 24 // shift for background (starting from 24th bit) -) - -// Foreground colors and related formats -const ( - - // 8 bits - - // [ 0; 7] - 30-37 - // [ 8; 15] - 90-97 - // [ 16; 231] - RGB - // [232; 255] - grayscale - - BlackFg Color = (iota << shiftFg) | flagFg // 30, 90 - RedFg // 31, 91 - GreenFg // 32, 92 - YellowFg // 33, 93 - BlueFg // 34, 94 - MagentaFg // 35, 95 - CyanFg // 36, 96 - WhiteFg // 37, 97 - - BrightFg Color = ((1 << 3) << shiftFg) | flagFg // -> 90 - - // the BrightFg itself doesn't represent - // a color, thus it has not flagFg - - // 5 bits - - // BrownFg represents brown foreground color. - // - // Deprecated: use YellowFg instead, following specifications - BrownFg = YellowFg - - // - maskFg = (0xff << shiftFg) | flagFg -) - -// Background colors and related formats -const ( - - // 8 bits - - // [ 0; 7] - 40-47 - // [ 8; 15] - 100-107 - // [ 16; 231] - RGB - // [232; 255] - grayscale - - BlackBg Color = (iota << shiftBg) | flagBg // 40, 100 - RedBg // 41, 101 - GreenBg // 42, 102 - YellowBg // 43, 103 - BlueBg // 44, 104 - MagentaBg // 45, 105 - CyanBg // 46, 106 - WhiteBg // 47, 107 - - BrightBg Color = ((1 << 3) << shiftBg) | flagBg // -> 100 - - // the BrightBg itself doesn't represent - // a color, thus it has not flagBg - - // 5 bits - - // BrownBg represents brown foreground color. - // - // Deprecated: use YellowBg instead, following specifications - BrownBg = YellowBg - - // - maskBg = (0xff << shiftBg) | flagBg -) - -const ( - availFlags = "-+# 0" - esc = "\033[" - clear = esc + "0m" -) - -// IsValid returns true always -// -// Deprecated: don't use this method anymore -func (c Color) IsValid() bool { - return true -} - -// Nos returns string like 1;7;31;45. It -// may be an empty string for empty color. -// If the zero is true, then the string -// is prepended with 0; -func (c Color) Nos(zero bool) string { - return string(c.appendNos(make([]byte, 0, 59), zero)) -} - -func appendCond(bs []byte, cond, semi bool, vals ...byte) []byte { - if !cond { - return bs - } - return appendSemi(bs, semi, vals...) -} - -// if the semi is true, then prepend with semicolon -func appendSemi(bs []byte, semi bool, vals ...byte) []byte { - if semi { - bs = append(bs, ';') - } - return append(bs, vals...) -} - -func itoa(t byte) string { - var ( - a [3]byte - j = 2 - ) - for i := 0; i < 3; i, j = i+1, j-1 { - a[j] = '0' + t%10 - if t = t / 10; t == 0 { - break - } - } - return string(a[j:]) -} - -func (c Color) appendFg(bs []byte, zero bool) []byte { - - if zero || c&maskFm != 0 { - bs = append(bs, ';') - } - - // 0- 7 : 30-37 - // 8-15 : 90-97 - // > 15 : 38;5;val - - switch fg := (c & maskFg) >> shiftFg; { - case fg <= 7: - // '3' and the value itself - bs = append(bs, '3', '0'+byte(fg)) - case fg <= 15: - // '9' and the value itself - bs = append(bs, '9', '0'+byte(fg&^0x08)) // clear bright flag - default: - bs = append(bs, '3', '8', ';', '5', ';') - bs = append(bs, itoa(byte(fg))...) - } - return bs -} - -func (c Color) appendBg(bs []byte, zero bool) []byte { - - if zero || c&(maskFm|maskFg) != 0 { - bs = append(bs, ';') - } - - // 0- 7 : 40- 47 - // 8-15 : 100-107 - // > 15 : 48;5;val - - switch fg := (c & maskBg) >> shiftBg; { - case fg <= 7: - // '3' and the value itself - bs = append(bs, '4', '0'+byte(fg)) - case fg <= 15: - // '1', '0' and the value itself - bs = append(bs, '1', '0', '0'+byte(fg&^0x08)) // clear bright flag - default: - bs = append(bs, '4', '8', ';', '5', ';') - bs = append(bs, itoa(byte(fg))...) - } - return bs -} - -func (c Color) appendFm9(bs []byte, zero bool) []byte { - - bs = appendCond(bs, c&ItalicFm != 0, - zero || c&(BoldFm|FaintFm) != 0, - '3') - bs = appendCond(bs, c&UnderlineFm != 0, - zero || c&(BoldFm|FaintFm|ItalicFm) != 0, - '4') - // don't combine slow and rapid blink using only - // on of them, preferring slow blink - if c&SlowBlinkFm != 0 { - bs = appendSemi(bs, - zero || c&(BoldFm|FaintFm|ItalicFm|UnderlineFm) != 0, - '5') - } else if c&RapidBlinkFm != 0 { - bs = appendSemi(bs, - zero || c&(BoldFm|FaintFm|ItalicFm|UnderlineFm) != 0, - '6') - } - - // including 1-2 - const mask6i = BoldFm | FaintFm | - ItalicFm | UnderlineFm | - SlowBlinkFm | RapidBlinkFm - - bs = appendCond(bs, c&ReverseFm != 0, - zero || c&(mask6i) != 0, - '7') - bs = appendCond(bs, c&ConcealFm != 0, - zero || c&(mask6i|ReverseFm) != 0, - '8') - bs = appendCond(bs, c&CrossedOutFm != 0, - zero || c&(mask6i|ReverseFm|ConcealFm) != 0, - '9') - - return bs -} - -// append 1;3;38;5;216 like string that represents ANSI -// color of the Color; the zero argument requires -// appending of '0' before to reset previous format -// and colors -func (c Color) appendNos(bs []byte, zero bool) []byte { - - if zero { - bs = append(bs, '0') // reset previous - } - - // formats - // - - if c&maskFm != 0 { - - // 1-2 - - // don't combine bold and faint using only on of them, preferring bold - - if c&BoldFm != 0 { - bs = appendSemi(bs, zero, '1') - } else if c&FaintFm != 0 { - bs = appendSemi(bs, zero, '2') - } - - // 3-9 - - const mask9 = ItalicFm | UnderlineFm | - SlowBlinkFm | RapidBlinkFm | - ReverseFm | ConcealFm | CrossedOutFm - - if c&mask9 != 0 { - bs = c.appendFm9(bs, zero) - } - - // 20-21 - - const ( - mask21 = FrakturFm | DoublyUnderlineFm - mask9i = BoldFm | FaintFm | mask9 - ) - - if c&mask21 != 0 { - bs = appendCond(bs, c&FrakturFm != 0, - zero || c&mask9i != 0, - '2', '0') - bs = appendCond(bs, c&DoublyUnderlineFm != 0, - zero || c&(mask9i|FrakturFm) != 0, - '2', '1') - } - - // 50-53 - - const ( - mask53 = FramedFm | EncircledFm | OverlinedFm - mask21i = mask9i | mask21 - ) - - if c&mask53 != 0 { - bs = appendCond(bs, c&FramedFm != 0, - zero || c&mask21i != 0, - '5', '1') - bs = appendCond(bs, c&EncircledFm != 0, - zero || c&(mask21i|FramedFm) != 0, - '5', '2') - bs = appendCond(bs, c&OverlinedFm != 0, - zero || c&(mask21i|FramedFm|EncircledFm) != 0, - '5', '3') - } - - } - - // foreground - if c&maskFg != 0 { - bs = c.appendFg(bs, zero) - } - - // background - if c&maskBg != 0 { - bs = c.appendBg(bs, zero) - } - - return bs -} diff --git a/vendor/github.com/logrusorgru/aurora/disable.png b/vendor/github.com/logrusorgru/aurora/disable.png deleted file mode 100644 index 0dd1d63f..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/disable.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/enable.png b/vendor/github.com/logrusorgru/aurora/enable.png deleted file mode 100644 index a488367c..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/enable.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/gopher_aurora.png b/vendor/github.com/logrusorgru/aurora/gopher_aurora.png deleted file mode 100644 index 8f61bb20..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/gopher_aurora.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/printf.png b/vendor/github.com/logrusorgru/aurora/printf.png deleted file mode 100644 index 5978844a..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/printf.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/simple.png b/vendor/github.com/logrusorgru/aurora/simple.png deleted file mode 100644 index 50edf042..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/simple.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/sprintf.go b/vendor/github.com/logrusorgru/aurora/sprintf.go deleted file mode 100644 index b92d5933..00000000 --- a/vendor/github.com/logrusorgru/aurora/sprintf.go +++ /dev/null @@ -1,68 +0,0 @@ -// -// Copyright (c) 2016-2020 The Aurora Authors. All rights reserved. -// This program is free software. It comes without any warranty, -// to the extent permitted by applicable law. You can redistribute -// it and/or modify it under the terms of the Unlicense. See LICENSE -// file for more details or see below. -// - -// -// This is free and unencumbered software released into the public domain. -// -// Anyone is free to copy, modify, publish, use, compile, sell, or -// distribute this software, either in source code form or as a compiled -// binary, for any purpose, commercial or non-commercial, and by any -// means. -// -// In jurisdictions that recognize copyright laws, the author or authors -// of this software dedicate any and all copyright interest in the -// software to the public domain. We make this dedication for the benefit -// of the public at large and to the detriment of our heirs and -// successors. We intend this dedication to be an overt act of -// relinquishment in perpetuity of all present and future rights to this -// software under copyright law. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -// For more information, please refer to -// - -package aurora - -import ( - "fmt" -) - -// Sprintf allows to use Value as format. For example -// -// v := Sprintf(Red("total: +3.5f points"), Blue(3.14)) -// -// In this case "total:" and "points" will be red, but -// 3.14 will be blue. But, in another example -// -// v := Sprintf(Red("total: +3.5f points"), 3.14) -// -// full string will be red. And no way to clear 3.14 to -// default format and color -func Sprintf(format interface{}, args ...interface{}) string { - switch ft := format.(type) { - case string: - return fmt.Sprintf(ft, args...) - case Value: - for i, v := range args { - if val, ok := v.(Value); ok { - args[i] = val.setTail(ft.Color()) - continue - } - } - return fmt.Sprintf(ft.String(), args...) - } - // unknown type of format (we hope it's a string) - return fmt.Sprintf(fmt.Sprint(format), args...) -} diff --git a/vendor/github.com/logrusorgru/aurora/sprintf.png b/vendor/github.com/logrusorgru/aurora/sprintf.png deleted file mode 100644 index df2b2cc0..00000000 Binary files a/vendor/github.com/logrusorgru/aurora/sprintf.png and /dev/null differ diff --git a/vendor/github.com/logrusorgru/aurora/value.go b/vendor/github.com/logrusorgru/aurora/value.go deleted file mode 100644 index feda25af..00000000 --- a/vendor/github.com/logrusorgru/aurora/value.go +++ /dev/null @@ -1,745 +0,0 @@ -// -// Copyright (c) 2016-2020 The Aurora Authors. All rights reserved. -// This program is free software. It comes without any warranty, -// to the extent permitted by applicable law. You can redistribute -// it and/or modify it under the terms of the Unlicense. See LICENSE -// file for more details or see below. -// - -// -// This is free and unencumbered software released into the public domain. -// -// Anyone is free to copy, modify, publish, use, compile, sell, or -// distribute this software, either in source code form or as a compiled -// binary, for any purpose, commercial or non-commercial, and by any -// means. -// -// In jurisdictions that recognize copyright laws, the author or authors -// of this software dedicate any and all copyright interest in the -// software to the public domain. We make this dedication for the benefit -// of the public at large and to the detriment of our heirs and -// successors. We intend this dedication to be an overt act of -// relinquishment in perpetuity of all present and future rights to this -// software under copyright law. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -// For more information, please refer to -// - -package aurora - -import ( - "fmt" - "strconv" - "unicode/utf8" -) - -// A Value represents any printable value -// with it's color -type Value interface { - // String returns string with colors. If there are any color - // or format the string will be terminated with \033[0m - fmt.Stringer - // Format implements fmt.Formatter interface - fmt.Formatter - // Color returns value's color - Color() Color - // Value returns value's value (welcome to the tautology club) - Value() interface{} - - // internals - tail() Color - setTail(Color) Value - - // Bleach returns copy of original value without colors - // - // Deprecated: use Reset instead. - Bleach() Value - // Reset colors and formats - Reset() Value - - // - // Formats - // - // - // Bold or increased intensity (1). - Bold() Value - // Faint, decreased intensity, reset the Bold (2). - Faint() Value - // - // DoublyUnderline or Bold off, double-underline - // per ECMA-48 (21). It depends. - DoublyUnderline() Value - // Fraktur, rarely supported (20). - Fraktur() Value - // - // Italic, not widely supported, sometimes - // treated as inverse (3). - Italic() Value - // Underline (4). - Underline() Value - // - // SlowBlink, blinking less than 150 - // per minute (5). - SlowBlink() Value - // RapidBlink, blinking 150+ per minute, - // not widely supported (6). - RapidBlink() Value - // Blink is alias for the SlowBlink. - Blink() Value - // - // Reverse video, swap foreground and - // background colors (7). - Reverse() Value - // Inverse is alias for the Reverse - Inverse() Value - // - // Conceal, hidden, not widely supported (8). - Conceal() Value - // Hidden is alias for the Conceal - Hidden() Value - // - // CrossedOut, characters legible, but - // marked for deletion (9). - CrossedOut() Value - // StrikeThrough is alias for the CrossedOut. - StrikeThrough() Value - // - // Framed (51). - Framed() Value - // Encircled (52). - Encircled() Value - // - // Overlined (53). - Overlined() Value - - // - // Foreground colors - // - // - // Black foreground color (30) - Black() Value - // Red foreground color (31) - Red() Value - // Green foreground color (32) - Green() Value - // Yellow foreground color (33) - Yellow() Value - // Brown foreground color (33) - // - // Deprecated: use Yellow instead, following specification - Brown() Value - // Blue foreground color (34) - Blue() Value - // Magenta foreground color (35) - Magenta() Value - // Cyan foreground color (36) - Cyan() Value - // White foreground color (37) - White() Value - // - // Bright foreground colors - // - // BrightBlack foreground color (90) - BrightBlack() Value - // BrightRed foreground color (91) - BrightRed() Value - // BrightGreen foreground color (92) - BrightGreen() Value - // BrightYellow foreground color (93) - BrightYellow() Value - // BrightBlue foreground color (94) - BrightBlue() Value - // BrightMagenta foreground color (95) - BrightMagenta() Value - // BrightCyan foreground color (96) - BrightCyan() Value - // BrightWhite foreground color (97) - BrightWhite() Value - // - // Other - // - // Index of pre-defined 8-bit foreground color - // from 0 to 255 (38;5;n). - // - // 0- 7: standard colors (as in ESC [ 30–37 m) - // 8- 15: high intensity colors (as in ESC [ 90–97 m) - // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) - // 232-255: grayscale from black to white in 24 steps - // - Index(n uint8) Value - // Gray from 0 to 24. - Gray(n uint8) Value - - // - // Background colors - // - // - // BgBlack background color (40) - BgBlack() Value - // BgRed background color (41) - BgRed() Value - // BgGreen background color (42) - BgGreen() Value - // BgYellow background color (43) - BgYellow() Value - // BgBrown background color (43) - // - // Deprecated: use BgYellow instead, following specification - BgBrown() Value - // BgBlue background color (44) - BgBlue() Value - // BgMagenta background color (45) - BgMagenta() Value - // BgCyan background color (46) - BgCyan() Value - // BgWhite background color (47) - BgWhite() Value - // - // Bright background colors - // - // BgBrightBlack background color (100) - BgBrightBlack() Value - // BgBrightRed background color (101) - BgBrightRed() Value - // BgBrightGreen background color (102) - BgBrightGreen() Value - // BgBrightYellow background color (103) - BgBrightYellow() Value - // BgBrightBlue background color (104) - BgBrightBlue() Value - // BgBrightMagenta background color (105) - BgBrightMagenta() Value - // BgBrightCyan background color (106) - BgBrightCyan() Value - // BgBrightWhite background color (107) - BgBrightWhite() Value - // - // Other - // - // BgIndex of 8-bit pre-defined background color - // from 0 to 255 (48;5;n). - // - // 0- 7: standard colors (as in ESC [ 40–47 m) - // 8- 15: high intensity colors (as in ESC [100–107 m) - // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) - // 232-255: grayscale from black to white in 24 steps - // - BgIndex(n uint8) Value - // BgGray from 0 to 24. - BgGray(n uint8) Value - - // - // Special - // - // Colorize removes existing colors and - // formats of the argument and applies given. - Colorize(color Color) Value -} - -// Value without colors - -type valueClear struct { - value interface{} -} - -func (vc valueClear) String() string { return fmt.Sprint(vc.value) } - -func (vc valueClear) Color() Color { return 0 } -func (vc valueClear) Value() interface{} { return vc.value } - -func (vc valueClear) tail() Color { return 0 } -func (vc valueClear) setTail(Color) Value { return vc } - -func (vc valueClear) Bleach() Value { return vc } -func (vc valueClear) Reset() Value { return vc } - -func (vc valueClear) Bold() Value { return vc } -func (vc valueClear) Faint() Value { return vc } -func (vc valueClear) DoublyUnderline() Value { return vc } -func (vc valueClear) Fraktur() Value { return vc } -func (vc valueClear) Italic() Value { return vc } -func (vc valueClear) Underline() Value { return vc } -func (vc valueClear) SlowBlink() Value { return vc } -func (vc valueClear) RapidBlink() Value { return vc } -func (vc valueClear) Blink() Value { return vc } -func (vc valueClear) Reverse() Value { return vc } -func (vc valueClear) Inverse() Value { return vc } -func (vc valueClear) Conceal() Value { return vc } -func (vc valueClear) Hidden() Value { return vc } -func (vc valueClear) CrossedOut() Value { return vc } -func (vc valueClear) StrikeThrough() Value { return vc } -func (vc valueClear) Framed() Value { return vc } -func (vc valueClear) Encircled() Value { return vc } -func (vc valueClear) Overlined() Value { return vc } - -func (vc valueClear) Black() Value { return vc } -func (vc valueClear) Red() Value { return vc } -func (vc valueClear) Green() Value { return vc } -func (vc valueClear) Yellow() Value { return vc } -func (vc valueClear) Brown() Value { return vc } -func (vc valueClear) Blue() Value { return vc } -func (vc valueClear) Magenta() Value { return vc } -func (vc valueClear) Cyan() Value { return vc } -func (vc valueClear) White() Value { return vc } -func (vc valueClear) BrightBlack() Value { return vc } -func (vc valueClear) BrightRed() Value { return vc } -func (vc valueClear) BrightGreen() Value { return vc } -func (vc valueClear) BrightYellow() Value { return vc } -func (vc valueClear) BrightBlue() Value { return vc } -func (vc valueClear) BrightMagenta() Value { return vc } -func (vc valueClear) BrightCyan() Value { return vc } -func (vc valueClear) BrightWhite() Value { return vc } -func (vc valueClear) Index(uint8) Value { return vc } -func (vc valueClear) Gray(uint8) Value { return vc } - -func (vc valueClear) BgBlack() Value { return vc } -func (vc valueClear) BgRed() Value { return vc } -func (vc valueClear) BgGreen() Value { return vc } -func (vc valueClear) BgYellow() Value { return vc } -func (vc valueClear) BgBrown() Value { return vc } -func (vc valueClear) BgBlue() Value { return vc } -func (vc valueClear) BgMagenta() Value { return vc } -func (vc valueClear) BgCyan() Value { return vc } -func (vc valueClear) BgWhite() Value { return vc } -func (vc valueClear) BgBrightBlack() Value { return vc } -func (vc valueClear) BgBrightRed() Value { return vc } -func (vc valueClear) BgBrightGreen() Value { return vc } -func (vc valueClear) BgBrightYellow() Value { return vc } -func (vc valueClear) BgBrightBlue() Value { return vc } -func (vc valueClear) BgBrightMagenta() Value { return vc } -func (vc valueClear) BgBrightCyan() Value { return vc } -func (vc valueClear) BgBrightWhite() Value { return vc } -func (vc valueClear) BgIndex(uint8) Value { return vc } -func (vc valueClear) BgGray(uint8) Value { return vc } -func (vc valueClear) Colorize(Color) Value { return vc } - -func (vc valueClear) Format(s fmt.State, verb rune) { - // it's enough for many cases (%-+020.10f) - // % - 1 - // availFlags - 3 (5) - // width - 2 - // prec - 3 (.23) - // verb - 1 - // -------------- - // 10 - format := make([]byte, 1, 10) - format[0] = '%' - var f byte - for i := 0; i < len(availFlags); i++ { - if f = availFlags[i]; s.Flag(int(f)) { - format = append(format, f) - } - } - var width, prec int - var ok bool - if width, ok = s.Width(); ok { - format = strconv.AppendInt(format, int64(width), 10) - } - if prec, ok = s.Precision(); ok { - format = append(format, '.') - format = strconv.AppendInt(format, int64(prec), 10) - } - if verb > utf8.RuneSelf { - format = append(format, string(verb)...) - } else { - format = append(format, byte(verb)) - } - fmt.Fprintf(s, string(format), vc.value) -} - -// Value within colors - -type value struct { - value interface{} // value as it - color Color // this color - tailColor Color // tail color -} - -func (v value) String() string { - if v.color != 0 { - if v.tailColor != 0 { - return esc + v.color.Nos(true) + "m" + - fmt.Sprint(v.value) + - esc + v.tailColor.Nos(true) + "m" - } - return esc + v.color.Nos(false) + "m" + fmt.Sprint(v.value) + clear - } - return fmt.Sprint(v.value) -} - -func (v value) Color() Color { return v.color } - -func (v value) Bleach() Value { - v.color, v.tailColor = 0, 0 - return v -} - -func (v value) Reset() Value { - v.color, v.tailColor = 0, 0 - return v -} - -func (v value) tail() Color { return v.tailColor } - -func (v value) setTail(t Color) Value { - v.tailColor = t - return v -} - -func (v value) Value() interface{} { return v.value } - -func (v value) Format(s fmt.State, verb rune) { - - // it's enough for many cases (%-+020.10f) - // % - 1 - // availFlags - 3 (5) - // width - 2 - // prec - 3 (.23) - // verb - 1 - // -------------- - // 10 - // + - // \033[ 5 - // 0;1;3;4;5;7;8;9;20;21;51;52;53 30 - // 38;5;216 8 - // 48;5;216 8 - // m 1 - // + - // \033[0m 7 - // - // x2 (possible tail color) - // - // 10 + 59 * 2 = 128 - - format := make([]byte, 0, 128) - if v.color != 0 { - format = append(format, esc...) - format = v.color.appendNos(format, v.tailColor != 0) - format = append(format, 'm') - } - format = append(format, '%') - var f byte - for i := 0; i < len(availFlags); i++ { - if f = availFlags[i]; s.Flag(int(f)) { - format = append(format, f) - } - } - var width, prec int - var ok bool - if width, ok = s.Width(); ok { - format = strconv.AppendInt(format, int64(width), 10) - } - if prec, ok = s.Precision(); ok { - format = append(format, '.') - format = strconv.AppendInt(format, int64(prec), 10) - } - if verb > utf8.RuneSelf { - format = append(format, string(verb)...) - } else { - format = append(format, byte(verb)) - } - if v.color != 0 { - if v.tailColor != 0 { - // set next (previous) format clearing current one - format = append(format, esc...) - format = v.tailColor.appendNos(format, true) - format = append(format, 'm') - } else { - format = append(format, clear...) // just clear - } - } - fmt.Fprintf(s, string(format), v.value) -} - -func (v value) Bold() Value { - v.color = (v.color &^ FaintFm) | BoldFm - return v -} - -func (v value) Faint() Value { - v.color = (v.color &^ BoldFm) | FaintFm - return v -} - -func (v value) DoublyUnderline() Value { - v.color |= DoublyUnderlineFm - return v -} - -func (v value) Fraktur() Value { - v.color |= FrakturFm - return v -} - -func (v value) Italic() Value { - v.color |= ItalicFm - return v -} - -func (v value) Underline() Value { - v.color |= UnderlineFm - return v -} - -func (v value) SlowBlink() Value { - v.color = (v.color &^ RapidBlinkFm) | SlowBlinkFm - return v -} - -func (v value) RapidBlink() Value { - v.color = (v.color &^ SlowBlinkFm) | RapidBlinkFm - return v -} - -func (v value) Blink() Value { - return v.SlowBlink() -} - -func (v value) Reverse() Value { - v.color |= ReverseFm - return v -} - -func (v value) Inverse() Value { - return v.Reverse() -} - -func (v value) Conceal() Value { - v.color |= ConcealFm - return v -} - -func (v value) Hidden() Value { - return v.Conceal() -} - -func (v value) CrossedOut() Value { - v.color |= CrossedOutFm - return v -} - -func (v value) StrikeThrough() Value { - return v.CrossedOut() -} - -func (v value) Framed() Value { - v.color |= FramedFm - return v -} - -func (v value) Encircled() Value { - v.color |= EncircledFm - return v -} - -func (v value) Overlined() Value { - v.color |= OverlinedFm - return v -} - -func (v value) Black() Value { - v.color = (v.color &^ maskFg) | BlackFg - return v -} - -func (v value) Red() Value { - v.color = (v.color &^ maskFg) | RedFg - return v -} - -func (v value) Green() Value { - v.color = (v.color &^ maskFg) | GreenFg - return v -} - -func (v value) Yellow() Value { - v.color = (v.color &^ maskFg) | YellowFg - return v -} - -func (v value) Brown() Value { - return v.Yellow() -} - -func (v value) Blue() Value { - v.color = (v.color &^ maskFg) | BlueFg - return v -} - -func (v value) Magenta() Value { - v.color = (v.color &^ maskFg) | MagentaFg - return v -} - -func (v value) Cyan() Value { - v.color = (v.color &^ maskFg) | CyanFg - return v -} - -func (v value) White() Value { - v.color = (v.color &^ maskFg) | WhiteFg - return v -} - -func (v value) BrightBlack() Value { - v.color = (v.color &^ maskFg) | BrightFg | BlackFg - return v -} - -func (v value) BrightRed() Value { - v.color = (v.color &^ maskFg) | BrightFg | RedFg - return v -} - -func (v value) BrightGreen() Value { - v.color = (v.color &^ maskFg) | BrightFg | GreenFg - return v -} - -func (v value) BrightYellow() Value { - v.color = (v.color &^ maskFg) | BrightFg | YellowFg - return v -} - -func (v value) BrightBlue() Value { - v.color = (v.color &^ maskFg) | BrightFg | BlueFg - return v -} - -func (v value) BrightMagenta() Value { - v.color = (v.color &^ maskFg) | BrightFg | MagentaFg - return v -} - -func (v value) BrightCyan() Value { - v.color = (v.color &^ maskFg) | BrightFg | CyanFg - return v -} - -func (v value) BrightWhite() Value { - v.color = (v.color &^ maskFg) | BrightFg | WhiteFg - return v -} - -func (v value) Index(n uint8) Value { - v.color = (v.color &^ maskFg) | (Color(n) << shiftFg) | flagFg - return v -} - -func (v value) Gray(n uint8) Value { - if n > 23 { - n = 23 - } - v.color = (v.color &^ maskFg) | (Color(232+n) << shiftFg) | flagFg - return v -} - -func (v value) BgBlack() Value { - v.color = (v.color &^ maskBg) | BlackBg - return v -} - -func (v value) BgRed() Value { - v.color = (v.color &^ maskBg) | RedBg - return v -} - -func (v value) BgGreen() Value { - v.color = (v.color &^ maskBg) | GreenBg - return v -} - -func (v value) BgYellow() Value { - v.color = (v.color &^ maskBg) | YellowBg - return v -} - -func (v value) BgBrown() Value { - return v.BgYellow() -} - -func (v value) BgBlue() Value { - v.color = (v.color &^ maskBg) | BlueBg - return v -} - -func (v value) BgMagenta() Value { - v.color = (v.color &^ maskBg) | MagentaBg - return v -} - -func (v value) BgCyan() Value { - v.color = (v.color &^ maskBg) | CyanBg - return v -} - -func (v value) BgWhite() Value { - v.color = (v.color &^ maskBg) | WhiteBg - return v -} - -func (v value) BgBrightBlack() Value { - v.color = (v.color &^ maskBg) | BrightBg | BlackBg - return v -} - -func (v value) BgBrightRed() Value { - v.color = (v.color &^ maskBg) | BrightBg | RedBg - return v -} - -func (v value) BgBrightGreen() Value { - v.color = (v.color &^ maskBg) | BrightBg | GreenBg - return v -} - -func (v value) BgBrightYellow() Value { - v.color = (v.color &^ maskBg) | BrightBg | YellowBg - return v -} - -func (v value) BgBrightBlue() Value { - v.color = (v.color &^ maskBg) | BrightBg | BlueBg - return v -} - -func (v value) BgBrightMagenta() Value { - v.color = (v.color &^ maskBg) | BrightBg | MagentaBg - return v -} - -func (v value) BgBrightCyan() Value { - v.color = (v.color &^ maskBg) | BrightBg | CyanBg - return v -} - -func (v value) BgBrightWhite() Value { - v.color = (v.color &^ maskBg) | BrightBg | WhiteBg - return v -} - -func (v value) BgIndex(n uint8) Value { - v.color = (v.color &^ maskBg) | (Color(n) << shiftBg) | flagBg - return v -} - -func (v value) BgGray(n uint8) Value { - if n > 23 { - n = 23 - } - v.color = (v.color &^ maskBg) | (Color(232+n) << shiftBg) | flagBg - return v -} - -func (v value) Colorize(color Color) Value { - v.color = color - return v -} diff --git a/vendor/github.com/logrusorgru/aurora/wrap.go b/vendor/github.com/logrusorgru/aurora/wrap.go deleted file mode 100644 index 44e1aa6c..00000000 --- a/vendor/github.com/logrusorgru/aurora/wrap.go +++ /dev/null @@ -1,558 +0,0 @@ -// -// Copyright (c) 2016-2020 The Aurora Authors. All rights reserved. -// This program is free software. It comes without any warranty, -// to the extent permitted by applicable law. You can redistribute -// it and/or modify it under the terms of the Unlicense. See LICENSE -// file for more details or see below. -// - -// -// This is free and unencumbered software released into the public domain. -// -// Anyone is free to copy, modify, publish, use, compile, sell, or -// distribute this software, either in source code form or as a compiled -// binary, for any purpose, commercial or non-commercial, and by any -// means. -// -// In jurisdictions that recognize copyright laws, the author or authors -// of this software dedicate any and all copyright interest in the -// software to the public domain. We make this dedication for the benefit -// of the public at large and to the detriment of our heirs and -// successors. We intend this dedication to be an overt act of -// relinquishment in perpetuity of all present and future rights to this -// software under copyright law. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -// IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -// OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -// For more information, please refer to -// - -package aurora - -// Colorize wraps given value into Value with -// given colors. For example -// -// s := Colorize("some", BlueFg|GreenBg|BoldFm) -// -// returns a Value with blue foreground, green -// background and bold. Unlike functions like -// Red/BgBlue/Bold etc. This function clears -// all previous colors and formats. Thus -// -// s := Colorize(Red("some"), BgBlue) -// -// clears red color from value -func Colorize(arg interface{}, color Color) Value { - if val, ok := arg.(value); ok { - val.color = color - return val - } - return value{arg, color, 0} -} - -// Reset wraps given argument returning Value -// without formats and colors. -func Reset(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Reset() - } - return value{value: arg} -} - -// -// Formats -// - -// Bold or increased intensity (1). -func Bold(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Bold() - } - return value{value: arg, color: BoldFm} -} - -// Faint decreases intensity (2). -// The Faint rejects the Bold. -func Faint(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Faint() - } - return value{value: arg, color: FaintFm} -} - -// DoublyUnderline or Bold off, double-underline -// per ECMA-48 (21). -func DoublyUnderline(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.DoublyUnderline() - } - return value{value: arg, color: DoublyUnderlineFm} -} - -// Fraktur is rarely supported (20). -func Fraktur(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Fraktur() - } - return value{value: arg, color: FrakturFm} -} - -// Italic is not widely supported, sometimes -// treated as inverse (3). -func Italic(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Italic() - } - return value{value: arg, color: ItalicFm} -} - -// Underline (4). -func Underline(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Underline() - } - return value{value: arg, color: UnderlineFm} -} - -// SlowBlink makes text blink less than -// 150 per minute (5). -func SlowBlink(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.SlowBlink() - } - return value{value: arg, color: SlowBlinkFm} -} - -// RapidBlink makes text blink 150+ per -// minute. It is not widely supported (6). -func RapidBlink(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.RapidBlink() - } - return value{value: arg, color: RapidBlinkFm} -} - -// Blink is alias for the SlowBlink. -func Blink(arg interface{}) Value { - return SlowBlink(arg) -} - -// Reverse video, swap foreground and -// background colors (7). -func Reverse(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Reverse() - } - return value{value: arg, color: ReverseFm} -} - -// Inverse is alias for the Reverse -func Inverse(arg interface{}) Value { - return Reverse(arg) -} - -// Conceal hides text, preserving an ability to select -// the text and copy it. It is not widely supported (8). -func Conceal(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Conceal() - } - return value{value: arg, color: ConcealFm} -} - -// Hidden is alias for the Conceal -func Hidden(arg interface{}) Value { - return Conceal(arg) -} - -// CrossedOut makes characters legible, but -// marked for deletion (9). -func CrossedOut(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.CrossedOut() - } - return value{value: arg, color: CrossedOutFm} -} - -// StrikeThrough is alias for the CrossedOut. -func StrikeThrough(arg interface{}) Value { - return CrossedOut(arg) -} - -// Framed (51). -func Framed(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Framed() - } - return value{value: arg, color: FramedFm} -} - -// Encircled (52). -func Encircled(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Encircled() - } - return value{value: arg, color: EncircledFm} -} - -// Overlined (53). -func Overlined(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Overlined() - } - return value{value: arg, color: OverlinedFm} -} - -// -// Foreground colors -// -// - -// Black foreground color (30) -func Black(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Black() - } - return value{value: arg, color: BlackFg} -} - -// Red foreground color (31) -func Red(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Red() - } - return value{value: arg, color: RedFg} -} - -// Green foreground color (32) -func Green(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Green() - } - return value{value: arg, color: GreenFg} -} - -// Yellow foreground color (33) -func Yellow(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Yellow() - } - return value{value: arg, color: YellowFg} -} - -// Brown foreground color (33) -// -// Deprecated: use Yellow instead, following specification -func Brown(arg interface{}) Value { - return Yellow(arg) -} - -// Blue foreground color (34) -func Blue(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Blue() - } - return value{value: arg, color: BlueFg} -} - -// Magenta foreground color (35) -func Magenta(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Magenta() - } - return value{value: arg, color: MagentaFg} -} - -// Cyan foreground color (36) -func Cyan(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Cyan() - } - return value{value: arg, color: CyanFg} -} - -// White foreground color (37) -func White(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.White() - } - return value{value: arg, color: WhiteFg} -} - -// -// Bright foreground colors -// - -// BrightBlack foreground color (90) -func BrightBlack(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightBlack() - } - return value{value: arg, color: BrightFg | BlackFg} -} - -// BrightRed foreground color (91) -func BrightRed(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightRed() - } - return value{value: arg, color: BrightFg | RedFg} -} - -// BrightGreen foreground color (92) -func BrightGreen(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightGreen() - } - return value{value: arg, color: BrightFg | GreenFg} -} - -// BrightYellow foreground color (93) -func BrightYellow(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightYellow() - } - return value{value: arg, color: BrightFg | YellowFg} -} - -// BrightBlue foreground color (94) -func BrightBlue(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightBlue() - } - return value{value: arg, color: BrightFg | BlueFg} -} - -// BrightMagenta foreground color (95) -func BrightMagenta(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightMagenta() - } - return value{value: arg, color: BrightFg | MagentaFg} -} - -// BrightCyan foreground color (96) -func BrightCyan(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightCyan() - } - return value{value: arg, color: BrightFg | CyanFg} -} - -// BrightWhite foreground color (97) -func BrightWhite(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BrightWhite() - } - return value{value: arg, color: BrightFg | WhiteFg} -} - -// -// Other -// - -// Index of pre-defined 8-bit foreground color -// from 0 to 255 (38;5;n). -// -// 0- 7: standard colors (as in ESC [ 30–37 m) -// 8- 15: high intensity colors (as in ESC [ 90–97 m) -// 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) -// 232-255: grayscale from black to white in 24 steps -// -func Index(n uint8, arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Index(n) - } - return value{value: arg, color: (Color(n) << shiftFg) | flagFg} -} - -// Gray from 0 to 24. -func Gray(n uint8, arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.Gray(n) - } - if n > 23 { - n = 23 - } - return value{value: arg, color: (Color(232+n) << shiftFg) | flagFg} -} - -// -// Background colors -// -// - -// BgBlack background color (40) -func BgBlack(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBlack() - } - return value{value: arg, color: BlackBg} -} - -// BgRed background color (41) -func BgRed(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgRed() - } - return value{value: arg, color: RedBg} -} - -// BgGreen background color (42) -func BgGreen(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgGreen() - } - return value{value: arg, color: GreenBg} -} - -// BgYellow background color (43) -func BgYellow(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgYellow() - } - return value{value: arg, color: YellowBg} -} - -// BgBrown background color (43) -// -// Deprecated: use BgYellow instead, following specification -func BgBrown(arg interface{}) Value { - return BgYellow(arg) -} - -// BgBlue background color (44) -func BgBlue(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBlue() - } - return value{value: arg, color: BlueBg} -} - -// BgMagenta background color (45) -func BgMagenta(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgMagenta() - } - return value{value: arg, color: MagentaBg} -} - -// BgCyan background color (46) -func BgCyan(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgCyan() - } - return value{value: arg, color: CyanBg} -} - -// BgWhite background color (47) -func BgWhite(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgWhite() - } - return value{value: arg, color: WhiteBg} -} - -// -// Bright background colors -// - -// BgBrightBlack background color (100) -func BgBrightBlack(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightBlack() - } - return value{value: arg, color: BrightBg | BlackBg} -} - -// BgBrightRed background color (101) -func BgBrightRed(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightRed() - } - return value{value: arg, color: BrightBg | RedBg} -} - -// BgBrightGreen background color (102) -func BgBrightGreen(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightGreen() - } - return value{value: arg, color: BrightBg | GreenBg} -} - -// BgBrightYellow background color (103) -func BgBrightYellow(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightYellow() - } - return value{value: arg, color: BrightBg | YellowBg} -} - -// BgBrightBlue background color (104) -func BgBrightBlue(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightBlue() - } - return value{value: arg, color: BrightBg | BlueBg} -} - -// BgBrightMagenta background color (105) -func BgBrightMagenta(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightMagenta() - } - return value{value: arg, color: BrightBg | MagentaBg} -} - -// BgBrightCyan background color (106) -func BgBrightCyan(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightCyan() - } - return value{value: arg, color: BrightBg | CyanBg} -} - -// BgBrightWhite background color (107) -func BgBrightWhite(arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgBrightWhite() - } - return value{value: arg, color: BrightBg | WhiteBg} -} - -// -// Other -// - -// BgIndex of 8-bit pre-defined background color -// from 0 to 255 (48;5;n). -// -// 0- 7: standard colors (as in ESC [ 40–47 m) -// 8- 15: high intensity colors (as in ESC [100–107 m) -// 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5) -// 232-255: grayscale from black to white in 24 steps -// -func BgIndex(n uint8, arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgIndex(n) - } - return value{value: arg, color: (Color(n) << shiftBg) | flagBg} -} - -// BgGray from 0 to 24. -func BgGray(n uint8, arg interface{}) Value { - if val, ok := arg.(Value); ok { - return val.BgGray(n) - } - if n > 23 { - n = 23 - } - return value{value: arg, color: (Color(n+232) << shiftBg) | flagBg} -} diff --git a/vendor/github.com/magefile/mage/sh/cmd.go b/vendor/github.com/magefile/mage/sh/cmd.go index 35d232a4..312de65a 100644 --- a/vendor/github.com/magefile/mage/sh/cmd.go +++ b/vendor/github.com/magefile/mage/sh/cmd.go @@ -89,14 +89,14 @@ func OutputWith(env map[string]string, cmd string, args ...string) (string, erro return strings.TrimSuffix(buf.String(), "\n"), err } -// Exec executes the command, piping its stderr to mage's stderr and -// piping its stdout to the given writer. If the command fails, it will return -// an error that, if returned from a target or mg.Deps call, will cause mage to -// exit with the same code as the command failed with. Env is a list of -// environment variables to set when running the command, these override the -// current environment variables set (which are also passed to the command). cmd -// and args may include references to environment variables in $FOO format, in -// which case these will be expanded before the command is run. +// Exec executes the command, piping its stdout and stderr to the given +// writers. If the command fails, it will return an error that, if returned +// from a target or mg.Deps call, will cause mage to exit with the same code as +// the command failed with. Env is a list of environment variables to set when +// running the command, these override the current environment variables set +// (which are also passed to the command). cmd and args may include references +// to environment variables in $FOO format, in which case these will be +// expanded before the command is run. // // Ran reports if the command ran (rather than was not found or not executable). // Code reports the exit code the command returned if it ran. If err == nil, ran diff --git a/vendor/github.com/pterm/pterm/.gitignore b/vendor/github.com/pterm/pterm/.gitignore index e4354651..dc83dde7 100644 --- a/vendor/github.com/pterm/pterm/.gitignore +++ b/vendor/github.com/pterm/pterm/.gitignore @@ -15,7 +15,7 @@ vendor/ # This is where we test stuff -/experimenting/ +/experimenting/* /.history /.vscode diff --git a/vendor/github.com/pterm/pterm/.golangci.yml b/vendor/github.com/pterm/pterm/.golangci.yml index 57ba35d8..a964631f 100644 --- a/vendor/github.com/pterm/pterm/.golangci.yml +++ b/vendor/github.com/pterm/pterm/.golangci.yml @@ -28,7 +28,6 @@ linters: - gosec - govet - ineffassign - - interfacer - unconvert - gosimple - godox diff --git a/vendor/github.com/pterm/pterm/CHANGELOG.md b/vendor/github.com/pterm/pterm/CHANGELOG.md deleted file mode 100644 index d8f4c8e3..00000000 --- a/vendor/github.com/pterm/pterm/CHANGELOG.md +++ /dev/null @@ -1,1237 +0,0 @@ - -## [Unreleased] - -### Features -- **logger:** implemented structured logging -- **logger:** implemented structured logging -- **logger:** implemented structured logging -- **logger:** added logger -- **logger:** create logger -- **rgb:** made it possible to use RGB colors as background -- **rgb:** made it possible to use RGB colors as background -- **rgb:** made it possible to use RGB colors as background - -### Bug Fixes -- **rgb:** fix Fade maxValue == current not displaying the last color - - - -## [v0.12.57] - 2023-03-28 -### Code Refactoring -- use `pterm.Print` instead of `fmt.Print` functions - - - -## [v0.12.56] - 2023-03-14 -### Bug Fixes -- **table:** fixed panic when multiple lines contained color in a single row - - - -## [v0.12.55] - 2023-03-04 -### Features -- **table:** multiline support for table printer -- **table:** multiline support for table printer - -### Code Refactoring -- **table:** fixed linting - - - -## [v0.12.54] - 2023-01-22 -### Bug Fixes -- **tree:** print top node [#443](https://github.com/pterm/pterm/issues/443) - - - -## [v0.12.53] - 2023-01-05 -### Features -- **color:** added `color.ToStyle()` -- **color:** added `color.ToStyle()` -- **progressbar:** added optional title to `Start` method - -### Bug Fixes -- **prefix:** fixed line numbers in different print functions - - - -## [v0.12.52] - 2023-01-05 -### Features -- **multiselect:** added theme support for checkmarks -- **multiselect:** added theme support for checkmarks - -### Test -- **multiselect:** fixed test - -### Code Refactoring -- **progressbar:** make add more safe - - - -## [v0.12.51] - 2022-12-24 -### Bug Fixes -- Make sure the confirm printer can clean up after Ctrl+C - - - -## [v0.12.50] - 2022-11-22 -### Bug Fixes -- revert original test & add new test -- slice bounds out of range on select printer - - - -## [v0.12.49] - 2022-10-03 - - -## [v0.12.48] - 2022-10-02 -### Features -- custom select/confirm key for interactive printer -- add flag to disable filter/search for interactive printer - - - -## [v0.12.47] - 2022-09-19 -### Features -- adding interactive continue printer - -### Bug Fixes -- typo -- append the selected value to the prompt - -### Code Refactoring -- ignore invalid custom handles -- initiazile handles on getSuffix -- comment format -- address renaming PR comments -- show full handles by default -- use a map for the options - -### Reverts -- refactor: use a map for the options - - - -## [v0.12.46] - 2022-09-05 -### Features -- **putils:** add `CenterText` in putils - -### Bug Fixes -- **textinput:** fixed overwriting the default values - - - -## [v0.12.45] - 2022-07-26 -### Bug Fixes -- make sure the interactive printers can cleanup after Ctrl+C -- the interactive confirm answers should match the confirm/reject text - -### Test -- add tests for custom answers - - - -## [v0.12.44] - 2022-07-22 - - -## [v0.12.43] - 2022-07-17 -### Bug Fixes -- **spinner:** fix line didn't clear properly -- **table:** fixed column length calculation for Chinese strings - - - -## [v0.12.42] - 2022-06-21 -### Features -- **input:** added text input printer - - - -## [v0.12.41] - 2022-04-12 - - -## [v0.12.40] - 2022-03-28 -### Features -- added a custom writer for all printers - - - -## [v0.12.39] - 2022-03-18 -### Features -- use fallback color in `BigTextPrinter` when `RGB` is not supported - -### Test -- fix `BigTextPrinter` test -- removed `testdata` -- removed snapshot testing - - - -## [v0.12.38] - 2022-03-09 -### Features -- added `NewLettersFromStringWithRGB` -- added `NewLettersFromStringWithRGB` - -### Test -- **bigtext:** fix formatting error in bigtext test - - - -## [v0.12.37] - 2022-02-17 -### Features -- **progressbar:** Add option to set the `MaxWidth` of the progressbar - -### Test -- **progressbar:** added 100% test coverage for new features - -### Code Refactoring -- **putils:** Improved styling - - - -## [v0.12.36] - 2022-02-01 -### Features -- proposal horizontal table header row and row separators fixes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators changes -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators -- proposal horizontal table header row and row separators - - - -## [v0.12.35] - 2022-02-01 -### Code Refactoring -- fix linting -- regenerate snapshots - - - -## [v0.12.34] - 2022-01-16 -### Bug Fixes -- **progressbar:** refresh progressbars on every PTerm print ([#302](https://github.com/pterm/pterm/issues/302)) - -### Test -- removed `AreaPrinter` test output -- **table:** changed mock reader from `os.Stdin` to `outBuf` - - - -## [v0.12.33] - 2021-10-24 -### Features -- add `PrintOnErrorf` for every `TextPrinter` ([#279](https://github.com/pterm/pterm/issues/279)) -- **coverage:** add unit test -- **progressbar:** made updating the progressbar title easier. ([#267](https://github.com/pterm/pterm/issues/267)) -- **table:** increase test coverage -- **table:** revamp to follow practice -- **table:** add support for right data alignment - -### Bug Fixes -- **idea:** revert unwanted config changes -- **linter:** do linter recommendation to delete fallthrough - - - -## [v0.12.32] - 2021-10-15 -### Features -- added `AreaPrinter.Clear()` - -### Bug Fixes -- progressbar method name -- **header:** fixed length calculation for Chinese strings - -### Code Refactoring -- change bitSize size - - - -## [v0.12.31] - 2021-09-21 -### Features -- **prefix:** added `LineNumberOffset` to `PrefixPrinter` - - - -## [v0.12.30] - 2021-08-16 -### Bug Fixes -- **style:** resetting to previous color also resets attributes - -### Code Refactoring -- adapt new testza function name - - - -## [v0.12.29] - 2021-07-19 -### Features -- **putils:** add `PrintAverageExecutionTime` - -### Test -- fix rgb error test -- fix internal test import cycle -- move tests into own package - -### Code Refactoring -- replace `testify` with `testza` - - - -## [v0.12.28] - 2021-07-17 -### Features -- **spinner:** add option to show a timer - -### Bug Fixes -- **bar chart:** fix panic when rendering empty horizontal bar chart - -### Test -- **spinner:** try to fix RawOutput text -- **spinner:** add raw output test - -### Code Refactoring -- **spinner:** better raw output logic -- **spinner:** refactor - - - -## [v0.12.27] - 2021-07-05 -### Bug Fixes -- **style:** fix multiline style coloring - -### Test -- **style:** fix multiline style coloring -- **style:** fix multiline style coloring - - - -## [v0.12.26] - 2021-07-01 -### Bug Fixes -- **spinner:** Override previous text in `UpdateText` - - - -## [v0.12.25] - 2021-07-01 -### Features -- **table:** add `Boxed` option - -### Test -- add tests for boxed `TablePrinter` - - - -## [v0.12.24] - 2021-06-13 -### Features -- **boxprinter:** replace line breaks in title with space -- **boxprinter:** add title center position to `BoxPrinter` -- **boxprinter:** add title & title position to `BoxPrinter` -- **boxprinter:** add title & title position to `BoxPrinter` -- **putils:** add `TableDataFromSeparatedValues` -- **putils:** add `TableDataFromTSV` -- **putils:** add `TableDataFromCSV` -- **putils:** add function to convert TSV to `TableData` -- **putils:** add function to convert CSV to `TableData` - -### Test -- add test for putils `TableData` generation -- **boxprinter:** add tests for title center position to `BoxPrinter` -- **boxprinter:** add tests for title & title position - -### Code Refactoring -- **boxprinter:** prefix title positions with `Title` -- **putils:** add `rowSeparator` to `TableFromSeparatedValues` - - - -## [v0.12.23] - 2021-06-07 -### Features -- Add util functions to create tables from slices of structs ([#217](https://github.com/pterm/pterm/issues/217)) - -### Bug Fixes -- **headerprinter:** don't panic if content width > terminal width - -### Test -- **prefix:** `pterm.Error` default no line number shown - -### Code Refactoring -- **prefix:** `pterm.Error` default no line number shown - - - -## [v0.12.22] - 2021-05-30 -### Features -- make spinner update faster - -### Performance Improvements -- improve performance of `SpinnerPrinter` - - - -## [v0.12.21] - 2021-05-30 -### Features -- print lines above active spinners -- **putils:** add `DownloadFileWithProgressbar` - -### Test -- clear active spinners after tests complete - -### Code Refactoring -- **putils:** change internal variable name - - - -## [v0.12.20] - 2021-05-29 -### Features -- force color output by default - - - -## [v0.12.19] - 2021-05-29 -### Features -- add `PrintOnError` for all printers and interface -- **putils:** add `putils` package ([#206](https://github.com/pterm/pterm/issues/206)) - -### Bug Fixes -- **header:** fix multiline header - -### Test -- add tests for all printers for `PrintOnError` - -### Code Refactoring -- make `PrintOnError` return `*TextPrinter` -- **area:** better height calculation - - - -## [v0.12.18] - 2021-05-22 -### Features -- add `AreaPrinter` -- **area:** add `Center` option -- **area:** add `Fullscreen` option -- **area:** add `GetContent` function -- **area:** add `AreaPrinter` - -### Test -- **area:** fix tests for `AreaPrinter` -- **area:** add `AreaPrinter` tests - -### Code Refactoring -- fix linting errors - - - -## [v0.12.17] - 2021-05-14 -### Bug Fixes -- fix `pterm.Fatal.Printfln` not panicking -- **prefix:** fix `pterm.Fatal.Printfln` not panicking and had output in debug mode - -### Test -- **prefix:** add tests for `Sprintfln` and `Printfln` function when in debug mode - - - -## [v0.12.16] - 2021-05-13 -### Code Refactoring -- **prefix:** make `PrintOnError` accept multiple inputs - - - -## [v0.12.15] - 2021-05-13 -### Features -- add raw output mode for `BarChart` -- add disable styling boolean option -- **bigtext:** add raw output mode -- **centerprinter:** add raw output mode -- **headerprinter:** add raw output mode -- **panelprinter:** add raw output mode -- **paragraph:** add raw output mode -- **prefix:** add `PrintIfError` -- **prefix:** add raw output mode -- **progressbar:** add raw output mode -- **spinner:** add raw output mode - -### Bug Fixes -- **prefix:** fix `PrintOnError` - -### Test -- add tests with `RawOutput` enabled -- add interface tests for `Color` and `RGB` -- added tests for `DisableStyling` and `EnableStyling` - -### Code Refactoring -- correct behaviour of Enable-/DisableStyling -- fix variable names - - - -## [v0.12.14] - 2021-05-09 -### Features -- **basic-text:** add `Sprintfln` and `Printfln` function -- **boxprinter:** add `Sprintfln` and `Printfln` function -- **centerprinter:** add `Sprintfln` and `Printfln` function -- **color:** add `Sprintfln` and `Printfln` function -- **header:** add `Sprintfln` and `Printfln` function -- **paragraph:** add `Sprintfln` and `Printfln` function -- **prefix:** add `Sprintfln` and `Printfln` function -- **print:** add `Sprintfln` and `Printfln` function -- **printer-interface:** add `Sprintfln` and `Printfln` to the interface -- **rgb:** add `Sprintfln` and `Printfln` function -- **section:** add `Sprintfln` and `Printfln` function - -### Bug Fixes -- **header:** fix inline color in `Header` - -### Test -- add tests for `Sprintfln` and `Printfln` function - -### Code Refactoring -- refactor `Sprintfln` and `Printfln` func. for better performance - -### Reverts -- ci: change color scheme for rendered examples - - - -## [v0.12.13] - 2021-04-10 -### Bug Fixes -- **bigtext:** fix height of some characters [#180](https://github.com/pterm/pterm/issues/180) -- **color:** make color implement `TextPrinter` - -### Test -- add interface tests - -### Code Refactoring -- **examples:** center the intro of `demo` -- **examples:** add note to box printer - - - -## [v0.12.12] - 2021-03-01 -### Features -- **prefixprinter:** Add option to show line number of caller - -### Code Refactoring -- **examples:** Update `PrefixPrinter` example - - - -## [v0.12.11] - 2021-02-26 -### Code Refactoring -- refactor print logic of `BoxPrinter` -- refactor print logic of `CenterPrinter` - - - -## [v0.12.10] - 2021-02-26 -### Bug Fixes -- correct `pterm.Println()` behaviour to fit to `fmt.Println()` - - - -## [v0.12.9] - 2021-02-23 -### Bug Fixes -- correct `pterm.Println()` behaviour to fit to `fmt.Println()` -- change terminal package import path to updated version - - - -## [v0.12.8] - 2020-12-11 -### Features -- **boxprinter:** add `WithHorizontalString` to `BoxPrinter` -- **boxprinter:** add `BoxPrinter` -- **panel:** add optional border for `Panel` -- **panelprinter:** add theme support to `PanelPrinter` -- **theme:** add `BoxStyle` and `BoxTextStyle` -- **theme:** add optional theme for border in `Panel` - -### Bug Fixes -- revert change horizontal string change - -### Test -- **boxprinter:** add test -- **boxprinter:** test multiple lines in one box -- **boxprinter:** add tests for `BoxPrinter` -- **panelprinter:** add tests for adding box printer -- **panelprinter:** add tests for optional border for `Panel` -- **theme:** add tests for `BoxStyle` and `BoxTextStyle` - -### Code Refactoring -- remove analytics -- **boxprinter:** return theme when style is nil -- **boxprinter:** change `DefaultBox` top and bottom padding to 0 -- **boxprinter:** fix spacing between boxes and in boxes -- **boxprinter:** refactor code -- **boxprinter:** change from `RenderablePrinter` to `TextPrinter` -- **panelprinter:** add `BoxPrinter` to surround panels with a fully custom box -- **panelprinter:** optional border for `Panel` - - - -## [v0.12.7] - 2020-11-24 -### Features -- add values to chart -- add horizontal `BarChartPrinter` -- add `BarChartPrinter` -- add `BarChartPrinter` -- add `BarChartPrinter` -- **theme:** add theme support to `BarChart` - -### Bug Fixes -- center bars over colored labels in `BarChart` - -### Test -- add tests to `BarChartPrinter` - - - -## [v0.12.6] - 2020-11-17 -### Bug Fixes -- disabling output works as expected now ([#149](https://github.com/pterm/pterm/issues/149)) - - - -## [v0.12.5] - 2020-11-17 -### Bug Fixes -- fix `PrefixPrinter` with multiple trailing newline endings. - - - -## [v0.12.4] - 2020-11-17 -### Bug Fixes -- fix `Printf` of `PrefixPrinter` - - - -## [v0.12.3] - 2020-11-12 -### Test -- reduce tests -- different test logic for rgb printing -- add better test names for `RGB` tests - - - -## [v0.12.2] - 2020-11-05 -### Features -- color each line separately when using multi line input - -### Bug Fixes -- fix internal `GetStringMaxWidth` max width - -### Test -- **basictext:** proxy print functions to DevNull -- **progressbar:** proxy print functions to DevNull - -### Code Refactoring -- use `pterm.Sprint` to print - - - -## [v0.12.1] - 2020-11-04 -### Bug Fixes -- **panel:** Fix output when input is colored - -### Performance Improvements -- **header:** calculate margin faster - - - -## [v0.12.0] - 2020-11-04 -### Features -- **panel:** add an option to make a padding beneath `panel` -- **panel:** add an option to make columns the same length - -### Bug Fixes -- **panel:** add invalid check for `padding` in `panel` - -### Test -- **bulletlist:** `BulletListItem` remove `Render` and `Srender` -- **bulletlist:** change `BulletList` to `BulletListPrinter` -- **panel:** add invalid check for `padding` in `panel` -- **panel:** add test for `WithBottomPadding` -- **panel:** add test for `WithSameColumnWidth` & multiple `panel` -- **panel:** add test for `WithSameColumnWidth` -- **progressbar:** change directory name `progressbar_test` to `progressbar_printer_test` -- **progressbar:** change `Progressbar` to `ProgressbarPrinter` -- **spinner:** change directory name `spinner_test` to `spinner_printer_test` -- **spinner:** change `Spinner` to `SpinnerPrinter` -- **table:** change `Table` to `TablePrinter` -- **tree:** change `Tree` to `TreePrinter` - -### Code Refactoring -- make all printer names end with `Printer` ([#134](https://github.com/pterm/pterm/issues/134)) -- **bulletlist:** remove `DefaultBulletListItem` -- **bulletlist:** `BulletListItem` remove `Render` and `Srender` -- **bulletlist:** `BulletListItem` is no renderable anymore -- **bulletlist:** change `BulletList` to `BulletListPrinter` -- **progressbar:** change `ActiveProgressbars` to `ActiveProgressbarPrinters` -- **progressbar:** change directory name `progressbar` to `progressbar_printer` -- **progressbar:** change `Progressbar` to `ProgressbarPrinter` -- **spinner:** change directory name `spinner` to `spinner_printer` -- **spinner:** change `Spinner` to `SpinnerPrinter` -- **table:** change `Table` to `TablePrinter` -- **tree:** change `Tree` to `TreePrinter` - -### BREAKING CHANGE - -Removed `DefaultBulletListItem`. - -Change names of printers which didn't end with `Printer`. Every printer name ends with `Printer` now to fit into the new naming convention. - -change `ActiveProgressbars` to `ActiveProgressbarPrinters` - -`BulletListItem` is no renderable anymore, removed `Render` and `Srender` - -`BulletListItem` is no renderable anymore, removed `Render` and `Srender` - -`BulletListItem` is no renderable anymore - -change `Tree` to `TreePrinter` to unify the naming scheme - -change `Tree` to `TreePrinter` to unify the naming scheme - -change `Table` to `TablePrinter` to unify the naming scheme - -change `Table` to `TablePrinter` to unify the naming scheme - -change `Spinner` to `SpinnerPrinter` to unify the naming scheme - -change `Spinner` to `SpinnerPrinter` to unify the naming scheme - -change `Progressbar` to `ProgressbarPrinter` to unify the naming scheme - -change `Progressbar` to `ProgressbarPrinter` to unify the naming scheme - -change `BulletList` to `BulletListPrinter` to unify the naming scheme - -change `BulletList` to `BulletListPrinter` to unify the naming scheme - - - -## [v0.11.0] - 2020-11-03 -### Features -- add `PanelPrinter` - -### Bug Fixes -- **centerprinter:** make centerprinter func return pointer - -### BREAKING CHANGE - -make centerprinter func `WithCenterEachLineSeparately` return a pointer of centerprinter - - - -## [v0.10.1] - 2020-11-02 -### Features -- add `CenterPrinter` - - - -## [v0.10.0] - 2020-11-01 -### Features -- make printers return errors -- add `DisableOutput()` and `EnableOutput()` ([#108](https://github.com/pterm/pterm/issues/108)) - -### Code Refactoring -- ignore errors where no errors can occur -- **theme:** change `ListTextStyle` to `BulletListTextStyle` ([#104](https://github.com/pterm/pterm/issues/104)) -- **theme:** change `ProgressbarBarStyle` to `FgCyan` ([#106](https://github.com/pterm/pterm/issues/106)) -- **theme:** change white to default color in `Theme` ([#103](https://github.com/pterm/pterm/issues/103)) - -### BREAKING CHANGE - -Interface of `RenderablePrinter` and `LivePrinter` changed. - -The global variable `DisableOutput` was renamed to `Output`. - - - -## [v0.9.3] - 2020-10-31 -### Features -- add a levelList converter for TreeListPrinter -- add `TreeListPrinter` as a renderable printer -- add `TreeListPrinter` as a renderable printer -- **theme:** add theme support for `Tree` - -### Test -- **tree:** add `Tree` tests - -### Code Refactoring -- clean up `Tree` -- **theme:** change `TreeTextStyle` to `FgDefault` -- **tree:** add Indent to control the spacing between levels and changed docs(examples) -- **tree:** add more spacing between levels -- **tree:** refactor `Tree` code and write tests for `Tree` -- **tree:** refactor `Tree` code and write tests for `Tree` -- **tree:** refactor `Tree` code -- **tree:** refactor `Tree` code -- **tree:** refactor `Tree` code - - - -## [v0.9.2] - 2020-10-29 -### Features -- add option to disable and enable colors - - - -## [v0.9.1] - 2020-10-27 -### Code Refactoring -- make the prefix of `Info` smaller again - - - -## [v0.9.0] - 2020-10-27 -### Features -- add `Debug` `PrefixPrinter` -- add support for enabling and disabling debug messages - -### Bug Fixes -- progressbar disappears when done and something is printed after - -### Test -- add debugger tests to `PrefixPrinter` -- add progressbar tests - -### Code Refactoring -- remove `UpdateDelay` from `Progressbar` -- change `NewList` to `NewBulletList` -- change `NewList` to `NewBulletList` -- deprecate `UpdateDelay` in `Progressbar` - -### BREAKING CHANGE - -Removed `UpdateDelay` from `Progressbar`. It's no longer used. The Progressbar automatically updates on every change to the current value. - -Changed `NewList` to `NewBulletList`. - - - -## [v0.8.1] - 2020-10-26 -### Features -- add fade from one RGB over several RGBs to another RGB - -### Code Refactoring -- refactor doc -- refactor code - - - -## [v0.8.0] - 2020-10-24 -### Features -- add `BigTextPrinter` ([#75](https://github.com/pterm/pterm/issues/75)) -- use level of section printer -- add `BulletListPrinter` ([#67](https://github.com/pterm/pterm/issues/67)) - -### Test -- test that `%s` won't fail to print - -### Code Refactoring -- make `BigTextPrinter` release ready -- change `LineCharacter` to `BarCharacter` ([#70](https://github.com/pterm/pterm/issues/70)) - -### BREAKING CHANGE - -Changed `LineCharacter` to `BarCharacter`. - - - -## [v0.7.0] - 2020-10-20 -### Features -- **progressbar:** add RemoveWhenDone - -### Bug Fixes -- make theme accept pointer styles -- make Spinner accept pointer Style -- make WithMessageStyle accept Style pointer -- add nil check to SectionPrinter Style -- section printer Style to pointer - -### Test -- add tests color and style -- add tests to root print functions -- add tests to progressbar -- add tests to terminal -- add tests to theme -- fix internal percentage test -- add tests to Spinner -- add tests for TablePrinter -- special tests for special statements -- complete PrefixPrinter tests -- add PrefixPrinter tests -- rename HeaderPrinter tests -- complete HeaderPrinter tests -- add ParagraphPrinter tests -- add HeaderPrinter tests -- make unit test system check different types -- add SectionPrinter tests -- implement test utils -- add rgb tests - -### Code Refactoring -- use log output -- remove obsolete if -- fit progressbar to new percentage calculation method -- make fatal panic -- rename parameters -- don't show empty line when removing a progressbar - - - -## [v0.6.1] - 2020-10-20 -### Bug Fixes -- fix RGB methods - - - -## [v0.6.0] - 2020-10-19 -### Features -- add BasicTextPrinter -- add theme support to section and table printer -- add theme support to spinner -- add theme support to headers -- add template support for progressbars -- add default theme - -### Test -- **benchmark:** fix spinner benchmark - -### Code Refactoring -- make printers accept pointers to styles -- remove emojis to comply with cross-platform policy -- change LivePrinter interface to pointer output -- change TextPrinter interface to pointer output - -### BREAKING CHANGE - -All printers only accept pointers as any `Style` attribute. - -LivePrinter now requires to return a pointer. - -TextPrinter now requires to return a pointer. - - - -## [v0.5.1] - 2020-10-14 -### Features -- add ability to disable output ([#44](https://github.com/pterm/pterm/issues/44)) -- add `Srender` to `RenderPrinter` interface -- add csv table support ([#42](https://github.com/pterm/pterm/issues/42)) -- add HEX to RGB converter in `RGB` ([#41](https://github.com/pterm/pterm/issues/41)) -- add theme to generated animations -- add color fade example ([#38](https://github.com/pterm/pterm/issues/38)) -- implement `TextPrinter` into `RGB` -- implement color fade to `Progressbar` ([#37](https://github.com/pterm/pterm/issues/37)) -- add color fade function and `RBG` ([#34](https://github.com/pterm/pterm/issues/34)) -- change `Section` style - -### Code Refactoring -- declare function name as `WithCSVReader` - - - -## [v0.5.0] - 2020-10-08 -### Features -- implement `LivePrinter` in `Spinner` -- add `BottomPadding` to `SectionPrinter` -- add `RenderPrinter` interface -- implement `LivePrinter` in `Progressbar` -- add `LivePrinter` interface -- add `TablePrinter` ([#27](https://github.com/pterm/pterm/issues/27)) -- add `ParagraphPrinter` ([#24](https://github.com/pterm/pterm/issues/24)) - -### Test -- add `Print` equals `Sprint` tests for `GenericPrinter` -- add `Spinner` benchmarks - -### Code Refactoring -- rename spinner_printer.go to spinner.go -- rename `GenericPrinter` to `TextPrinter` - -### BREAKING CHANGE - -The `GenericPrinter` is now called `TextPrinter`. - - - -## [v0.4.1] - 2020-10-07 - - -## [v0.4.0] - 2020-10-07 -### Features -- add `Add` to `Style` -- add options shorthands to `SectionPrinter` - -### Test -- ignore writer close errors in stdout capture - -### Code Refactoring -- use `Style` instead of colors -- refactor function parameters to fit expectation -- rename `RemoveColors` to `RemoveColorFromString` - -### BREAKING CHANGE - -use `Style` instead of colors - -Refactor function parameters to fit expectation. -Affects: `WithStyle(colors -> style)`, `WithScope(string, colors -> scope)` - -rename `RemoveColors` to `RemoveColorFromString` - - - -## [v0.3.2] - 2020-10-06 -### Features -- add `SectionPrinter` - -### Bug Fixes -- fix `Sprintf` function of `HeaderPrinter` - -### Test -- add tests for `HeaderPrinter` and `SectionPrinter` - - - -## [v0.3.1] - 2020-10-06 -### Features -- add `BarFiller` to `Progressbar` - -### Test -- fix import cycle -- change to inbuilt `SetDefaultOutput` option -- add more benchmarks -- add benchmarks -- add tests to `GenericPrinter` and default print methods - -### Code Refactoring -- set default `BarFiller` to space -- move tests directly into `pterm` module - - - -## [v0.3.0] - 2020-10-05 -### Bug Fixes -- fix `WithXYZ(b ...bool)` to detect booleans correctly - -### Code Refactoring -- remove `Version` constant -- change `WithXXX(b bool)` to `WithXXX(b ...bool)` -- change `SetXXX` to `WithXXX` -- change `Header` to `DefaultHeader` - -### BREAKING CHANGE - -remove `Version` constant - -rename `SetXXX` to `WithXXX` - -rename `Header` to `DefaultHeader` - - - -## [v0.2.4] - 2020-10-04 -### Bug Fixes -- `Printf` works again - - - -## [v0.2.3] - 2020-10-04 -### Features -- automatically print above `Progressbar` - -### Code Refactoring -- remove goroutine from `Progressbar` - - - -## [v0.2.2] - 2020-10-04 -### Features -- add `Fatal` printer - - - -## [v0.2.1] - 2020-10-04 -### Features -- make progressbar configurable -- add percentage helper -- add `RemoveColors` -- add `Progressbar` ([#5](https://github.com/pterm/pterm/issues/5)) -- add `Progressbar` -- add fatal to `PrefixPrinter` ([#4](https://github.com/pterm/pterm/issues/4)) -- **progressbar:** fade percentage color according to value - -### Code Refactoring -- bump version to "v0.2.1" - - - -## [v0.2.0] - 2020-09-30 -### Features -- change style of `Description` printer -- add color in color support -- add `RemoveWhenDone` to `Spinner` -- add multiline support to `PrefixPrinter` -- add `UpdateText` to spinner - -### Bug Fixes -- spinners spin evenly when multiple spinners are started - -### Performance Improvements -- improve spinner performance - -### Code Refactoring -- bump version to "v0.2.0" -- change `WithXXX` to `SetXXX` -- removed `Println` aliases - -### BREAKING CHANGE - -every `WithXXX` is renamed to `SetXXX` - -remove `GetFormattedMessage` from `PrefixPrinter` - -removed `Println` aliases - - - -## [v0.1.0] - 2020-09-28 -### Features -- add spinners -- shorten printer names and add builder methods to printers -- add `Printo` to override printed text -- add `FullWidth` to `HeaderPrinter` -- add terminal size detection - -### Code Refactoring -- bump version to "v0.1.0" -- consistent example code for `Printo` -- better comments for `Printo` -- simplify `HeaderPrinter` - -### BREAKING CHANGE - -printer names changed - -removed `Header` and put it's content directly into `HeaderPrinter` - - - -## [v0.0.1] - 2020-09-21 -### Features -- add aliases to default printers -- add header example -- integrate ci -- add `HeaderPrinter` -- add exported version variable -- add example `override-default-printer` -- change prefix text color to `LightWhite` - -### Bug Fixes -- header should now work in CI - -### Code Refactoring -- bump version to "v0.0.1" -- refactor project -- add comments to functions - - - -## v0.0.0 - 2020-09-18 -### Features -- add changelog template -- configs -- initial commit - - -[Unreleased]: https://github.com/pterm/pterm/compare/v0.12.57...HEAD -[v0.12.57]: https://github.com/pterm/pterm/compare/v0.12.56...v0.12.57 -[v0.12.56]: https://github.com/pterm/pterm/compare/v0.12.55...v0.12.56 -[v0.12.55]: https://github.com/pterm/pterm/compare/v0.12.54...v0.12.55 -[v0.12.54]: https://github.com/pterm/pterm/compare/v0.12.53...v0.12.54 -[v0.12.53]: https://github.com/pterm/pterm/compare/v0.12.52...v0.12.53 -[v0.12.52]: https://github.com/pterm/pterm/compare/v0.12.51...v0.12.52 -[v0.12.51]: https://github.com/pterm/pterm/compare/v0.12.50...v0.12.51 -[v0.12.50]: https://github.com/pterm/pterm/compare/v0.12.49...v0.12.50 -[v0.12.49]: https://github.com/pterm/pterm/compare/v0.12.48...v0.12.49 -[v0.12.48]: https://github.com/pterm/pterm/compare/v0.12.47...v0.12.48 -[v0.12.47]: https://github.com/pterm/pterm/compare/v0.12.46...v0.12.47 -[v0.12.46]: https://github.com/pterm/pterm/compare/v0.12.45...v0.12.46 -[v0.12.45]: https://github.com/pterm/pterm/compare/v0.12.44...v0.12.45 -[v0.12.44]: https://github.com/pterm/pterm/compare/v0.12.43...v0.12.44 -[v0.12.43]: https://github.com/pterm/pterm/compare/v0.12.42...v0.12.43 -[v0.12.42]: https://github.com/pterm/pterm/compare/v0.12.41...v0.12.42 -[v0.12.41]: https://github.com/pterm/pterm/compare/v0.12.40...v0.12.41 -[v0.12.40]: https://github.com/pterm/pterm/compare/v0.12.39...v0.12.40 -[v0.12.39]: https://github.com/pterm/pterm/compare/v0.12.38...v0.12.39 -[v0.12.38]: https://github.com/pterm/pterm/compare/v0.12.37...v0.12.38 -[v0.12.37]: https://github.com/pterm/pterm/compare/v0.12.36...v0.12.37 -[v0.12.36]: https://github.com/pterm/pterm/compare/v0.12.35...v0.12.36 -[v0.12.35]: https://github.com/pterm/pterm/compare/v0.12.34...v0.12.35 -[v0.12.34]: https://github.com/pterm/pterm/compare/v0.12.33...v0.12.34 -[v0.12.33]: https://github.com/pterm/pterm/compare/v0.12.32...v0.12.33 -[v0.12.32]: https://github.com/pterm/pterm/compare/v0.12.31...v0.12.32 -[v0.12.31]: https://github.com/pterm/pterm/compare/v0.12.30...v0.12.31 -[v0.12.30]: https://github.com/pterm/pterm/compare/v0.12.29...v0.12.30 -[v0.12.29]: https://github.com/pterm/pterm/compare/v0.12.28...v0.12.29 -[v0.12.28]: https://github.com/pterm/pterm/compare/v0.12.27...v0.12.28 -[v0.12.27]: https://github.com/pterm/pterm/compare/v0.12.26...v0.12.27 -[v0.12.26]: https://github.com/pterm/pterm/compare/v0.12.25...v0.12.26 -[v0.12.25]: https://github.com/pterm/pterm/compare/v0.12.24...v0.12.25 -[v0.12.24]: https://github.com/pterm/pterm/compare/v0.12.23...v0.12.24 -[v0.12.23]: https://github.com/pterm/pterm/compare/v0.12.22...v0.12.23 -[v0.12.22]: https://github.com/pterm/pterm/compare/v0.12.21...v0.12.22 -[v0.12.21]: https://github.com/pterm/pterm/compare/v0.12.20...v0.12.21 -[v0.12.20]: https://github.com/pterm/pterm/compare/v0.12.19...v0.12.20 -[v0.12.19]: https://github.com/pterm/pterm/compare/v0.12.18...v0.12.19 -[v0.12.18]: https://github.com/pterm/pterm/compare/v0.12.17...v0.12.18 -[v0.12.17]: https://github.com/pterm/pterm/compare/v0.12.16...v0.12.17 -[v0.12.16]: https://github.com/pterm/pterm/compare/v0.12.15...v0.12.16 -[v0.12.15]: https://github.com/pterm/pterm/compare/v0.12.14...v0.12.15 -[v0.12.14]: https://github.com/pterm/pterm/compare/v0.12.13...v0.12.14 -[v0.12.13]: https://github.com/pterm/pterm/compare/v0.12.12...v0.12.13 -[v0.12.12]: https://github.com/pterm/pterm/compare/v0.12.11...v0.12.12 -[v0.12.11]: https://github.com/pterm/pterm/compare/v0.12.10...v0.12.11 -[v0.12.10]: https://github.com/pterm/pterm/compare/v0.12.9...v0.12.10 -[v0.12.9]: https://github.com/pterm/pterm/compare/v0.12.8...v0.12.9 -[v0.12.8]: https://github.com/pterm/pterm/compare/v0.12.7...v0.12.8 -[v0.12.7]: https://github.com/pterm/pterm/compare/v0.12.6...v0.12.7 -[v0.12.6]: https://github.com/pterm/pterm/compare/v0.12.5...v0.12.6 -[v0.12.5]: https://github.com/pterm/pterm/compare/v0.12.4...v0.12.5 -[v0.12.4]: https://github.com/pterm/pterm/compare/v0.12.3...v0.12.4 -[v0.12.3]: https://github.com/pterm/pterm/compare/v0.12.2...v0.12.3 -[v0.12.2]: https://github.com/pterm/pterm/compare/v0.12.1...v0.12.2 -[v0.12.1]: https://github.com/pterm/pterm/compare/v0.12.0...v0.12.1 -[v0.12.0]: https://github.com/pterm/pterm/compare/v0.11.0...v0.12.0 -[v0.11.0]: https://github.com/pterm/pterm/compare/v0.10.1...v0.11.0 -[v0.10.1]: https://github.com/pterm/pterm/compare/v0.10.0...v0.10.1 -[v0.10.0]: https://github.com/pterm/pterm/compare/v0.9.3...v0.10.0 -[v0.9.3]: https://github.com/pterm/pterm/compare/v0.9.2...v0.9.3 -[v0.9.2]: https://github.com/pterm/pterm/compare/v0.9.1...v0.9.2 -[v0.9.1]: https://github.com/pterm/pterm/compare/v0.9.0...v0.9.1 -[v0.9.0]: https://github.com/pterm/pterm/compare/v0.8.1...v0.9.0 -[v0.8.1]: https://github.com/pterm/pterm/compare/v0.8.0...v0.8.1 -[v0.8.0]: https://github.com/pterm/pterm/compare/v0.7.0...v0.8.0 -[v0.7.0]: https://github.com/pterm/pterm/compare/v0.6.1...v0.7.0 -[v0.6.1]: https://github.com/pterm/pterm/compare/v0.6.0...v0.6.1 -[v0.6.0]: https://github.com/pterm/pterm/compare/v0.5.1...v0.6.0 -[v0.5.1]: https://github.com/pterm/pterm/compare/v0.5.0...v0.5.1 -[v0.5.0]: https://github.com/pterm/pterm/compare/v0.4.1...v0.5.0 -[v0.4.1]: https://github.com/pterm/pterm/compare/v0.4.0...v0.4.1 -[v0.4.0]: https://github.com/pterm/pterm/compare/v0.3.2...v0.4.0 -[v0.3.2]: https://github.com/pterm/pterm/compare/v0.3.1...v0.3.2 -[v0.3.1]: https://github.com/pterm/pterm/compare/v0.3.0...v0.3.1 -[v0.3.0]: https://github.com/pterm/pterm/compare/v0.2.4...v0.3.0 -[v0.2.4]: https://github.com/pterm/pterm/compare/v0.2.3...v0.2.4 -[v0.2.3]: https://github.com/pterm/pterm/compare/v0.2.2...v0.2.3 -[v0.2.2]: https://github.com/pterm/pterm/compare/v0.2.1...v0.2.2 -[v0.2.1]: https://github.com/pterm/pterm/compare/v0.2.0...v0.2.1 -[v0.2.0]: https://github.com/pterm/pterm/compare/v0.1.0...v0.2.0 -[v0.1.0]: https://github.com/pterm/pterm/compare/v0.0.1...v0.1.0 -[v0.0.1]: https://github.com/pterm/pterm/compare/v0.0.0...v0.0.1 diff --git a/vendor/github.com/pterm/pterm/README.md b/vendor/github.com/pterm/pterm/README.md index 73d4b098..c7a37292 100644 --- a/vendor/github.com/pterm/pterm/README.md +++ b/vendor/github.com/pterm/pterm/README.md @@ -35,13 +35,17 @@ Downloads + + + +

PTerm -

Show Demo Code

+

Show Demo Code

@@ -88,25 +92,30 @@ go get github.com/pterm/pterm ### Printers (Components) +
+ | Feature | Feature | Feature | Feature | Feature | | :-------: | :-------: | :-------: | :-------: | :-------: | | Area
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/area) |Barchart
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/barchart) |Basictext
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/basictext) |Bigtext
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/bigtext) |Box
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/box) | -| Bulletlist
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/bulletlist) |Center
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/center) |Coloring
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/coloring) |Demo
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/demo) |Header
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/header) | +| Bulletlist
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/bulletlist) |Center
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/center) |Coloring
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/coloring) |Header
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/header) |Heatmap
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/heatmap) | | Interactive confirm
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/interactive_confirm) |Interactive continue
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/interactive_continue) |Interactive multiselect
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/interactive_multiselect) |Interactive select
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/interactive_select) |Interactive textinput
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/interactive_textinput) | -| Logger
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/logger) |Panel
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/panel) |Paragraph
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/paragraph) |Prefix
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/prefix) |Progressbar
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/progressbar) | -| Section
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/section) |Spinner
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/spinner) |Style
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/style) |Table
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/table) |Theme
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/theme) | -| Tree
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/tree) | | | | | +| Logger
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/logger) |Multiple-live-printers
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/multiple-live-printers) |Panel
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/panel) |Paragraph
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/paragraph) |Prefix
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/prefix) | +| Progressbar
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/progressbar) |Section
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/section) |Slog
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/slog) |Spinner
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/spinner) |Style
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/style) | +| Table
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/table) |Theme
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/theme) |Tree
[(Examples)](https://github.com/pterm/pterm/tree/master/_examples/tree) | | | +
+ +---
-### 🦸‍♂️ Supporters +### 🦸‍♂️ Sponsors -|-|User|💸| -|---|---|---| -|![Jens Lauterbach](https://avatars.githubusercontent.com/u/1292368?s=25)|[@jenslauterbach](https://github.com/jenslauterbach)|25$| + + +---
@@ -126,9 +135,9 @@ go get github.com/pterm/pterm

-### area/center +### area/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/center/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/demo/animation.svg)
@@ -141,16 +150,37 @@ import ( "time" "github.com/pterm/pterm" + "github.com/pterm/pterm/putils" ) func main() { + // Print an informational message using PTerm's Info printer. + // This message will stay in place while the area updates. + pterm.Info.Println("The previous text will stay in place, while the area updates.") + + // Print two new lines as spacer. + pterm.Print("\n\n") + + // Start the Area printer from PTerm's DefaultArea, with the Center option. + // The Area printer allows us to update a specific area of the console output. + // The returned 'area' object is used to control the area updates. area, _ := pterm.DefaultArea.WithCenter().Start() - for i := 0; i < 5; i++ { - area.Update(pterm.Sprintf("Current count: %d\nAreas can update their content dynamically!", i)) + // Loop 10 times to update the area with the current time. + for i := 0; i < 10; i++ { + // Get the current time, format it as "15:04:05" (hour:minute:second), and convert it to a string. + // Then, create a BigText from the time string using PTerm's DefaultBigText and putils NewLettersFromString. + // The Srender() function is used to save the BigText as a string. + str, _ := pterm.DefaultBigText.WithLetters(putils.LettersFromString(time.Now().Format("15:04:05"))).Srender() + + // Update the Area contents with the current time string. + area.Update(str) + + // Sleep for a second before the next update. time.Sleep(time.Second) } + // Stop the Area printer after all updates are done. area.Stop() } @@ -158,9 +188,9 @@ func main() {
-### area/default +### area/center -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/default/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/center/animation.svg)
@@ -176,13 +206,21 @@ import ( ) func main() { - area, _ := pterm.DefaultArea.Start() + // Start a new default area in the center of the terminal. + // The Start() function returns the created area and an error. + area, _ := pterm.DefaultArea.WithCenter().Start() + // Loop 5 times to simulate a dynamic update. for i := 0; i < 5; i++ { + // Update the content of the area with the current count. + // The Sprintf function is used to format the string. area.Update(pterm.Sprintf("Current count: %d\nAreas can update their content dynamically!", i)) + + // Pause for a second to simulate a time-consuming task. time.Sleep(time.Second) } + // Stop the area after all updates are done. area.Stop() } @@ -190,9 +228,9 @@ func main() {
-### area/demo +### area/default -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/area/default/animation.svg)
@@ -208,15 +246,22 @@ import ( ) func main() { - pterm.Info.Println("The previous text will stay in place, while the area updates.") - pterm.Print("\n\n") // Add two new lines as spacer. + // Start a new default area and get a reference to it. + // The second return value is an error which is ignored here. + area, _ := pterm.DefaultArea.Start() - area, _ := pterm.DefaultArea.WithCenter().Start() // Start the Area printer, with the Center option. - for i := 0; i < 10; i++ { - str, _ := pterm.DefaultBigText.WithLetters(pterm.NewLettersFromString(time.Now().Format("15:04:05"))).Srender() // Save current time in str. - area.Update(str) // Update Area contents. + // Loop 5 times + for i := 0; i < 5; i++ { + // Update the content of the area dynamically. + // Here we're just displaying the current count. + area.Update(pterm.Sprintf("Current count: %d\nAreas can update their content dynamically!", i)) + + // Pause for a second before the next update. time.Sleep(time.Second) } + + // Stop the area after all updates are done. + // This will clean up and free resources used by the area. area.Stop() } @@ -242,23 +287,35 @@ import ( ) func main() { + // Start a new fullscreen centered area. + // This area will be used to display the bar chart. area, _ := pterm.DefaultArea.WithFullscreen().WithCenter().Start() + // Ensure the area stops updating when we're done. defer area.Stop() + // Loop to update the bar chart 10 times. for i := 0; i < 10; i++ { + // Create a new bar chart with dynamic bars. + // The bars will change based on the current iteration. barchart := pterm.DefaultBarChart.WithBars(dynamicBars(i)) + // Render the bar chart to a string. + // This string will be used to update the area. content, _ := barchart.Srender() + // Update the area with the new bar chart. area.Update(content) + // Wait for half a second before the next update. time.Sleep(500 * time.Millisecond) } } +// dynamicBars generates a set of bars for the bar chart. +// The bars will change based on the current iteration. func dynamicBars(i int) pterm.Bars { return pterm.Bars{ - {Label: "A", Value: 10}, - {Label: "B", Value: 20 * i}, - {Label: "C", Value: 30}, - {Label: "D", Value: 40 + i}, + {Label: "A", Value: 10}, // A static bar. + {Label: "B", Value: 20 * i}, // A bar that grows with each iteration. + {Label: "C", Value: 30}, // Another static bar. + {Label: "D", Value: 40 + i}, // A bar that grows slowly with each iteration. } } @@ -284,13 +341,21 @@ import ( ) func main() { + // Start a new fullscreen area. This will return an area instance and an error. + // The underscore (_) is used to ignore the error. area, _ := pterm.DefaultArea.WithFullscreen().Start() + // Loop 5 times to update the area content. for i := 0; i < 5; i++ { + // Update the content of the area with the current count. + // The Sprintf function is used to format the string. area.Update(pterm.Sprintf("Current count: %d\nAreas can update their content dynamically!", i)) + + // Pause for a second before the next update. time.Sleep(time.Second) } + // Stop the area after all updates are done. area.Stop() } @@ -316,13 +381,22 @@ import ( ) func main() { + // Initialize a new PTerm area with fullscreen and center options + // The Start() function returns the created area and an error (ignored here) area, _ := pterm.DefaultArea.WithFullscreen().WithCenter().Start() + // Loop 5 times to demonstrate dynamic content update for i := 0; i < 5; i++ { + // Update the content of the area with the current count + // The Sprintf function is used to format the string with the count area.Update(pterm.Sprintf("Current count: %d\nAreas can update their content dynamically!", i)) + + // Pause for a second time.Sleep(time.Second) } + // Stop the area after all updates are done + // This will clear the area and return the terminal to its normal state area.Stop() } @@ -330,9 +404,9 @@ func main() {
-### barchart/custom-height +### barchart/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/custom-height/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/demo/animation.svg)
@@ -341,29 +415,39 @@ func main() { ```go package main -import "github.com/pterm/pterm" +import ( + "github.com/pterm/pterm" +) func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ - {Label: "A", Value: 10}, - {Label: "B", Value: 20}, - {Label: "C", Value: 30}, - {Label: "D", Value: 40}, - {Label: "E", Value: 50}, - {Label: "F", Value: 40}, - {Label: "G", Value: 30}, - {Label: "H", Value: 20}, - {Label: "I", Value: 10}, - }).WithHeight(5).Render() + // Define the bars for the chart + bars := []pterm.Bar{ + {Label: "Bar 1", Value: 5}, + {Label: "Bar 2", Value: 3}, + {Label: "Longer Label", Value: 7}, + } + + // Print an informational message + pterm.Info.Println("Chart example with positive only values (bars use 100% of chart area)") + + // Create a bar chart with the defined bars and render it + // The DefaultBarChart is used as a base, and the bars are added with the WithBars option + // The Render function is then called to display the chart + pterm.DefaultBarChart.WithBars(bars).Render() + + // Create a horizontal bar chart with the defined bars and render it + // The DefaultBarChart is used as a base, the chart is made horizontal with the WithHorizontal option, and the bars are added with the WithBars option + // The Render function is then called to display the chart + pterm.DefaultBarChart.WithHorizontal().WithBars(bars).Render() } ```
-### barchart/custom-width +### barchart/custom-height -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/custom-width/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/custom-height/animation.svg)
@@ -375,7 +459,9 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ + // Define a slice of Bar structs. Each struct represents a bar in the chart. + // The Label field is the name of the bar and the Value field is the height of the bar. + bars := []pterm.Bar{ {Label: "A", Value: 10}, {Label: "B", Value: 20}, {Label: "C", Value: 30}, @@ -385,16 +471,22 @@ func main() { {Label: "G", Value: 30}, {Label: "H", Value: 20}, {Label: "I", Value: 10}, - }).WithHorizontal().WithWidth(5).Render() + } + + // Create and render a bar chart with the defined bars and a height of 5. + // The WithBars method is used to set the bars of the chart. + // The WithHeight method is used to set the height of the chart. + // The Render method is used to display the chart in the terminal. + pterm.DefaultBarChart.WithBars(bars).WithHeight(5).Render() } ```
-### barchart/default +### barchart/custom-width -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/default/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/custom-width/animation.svg)
@@ -406,7 +498,8 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ + // Define the data for the bar chart + barData := []pterm.Bar{ {Label: "A", Value: 10}, {Label: "B", Value: 20}, {Label: "C", Value: 30}, @@ -416,16 +509,21 @@ func main() { {Label: "G", Value: 30}, {Label: "H", Value: 20}, {Label: "I", Value: 10}, - }).Render() + } + + // Create a bar chart with the defined data + // The chart is horizontal and has a width of 5 + // The Render() function is called to display the chart + pterm.DefaultBarChart.WithBars(barData).WithHorizontal().WithWidth(5).Render() } ```
-### barchart/demo +### barchart/default -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/barchart/default/animation.svg)
@@ -434,29 +532,27 @@ func main() { ```go package main -import ( - "github.com/pterm/pterm" -) +import "github.com/pterm/pterm" func main() { - positiveBars := pterm.Bars{ - pterm.Bar{ - Label: "Bar 1", - Value: 5, - }, - pterm.Bar{ - Label: "Bar 2", - Value: 3, - }, - pterm.Bar{ - Label: "Longer Label", - Value: 7, - }, + // Define the data for the bar chart. Each bar is represented by a `pterm.Bar` struct. + // The `Label` field represents the label of the bar, and the `Value` field represents the value of the bar. + bars := []pterm.Bar{ + {Label: "A", Value: 10}, + {Label: "B", Value: 20}, + {Label: "C", Value: 30}, + {Label: "D", Value: 40}, + {Label: "E", Value: 50}, + {Label: "F", Value: 40}, + {Label: "G", Value: 30}, + {Label: "H", Value: 20}, + {Label: "I", Value: 10}, } - pterm.Info.Println("Chart example with positive only values (bars use 100% of chart area)") - _ = pterm.DefaultBarChart.WithBars(positiveBars).Render() - _ = pterm.DefaultBarChart.WithHorizontal().WithBars(positiveBars).Render() + // Use the `DefaultBarChart` from the `pterm` package to create a bar chart. + // The `WithBars` method is used to set the bars of the chart. + // The `Render` method is used to display the chart. + pterm.DefaultBarChart.WithBars(bars).Render() } ``` @@ -477,7 +573,8 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ + // Define the data for the bar chart + bars := []pterm.Bar{ {Label: "A", Value: 10}, {Label: "B", Value: 20}, {Label: "C", Value: 30}, @@ -487,7 +584,12 @@ func main() { {Label: "G", Value: 30}, {Label: "H", Value: 20}, {Label: "I", Value: 10}, - }).WithHorizontal().Render() + } + + // Create a bar chart with the defined data + // The chart is displayed horizontally + // The Render() function is called to display the chart + pterm.DefaultBarChart.WithBars(bars).WithHorizontal().Render() } ``` @@ -508,7 +610,8 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ + // Define the data for the bar chart + barData := []pterm.Bar{ {Label: "A", Value: 10}, {Label: "B", Value: 20}, {Label: "C", Value: 30}, @@ -518,7 +621,12 @@ func main() { {Label: "G", Value: 30}, {Label: "H", Value: 20}, {Label: "I", Value: 10}, - }).WithHorizontal().WithShowValue().Render() + } + + // Create a bar chart with the defined data + // The chart is horizontal and displays the value of each bar + // The Render() function is called to display the chart + pterm.DefaultBarChart.WithBars(barData).WithHorizontal().WithShowValue().Render() } ``` @@ -541,32 +649,29 @@ import ( ) func main() { - mixedBars := pterm.Bars{ - pterm.Bar{ - Label: "Bar 1", - Value: 2, - }, - pterm.Bar{ - Label: "Bar 2", - Value: -3, - }, - pterm.Bar{ - Label: "Bar 3", - Value: -2, - }, - pterm.Bar{ - Label: "Bar 4", - Value: 5, - }, - pterm.Bar{ - Label: "Longer Label", - Value: 7, - }, + // Define a set of bars for the chart. + // Each bar has a label and a value. + bars := []pterm.Bar{ + {Label: "Bar 1", Value: 2}, + {Label: "Bar 2", Value: -3}, + {Label: "Bar 3", Value: -2}, + {Label: "Bar 4", Value: 5}, + {Label: "Longer Label", Value: 7}, } + // Print a section header. + // This is useful for separating different parts of the output. pterm.DefaultSection.Println("Chart example with mixed values (note screen space usage in case when ABSOLUTE values of negative and positive parts are differ too much)") - _ = pterm.DefaultBarChart.WithBars(mixedBars).WithShowValue().Render() - _ = pterm.DefaultBarChart.WithHorizontal().WithBars(mixedBars).WithShowValue().Render() + + // Create a bar chart with the defined bars. + // The chart will display the value of each bar. + // The Render() function is called to display the chart. + pterm.DefaultBarChart.WithBars(bars).WithShowValue().Render() + + // Create a horizontal bar chart with the same bars. + // The chart will display the value of each bar. + // The Render() function is called to display the chart. + pterm.DefaultBarChart.WithHorizontal().WithBars(bars).WithShowValue().Render() } ``` @@ -589,23 +694,25 @@ import ( ) func main() { + // Define a set of bars with negative values. + // Each bar is represented by a struct with a label and a value. negativeBars := pterm.Bars{ - pterm.Bar{ - Label: "Bar 1", - Value: -5, - }, - pterm.Bar{ - Label: "Bar 2", - Value: -3, - }, - pterm.Bar{ - Label: "Longer Label", - Value: -7, - }, + {Label: "Bar 1", Value: -5}, + {Label: "Bar 2", Value: -3}, + {Label: "Longer Label", Value: -7}, } + // Print an informational message to the console. pterm.Info.Println("Chart example with negative only values (bars use 100% of chart area)") + + // Create a vertical bar chart with the defined bars. + // The WithShowValue() option is used to display the value of each bar in the chart. + // The Render() method is called to draw the chart. _ = pterm.DefaultBarChart.WithBars(negativeBars).WithShowValue().Render() + + // Create a horizontal bar chart with the same bars. + // The WithHorizontal() option is used to orient the chart horizontally. + // The WithShowValue() option and Render() method are used in the same way as before. _ = pterm.DefaultBarChart.WithHorizontal().WithBars(negativeBars).WithShowValue().Render() } @@ -627,7 +734,10 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBarChart.WithBars([]pterm.Bar{ + // Define a slice of bars for the bar chart. Each bar is represented by a struct + // with a Label and a Value. The Label is a string that represents the name of the bar, + // and the Value is an integer that represents the height of the bar. + bars := []pterm.Bar{ {Label: "A", Value: 10}, {Label: "B", Value: 20}, {Label: "C", Value: 30}, @@ -637,7 +747,13 @@ func main() { {Label: "G", Value: 30}, {Label: "H", Value: 20}, {Label: "I", Value: 10}, - }).WithShowValue().Render() + } + + // Create a bar chart with the defined bars using the DefaultBarChart object from PTerm. + // Chain the WithBars method to set the bars of the chart. + // Chain the WithShowValue method to display the value of each bar on the chart. + // Finally, call the Render method to display the chart. + pterm.DefaultBarChart.WithBars(bars).WithShowValue().Render() } ``` @@ -658,22 +774,24 @@ package main import "github.com/pterm/pterm" func main() { - // A BasicText printer is used to print text, without special formatting. - // As it implements the TextPrinter interface, you can use it in combination with other printers. + // The DefaultBasicText is a basic text printer provided by PTerm. + // It is used to print text without any special formatting. pterm.DefaultBasicText.Println("Default basic text printer.") + + // The DefaultBasicText can be used in any context that requires a TextPrinter. + // Here, we're using it with the LightMagenta function to color a portion of the text. pterm.DefaultBasicText.Println("Can be used in any" + pterm.LightMagenta(" TextPrinter ") + "context.") - pterm.DefaultBasicText.Println("For example to resolve progressbars and spinners.") - // If you just want to print text, you should use this instead: - // pterm.Println("Hello, World!") + + // The DefaultBasicText is also useful for resolving progress bars and spinners. } ```
-### bigtext/colored +### bigtext/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/colored/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/demo/animation.svg)
@@ -688,19 +806,33 @@ import ( ) func main() { + // Create a large text with the LetterStyle from the standard theme. + // This is useful for creating title screens. + pterm.DefaultBigText.WithLetters(putils.LettersFromString("PTerm")).Render() + + // Create a large text with differently colored letters. + // Here, the first letter 'P' is colored cyan and the rest 'Term' is colored light magenta. + // This can be used to highlight specific parts of the text. pterm.DefaultBigText.WithLetters( putils.LettersFromStringWithStyle("P", pterm.FgCyan.ToStyle()), - putils.LettersFromStringWithStyle("Term", pterm.FgLightMagenta.ToStyle())). - Render() + putils.LettersFromStringWithStyle("Term", pterm.FgLightMagenta.ToStyle()), + ).Render() + + // Create a large text with a specific RGB color. + // This can be used when you need a specific color that is not available in the standard colors. + // Here, the color is gold (RGB: 255, 215, 0). + pterm.DefaultBigText.WithLetters( + putils.LettersFromStringWithRGB("PTerm", pterm.NewRGB(255, 215, 0)), + ).Render() } ```
-### bigtext/default +### bigtext/colored -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/default/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/colored/animation.svg)
@@ -715,16 +847,21 @@ import ( ) func main() { - pterm.DefaultBigText.WithLetters(putils.LettersFromString("PTerm")).Render() + // Initialize a big text display with the letters "P" and "Term" + // "P" is displayed in cyan and "Term" is displayed in light magenta + pterm.DefaultBigText.WithLetters( + putils.LettersFromStringWithStyle("P", pterm.FgCyan.ToStyle()), + putils.LettersFromStringWithStyle("Term", pterm.FgLightMagenta.ToStyle())). + Render() // Render the big text to the terminal } ```
-### bigtext/demo +### bigtext/default -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bigtext/default/animation.svg)
@@ -739,29 +876,23 @@ import ( ) func main() { - // Print a large text with the LetterStyle from the standard theme. - // Useful for title screens. - pterm.DefaultBigText.WithLetters(putils.LettersFromString("PTerm")).Render() + // Define the text to be rendered + var text = "PTerm" - // Print a large text with differently colored letters. - pterm.DefaultBigText.WithLetters( - putils.LettersFromStringWithStyle("P", pterm.FgCyan.ToStyle()), - putils.LettersFromStringWithStyle("Term", pterm.FgLightMagenta.ToStyle())). - Render() + // Convert the text into a format suitable for PTerm + var letters = putils.LettersFromString(text) - // LettersFromStringWithRGB can be used to create a large text with a specific RGB color. - pterm.DefaultBigText.WithLetters( - putils.LettersFromStringWithRGB("PTerm", pterm.NewRGB(255, 215, 0))). - Render() + // Render the text using PTerm's default big text style + pterm.DefaultBigText.WithLetters(letters).Render() } ```
-### box/custom-padding +### box/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/custom-padding/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/demo/animation.svg)
@@ -773,21 +904,35 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBox. - WithRightPadding(10). - WithLeftPadding(10). - WithTopPadding(2). - WithBottomPadding(2). - Println("Hello, World!") + // Print an informational message. + pterm.Info.Println("This might not be rendered correctly on GitHub,\nbut it will work in a real terminal.\nThis is because GitHub does not use a monospaced font by default for SVGs") + + // Create three panels with text, some of them with titles. + // The panels are created using the DefaultBox style. + panel1 := pterm.DefaultBox.Sprint("Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit,\nsed do eiusmod tempor incididunt\nut labore et dolore\nmagna aliqua.") + panel2 := pterm.DefaultBox.WithTitle("title").Sprint("Ut enim ad minim veniam,\nquis nostrud exercitation\nullamco laboris\nnisi ut aliquip\nex ea commodo\nconsequat.") + panel3 := pterm.DefaultBox.WithTitle("bottom center title").WithTitleBottomCenter().Sprint("Duis aute irure\ndolor in reprehenderit\nin voluptate velit esse cillum\ndolore eu fugiat\nnulla pariatur.") + + // Combine the panels into a layout using the DefaultPanel style. + // The layout is a 2D grid, with each row being an array of panels. + // In this case, the first row contains panel1 and panel2, and the second row contains only panel3. + panels, _ := pterm.DefaultPanel.WithPanels(pterm.Panels{ + {{Data: panel1}, {Data: panel2}}, + {{Data: panel3}}, + }).Srender() + + // Print the panels layout inside a box with a title. + // The box is created using the DefaultBox style, with the title positioned at the bottom right. + pterm.DefaultBox.WithTitle("Lorem Ipsum").WithTitleBottomRight().WithRightPadding(0).WithBottomPadding(0).Println(panels) } ```
-### box/default +### box/custom-padding -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/default/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/custom-padding/animation.svg)
@@ -799,16 +944,17 @@ package main import "github.com/pterm/pterm" func main() { - pterm.DefaultBox.Println("Hello, World!") + // Create a default box with custom padding options and print "Hello, World!" inside it. + pterm.DefaultBox.WithRightPadding(10).WithLeftPadding(10).WithTopPadding(2).WithBottomPadding(2).Println("Hello, World!") } ```
-### box/demo +### box/default -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/box/default/animation.svg)
@@ -820,18 +966,9 @@ package main import "github.com/pterm/pterm" func main() { - pterm.Info.Println("This might not be rendered correctly on GitHub,\nbut it will work in a real terminal.\nThis is because GitHub does not use a monospaced font by default for SVGs") - - panel1 := pterm.DefaultBox.Sprint("Lorem ipsum dolor sit amet,\nconsectetur adipiscing elit,\nsed do eiusmod tempor incididunt\nut labore et dolore\nmagna aliqua.") - panel2 := pterm.DefaultBox.WithTitle("title").Sprint("Ut enim ad minim veniam,\nquis nostrud exercitation\nullamco laboris\nnisi ut aliquip\nex ea commodo\nconsequat.") - panel3 := pterm.DefaultBox.WithTitle("bottom center title").WithTitleBottomCenter().Sprint("Duis aute irure\ndolor in reprehenderit\nin voluptate velit esse cillum\ndolore eu fugiat\nnulla pariatur.") - - panels, _ := pterm.DefaultPanel.WithPanels(pterm.Panels{ - {{Data: panel1}, {Data: panel2}}, - {{Data: panel3}}, - }).Srender() - - pterm.DefaultBox.WithTitle("Lorem Ipsum").WithTitleBottomRight().WithRightPadding(0).WithBottomPadding(0).Println(panels) + // Create a default box with PTerm and print a message in it. + // The DefaultBox.Println method automatically starts, prints the message, and stops the box. + pterm.DefaultBox.Println("Hello, World!") } ``` @@ -852,19 +989,22 @@ package main import "github.com/pterm/pterm" func main() { - // Default titled bpx + // Create a default box with specified padding paddedBox := pterm.DefaultBox.WithLeftPadding(4).WithRightPadding(4).WithTopPadding(1).WithBottomPadding(1) + // Define a title for the box title := pterm.LightRed("I'm a box!") - box1 := paddedBox.WithTitle(title).Sprint("Hello, World!\n 1") - box2 := paddedBox.WithTitle(title).WithTitleTopCenter().Sprint("Hello, World!\n 2") - box3 := paddedBox.WithTitle(title).WithTitleTopRight().Sprint("Hello, World!\n 3") - box4 := paddedBox.WithTitle(title).WithTitleBottomRight().Sprint("Hello, World!\n 4") - box5 := paddedBox.WithTitle(title).WithTitleBottomCenter().Sprint("Hello, World!\n 5") - box6 := paddedBox.WithTitle(title).WithTitleBottomLeft().Sprint("Hello, World!\n 6") - box7 := paddedBox.WithTitle(title).WithTitleTopLeft().Sprint("Hello, World!\n 7") + // Create boxes with the title positioned differently and containing different content + box1 := paddedBox.WithTitle(title).Sprint("Hello, World!\n 1") // Title at default position (top left) + box2 := paddedBox.WithTitle(title).WithTitleTopCenter().Sprint("Hello, World!\n 2") // Title at top center + box3 := paddedBox.WithTitle(title).WithTitleTopRight().Sprint("Hello, World!\n 3") // Title at top right + box4 := paddedBox.WithTitle(title).WithTitleBottomRight().Sprint("Hello, World!\n 4") // Title at bottom right + box5 := paddedBox.WithTitle(title).WithTitleBottomCenter().Sprint("Hello, World!\n 5") // Title at bottom center + box6 := paddedBox.WithTitle(title).WithTitleBottomLeft().Sprint("Hello, World!\n 6") // Title at bottom left + box7 := paddedBox.WithTitle(title).WithTitleTopLeft().Sprint("Hello, World!\n 7") // Title at top left + // Render the boxes in a panel layout pterm.DefaultPanel.WithPanels([][]pterm.Panel{ {{box1}, {box2}, {box3}}, {{box4}, {box5}, {box6}}, @@ -876,9 +1016,9 @@ func main() {
-### bulletlist/customized +### bulletlist/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bulletlist/customized/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bulletlist/demo/animation.svg)
@@ -889,24 +1029,37 @@ package main import ( "github.com/pterm/pterm" + "github.com/pterm/pterm/putils" ) func main() { - // Print a customized list with different styles and levels. - pterm.DefaultBulletList.WithItems([]pterm.BulletListItem{ - {Level: 0, Text: "Blue", TextStyle: pterm.NewStyle(pterm.FgBlue), BulletStyle: pterm.NewStyle(pterm.FgRed)}, - {Level: 1, Text: "Green", TextStyle: pterm.NewStyle(pterm.FgGreen), Bullet: "-", BulletStyle: pterm.NewStyle(pterm.FgLightWhite)}, - {Level: 2, Text: "Cyan", TextStyle: pterm.NewStyle(pterm.FgCyan), Bullet: ">", BulletStyle: pterm.NewStyle(pterm.FgYellow)}, - }).Render() + // Define a list of bullet list items with different levels. + bulletListItems := []pterm.BulletListItem{ + {Level: 0, Text: "Level 0"}, // Level 0 item + {Level: 1, Text: "Level 1"}, // Level 1 item + {Level: 2, Text: "Level 2"}, // Level 2 item + } + + // Use the default bullet list style to render the list items. + pterm.DefaultBulletList.WithItems(bulletListItems).Render() + + // Define a string with different levels of indentation. + text := `0 + 1 + 2 + 3` + + // Convert the indented string to a bullet list and render it. + putils.BulletListFromString(text, " ").Render() } ```
-### bulletlist/demo +### bulletlist/customized -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bulletlist/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/bulletlist/customized/animation.svg)
@@ -917,23 +1070,35 @@ package main import ( "github.com/pterm/pterm" - "github.com/pterm/pterm/putils" ) func main() { - // Print a list with different levels. - // Useful to generate lists automatically from data. - pterm.DefaultBulletList.WithItems([]pterm.BulletListItem{ - {Level: 0, Text: "Level 0"}, - {Level: 1, Text: "Level 1"}, - {Level: 2, Text: "Level 2"}, - }).Render() + // Define a list of bullet list items with different styles and levels. + bulletListItems := []pterm.BulletListItem{ + { + Level: 0, // Level 0 (top level) + Text: "Blue", // Text to display + TextStyle: pterm.NewStyle(pterm.FgBlue), // Text color + BulletStyle: pterm.NewStyle(pterm.FgRed), // Bullet color + }, + { + Level: 1, // Level 1 (sub-item) + Text: "Green", // Text to display + TextStyle: pterm.NewStyle(pterm.FgGreen), // Text color + Bullet: "-", // Custom bullet symbol + BulletStyle: pterm.NewStyle(pterm.FgLightWhite), // Bullet color + }, + { + Level: 2, // Level 2 (sub-sub-item) + Text: "Cyan", // Text to display + TextStyle: pterm.NewStyle(pterm.FgCyan), // Text color + Bullet: ">", // Custom bullet symbol + BulletStyle: pterm.NewStyle(pterm.FgYellow), // Bullet color + }, + } - // Convert a text to a list and print it. - putils.BulletListFromString(`0 - 1 - 2 - 3`, " ").Render() + // Create a bullet list with the defined items and render it. + pterm.DefaultBulletList.WithItems(bulletListItems).Render() } ``` @@ -951,15 +1116,22 @@ func main() { ```go package main -import "github.com/pterm/pterm" +import ( + "github.com/pterm/pterm" + "github.com/pterm/pterm/putils" +) func main() { - pterm.DefaultCenter.Println("This text is centered!\nIt centeres the whole block by default.\nIn that way you can do stuff like this:") + // Print a block of text centered in the terminal + pterm.DefaultCenter.Println("This text is centered!\nIt centers the whole block by default.\nIn that way you can do stuff like this:") + + // Generate BigLetters and store in 's' + s, _ := pterm.DefaultBigText.WithLetters(putils.LettersFromString("PTerm")).Srender() - // Generate BigLetters - s, _ := pterm.DefaultBigText.WithLetters(pterm.NewLettersFromString("PTerm")).Srender() - pterm.DefaultCenter.Println(s) // Print BigLetters with the default CenterPrinter + // Print the BigLetters 's' centered in the terminal + pterm.DefaultCenter.Println(s) + // Print each line of the text separately centered in the terminal pterm.DefaultCenter.WithCenterEachLineSeparately().Println("This text is centered!\nBut each line is\ncentered\nseparately") } @@ -981,8 +1153,7 @@ package main import "github.com/pterm/pterm" func main() { - // Print all colors - + // Create a table with different foreground and background colors. pterm.DefaultTable.WithData([][]string{ {pterm.FgBlack.Sprint("Black"), pterm.FgRed.Sprint("Red"), pterm.FgGreen.Sprint("Green"), pterm.FgYellow.Sprint("Yellow")}, {"", pterm.FgLightRed.Sprint("Light Red"), pterm.FgLightGreen.Sprint("Light Green"), pterm.FgLightYellow.Sprint("Light Yellow")}, @@ -992,18 +1163,19 @@ func main() { {pterm.FgLightBlue.Sprint("Light Blue"), pterm.FgLightMagenta.Sprint("Light Magenta"), pterm.FgLightCyan.Sprint("Light Cyan"), pterm.FgLightWhite.Sprint("Light White")}, {pterm.BgBlue.Sprint("Blue"), pterm.BgMagenta.Sprint("Magenta"), pterm.BgCyan.Sprint("Cyan"), pterm.BgWhite.Sprint("White")}, {pterm.BgLightBlue.Sprint("Light Blue"), pterm.BgLightMagenta.Sprint("Light Magenta"), pterm.BgLightCyan.Sprint("Light Cyan"), pterm.BgLightWhite.Sprint("Light White")}, - }).Render() + }).Render() // Render the table. pterm.Println() - // Print different colored words. + // Print words in different colors. pterm.Println(pterm.Red("Hello, ") + pterm.Green("World") + pterm.Cyan("!")) pterm.Println(pterm.Red("Even " + pterm.Cyan("nested ") + pterm.Green("colors ") + "are supported!")) pterm.Println() - // Or print colors as a style + // Create a new style with a red background, light green foreground, and bold text. style := pterm.NewStyle(pterm.BgRed, pterm.FgLightGreen, pterm.Bold) + // Print text using the created style. style.Println("This text uses a style and is bold and light green with a red background!") } @@ -1025,16 +1197,20 @@ package main import "github.com/pterm/pterm" func main() { + // Loop from 0 to 14 for i := 0; i < 15; i++ { switch i { case 5: + // At the 5th iteration, print a message and disable the output pterm.Info.Println("Disabled Output!") pterm.DisableOutput() case 10: + // At the 10th iteration, enable the output and print a message pterm.EnableOutput() pterm.Info.Println("Enabled Output!") } + // Print a progress message for each iteration pterm.Printf("Printing something... [%d/%d]\n", i, 15) } } @@ -1059,16 +1235,26 @@ import ( ) func main() { - // Print info. + // Print an informational message. pterm.Info.Println("RGB colors only work in Terminals which support TrueColor.") - from := pterm.NewRGB(0, 255, 255) // This RGB value is used as the gradients start point. - to := pterm.NewRGB(255, 0, 255) // This RGB value is used as the gradients end point. + // Define the start and end points for the color gradient. + startColor := pterm.NewRGB(0, 255, 255) // Cyan + endColor := pterm.NewRGB(255, 0, 255) // Magenta + + // Get the terminal height to determine the gradient range. + terminalHeight := pterm.GetTerminalHeight() + + // Loop over the range of the terminal height to create a color gradient. + for i := 0; i < terminalHeight-2; i++ { + // Calculate the fade factor for the current step in the gradient. + fadeFactor := float32(i) / float32(terminalHeight-2) - // For loop over the range of the terminal height. - for i := 0; i < pterm.GetTerminalHeight()-2; i++ { - // Print string which is colored with the faded RGB value. - from.Fade(0, float32(pterm.GetTerminalHeight()-2), float32(i), to).Println("Hello, World!") + // Create a color that represents the current step in the gradient. + currentColor := startColor.Fade(0, 1, fadeFactor, endColor) + + // Print a string with the current color. + currentColor.Println("Hello, World!") } } @@ -1076,9 +1262,9 @@ func main() {
-### coloring/fade-multiple-colors +### coloring/fade-colors-rgb-style -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/fade-multiple-colors/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/fade-colors-rgb-style/animation.svg)
@@ -1094,38 +1280,73 @@ import ( ) func main() { - from := pterm.NewRGB(0, 255, 255) // This RGB value is used as the gradients start point. - to := pterm.NewRGB(255, 0, 255) // This RGB value is used as the gradients first point. - to2 := pterm.NewRGB(255, 0, 0) // This RGB value is used as the gradients second point. - to3 := pterm.NewRGB(0, 255, 0) // This RGB value is used as the gradients third point. - to4 := pterm.NewRGB(255, 255, 255) // This RGB value is used as the gradients end point. + // Define RGB colors + white := pterm.NewRGB(255, 255, 255) + grey := pterm.NewRGB(128, 128, 128) + black := pterm.NewRGB(0, 0, 0) + red := pterm.NewRGB(255, 0, 0) + purple := pterm.NewRGB(255, 0, 255) + green := pterm.NewRGB(0, 255, 0) - str := "RGB colors only work in Terminals which support TrueColor." + // Define strings to be printed + str1 := "RGB colors only work in Terminals which support TrueColor." + str2 := "The background and foreground colors can be customized individually." + str3 := "Styles can also be applied. For example: Bold or Italic." + + // Print first string with color fading from white to purple + printFadedString(str1, white, purple, grey, black) + + // Print second string with color fading from purple to red + printFadedString(str2, black, purple, red, red) + + // Print third string with color fading from white to green and style changes + printStyledString(str3, white, green, red, black) +} + +// printFadedString prints a string with color fading effect +func printFadedString(str string, fgStart, fgEnd, bgStart, bgEnd pterm.RGB) { strs := strings.Split(str, "") - var fadeInfo string // String which will be used to print info. - // For loop over the range of the string length. + var result string for i := 0; i < len(str); i++ { - // Append faded letter to info string. - fadeInfo += from.Fade(0, float32(len(str)), float32(i), to).Sprint(strs[i]) + // Create a style with color fading effect + style := pterm.NewRGBStyle(fgStart.Fade(0, float32(len(str)), float32(i), fgEnd), bgStart.Fade(0, float32(len(str)), float32(i), bgEnd)) + // Append styled letter to result string + result += style.Sprint(strs[i]) } + pterm.Println(result) +} - // Print info. - pterm.Info.Println(fadeInfo) - - // For loop over the range of the terminal height. - for i := 0; i < pterm.GetTerminalHeight()-2; i++ { - // Print string which is colored with the faded RGB value. - from.Fade(0, float32(pterm.GetTerminalHeight()-2), float32(i), to, to2, to3, to4).Println("Hello, World!") +// printStyledString prints a string with color fading and style changes +func printStyledString(str string, fgStart, fgEnd, bgStart, bgEnd pterm.RGB) { + strs := strings.Split(str, "") + var result string + boldStr := strings.Split("Bold", "") + italicStr := strings.Split("Italic", "") + bold, italic := 0, 0 + for i := 0; i < len(str); i++ { + // Create a style with color fading effect + style := pterm.NewRGBStyle(fgStart.Fade(0, float32(len(str)), float32(i), fgEnd), bgStart.Fade(0, float32(len(str)), float32(i), bgEnd)) + // Check if the next letters are "Bold" or "Italic" and add the corresponding style + if bold < len(boldStr) && i+len(boldStr)-bold <= len(strs) && strings.Join(strs[i:i+len(boldStr)-bold], "") == strings.Join(boldStr[bold:], "") { + style = style.AddOptions(pterm.Bold) + bold++ + } else if italic < len(italicStr) && i+len(italicStr)-italic < len(strs) && strings.Join(strs[i:i+len(italicStr)-italic], "") == strings.Join(italicStr[italic:], "") { + style = style.AddOptions(pterm.Italic) + italic++ + } + // Append styled letter to result string + result += style.Sprint(strs[i]) } + pterm.Println(result) } ```
-### coloring/override-default-printers +### coloring/fade-multiple-colors -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/override-default-printers/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/fade-multiple-colors/animation.svg)
@@ -1134,27 +1355,79 @@ func main() { ```go package main -import "github.com/pterm/pterm" +import ( + "strings" + + "github.com/pterm/pterm" +) func main() { - // Print default error. - pterm.Error.Println("This is the default Error") + // Define RGB values for gradient points. + startColor := pterm.NewRGB(0, 255, 255) + firstPoint := pterm.NewRGB(255, 0, 255) + secondPoint := pterm.NewRGB(255, 0, 0) + thirdPoint := pterm.NewRGB(0, 255, 0) + endColor := pterm.NewRGB(255, 255, 255) + + // Define the string to be printed. + str := "RGB colors only work in Terminals which support TrueColor." + strs := strings.Split(str, "") - // Customize default error. - pterm.Error.Prefix = pterm.Prefix{ - Text: "OVERRIDE", - Style: pterm.NewStyle(pterm.BgCyan, pterm.FgRed), + // Initialize an empty string for the faded info. + var fadeInfo string + + // Loop over the string length to create a gradient effect. + for i := 0; i < len(str); i++ { + // Append each character of the string with a faded color to the info string. + fadeInfo += startColor.Fade(0, float32(len(str)), float32(i), firstPoint).Sprint(strs[i]) } - // Print new default error. - pterm.Error.Println("This is the default Error after the prefix was overridden") + // Print the info string with gradient effect. + pterm.Info.Println(fadeInfo) + + // Get the terminal height. + terminalHeight := pterm.GetTerminalHeight() + + // Loop over the terminal height to print "Hello, World!" with a gradient effect. + for i := 0; i < terminalHeight-2; i++ { + // Print the string with a color that fades from startColor to endColor. + startColor.Fade(0, float32(terminalHeight-2), float32(i), firstPoint, secondPoint, thirdPoint, endColor).Println("Hello, World!") + } } ```
-### coloring/print-color-rgb +### coloring/override-default-printers + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/override-default-printers/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import "github.com/pterm/pterm" + +func main() { + // Print a default error message with PTerm's built-in Error style. + pterm.Error.Println("This is the default Error") + + // Override the default error prefix with a new text and style. + pterm.Error.Prefix = pterm.Prefix{Text: "OVERRIDE", Style: pterm.NewStyle(pterm.BgCyan, pterm.FgRed)} + + // Print the error message again, this time with the overridden prefix. + pterm.Error.Println("This is the default Error after the prefix was overridden") +} + +``` + +
+ +### coloring/print-color-rgb ![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/print-color-rgb/animation.svg) @@ -1168,10 +1441,17 @@ package main import "github.com/pterm/pterm" func main() { - // Print strings with a custom RGB color. - // NOTICE: This only works with terminals which support TrueColor. + // Create a new RGB color with values 178, 44, 199. + // This color will be used for the text. pterm.NewRGB(178, 44, 199).Println("This text is printed with a custom RGB!") + + // Create a new RGB color with values 15, 199, 209. + // This color will be used for the text. pterm.NewRGB(15, 199, 209).Println("This text is printed with a custom RGB!") + + // Create a new RGB color with values 201, 144, 30. + // This color will be used for the background. + // The 'true' argument indicates that the color is for the background. pterm.NewRGB(201, 144, 30, true).Println("This text is printed with a custom RGB background!") } @@ -1179,6 +1459,43 @@ func main() { +### coloring/print-color-rgb-style + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/coloring/print-color-rgb-style/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "github.com/pterm/pterm" +) + +func main() { + // Define RGB colors for foreground and background. + foregroundRGB := pterm.RGB{R: 187, G: 80, B: 0} + backgroundRGB := pterm.RGB{R: 0, G: 50, B: 123} + + // Create a new RGB style with the defined foreground and background colors. + rgbStyle := pterm.NewRGBStyle(foregroundRGB, backgroundRGB) + + // Print a string with the custom RGB style. + rgbStyle.Println("This text is not styled.") + + // Add the 'Bold' option to the RGB style and print a string with this style. + rgbStyle.AddOptions(pterm.Bold).Println("This text is bold.") + + // Add the 'Italic' option to the RGB style and print a string with this style. + rgbStyle.AddOptions(pterm.Italic).Println("This text is italic.") +} + +``` + +
+ ### demo/demo ![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/demo/demo/animation.svg) @@ -1321,7 +1638,7 @@ func main() { pterm.DefaultCenter.WithCenterEachLineSeparately().Println(fadeInfo) }) - showcase("Fully Customizale", 2, func() { + showcase("Fully Customizable", 2, func() { for i := 0; i < 4; i++ { pterm.Println() } @@ -1461,9 +1778,9 @@ func randomInt(min, max int) int { -### header/custom +### header/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/header/custom/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/header/demo/animation.svg)
@@ -1475,35 +1792,25 @@ package main import "github.com/pterm/pterm" func main() { - // All available options: https://pkg.go.dev/github.com/pterm/pterm#HeaderPrinter - - // Build on top of DefaultHeader - pterm.DefaultHeader. // Use DefaultHeader as base - WithMargin(15). - WithBackgroundStyle(pterm.NewStyle(pterm.BgCyan)). - WithTextStyle(pterm.NewStyle(pterm.FgBlack)). - Println("This is a custom header!") - // Instead of printing the header you can set it to a variable. - // You can then reuse your custom header. + // Print a default header. + // This uses the default settings of PTerm to print a header. + pterm.DefaultHeader.Println("This is the default header!") - // Making a completely new HeaderPrinter - newHeader := pterm.HeaderPrinter{ - TextStyle: pterm.NewStyle(pterm.FgBlack), - BackgroundStyle: pterm.NewStyle(pterm.BgRed), - Margin: 20, - } + // Print a spacer line for better readability. + pterm.Println() - // Print header. - newHeader.Println("This is a custom header!") + // Print a full-width header. + // This uses the WithFullWidth() option of PTerm to print a header that spans the full width of the terminal. + pterm.DefaultHeader.WithFullWidth().Println("This is a full-width header.") } ```
-### header/demo +### header/custom -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/header/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/header/custom/animation.svg)
@@ -1515,19 +1822,27 @@ package main import "github.com/pterm/pterm" func main() { - // Print a default header. - pterm.DefaultHeader.Println("This is the default header!") - pterm.Println() // spacer - pterm.DefaultHeader.WithFullWidth().Println("This is a full-width header.") + // Customize the DefaultHeader with a cyan background, black text, and a margin of 15. + pterm.DefaultHeader.WithMargin(15).WithBackgroundStyle(pterm.NewStyle(pterm.BgCyan)).WithTextStyle(pterm.NewStyle(pterm.FgBlack)).Println("This is a custom header!") + + // Define a new HeaderPrinter with a red background, black text, and a margin of 20. + newHeader := pterm.HeaderPrinter{ + TextStyle: pterm.NewStyle(pterm.FgBlack), + BackgroundStyle: pterm.NewStyle(pterm.BgRed), + Margin: 20, + } + + // Print the custom header using the new HeaderPrinter. + newHeader.Println("This is a custom header!") } ```
-### interactive_confirm/demo +### heatmap/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_confirm/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/demo/animation.svg)
@@ -1541,25 +1856,33 @@ import ( ) func main() { - result, _ := pterm.DefaultInteractiveConfirm.Show() - pterm.Println() // Blank line - pterm.Info.Printfln("You answered: %s", boolToText(result)) -} + // Define the data for the heatmap. Each sub-array represents a row in the heatmap. + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, + } -func boolToText(b bool) string { - if b { - return pterm.Green("Yes") + // Define the labels for the X and Y axes of the heatmap. + headerData := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, } - return pterm.Red("No") + + // Create a heatmap with the defined data and axis labels, and enable RGB colors. + // Then render the heatmap. + pterm.DefaultHeatmap.WithAxisData(headerData).WithData(data).WithEnableRGB().Render() } ```
-### interactive_continue/demo +### heatmap/custom_colors -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_continue/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/custom_colors/animation.svg)
@@ -1573,18 +1896,43 @@ import ( ) func main() { - result, _ := pterm.DefaultInteractiveContinue.Show() - pterm.Println() // Blank line - pterm.Info.Printfln("You answered: %s", result) + // Define the data for the heatmap + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, + } + + // Define the axis labels for the heatmap + headerData := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, + } + + // Print an informational message + pterm.Info.Println("The following table has no rgb (supported by every terminal), no axis data and a legend.") + pterm.Println() + + // Create the heatmap with the defined data and options, and render it + pterm.DefaultHeatmap. + WithData(data). + WithBoxed(false). + WithAxisData(headerData). + WithLegend(false). + WithColors(pterm.BgBlue, pterm.BgRed, pterm.BgGreen, pterm.BgYellow). + WithLegend(). + Render() } ```
-### interactive_multiselect/custom-checkmarks +### heatmap/custom_legend -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/custom-checkmarks/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/custom_legend/animation.svg)
@@ -1594,36 +1942,48 @@ func main() { package main import ( - "fmt" - - "atomicgo.dev/keyboard/keys" - "github.com/pterm/pterm" ) func main() { - var options []string + // Define the data for the heatmap + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, + } - for i := 0; i < 5; i++ { - options = append(options, fmt.Sprintf("Option %d", i)) + // Define the header data for the heatmap + headerData := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, } - printer := pterm.DefaultInteractiveMultiselect.WithOptions(options) - printer.Filter = false - printer.KeyConfirm = keys.Enter - printer.KeySelect = keys.Space - printer.Checkmark = &pterm.Checkmark{Checked: pterm.Green("+"), Unchecked: pterm.Red("-")} - selectedOptions, _ := printer.Show() - pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) + // Print an informational message + pterm.Info.Println("The following table has rgb (not supported by every terminal), axis data and a custom legend.") + pterm.Println() + + // Create the heatmap with the defined data and options + // Options are chained in a single line for simplicity + pterm.DefaultHeatmap. + WithData(data). + WithBoxed(false). + WithAxisData(headerData). + WithEnableRGB(). + WithLegendLabel("custom"). + WithLegendOnlyColoredCells(). + Render() // Render the heatmap } ```
-### interactive_multiselect/custom-keys +### heatmap/custom_rgb -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/custom-keys/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/custom_rgb/animation.svg)
@@ -1633,34 +1993,54 @@ func main() { package main import ( - "fmt" - - "atomicgo.dev/keyboard/keys" "github.com/pterm/pterm" ) func main() { - var options []string + // Define the data for the heatmap. + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, + } - for i := 0; i < 5; i++ { - options = append(options, fmt.Sprintf("Option %d", i)) + // Define the axis labels for the heatmap. + axisLabels := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, } - printer := pterm.DefaultInteractiveMultiselect.WithOptions(options) - printer.Filter = false - printer.KeyConfirm = keys.Enter - printer.KeySelect = keys.Space - selectedOptions, _ := printer.Show() - pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) + // Print an informational message. + pterm.Info.Println("The following table has rgb (not supported by every terminal), axis data and a legend.") + pterm.Println() + + // Define the color range for the heatmap. + rgbRange := []pterm.RGB{ + pterm.NewRGB(0, 0, 255), + pterm.NewRGB(255, 0, 0), + pterm.NewRGB(0, 255, 0), + pterm.NewRGB(255, 255, 0), + } + + // Create and render the heatmap. + pterm.DefaultHeatmap. + WithData(data). + WithBoxed(false). + WithAxisData(axisLabels). + WithEnableRGB(). + WithRGBRange(rgbRange...). + Render() } ```
-### interactive_multiselect/demo +### heatmap/no_grid -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/no_grid/animation.svg)
@@ -1670,33 +2050,40 @@ func main() { package main import ( - "fmt" - "github.com/pterm/pterm" ) func main() { - var options []string - - for i := 0; i < 100; i++ { - options = append(options, fmt.Sprintf("Option %d", i)) + // Define the data for the heatmap. + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, } - for i := 0; i < 5; i++ { - options = append(options, fmt.Sprintf("You can use fuzzy searching (%d)", i)) + // Define the axis data for the heatmap. + axisData := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, } - selectedOptions, _ := pterm.DefaultInteractiveMultiselect.WithOptions(options).Show() - pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) + // Print an informational message. + pterm.Info.Println("The following table has rgb (not supported by every terminal), axis data and a legend.") + pterm.Println() + + // Create the heatmap with the defined data and options, then render it. + pterm.DefaultHeatmap.WithData(data).WithBoxed(false).WithAxisData(axisData).WithEnableRGB().WithLegend().WithGrid(false).Render() } ```
-### interactive_select/demo +### heatmap/separated -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_select/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/heatmap/separated/animation.svg)
@@ -1705,34 +2092,39 @@ func main() { ```go package main -import ( - "fmt" - - "github.com/pterm/pterm" -) +import "github.com/pterm/pterm" func main() { - var options []string - - for i := 0; i < 100; i++ { - options = append(options, fmt.Sprintf("Option %d", i)) + // Define the data for the heatmap. + data := [][]float32{ + {0.9, 0.2, -0.7, 0.4, -0.5, 0.6, -0.3, 0.8, -0.1, -1.0, 0.1, -0.8, 0.3}, + {0.2, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.9, -0.9, -0.7, -0.5, -0.3}, + {0.4, 0.4, -0.3, -1.0, 0.3, -0.2, -0.9, 0.5, -0.3, -1.0, 0.6, -0.2, -0.9}, + {0.9, -0.5, -0.1, 0.3, 1, -0.7, -0.3, 0.1, 0.7, -0.9, -0.5, 0.2, 0.6}, + {0.5, 0.6, 0.1, -0.2, -0.7, 0.8, 0.6, 0.1, -0.5, -0.7, 0.7, 0.3, 0.0}, } - for i := 0; i < 5; i++ { - options = append(options, fmt.Sprintf("You can use fuzzy searching (%d)", i)) + // Define the axis labels for the heatmap. + headerData := pterm.HeatmapAxis{ + XAxis: []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"}, + YAxis: []string{"1", "2", "3", "4", "5"}, } - selectedOption, _ := pterm.DefaultInteractiveSelect.WithOptions(options).Show() - pterm.Info.Printfln("Selected option: %s", pterm.Green(selectedOption)) + // Print an informational message. + pterm.Info.Println("The following table has no rgb (supported by every terminal), no axis data and no legend.") + pterm.Println() + + // Create the heatmap with the specified data and options, and render it. + pterm.DefaultHeatmap.WithData(data).WithBoxed(false).WithAxisData(headerData).WithLegend(false).Render() } ```
-### interactive_textinput/demo +### interactive_confirm/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_textinput/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_confirm/demo/animation.svg)
@@ -1746,18 +2138,33 @@ import ( ) func main() { - result, _ := pterm.DefaultInteractiveTextInput.WithMultiLine(false).Show() - pterm.Println() // Blank line - pterm.Info.Printfln("You answered: %s", result) + // Show an interactive confirmation dialog and get the result. + result, _ := pterm.DefaultInteractiveConfirm.Show() + + // Print a blank line for better readability. + pterm.Println() + + // Print the user's answer in a formatted way. + pterm.Info.Printfln("You answered: %s", boolToText(result)) +} + +// boolToText converts a boolean value to a colored text. +// If the value is true, it returns a green "Yes". +// If the value is false, it returns a red "No". +func boolToText(b bool) string { + if b { + return pterm.Green("Yes") + } + return pterm.Red("No") } ```
-### interactive_textinput/multi-line +### interactive_continue/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_textinput/multi-line/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_continue/demo/animation.svg)
@@ -1771,8 +2178,21 @@ import ( ) func main() { - result, _ := pterm.DefaultInteractiveTextInput.WithMultiLine().Show() // Text input with multi line enabled - pterm.Println() // Blank line + // Create an interactive continue prompt with default settings + // This will pause the program execution until the user presses enter + // The message displayed is "Press 'Enter' to continue..." + prompt := pterm.DefaultInteractiveContinue + + // Show the prompt and wait for user input + // The returned result is the user's input (should be empty as it's a continue prompt) + // The second return value is an error which is ignored here + result, _ := prompt.Show() + + // Print a blank line for better readability + pterm.Println() + + // Print the user's input with an info prefix + // As this is a continue prompt, the input should be empty pterm.Info.Printfln("You answered: %s", result) } @@ -1780,9 +2200,9 @@ func main() {
-### logger/custom-key-styles +### interactive_multiselect/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/custom-key-styles/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/demo/animation.svg)
@@ -1791,33 +2211,40 @@ func main() { ```go package main -import "github.com/pterm/pterm" +import ( + "fmt" + "github.com/pterm/pterm" +) func main() { - logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace) // Only show logs with a level of Trace or higher. + // Initialize an empty slice to hold the options. + var options []string - // Overwrite all key styles with a new map - logger = logger.WithKeyStyles(map[string]pterm.Style{ - "priority": *pterm.NewStyle(pterm.FgRed), - }) + // Populate the options slice with 100 options. + for i := 0; i < 100; i++ { + options = append(options, fmt.Sprintf("Option %d", i)) + } - // The priority key should now be red - logger.Info("The priority key should now be red", logger.Args("priority", "low", "foo", "bar")) + // Add 5 more options to the slice, indicating the availability of fuzzy searching. + for i := 0; i < 5; i++ { + options = append(options, fmt.Sprintf("You can use fuzzy searching (%d)", i)) + } - // Append a key style to the exisiting ones - logger.AppendKeyStyle("foo", *pterm.NewStyle(pterm.FgBlue)) + // Use PTerm's interactive multiselect to present the options to the user and capture their selections. + // The Show() method displays the options and waits for user input. + selectedOptions, _ := pterm.DefaultInteractiveMultiselect.WithOptions(options).Show() - // The foo key should now be blue - logger.Info("The foo key should now be blue", logger.Args("priority", "low", "foo", "bar")) + // Print the selected options, highlighted in green. + pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) } ```
-### logger/default +### interactive_multiselect/custom-checkmarks -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/default/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/custom-checkmarks/animation.svg)
@@ -1827,38 +2254,40 @@ func main() { package main import ( + "fmt" "github.com/pterm/pterm" - "time" ) func main() { - logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace) // Only show logs with a level of Trace or higher. - - logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) + // Initialize an empty slice to hold the options + var options []string - // You can also use the `ArgsFromMap` function to create a `Args` object from a map. - interstingStuff := map[string]any{ - "when were crayons invented": "1903", - "what is the meaning of life": 42, - "is this interesting": true, + // Populate the options slice with 5 options + for i := 0; i < 5; i++ { + options = append(options, fmt.Sprintf("Option %d", i)) } - logger.Debug("This might be interesting", logger.ArgsFromMap(interstingStuff)) - logger.Info("That was actually interesting", logger.Args("such", "wow")) - logger.Warn("Oh no, I see an error coming to us!", logger.Args("speed", 88, "measures", "mph")) - logger.Error("Damn, here it is!", logger.Args("error", "something went wrong")) - logger.Info("But what's really cool is, that you can print very long logs, and PTerm will automatically wrap them for you! Say goodbye to text, that has weird line breaks!", logger.Args("very", "long")) - time.Sleep(time.Second * 2) - logger.Fatal("Oh no, this process is getting killed!", logger.Args("fatal", true)) + // Create a new interactive multiselect printer with the options + // Disable the filter and define the checkmark symbols + printer := pterm.DefaultInteractiveMultiselect. + WithOptions(options). + WithFilter(false). + WithCheckmark(&pterm.Checkmark{Checked: pterm.Green("+"), Unchecked: pterm.Red("-")}) + + // Show the interactive multiselect and get the selected options + selectedOptions, _ := printer.Show() + + // Print the selected options + pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) } ```
-### logger/demo +### interactive_multiselect/custom-keys -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_multiselect/custom-keys/animation.svg)
@@ -1868,37 +2297,250 @@ func main() { package main import ( + "atomicgo.dev/keyboard/keys" + "fmt" "github.com/pterm/pterm" - "time" ) func main() { - logger := pterm.DefaultLogger. - WithLevel(pterm.LogLevelTrace) + // Initialize an empty slice to hold the options + var options []string - logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) + // Populate the options slice with 5 options + for i := 0; i < 5; i++ { + options = append(options, fmt.Sprintf("Option %d", i)) + } - sleep() + // Create a new interactive multiselect printer with the options + // Disable the filter and set the keys for confirming and selecting options + printer := pterm.DefaultInteractiveMultiselect. + WithOptions(options). + WithFilter(false). + WithKeyConfirm(keys.Enter). + WithKeySelect(keys.Space) - interstingStuff := map[string]any{ - "when were crayons invented": "1903", - "what is the meaning of life": 42, + // Show the interactive multiselect and get the selected options + selectedOptions, _ := printer.Show() + + // Print the selected options + pterm.Info.Printfln("Selected options: %s", pterm.Green(selectedOptions)) +} + +``` + +
+ +### interactive_select/demo + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_select/demo/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "fmt" + "github.com/pterm/pterm" +) + +func main() { + // Initialize an empty slice to hold the options + var options []string + + // Generate 100 options and add them to the options slice + for i := 0; i < 100; i++ { + options = append(options, fmt.Sprintf("Option %d", i)) + } + + // Generate 5 additional options with a specific message and add them to the options slice + for i := 0; i < 5; i++ { + options = append(options, fmt.Sprintf("You can use fuzzy searching (%d)", i)) + } + + // Use PTerm's interactive select feature to present the options to the user and capture their selection + // The Show() method displays the options and waits for the user's input + selectedOption, _ := pterm.DefaultInteractiveSelect.WithOptions(options).Show() + + // Display the selected option to the user with a green color for emphasis + pterm.Info.Printfln("Selected option: %s", pterm.Green(selectedOption)) +} + +``` + +
+ +### interactive_textinput/demo + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_textinput/demo/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "github.com/pterm/pterm" +) + +func main() { + // Create an interactive text input with single line input mode + textInput := pterm.DefaultInteractiveTextInput.WithMultiLine(false) + + // Show the text input and get the result + result, _ := textInput.Show() + + // Print a blank line for better readability + pterm.Println() + + // Print the user's answer with an info prefix + pterm.Info.Printfln("You answered: %s", result) +} + +``` + +
+ +### interactive_textinput/multi-line + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_textinput/multi-line/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "github.com/pterm/pterm" +) + +func main() { + // Create a default interactive text input with multi-line enabled. + // This allows the user to input multiple lines of text. + textInput := pterm.DefaultInteractiveTextInput.WithMultiLine() + + // Show the text input to the user and store the result. + // The second return value (an error) is ignored with '_'. + result, _ := textInput.Show() + + // Print a blank line for better readability in the output. + pterm.Println() + + // Print the user's input prefixed with an informational message. + // The '%s' placeholder is replaced with the user's input. + pterm.Info.Printfln("You answered: %s", result) +} + +``` + +
+ +### interactive_textinput/password + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/interactive_textinput/password/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import "github.com/pterm/pterm" + +func main() { + // Create an interactive text input with a mask for password input + passwordInput := pterm.DefaultInteractiveTextInput.WithMask("*") + + // Show the password input prompt and store the result + result, _ := passwordInput.Show("Enter your password") + + // Get the default logger from PTerm + logger := pterm.DefaultLogger + + // Log the received password (masked) + // Note: In a real-world application, you should never log passwords + logger.Info("Password received", logger.Args("password", result)) +} + +``` + +
+ +### logger/demo + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/demo/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "github.com/pterm/pterm" + "time" +) + +func main() { + // Create a logger with trace level + logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace) + + // Log a trace level message + logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) + + // Pause for 3 seconds + sleep() + + // Define a map with interesting stuff + interstingStuff := map[string]any{ + "when were crayons invented": "1903", + "what is the meaning of life": 42, "is this interesting": true, } + + // Log a debug level message with arguments from the map logger.Debug("This might be interesting", logger.ArgsFromMap(interstingStuff)) + + // Pause for 3 seconds sleep() + // Log an info level message logger.Info("That was actually interesting", logger.Args("such", "wow")) + + // Pause for 3 seconds sleep() + + // Log a warning level message logger.Warn("Oh no, I see an error coming to us!", logger.Args("speed", 88, "measures", "mph")) + + // Pause for 3 seconds sleep() + + // Log an error level message logger.Error("Damn, here it is!", logger.Args("error", "something went wrong")) + + // Pause for 3 seconds sleep() + + // Log an info level message with a long text that will be automatically wrapped logger.Info("But what's really cool is, that you can print very long logs, and PTerm will automatically wrap them for you! Say goodbye to text, that has weird line breaks!", logger.Args("very", "long")) + + // Pause for 3 seconds sleep() + + // Log a fatal level message logger.Fatal("Oh no, this process is getting killed!", logger.Args("fatal", true)) } +// Function to pause the execution for 3 seconds func sleep() { time.Sleep(time.Second * 3) } @@ -1907,9 +2549,9 @@ func sleep() {
-### logger/json +### logger/custom-key-styles -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/json/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/custom-key-styles/animation.svg)
@@ -1921,20 +2563,121 @@ package main import "github.com/pterm/pterm" func main() { - logger := pterm.DefaultLogger. - WithLevel(pterm.LogLevelTrace). // Only show logs with a level of Trace or higher. - WithFormatter(pterm.LogFormatterJSON) // ! Make the logger print JSON logs. + // Create a logger with a level of Trace or higher. + logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace) + + // Define a new style for the "priority" key. + priorityStyle := map[string]pterm.Style{ + "priority": *pterm.NewStyle(pterm.FgRed), + } + + // Overwrite all key styles with the new map. + logger = logger.WithKeyStyles(priorityStyle) + + // Log an info message. The "priority" key will be displayed in red. + logger.Info("The priority key should now be red", logger.Args("priority", "low", "foo", "bar")) + + // Define a new style for the "foo" key. + fooStyle := *pterm.NewStyle(pterm.FgBlue) + + // Append the new style to the existing ones. + logger.AppendKeyStyle("foo", fooStyle) + // Log another info message. The "foo" key will be displayed in blue. + logger.Info("The foo key should now be blue", logger.Args("priority", "low", "foo", "bar")) +} + +``` + +
+ +### logger/default + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/default/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "github.com/pterm/pterm" + "time" +) + +func main() { + // Create a logger with a level of Trace or higher. + logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace) + + // Log a trace message with additional arguments. logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) - // You can also use the `ArgsFromMap` function to create a `Args` object from a map. + // Create a map of interesting stuff. interstingStuff := map[string]any{ "when were crayons invented": "1903", "what is the meaning of life": 42, "is this interesting": true, } + + // Log a debug message with arguments from a map. logger.Debug("This might be interesting", logger.ArgsFromMap(interstingStuff)) + // Log an info message with additional arguments. + logger.Info("That was actually interesting", logger.Args("such", "wow")) + + // Log a warning message with additional arguments. + logger.Warn("Oh no, I see an error coming to us!", logger.Args("speed", 88, "measures", "mph")) + + // Log an error message with additional arguments. + logger.Error("Damn, here it is!", logger.Args("error", "something went wrong")) + + // Log an info message with additional arguments. PTerm will automatically wrap long logs. + logger.Info("But what's really cool is, that you can print very long logs, and PTerm will automatically wrap them for you! Say goodbye to text, that has weird line breaks!", logger.Args("very", "long")) + + // Pause for 2 seconds. + time.Sleep(time.Second * 2) + + // Log a fatal message with additional arguments. This will terminate the process. + logger.Fatal("Oh no, this process is getting killed!", logger.Args("fatal", true)) +} + +``` + +
+ +### logger/json + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/logger/json/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import "github.com/pterm/pterm" + +func main() { + // Create a logger with Trace level and JSON formatter + logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace).WithFormatter(pterm.LogFormatterJSON) + + // Log a Trace level message with additional arguments + logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) + + // Create a map of interesting stuff + interestingStuff := map[string]any{ + "when were crayons invented": "1903", + "what is the meaning of life": 42, + "is this interesting": true, + } + + // Log a Debug level message with arguments from the map + logger.Debug("This might be interesting", logger.ArgsFromMap(interestingStuff)) + + // Log Info, Warn, Error, and Fatal level messages with additional arguments logger.Info("That was actually interesting", logger.Args("such", "wow")) logger.Warn("Oh no, I see an error coming to us!", logger.Args("speed", 88, "measures", "mph")) logger.Error("Damn, here it is!", logger.Args("error", "something went wrong")) @@ -1960,24 +2703,35 @@ package main import "github.com/pterm/pterm" func main() { - logger := pterm.DefaultLogger. - WithLevel(pterm.LogLevelTrace). // Only show logs with a level of Trace or higher. - WithCaller() // ! Show the caller of the log function. + // Create a logger with Trace level and caller information + logger := pterm.DefaultLogger.WithLevel(pterm.LogLevelTrace).WithCaller() + // Log a trace message with additional arguments logger.Trace("Doing not so important stuff", logger.Args("priority", "super low")) - // You can also use the `ArgsFromMap` function to create a `Args` object from a map. - interstingStuff := map[string]any{ + // Create a map of interesting stuff + interestingStuff := map[string]any{ "when were crayons invented": "1903", "what is the meaning of life": 42, "is this interesting": true, } - logger.Debug("This might be interesting", logger.ArgsFromMap(interstingStuff)) + // Log a debug message with arguments from a map + logger.Debug("This might be interesting", logger.ArgsFromMap(interestingStuff)) + + // Log an info message with additional arguments logger.Info("That was actually interesting", logger.Args("such", "wow")) + + // Log a warning message with additional arguments logger.Warn("Oh no, I see an error coming to us!", logger.Args("speed", 88, "measures", "mph")) + + // Log an error message with additional arguments logger.Error("Damn, here it is!", logger.Args("error", "something went wrong")) + + // Log an info message with additional arguments. PTerm will automatically wrap long logs. logger.Info("But what's really cool is, that you can print very long logs, and PTerm will automatically wrap them for you! Say goodbye to text, that has weird line breaks!", logger.Args("very", "long")) + + // Log a fatal message with additional arguments. This will terminate the process. logger.Fatal("Oh no, this process is getting killed!", logger.Args("fatal", true)) } @@ -1985,6 +2739,80 @@ func main() {
+### multiple-live-printers/demo + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/multiple-live-printers/demo/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "time" + + "github.com/pterm/pterm" +) + +func main() { + // Create a multi printer for managing multiple printers + multi := pterm.DefaultMultiPrinter + + // Create two spinners with their own writers + spinner1, _ := pterm.DefaultSpinner.WithWriter(multi.NewWriter()).Start("Spinner 1") + spinner2, _ := pterm.DefaultSpinner.WithWriter(multi.NewWriter()).Start("Spinner 2") + + // Create five progress bars with their own writers and a total of 100 + pb1, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 1") + pb2, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 2") + pb3, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 3") + pb4, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 4") + pb5, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 5") + + // Start the multi printer + multi.Start() + + // Increment progress bars and spinners based on certain conditions + for i := 1; i <= 100; i++ { + pb1.Increment() // Increment progress bar 1 every iteration + + if i%2 == 0 { + pb2.Add(3) // Add 3 to progress bar 2 every even iteration + } + + if i%5 == 0 { + pb3.Increment() // Increment progress bar 3 every 5th iteration + } + + if i%10 == 0 { + pb4.Increment() // Increment progress bar 4 every 10th iteration + } + + if i%3 == 0 { + pb5.Increment() // Increment progress bar 5 every 3rd iteration + } + + if i%50 == 0 { + spinner1.Success("Spinner 1 is done!") // Mark spinner 1 as successful every 50th iteration + } + + if i%60 == 0 { + spinner2.Fail("Spinner 2 failed!") // Mark spinner 2 as failed every 60th iteration + } + + time.Sleep(time.Millisecond * 50) // Sleep for 50 milliseconds between each iteration + } + + // Stop the multi printer + multi.Stop() +} + +``` + +
+ ### panel/demo ![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/panel/demo/animation.svg) @@ -1999,13 +2827,20 @@ package main import "github.com/pterm/pterm" func main() { - // Declare panels in a two dimensional grid system. + // Define panels in a 2D grid system panels := pterm.Panels{ - {{Data: "This is the first panel"}, {Data: pterm.DefaultHeader.Sprint("Hello, World!")}, {Data: "This\npanel\ncontains\nmultiple\nlines"}}, - {{Data: pterm.Red("This is another\npanel line")}, {Data: "This is the second panel\nwith a new line"}}, + { + {Data: "This is the first panel"}, + {Data: pterm.DefaultHeader.Sprint("Hello, World!")}, + {Data: "This\npanel\ncontains\nmultiple\nlines"}, + }, + { + {Data: pterm.Red("This is another\npanel line")}, + {Data: "This is the second panel\nwith a new line"}, + }, } - // Print panels. + // Render the panels with a padding of 5 _ = pterm.DefaultPanel.WithPanels(panels).WithPadding(5).Render() } @@ -2013,9 +2848,9 @@ func main() { -### paragraph/customized +### paragraph/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/paragraph/customized/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/paragraph/demo/animation.svg)
@@ -2027,15 +2862,18 @@ package main import "github.com/pterm/pterm" func main() { - // Print a paragraph with a custom maximal width. - pterm.DefaultParagraph.WithMaxWidth(60).Println("This is a custom paragraph printer. As you can see, no words are separated, " + + // Using the default paragraph printer to print a long text. + // The text is split at the spaces, which is useful for continuous text of all kinds. + // The line width can be manually adjusted if needed. + pterm.DefaultParagraph.Println("This is the default paragraph printer. As you can see, no words are separated, " + "but the text is split at the spaces. This is useful for continuous text of all kinds. You can manually change the line width if you want to." + "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam") - // Print one line space. + // Printing a line space for separation. pterm.Println() - // Print text without a paragraph printer. + // Printing a long text without using the paragraph printer. + // The default Println() function is used here, which does not provide intelligent splitting. pterm.Println("This text is written with the default Println() function. No intelligent splitting here." + "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam") } @@ -2044,9 +2882,9 @@ func main() {
-### paragraph/demo +### paragraph/customized -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/paragraph/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/paragraph/customized/animation.svg)
@@ -2058,17 +2896,23 @@ package main import "github.com/pterm/pterm" func main() { - // Print long text with default paragraph printer. - pterm.DefaultParagraph.Println("This is the default paragraph printer. As you can see, no words are separated, " + + // Define a long text to be printed as a paragraph. + longText := "This is a custom paragraph printer. As you can see, no words are separated, " + "but the text is split at the spaces. This is useful for continuous text of all kinds. You can manually change the line width if you want to." + - "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam") + "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam" - // Print one line space. + // Print the long text as a paragraph with a custom maximal width of 60 characters. + pterm.DefaultParagraph.WithMaxWidth(60).Println(longText) + + // Print a line space to separate the paragraph from the following text. pterm.Println() - // Print long text without paragraph printer. - pterm.Println("This text is written with the default Println() function. No intelligent splitting here." + - "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam") + // Define another long text to be printed without a paragraph printer. + longTextWithoutParagraph := "This text is written with the default Println() function. No intelligent splitting here." + + "Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam" + + // Print the long text without using a paragraph printer. + pterm.Println(longTextWithoutParagraph) } ``` @@ -2089,17 +2933,31 @@ package main import "github.com/pterm/pterm" func main() { - // Enable debug messages. + // Enable debug messages in PTerm. pterm.EnableDebugMessages() - pterm.Debug.Println("Hello, World!") // Print Debug. - pterm.Info.Println("Hello, World!") // Print Info. - pterm.Success.Println("Hello, World!") // Print Success. - pterm.Warning.Println("Hello, World!") // Print Warning. - pterm.Error.Println("Errors show the filename and linenumber inside the terminal!") // Print Error. - pterm.Info.WithShowLineNumber().Println("Other PrefixPrinters can do that too!") // Print Error. + // Print a debug message with PTerm. + pterm.Debug.Println("Hello, World!") + + // Print an informational message with PTerm. + pterm.Info.Println("Hello, World!") + + // Print a success message with PTerm. + pterm.Success.Println("Hello, World!") + + // Print a warning message with PTerm. + pterm.Warning.Println("Hello, World!") + + // Print an error message with PTerm. This will also display the filename and line number in the terminal. + pterm.Error.Println("Errors show the filename and linenumber inside the terminal!") + + // Print an informational message with PTerm, with line number. + // This demonstrates that other PrefixPrinters can also display line numbers. + pterm.Info.WithShowLineNumber().Println("Other PrefixPrinters can do that too!") + // Temporarily set Fatal to false, so that the CI won't crash. - pterm.Fatal.WithFatal(false).Println("Hello, World!") // Print Fatal. + // This will print a fatal message with PTerm, but won't terminate the program. + pterm.Fatal.WithFatal(false).Println("Hello, World!") } ``` @@ -2124,20 +2982,96 @@ import ( "github.com/pterm/pterm" ) -// Slice of strings with placeholder text. +// Slice of strings representing names of pseudo applications to be downloaded. var fakeInstallList = strings.Split("pseudo-excel pseudo-photoshop pseudo-chrome pseudo-outlook pseudo-explorer "+ "pseudo-dops pseudo-git pseudo-vsc pseudo-intellij pseudo-minecraft pseudo-scoop pseudo-chocolatey", " ") func main() { - // Create progressbar as fork from the default progressbar. + // Create a progressbar with the total steps equal to the number of items in fakeInstallList. + // Set the initial title of the progressbar to "Downloading stuff". p, _ := pterm.DefaultProgressbar.WithTotal(len(fakeInstallList)).WithTitle("Downloading stuff").Start() + // Loop over each item in the fakeInstallList. for i := 0; i < p.Total; i++ { - p.UpdateTitle("Downloading " + fakeInstallList[i]) // Update the title of the progressbar. - pterm.Success.Println("Downloading " + fakeInstallList[i]) // If a progressbar is running, each print will be printed above the progressbar. - p.Increment() // Increment the progressbar by one. Use Add(x int) to increment by a custom amount. - time.Sleep(time.Millisecond * 350) // Sleep 350 milliseconds. + // Simulate a slow download for the 7th item. + if i == 6 { + time.Sleep(time.Second * 3) + } + + // Update the title of the progressbar with the current item being downloaded. + p.UpdateTitle("Downloading " + fakeInstallList[i]) + + // Print a success message for the current download. This will be printed above the progressbar. + pterm.Success.Println("Downloading " + fakeInstallList[i]) + + // Increment the progressbar by one to indicate progress. + p.Increment() + + // Pause for 350 milliseconds to simulate the time taken for each download. + time.Sleep(time.Millisecond * 350) + } +} + +``` + +
+ +### progressbar/multiple + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/progressbar/multiple/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "time" + + "github.com/pterm/pterm" +) + +func main() { + // Create a multi printer instance from the default one + multi := pterm.DefaultMultiPrinter + + // Create five progress bars with a total of 100 units each, and assign each a new writer from the multi printer + pb1, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 1") + pb2, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 2") + pb3, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 3") + pb4, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 4") + pb5, _ := pterm.DefaultProgressbar.WithTotal(100).WithWriter(multi.NewWriter()).Start("Progressbar 5") + + // Start the multi printer + multi.Start() + + // Loop to increment progress bars based on certain conditions + for i := 1; i <= 100; i++ { + pb1.Increment() // Increment the first progress bar at each iteration + + if i%2 == 0 { + pb2.Add(3) // Add 3 units to the second progress bar at every even iteration + } + + if i%5 == 0 { + pb3.Increment() // Increment the third progress bar at every fifth iteration + } + + if i%10 == 0 { + pb4.Increment() // Increment the fourth progress bar at every tenth iteration + } + + if i%3 == 0 { + pb5.Increment() // Increment the fifth progress bar at every third iteration + } + + time.Sleep(time.Millisecond * 50) // Pause for 50 milliseconds at each iteration } + + // Stop the multi printer + multi.Stop() } ``` @@ -2158,14 +3092,16 @@ package main import "github.com/pterm/pterm" func main() { - // Print a section with level one. + // Create a section with level one and print it. pterm.DefaultSection.Println("This is a section!") - // Print placeholder. + + // Print an informational message. pterm.Info.Println("And here is some text.\nThis text could be anything.\nBasically it's just a placeholder") - // Print a section with level two. + // Create a section with level two and print it. pterm.DefaultSection.WithLevel(2).Println("This is another section!") - // Print placeholder. + + // Print another informational message. pterm.Info.Println("And this is\nmore placeholder text") } @@ -2173,6 +3109,53 @@ func main() {
+### slog/demo + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/slog/demo/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "log/slog" + + "github.com/pterm/pterm" +) + +func main() { + // Create a new slog handler with the default PTerm logger + handler := pterm.NewSlogHandler(&pterm.DefaultLogger) + + // Create a new slog logger with the handler + logger := slog.New(handler) + + // Log a debug message (won't show by default) + logger.Debug("This is a debug message that won't show") + + // Change the log level to debug to enable debug messages + pterm.DefaultLogger.Level = pterm.LogLevelDebug + + // Log a debug message (will show because debug level is enabled) + logger.Debug("This is a debug message", "changedLevel", true) + + // Log an info message + logger.Info("This is an info message") + + // Log a warning message + logger.Warn("This is a warning message") + + // Log an error message + logger.Error("This is an error message") +} + +``` + +
+ ### spinner/demo ![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/spinner/demo/animation.svg) @@ -2238,6 +3221,68 @@ func main() { +### spinner/multiple + +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/spinner/multiple/animation.svg) + +
+ +SHOW SOURCE + +```go +package main + +import ( + "time" + + "github.com/pterm/pterm" +) + +func main() { + // Create a multi printer. This allows multiple spinners to print simultaneously. + multi := pterm.DefaultMultiPrinter + + // Create and start spinner 1 with a new writer from the multi printer. + // The spinner will display the message "Spinner 1". + spinner1, _ := pterm.DefaultSpinner.WithWriter(multi.NewWriter()).Start("Spinner 1") + + // Create and start spinner 2 with a new writer from the multi printer. + // The spinner will display the message "Spinner 2". + spinner2, _ := pterm.DefaultSpinner.WithWriter(multi.NewWriter()).Start("Spinner 2") + + // Create and start spinner 3 with a new writer from the multi printer. + // The spinner will display the message "Spinner 3". + spinner3, _ := pterm.DefaultSpinner.WithWriter(multi.NewWriter()).Start("Spinner 3") + + // Start the multi printer. This will start printing all the spinners. + multi.Start() + + // Wait for 1 second. + time.Sleep(time.Millisecond * 1000) + + // Stop spinner 1 with a success message. + spinner1.Success("Spinner 1 is done!") + + // Wait for 750 milliseconds. + time.Sleep(time.Millisecond * 750) + + // Stop spinner 2 with a failure message. + spinner2.Fail("Spinner 2 failed!") + + // Wait for 500 milliseconds. + time.Sleep(time.Millisecond * 500) + + // Stop spinner 3 with a warning message. + spinner3.Warning("Spinner 3 has a warning!") + + // Stop the multi printer. This will stop printing all the spinners. + multi.Stop() +} + +``` + +
+ ### style/demo ![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/style/demo/animation.svg) @@ -2252,12 +3297,16 @@ package main import "github.com/pterm/pterm" func main() { - // Create styles as new variables + // Define a primary style with light cyan foreground, gray background, and bold text primary := pterm.NewStyle(pterm.FgLightCyan, pterm.BgGray, pterm.Bold) + + // Define a secondary style with light green foreground, white background, and italic text secondary := pterm.NewStyle(pterm.FgLightGreen, pterm.BgWhite, pterm.Italic) - // Use created styles + // Print "Hello, World!" with the primary style primary.Println("Hello, World!") + + // Print "Hello, World!" with the secondary style secondary.Println("Hello, World!") } @@ -2265,9 +3314,9 @@ func main() { -### table/boxed +### table/demo -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/table/boxed/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/table/demo/animation.svg)
@@ -2279,23 +3328,40 @@ package main import "github.com/pterm/pterm" func main() { - // Create a fork of the default table, fill it with data and print it. - // Data can also be generated and inserted later. - pterm.DefaultTable.WithHasHeader().WithBoxed().WithData(pterm.TableData{ + // Define the data for the first table + tableData1 := pterm.TableData{ {"Firstname", "Lastname", "Email", "Note"}, - {"Paul", "Dean", "nisi.dictum.augue@velitAliquam.co.uk", ""}, - {"Callie", "Mckay", "egestas.nunc.sed@est.com", "这是一个测试, haha!"}, - {"Libby", "Camacho", "aliquet.lobortis@semper.com", "just a test, hey!"}, - }).Render() + {"Paul", "Dean", "augue@velitAliquam.co.uk", ""}, + {"Callie", "Mckay", "nunc.sed@est.com", "这是一个测试, haha!"}, + {"Libby", "Camacho", "lobortis@semper.com", "just a test, hey!"}, + {"张", "小宝", "zhang@example.com", ""}, + } + + // Create a table with a header and the defined data, then render it + pterm.DefaultTable.WithHasHeader().WithData(tableData1).Render() + + pterm.Println() // Blank line + + // Define the data for the second table + tableData2 := pterm.TableData{ + {"Firstname", "Lastname", "Email"}, + {"Paul\n\nNewline", "Dean", "augue@velitAliquam.co.uk"}, + {"Callie", "Mckay", "nunc.sed@est.com\nNewline"}, + {"Libby", "Camacho", "lobortis@semper.com"}, + {"张", "小宝", "zhang@example.com"}, + } + + // Create another table with a header and the defined data, then render it + pterm.DefaultTable.WithHasHeader().WithData(tableData2).Render() } ```
-### table/demo +### table/boxed -![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/table/demo/animation.svg) +![Animation](https://raw.githubusercontent.com/pterm/pterm/master/_examples/table/boxed/animation.svg)
@@ -2307,25 +3373,21 @@ package main import "github.com/pterm/pterm" func main() { - // Create a fork of the default table, fill it with data and print it. - // Data can also be generated and inserted later. - pterm.DefaultTable.WithHasHeader().WithData(pterm.TableData{ + // Define the data for the table. + // Each inner slice represents a row in the table. + // The first row is considered as the header of the table. + tableData := pterm.TableData{ {"Firstname", "Lastname", "Email", "Note"}, - {"Paul", "Dean", "nisi.dictum.augue@velitAliquam.co.uk", ""}, - {"Callie", "Mckay", "egestas.nunc.sed@est.com", "这是一个测试, haha!"}, - {"Libby", "Camacho", "aliquet.lobortis@semper.com", "just a test, hey!"}, - }).Render() - - pterm.Println() // Blank line + {"Paul", "Dean", "augue@velitAliquam.co.uk", ""}, + {"Callie", "Mckay", "nunc.sed@est.com", "这是一个测试, haha!"}, + {"Libby", "Camacho", "lobortis@semper.com", "just a test, hey!"}, + {"张", "小宝", "zhang@example.com", ""}, + } - // Create a table with multiple lines in a row. - pterm.DefaultTable.WithHasHeader().WithData(pterm.TableData{ - {"Firstname", "Lastname", "Email"}, - {"Paul\n\nNewline", "Dean", "nisi.dictum.augue@velitAliquam.co.uk"}, - {"Callie", "Mckay", "egestas.nunc.sed@est.com\nNewline"}, - {"Libby", "Camacho", "aliquet.lobortis@semper.com"}, - {"张", "小宝", "zhang@example.com"}, - }).Render() + // Create a table with the defined data. + // The table has a header and is boxed. + // Finally, render the table to print it. + pterm.DefaultTable.WithHasHeader().WithBoxed().WithData(tableData).Render() } ``` @@ -2346,14 +3408,19 @@ package main import "github.com/pterm/pterm" func main() { - // Create a table with multiple lines in a row and set a row separator. - pterm.DefaultTable.WithHasHeader().WithRowSeparator("-").WithHeaderRowSeparator("-").WithData(pterm.TableData{ + // Define the data for the table. + data := pterm.TableData{ {"Firstname", "Lastname", "Email"}, - {"Paul\n\nNewline", "Dean", "nisi.dictum.augue@velitAliquam.co.uk"}, - {"Callie", "Mckay", "egestas.nunc.sed@est.com\nNewline"}, - {"Libby", "Camacho", "aliquet.lobortis@semper.com"}, + {"Paul\n\nNewline", "Dean", "augue@velitAliquam.co.uk"}, + {"Callie", "Mckay", "nunc.sed@est.com\nNewline"}, + {"Libby", "Camacho", "lobortis@semper.com"}, {"张", "小宝", "zhang@example.com"}, - }).Render() + } + + // Create and render the table. + // The options are chained in a single line for simplicity. + // The table has a header, a row separator, and a header row separator. + pterm.DefaultTable.WithHasHeader().WithRowSeparator("-").WithHeaderRowSeparator("-").WithData(data).Render() } ``` @@ -2374,14 +3441,21 @@ package main import "github.com/pterm/pterm" func main() { - // Create a fork of the default table, fill it with data and print it. - // Data can also be generated and inserted later. - pterm.DefaultTable.WithHasHeader().WithRightAlignment().WithData(pterm.TableData{ + // Define the data for the table. + // Each inner slice represents a row in the table. + // The first row is considered as the header. + tableData := pterm.TableData{ {"Firstname", "Lastname", "Email", "Note"}, - {"Paul", "Dean", "nisi.dictum.augue@velitAliquam.co.uk", ""}, - {"Callie", "Mckay", "egestas.nunc.sed@est.com", "这是一个测试, haha!"}, - {"Libby", "Camacho", "aliquet.lobortis@semper.com", "just a test, hey!"}, - }).Render() + {"Paul", "Dean", "augue@velitAliquam.co.uk", ""}, + {"Callie", "Mckay", "nunc.sed@est.com", "这是一个测试, haha!"}, + {"Libby", "Camacho", "lobortis@semper.com", "just a test, hey!"}, + {"张", "小宝", "zhang@example.com", ""}, + } + + // Create a table with the defined data. + // The table has a header and the text in the cells is right-aligned. + // The Render() method is used to print the table to the console. + pterm.DefaultTable.WithHasHeader().WithRightAlignment().WithData(tableData).Render() } ``` @@ -2406,23 +3480,27 @@ import ( ) func main() { - // Print info. - pterm.Info.Println("These are the default theme styles.\n" + - "You can modify them easily to your personal preference,\n" + - "or create new themes from scratch :)") + // Print an informational message about the default theme styles. + pterm.Info.Println("These are the default theme styles.\nYou can modify them easily to your personal preference,\nor create new themes from scratch :)") - pterm.Println() // Print one line space. + // Print a blank line for better readability. + pterm.Println() - // Print every value of the default theme with its own style. + // Get the value and type of the default theme. v := reflect.ValueOf(pterm.ThemeDefault) typeOfS := v.Type() + // Check if the type of the default theme is 'pterm.Theme'. if typeOfS == reflect.TypeOf(pterm.Theme{}) { + // Iterate over each field in the default theme. for i := 0; i < v.NumField(); i++ { + // Try to convert the field to 'pterm.Style'. field, ok := v.Field(i).Interface().(pterm.Style) if ok { + // Print the field name using its own style. field.Println(typeOfS.Field(i).Name) } + // Pause for a quarter of a second to make the output easier to read. time.Sleep(time.Millisecond * 250) } } @@ -2448,11 +3526,17 @@ import ( ) func main() { + // Define a tree structure using pterm.TreeNode tree := pterm.TreeNode{ + // The top node of the tree Text: "Top node", + // The children of the top node Children: []pterm.TreeNode{{ + // A child node Text: "Child node", + // The children of the child node Children: []pterm.TreeNode{ + // Grandchildren nodes {Text: "Grandchild node"}, {Text: "Grandchild node"}, {Text: "Grandchild node"}, @@ -2460,6 +3544,7 @@ func main() { }}, } + // Render the tree with the defined structure as the root pterm.DefaultTree.WithRoot(tree).Render() } @@ -2484,37 +3569,37 @@ import ( ) func main() { - // You can use a LeveledList here, for easy generation. + // Define a leveled list to represent the structure of the directories. leveledList := pterm.LeveledList{ - pterm.LeveledListItem{Level: 0, Text: "C:"}, - pterm.LeveledListItem{Level: 1, Text: "Users"}, - pterm.LeveledListItem{Level: 1, Text: "Windows"}, - pterm.LeveledListItem{Level: 1, Text: "Programs"}, - pterm.LeveledListItem{Level: 1, Text: "Programs(x86)"}, - pterm.LeveledListItem{Level: 1, Text: "dev"}, - pterm.LeveledListItem{Level: 0, Text: "D:"}, - pterm.LeveledListItem{Level: 0, Text: "E:"}, - pterm.LeveledListItem{Level: 1, Text: "Movies"}, - pterm.LeveledListItem{Level: 1, Text: "Music"}, - pterm.LeveledListItem{Level: 2, Text: "LinkinPark"}, - pterm.LeveledListItem{Level: 1, Text: "Games"}, - pterm.LeveledListItem{Level: 2, Text: "Shooter"}, - pterm.LeveledListItem{Level: 3, Text: "CallOfDuty"}, - pterm.LeveledListItem{Level: 3, Text: "CS:GO"}, - pterm.LeveledListItem{Level: 3, Text: "Battlefield"}, - pterm.LeveledListItem{Level: 4, Text: "Battlefield 1"}, - pterm.LeveledListItem{Level: 4, Text: "Battlefield 2"}, - pterm.LeveledListItem{Level: 0, Text: "F:"}, - pterm.LeveledListItem{Level: 1, Text: "dev"}, - pterm.LeveledListItem{Level: 2, Text: "dops"}, - pterm.LeveledListItem{Level: 2, Text: "PTerm"}, - } - - // Generate tree from LeveledList. + {Level: 0, Text: "C:"}, + {Level: 1, Text: "Users"}, + {Level: 1, Text: "Windows"}, + {Level: 1, Text: "Programs"}, + {Level: 1, Text: "Programs(x86)"}, + {Level: 1, Text: "dev"}, + {Level: 0, Text: "D:"}, + {Level: 0, Text: "E:"}, + {Level: 1, Text: "Movies"}, + {Level: 1, Text: "Music"}, + {Level: 2, Text: "LinkinPark"}, + {Level: 1, Text: "Games"}, + {Level: 2, Text: "Shooter"}, + {Level: 3, Text: "CallOfDuty"}, + {Level: 3, Text: "CS:GO"}, + {Level: 3, Text: "Battlefield"}, + {Level: 4, Text: "Battlefield 1"}, + {Level: 4, Text: "Battlefield 2"}, + {Level: 0, Text: "F:"}, + {Level: 1, Text: "dev"}, + {Level: 2, Text: "dops"}, + {Level: 2, Text: "PTerm"}, + } + + // Convert the leveled list into a tree structure. root := putils.TreeFromLeveledList(leveledList) - root.Text = "Computer" + root.Text = "Computer" // Set the root node text. - // Render TreePrinter + // Render the tree structure using the default tree printer. pterm.DefaultTree.WithRoot(root).Render() } diff --git a/vendor/github.com/pterm/pterm/SECURITY.md b/vendor/github.com/pterm/pterm/SECURITY.md new file mode 100644 index 00000000..3e4653f8 --- /dev/null +++ b/vendor/github.com/pterm/pterm/SECURITY.md @@ -0,0 +1,24 @@ +# PTerm Security Policy +This security policy applies to the PTerm GitHub repository and outlines the process for reporting security issues and handling security incidents. The primary goal of this policy is to ensure the safety and integrity of the PTerm codebase and to minimize the impact of security incidents on our users. + +## 1. Overview +PTerm is a command-line interface (CLI) tool library, and we believe the security risks associated with it are minimal. However, we recognize that vulnerabilities can still arise, and we are committed to addressing them promptly and transparently. + +## 2. Reporting Security Issues +If you discover a security issue in PTerm, please follow these steps: + +Open a new issue in the PTerm GitHub repository, describing the security problem in detail. + +## 3. Vulnerable Dependencies +If a dependency of PTerm is found to be vulnerable or infected and requires immediate updates, please follow these steps: + +1. Open a new issue in the PTerm GitHub repository, describing the vulnerable dependency and the need for an update. +2. *Optional: Contact @MarvinJWendt directly via Twitter or Discord to alert them to the issue.* + +## 4. Incident Response +Upon receiving a security report, the PTerm team will: + +1. Acknowledge receipt of the report and review the issue. +2. Investigate the issue and determine the severity and impact. +3. Develop and implement a fix or mitigation plan, as necessary. +4. Update the PTerm repository and notify users, if applicable. diff --git a/vendor/github.com/pterm/pterm/area_printer.go b/vendor/github.com/pterm/pterm/area_printer.go index 5f3e5b04..ed4b0c22 100644 --- a/vendor/github.com/pterm/pterm/area_printer.go +++ b/vendor/github.com/pterm/pterm/area_printer.go @@ -1,6 +1,7 @@ package pterm import ( + "io" "strings" "atomicgo.dev/cursor" @@ -47,6 +48,11 @@ func (p AreaPrinter) WithCenter(b ...bool) *AreaPrinter { return &p } +// SetWriter sets the writer for the AreaPrinter. +func (p *AreaPrinter) SetWriter(writer io.Writer) { + +} + // Update overwrites the content of the AreaPrinter. // Can be used live. func (p *AreaPrinter) Update(text ...interface{}) { diff --git a/vendor/github.com/pterm/pterm/box_printer.go b/vendor/github.com/pterm/pterm/box_printer.go index 8bff4671..3185e024 100644 --- a/vendor/github.com/pterm/pterm/box_printer.go +++ b/vendor/github.com/pterm/pterm/box_printer.go @@ -240,8 +240,8 @@ func (p BoxPrinter) Sprint(a ...interface{}) string { maxWidth+p.LeftPadding+p.RightPadding) + p.BoxStyle.Sprint(p.TopLeftCornerString) } else { p.Title = strings.ReplaceAll(p.Title, "\n", " ") - if (maxWidth + p.RightPadding + p.LeftPadding - 4) < len(RemoveColorFromString(p.Title)) { - p.RightPadding = len(RemoveColorFromString(p.Title)) - (maxWidth + p.RightPadding + p.LeftPadding - 5) + if (maxWidth + p.RightPadding + p.LeftPadding - 4) < internal.GetStringMaxWidth(p.Title) { + p.RightPadding = internal.GetStringMaxWidth(p.Title) - (maxWidth + p.RightPadding + p.LeftPadding - 5) } if p.TitleTopLeft { topLine = p.BoxStyle.Sprint(p.BottomRightCornerString) + internal.AddTitleToLine(p.Title, p.BoxStyle.Sprint(p.HorizontalString), maxWidth+p.LeftPadding+p.RightPadding, true) + p.BoxStyle.Sprint(p.BottomLeftCornerString) diff --git a/vendor/github.com/pterm/pterm/bulletlist_printer.go b/vendor/github.com/pterm/pterm/bulletlist_printer.go index ed997aa5..8e6afcb0 100644 --- a/vendor/github.com/pterm/pterm/bulletlist_printer.go +++ b/vendor/github.com/pterm/pterm/bulletlist_printer.go @@ -116,10 +116,18 @@ func (l BulletListPrinter) Srender() (string, error) { item.BulletStyle = l.BulletStyle } } - if item.Bullet == "" { - ret += strings.Repeat(" ", item.Level) + item.BulletStyle.Sprint(l.Bullet) + " " + item.TextStyle.Sprint(item.Text) + "\n" - } else { - ret += strings.Repeat(" ", item.Level) + item.BulletStyle.Sprint(item.Bullet) + " " + item.TextStyle.Sprint(item.Text) + "\n" + + split := strings.Split(item.Text, "\n") + for i, line := range split { + if i == 0 { + if item.Bullet == "" { + ret += strings.Repeat(" ", item.Level) + item.BulletStyle.Sprint(l.Bullet) + " " + item.TextStyle.Sprint(line) + "\n" + } else { + ret += strings.Repeat(" ", item.Level) + item.BulletStyle.Sprint(item.Bullet) + " " + item.TextStyle.Sprint(line) + "\n" + } + } else { + ret += strings.Repeat(" ", item.Level) + strings.Repeat(" ", len(item.Bullet)) + " " + item.TextStyle.Sprint(line) + "\n" + } } } return ret, nil diff --git a/vendor/github.com/pterm/pterm/heatmap_printer.go b/vendor/github.com/pterm/pterm/heatmap_printer.go new file mode 100644 index 00000000..4fac71e3 --- /dev/null +++ b/vendor/github.com/pterm/pterm/heatmap_printer.go @@ -0,0 +1,744 @@ +package pterm + +import ( + "bytes" + "errors" + "io" + "math" + "strings" + + "github.com/pterm/pterm/internal" +) + +// DefaultHeatmap contains standards, which can be used to print a HeatmapPrinter. +var DefaultHeatmap = HeatmapPrinter{ + AxisStyle: &ThemeDefault.HeatmapHeaderStyle, + SeparatorStyle: &ThemeDefault.HeatmapSeparatorStyle, + VerticalSeparator: "│", + TopRightCornerSeparator: "└", + TopLeftCornerSeparator: "┘", + BottomLeftCornerSeparator: "┐", + BottomRightCornerSeparator: "┌", + HorizontalSeparator: "─", + TSeparator: "┬", + TReverseSeparator: "┴", + LSeparator: "├", + LReverseSeparator: "┤", + TCrossSeparator: "┼", + LegendLabel: "Legend", + Boxed: true, + Grid: true, + Legend: true, + TextRGB: RGB{0, 0, 0, false}, + RGBRange: []RGB{{R: 255, G: 0, B: 0, Background: true}, {R: 255, G: 165, B: 0, Background: true}, {R: 0, G: 255, B: 0, Background: true}}, + TextColor: FgBlack, + Colors: []Color{BgRed, BgLightRed, BgYellow, BgLightYellow, BgLightGreen, BgGreen}, + + EnableRGB: false, +} + +// HeatmapData is the type that contains the data of a HeatmapPrinter. +type HeatmapData [][]float32 + +type HeatmapAxis struct { + XAxis []string + YAxis []string +} + +// HeatmapPrinter is able to render tables. +type HeatmapPrinter struct { + HasHeader bool + AxisStyle *Style + VerticalSeparator string + TopRightCornerSeparator string + TopLeftCornerSeparator string + BottomLeftCornerSeparator string + BottomRightCornerSeparator string + HorizontalSeparator string + TSeparator string + TReverseSeparator string + LSeparator string + LReverseSeparator string + TCrossSeparator string + LegendLabel string + SeparatorStyle *Style + Data HeatmapData + Axis HeatmapAxis + Boxed bool + Grid bool + OnlyColoredCells bool + LegendOnlyColoredCells bool + EnableComplementaryColor bool + Legend bool + CellSize int + Colors []Color + TextColor Color + EnableRGB bool + RGBRange []RGB + TextRGB RGB + Writer io.Writer + + minValue float32 + maxValue float32 + + rgbLegendValue int +} + +var complementaryColors = map[Color]Color{ + BgBlack: FgLightWhite, + BgRed: FgCyan, + BgGreen: FgMagenta, + BgYellow: FgBlue, + BgBlue: FgYellow, + BgMagenta: FgGreen, + BgCyan: FgRed, + BgWhite: FgBlack, + BgDefault: FgBlack, + BgDarkGray: FgLightWhite, + BgLightRed: FgLightCyan, + BgLightGreen: FgLightMagenta, + BgLightYellow: FgLightBlue, + BgLightBlue: FgLightYellow, + BgLightMagenta: FgLightGreen, + BgLightCyan: FgLightRed, + BgLightWhite: FgBlack, +} + +// WithAxisData returns a new HeatmapPrinter, where the first line and row are headers. +func (p HeatmapPrinter) WithAxisData(hd HeatmapAxis) *HeatmapPrinter { + p.HasHeader = true + p.Axis = hd + return &p +} + +// WithAxisStyle returns a new HeatmapPrinter with a specific AxisStyle. +func (p HeatmapPrinter) WithAxisStyle(style *Style) *HeatmapPrinter { + p.AxisStyle = style + return &p +} + +// WithSeparatorStyle returns a new HeatmapPrinter with a specific SeparatorStyle. +func (p HeatmapPrinter) WithSeparatorStyle(style *Style) *HeatmapPrinter { + p.SeparatorStyle = style + return &p +} + +// WithData returns a new HeatmapPrinter with specific Data. +func (p HeatmapPrinter) WithData(data [][]float32) *HeatmapPrinter { + p.Data = data + return &p +} + +// WithTextColor returns a new HeatmapPrinter with a specific TextColor. +// This sets EnableComplementaryColor to false. +func (p HeatmapPrinter) WithTextColor(color Color) *HeatmapPrinter { + p.TextColor = color + p.EnableComplementaryColor = false + return &p +} + +// WithTextRGB returns a new HeatmapPrinter with a specific TextRGB. +// This sets EnableComplementaryColor to false. +func (p HeatmapPrinter) WithTextRGB(rgb RGB) *HeatmapPrinter { + p.TextRGB = rgb + p.EnableComplementaryColor = false + return &p +} + +// WithBoxed returns a new HeatmapPrinter with a box around the table. +// If set to true, Grid will be set to true too. +func (p HeatmapPrinter) WithBoxed(b ...bool) *HeatmapPrinter { + p.Boxed = internal.WithBoolean(b) + if p.Boxed && !p.Grid { + p.Grid = true + } + return &p +} + +// WithGrid returns a new HeatmapPrinter with a grid. +// If set to false, Boxed will be set to false too. +func (p HeatmapPrinter) WithGrid(b ...bool) *HeatmapPrinter { + b2 := internal.WithBoolean(b) + p.Grid = b2 + if !b2 && p.Boxed { + p.Boxed = false + } + return &p +} + +// WithEnableRGB returns a new HeatmapPrinter with RGB colors. +func (p HeatmapPrinter) WithEnableRGB(b ...bool) *HeatmapPrinter { + p.EnableRGB = internal.WithBoolean(b) + return &p +} + +// WithOnlyColoredCells returns a new HeatmapPrinter with only colored cells. +func (p HeatmapPrinter) WithOnlyColoredCells(b ...bool) *HeatmapPrinter { + b2 := internal.WithBoolean(b) + p.OnlyColoredCells = b2 + return &p +} + +// WithLegendOnlyColoredCells returns a new HeatmapPrinter with legend with only colored cells. +// This sets the Legend to true. +func (p HeatmapPrinter) WithLegendOnlyColoredCells(b ...bool) *HeatmapPrinter { + b2 := internal.WithBoolean(b) + p.LegendOnlyColoredCells = b2 + if b2 { + p.Legend = true + } + return &p +} + +// WithEnableComplementaryColor returns a new HeatmapPrinter with complement color. +func (p HeatmapPrinter) WithEnableComplementaryColor(b ...bool) *HeatmapPrinter { + p.EnableComplementaryColor = internal.WithBoolean(b) + return &p +} + +// WithLegend returns a new HeatmapPrinter with a legend. +func (p HeatmapPrinter) WithLegend(b ...bool) *HeatmapPrinter { + p.Legend = internal.WithBoolean(b) + return &p +} + +// WithCellSize returns a new HeatmapPrinter with a specific cell size. +// This only works if there is no header and OnlyColoredCells == true! +func (p HeatmapPrinter) WithCellSize(i int) *HeatmapPrinter { + p.CellSize = i + return &p +} + +// WithLegendLabel returns a new HeatmapPrinter with a specific legend tag. +// This sets the Legend to true. +func (p HeatmapPrinter) WithLegendLabel(s string) *HeatmapPrinter { + p.LegendLabel = s + p.Legend = true + return &p +} + +// WithRGBRange returns a new HeatmapPrinter with a specific RGBRange. +func (p HeatmapPrinter) WithRGBRange(rgb ...RGB) *HeatmapPrinter { + p.RGBRange = rgb + return &p +} + +// WithColors returns a new HeatmapPrinter with a specific Colors. +func (p HeatmapPrinter) WithColors(colors ...Color) *HeatmapPrinter { + p.Colors = colors + return &p +} + +// WithWriter sets the Writer. +func (p HeatmapPrinter) WithWriter(writer io.Writer) *HeatmapPrinter { + p.Writer = writer + return &p +} + +// Srender renders the HeatmapPrinter as a string. +func (p HeatmapPrinter) Srender() (string, error) { + if err := p.errCheck(); err != nil { + return "", err + } + + if p.SeparatorStyle == nil { + p.SeparatorStyle = DefaultHeatmap.SeparatorStyle + } + if p.AxisStyle == nil { + p.AxisStyle = DefaultHeatmap.AxisStyle + } + + if RawOutput { + p.Legend = false + } + + buffer := bytes.NewBufferString("") + xAmount := len(p.Data[0]) - 1 + yAmount := len(p.Data) - 1 + p.minValue, p.maxValue = minMaxFloat32(p.Data) + + var data string + for _, datum := range p.Data { + for _, f := range datum { + data += Sprintf("%v\n", f) + } + } + + if p.HasHeader { + data, xAmount, yAmount = p.computeAxisData(data, xAmount, yAmount) + } + + colWidth := internal.GetStringMaxWidth(data) + legendColWidth := colWidth + 2 + + if p.OnlyColoredCells && (p.CellSize > colWidth || !p.HasHeader) { + colWidth = p.CellSize + } + + if p.Boxed { + p.renderSeparatorRow(buffer, colWidth, xAmount, true) + } + + p.renderData(buffer, colWidth, xAmount, yAmount) + + if p.HasHeader { + p.renderHeader(buffer, colWidth, xAmount) + } + + if p.Boxed { + p.renderSeparatorRow(buffer, colWidth, xAmount, false) + } + + if p.Legend { + p.renderLegend(buffer, legendColWidth) + } + + buffer.WriteString("\n") + + return buffer.String(), nil +} + +func (p HeatmapPrinter) computeAxisData(data string, xAmount, yAmount int) (string, int, int) { + var header string + for _, h := range p.Axis.XAxis { + header += h + "\n" + } + for _, h := range p.Axis.YAxis { + header += h + "\n" + } + + if p.OnlyColoredCells { + data = header + } else { + data += header + } + xAmount++ + yAmount++ + + p.Axis.YAxis = append(p.Axis.YAxis, "") + + return data, xAmount, yAmount +} + +func (p HeatmapPrinter) renderSeparatorRow(buffer *bytes.Buffer, colWidth, xAmount int, top bool) { + tSep := p.TReverseSeparator + rightSep := p.TopRightCornerSeparator + leftSep := p.TopLeftCornerSeparator + + if top { + tSep = p.TSeparator + rightSep = p.BottomRightCornerSeparator + leftSep = p.BottomLeftCornerSeparator + } else { + buffer.WriteString("\n") + } + buffer.WriteString(p.SeparatorStyle.Sprint(rightSep)) + for i := 0; i < xAmount+1; i++ { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), colWidth)) + if i < xAmount { + buffer.WriteString(p.SeparatorStyle.Sprint(tSep)) + } + } + buffer.WriteString(p.SeparatorStyle.Sprint(leftSep)) + + if top { + buffer.WriteString("\n") + } +} + +func (p HeatmapPrinter) renderLegend(buffer *bytes.Buffer, legendColWidth int) { + buffer.WriteString("\n") + buffer.WriteString("\n") + if p.Boxed { + p.boxLegend(buffer, p.LegendLabel, legendColWidth) + } else { + p.generateLegend(buffer, p.LegendLabel, legendColWidth) + } +} + +func (p HeatmapPrinter) renderHeader(buffer *bytes.Buffer, colWidth int, xAmount int) { + buffer.WriteString("\n") + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.LSeparator)) + } + if p.Grid { + for i := 0; i < xAmount+1; i++ { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), colWidth)) + if i < xAmount { + buffer.WriteString(p.SeparatorStyle.Sprint(p.TCrossSeparator)) + } + } + } + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.LReverseSeparator)) + } + if p.Grid { + buffer.WriteString("\n") + } + for j, f := range p.Axis.XAxis { + if j == 0 { + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + ct := internal.CenterText(" ", colWidth) + if len(ct) < colWidth { + ct += strings.Repeat(" ", colWidth-len(ct)) + } + buffer.WriteString(p.AxisStyle.Sprint(ct)) + if p.Grid { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + } + var ct string + ct = internal.CenterText(Sprintf("%v", f), colWidth) + if len(ct) < colWidth { + ct += strings.Repeat(" ", colWidth-len(ct)) + } + buffer.WriteString(p.AxisStyle.Sprint(ct)) + + if j < xAmount { + if !p.Boxed && j == xAmount-1 { + continue + } + if p.Grid { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + } + } +} + +func (p HeatmapPrinter) renderData(buffer *bytes.Buffer, colWidth int, xAmount int, yAmount int) { + for i, datum := range p.Data { + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + for j, f := range datum { + if j == 0 && p.HasHeader { + ct := internal.CenterText(p.Axis.YAxis[i], colWidth) + if len(ct) < colWidth { + ct += strings.Repeat(" ", colWidth-len(ct)) + } + buffer.WriteString(p.AxisStyle.Sprint(ct)) + if p.Grid { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + } + var ct string + if p.OnlyColoredCells { + ct = internal.CenterText(" ", colWidth) + } else { + ct = internal.CenterText(Sprintf("%v", f), colWidth) + } + if len(ct) < colWidth { + if len(Sprintf("%v", f)) == 1 { + ct += strings.Repeat(" ", colWidth-len(ct)) + } else { + ct = strings.Repeat(" ", colWidth-len(ct)) + ct + } + } + if p.EnableRGB { + rgb := p.RGBRange[0].Fade(p.minValue, p.maxValue, f, p.RGBRange[1:]...) + rgbStyle := NewRGBStyle(p.TextRGB, rgb) + if p.EnableComplementaryColor { + complimentary := NewRGB(internal.Complementary(rgb.R, rgb.G, rgb.B)) + rgbStyle = NewRGBStyle(complimentary, rgb) + } + buffer.WriteString(rgbStyle.Sprint(ct)) + } else { + color := getColor(p.minValue, p.maxValue, f, p.Colors...) + fgColor := p.TextColor + if p.EnableComplementaryColor { + fgColor = complementaryColors[color] + } + buffer.WriteString(fgColor.Sprint(color.Sprintf(ct))) + } + if j < xAmount { + if !p.Boxed && p.HasHeader && j == xAmount-1 { + continue + } + if p.Grid { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + } + if p.Boxed && !p.HasHeader && j == xAmount { + buffer.WriteString(p.SeparatorStyle.Sprint(p.VerticalSeparator)) + } + } + + if i < yAmount { + if p.HasHeader && i == yAmount-1 { + continue + } + buffer.WriteString("\n") + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.LSeparator)) + } + if p.Grid { + for i := 0; i < xAmount+1; i++ { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), colWidth)) + if i < xAmount { + buffer.WriteString(p.SeparatorStyle.Sprint(p.TCrossSeparator)) + } + } + } + if p.Boxed { + buffer.WriteString(p.SeparatorStyle.Sprint(p.LReverseSeparator)) + } + if p.Grid { + buffer.WriteString("\n") + } + } + } +} + +func (p HeatmapPrinter) generateLegend(buffer *bytes.Buffer, legend string, legendColWidth int) { + buffer.WriteString(p.AxisStyle.Sprint(legend)) + if p.Grid { + buffer.WriteString(p.SeparatorStyle.Sprintf("%s", p.VerticalSeparator)) + } else { + buffer.WriteString(" ") + } + if p.EnableRGB { + p.generateRGBLegend(buffer, legendColWidth) + } else { + p.generateColorLegend(buffer, legendColWidth) + } +} + +func (p HeatmapPrinter) generateColorLegend(buffer *bytes.Buffer, legendColWidth int) { + for i, color := range p.Colors { + // the first color is the min value and the last color is the max value + var f float32 + if i == 0 { + f = p.minValue + } else if i == len(p.Colors)-1 { + f = p.maxValue + } else { + f = p.minValue + (p.maxValue-p.minValue)*float32(i)/float32(len(p.Colors)-1) + } + fgColor := p.TextColor + if p.EnableComplementaryColor { + fgColor = complementaryColors[color] + } + buffer.WriteString(fgColor.Sprint(color.Sprint(centerAndShorten(f, legendColWidth, p.LegendOnlyColoredCells)))) + if p.Grid && i < len(p.Colors)-1 && !p.LegendOnlyColoredCells { + buffer.WriteString(p.SeparatorStyle.Sprintf("%s", p.VerticalSeparator)) + } + } +} + +func (p HeatmapPrinter) generateRGBLegend(buffer *bytes.Buffer, legendColWidth int) { + p.rgbLegendValue = 10 + steps := len(p.RGBRange) + if steps < p.rgbLegendValue { + steps = p.rgbLegendValue + } + if p.LegendOnlyColoredCells { + steps *= 3 + } + for i := 0; i < steps; i++ { + // the first color is the min value and the last color is the max value + var f float32 + if i == 0 { + f = p.minValue + } else if i == steps-1 { + f = p.maxValue + } else { + f = p.minValue + (p.maxValue-p.minValue)*float32(i)/float32(steps-1) + } + rgb := p.RGBRange[0].Fade(p.minValue, p.maxValue, f, p.RGBRange[1:]...) + rgbStyle := NewRGBStyle(p.TextRGB, rgb) + if p.EnableComplementaryColor { + complimentary := NewRGB(internal.Complementary(rgb.R, rgb.G, rgb.B)) + rgbStyle = NewRGBStyle(complimentary, rgb) + } + if p.LegendOnlyColoredCells { + buffer.WriteString(rgbStyle.Sprint(centerAndShorten(f, 1, p.LegendOnlyColoredCells))) + } else { + buffer.WriteString(rgbStyle.Sprint(centerAndShorten(f, legendColWidth, p.LegendOnlyColoredCells))) + } + if p.Grid && i < steps-1 && !p.LegendOnlyColoredCells { + buffer.WriteString(p.SeparatorStyle.Sprintf("%s", p.VerticalSeparator)) + } + } +} + +func (p HeatmapPrinter) boxLegend(buffer *bytes.Buffer, legend string, legendColWidth int) { + buffer.WriteString(p.SeparatorStyle.Sprint(p.BottomRightCornerSeparator)) + + p.generateSeparatorRow(buffer, legend, legendColWidth, true) + + buffer.WriteString(p.SeparatorStyle.Sprint(p.BottomLeftCornerSeparator)) + buffer.WriteString("\n") + buffer.WriteString(p.SeparatorStyle.Sprintf("%s", p.VerticalSeparator)) + + p.generateLegend(buffer, legend, legendColWidth) + + buffer.WriteString(p.SeparatorStyle.Sprintf("%s", p.VerticalSeparator)) + buffer.WriteString("\n") + + buffer.WriteString(p.SeparatorStyle.Sprint(p.TopRightCornerSeparator)) + + p.generateSeparatorRow(buffer, legend, legendColWidth, false) + + buffer.WriteString(p.SeparatorStyle.Sprint(p.TopLeftCornerSeparator)) +} + +func (p HeatmapPrinter) generateSeparatorRow(buffer *bytes.Buffer, legend string, legendColWidth int, top bool) { + p.rgbLegendValue = 10 + steps := len(p.RGBRange) + if steps < p.rgbLegendValue { + steps = p.rgbLegendValue + } + if p.LegendOnlyColoredCells { + steps *= 3 + } + + var xValue int + if p.EnableRGB { + xValue = len(p.RGBRange) + if xValue < p.rgbLegendValue { + xValue = p.rgbLegendValue + } + } else { + xValue = len(p.Colors) + } + + for i := 0; i < xValue+1; i++ { + if i == 0 { + firstLength := len(legend) + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), firstLength)) + } else { + if p.LegendOnlyColoredCells { + if p.EnableRGB { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), steps/(xValue))) + } else { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), legendColWidth)) + } + } else { + buffer.WriteString(strings.Repeat(p.SeparatorStyle.Sprint(p.HorizontalSeparator), legendColWidth)) + } + } + if i < xValue && !p.LegendOnlyColoredCells || i == 0 { + if top { + buffer.WriteString(p.SeparatorStyle.Sprint(p.TSeparator)) + } else { + buffer.WriteString(p.SeparatorStyle.Sprint(p.TReverseSeparator)) + } + } + } +} + +func centerAndShorten(f float32, lineLength int, onlyColor bool) string { + value := "" + if !onlyColor { + value = Sprintf("%.2v", f) + } + if len(value) > lineLength { + value = value[:lineLength] + if strings.HasSuffix(value, ".") { + value = Sprintf("%.1v", f) + lineLength = len(value) + } + } + ct := internal.CenterText(value, lineLength) + if len(ct) < lineLength { + if len(Sprintf("%v", f)) == 1 { + ct += strings.Repeat(" ", lineLength-len(ct)) + } else { + ct = strings.Repeat(" ", lineLength-len(ct)) + ct + } + } + + return ct +} + +func getColor(min float32, max float32, current float32, colors ...Color) Color { + // split the range into equal parts + // and assign a color to each part + // the last color is assigned to the max value + // and the first color to the min value + // the rest of the colors are assigned to the + // middle values + step := (max - min) / float32(len(colors)) + for i := range colors { + if current >= min+float32(i)*step && current < min+float32(i+1)*step { + return colors[i] + } + } + return colors[len(colors)-1] +} + +// Render prints the HeatmapPrinter to the terminal. +func (p HeatmapPrinter) Render() error { + s, err := p.Srender() + if err != nil { + return err + } + Fprintln(p.Writer, s) + + return nil +} + +func (p HeatmapPrinter) errCheck() error { + if p.HasHeader { + if p.Axis.XAxis == nil { + return errors.New("x axis is nil") + } + if p.Axis.YAxis == nil { + return errors.New("y axis is nil") + } + + if len(p.Axis.XAxis) == 0 { + return errors.New("x axis is empty") + } + if len(p.Axis.YAxis) == 0 { + return errors.New("y axis is empty") + } + + for i := 1; i < len(p.Data); i++ { + if len(p.Data[i]) != len(p.Axis.XAxis) { + return errors.New("x axis length does not match data") + } + } + if len(p.Axis.YAxis) != len(p.Data) { + return errors.New("y axis length does not match data") + } + } + + if p.Data == nil { + return errors.New("data is nil") + } + + if len(p.Data) == 0 { + return errors.New("data is empty") + } + + // check if p.Data[n] has the same length + for i := 1; i < len(p.Data); i++ { + if len(p.Data[i]) != len(p.Data[0]) { + return errors.New("data is not rectangular") + } + } + + return nil +} + +// return min and max value of a slice +func minMaxFloat32(s [][]float32) (float32, float32) { + var min, max float32 + min = math.MaxFloat32 + max = -math.MaxFloat32 + + for _, r := range s { + for _, c := range r { + if c < min { + min = c + } + if c > max { + max = c + } + } + } + return min, max +} diff --git a/vendor/github.com/pterm/pterm/interactive_confirm_printer.go b/vendor/github.com/pterm/pterm/interactive_confirm_printer.go index 6760ce69..a94cc5dc 100644 --- a/vendor/github.com/pterm/pterm/interactive_confirm_printer.go +++ b/vendor/github.com/pterm/pterm/interactive_confirm_printer.go @@ -7,35 +7,37 @@ import ( "atomicgo.dev/cursor" "atomicgo.dev/keyboard" "atomicgo.dev/keyboard/keys" + "github.com/pterm/pterm/internal" ) -var ( - // DefaultInteractiveConfirm is the default InteractiveConfirm printer. - // Pressing "y" will return true, "n" will return false. - // Pressing enter without typing "y" or "n" will return the configured default value (by default set to "no"). - DefaultInteractiveConfirm = InteractiveConfirmPrinter{ - DefaultValue: false, - DefaultText: "Please confirm", - TextStyle: &ThemeDefault.PrimaryStyle, - ConfirmText: "Yes", - ConfirmStyle: &ThemeDefault.SuccessMessageStyle, - RejectText: "No", - RejectStyle: &ThemeDefault.ErrorMessageStyle, - SuffixStyle: &ThemeDefault.SecondaryStyle, - } -) +// DefaultInteractiveConfirm is the default InteractiveConfirm printer. +// Pressing "y" will return true, "n" will return false. +// Pressing enter without typing "y" or "n" will return the configured default value (by default set to "no"). +var DefaultInteractiveConfirm = InteractiveConfirmPrinter{ + DefaultValue: false, + DefaultText: "Please confirm", + TextStyle: &ThemeDefault.PrimaryStyle, + ConfirmText: "Yes", + ConfirmStyle: &ThemeDefault.SuccessMessageStyle, + RejectText: "No", + RejectStyle: &ThemeDefault.ErrorMessageStyle, + SuffixStyle: &ThemeDefault.SecondaryStyle, + Delimiter: ": ", +} // InteractiveConfirmPrinter is a printer for interactive confirm prompts. type InteractiveConfirmPrinter struct { - DefaultValue bool - DefaultText string - TextStyle *Style - ConfirmText string - ConfirmStyle *Style - RejectText string - RejectStyle *Style - SuffixStyle *Style + DefaultValue bool + DefaultText string + Delimiter string + TextStyle *Style + ConfirmText string + ConfirmStyle *Style + RejectText string + RejectStyle *Style + SuffixStyle *Style + OnInterruptFunc func() } // WithDefaultText sets the default text. @@ -86,6 +88,18 @@ func (p InteractiveConfirmPrinter) WithSuffixStyle(style *Style) *InteractiveCon return &p } +// OnInterrupt sets the function to execute on exit of the input reader +func (p InteractiveConfirmPrinter) WithOnInterruptFunc(exitFunc func()) *InteractiveConfirmPrinter { + p.OnInterruptFunc = exitFunc + return &p +} + +// WithDelimiter sets the delimiter between the message and the input. +func (p InteractiveConfirmPrinter) WithDelimiter(delimiter string) *InteractiveConfirmPrinter { + p.Delimiter = delimiter + return &p +} + // Show shows the confirm prompt. // // Example: @@ -95,7 +109,7 @@ func (p InteractiveConfirmPrinter) WithSuffixStyle(style *Style) *InteractiveCon func (p InteractiveConfirmPrinter) Show(text ...string) (bool, error) { // should be the first defer statement to make sure it is executed last // and all the needed cleanup can be done before - cancel, exit := internal.NewCancelationSignal() + cancel, exit := internal.NewCancelationSignal(p.OnInterruptFunc) defer exit() var result bool @@ -104,7 +118,7 @@ func (p InteractiveConfirmPrinter) Show(text ...string) (bool, error) { text = []string{p.DefaultText} } - p.TextStyle.Print(text[0] + " " + p.getSuffix() + ": ") + p.TextStyle.Print(text[0] + " " + p.getSuffix() + p.Delimiter) y, n := p.getShortHandles() var interrupted bool diff --git a/vendor/github.com/pterm/pterm/interactive_continue_printer.go b/vendor/github.com/pterm/pterm/interactive_continue_printer.go index d0f91f13..4049cde9 100644 --- a/vendor/github.com/pterm/pterm/interactive_continue_printer.go +++ b/vendor/github.com/pterm/pterm/interactive_continue_printer.go @@ -2,7 +2,6 @@ package pterm import ( "fmt" - "os" "strings" "atomicgo.dev/cursor" @@ -14,24 +13,24 @@ import ( "github.com/pterm/pterm/internal" ) -var ( - // DefaultInteractiveContinue is the default InteractiveContinue printer. - // Pressing "y" will return yes, "n" will return no, "a" returns all and "s" returns stop. - // Pressing enter without typing any letter will return the configured default value (by default set to "yes", the fisrt option). - DefaultInteractiveContinue = InteractiveContinuePrinter{ - DefaultValueIndex: 0, - DefaultText: "Do you want to continue", - TextStyle: &ThemeDefault.PrimaryStyle, - Options: []string{"yes", "no", "all", "cancel"}, - OptionsStyle: &ThemeDefault.SuccessMessageStyle, - SuffixStyle: &ThemeDefault.SecondaryStyle, - } -) +// DefaultInteractiveContinue is the default InteractiveContinue printer. +// Pressing "y" will return yes, "n" will return no, "a" returns all and "s" returns stop. +// Pressing enter without typing any letter will return the configured default value (by default set to "yes", the fisrt option). +var DefaultInteractiveContinue = InteractiveContinuePrinter{ + DefaultValueIndex: 0, + DefaultText: "Do you want to continue", + TextStyle: &ThemeDefault.PrimaryStyle, + Options: []string{"yes", "no", "all", "cancel"}, + OptionsStyle: &ThemeDefault.SuccessMessageStyle, + SuffixStyle: &ThemeDefault.SecondaryStyle, + Delimiter: ": ", +} // InteractiveContinuePrinter is a printer for interactive continue prompts. type InteractiveContinuePrinter struct { DefaultValueIndex int DefaultText string + Delimiter string TextStyle *Style Options []string OptionsStyle *Style @@ -108,6 +107,12 @@ func (p InteractiveContinuePrinter) WithSuffixStyle(style *Style) *InteractiveCo return &p } +// WithDelimiter sets the delimiter between the message and the input. +func (p InteractiveContinuePrinter) WithDelimiter(delimiter string) *InteractiveContinuePrinter { + p.Delimiter = delimiter + return &p +} + // Show shows the continue prompt. // // Example: @@ -121,7 +126,7 @@ func (p InteractiveContinuePrinter) Show(text ...string) (string, error) { text = []string{p.DefaultText} } - p.TextStyle.Print(text[0] + " " + p.getSuffix() + ": ") + p.TextStyle.Print(text[0] + " " + p.getSuffix() + p.Delimiter) err := keyboard.Listen(func(keyInfo keys.Key) (stop bool, err error) { if err != nil { @@ -149,7 +154,7 @@ func (p InteractiveContinuePrinter) Show(text ...string) (string, error) { result = p.Options[p.DefaultValueIndex] return true, nil case keys.CtrlC: - os.Exit(1) + internal.Exit(1) return true, nil } return false, nil diff --git a/vendor/github.com/pterm/pterm/interactive_multiselect_printer.go b/vendor/github.com/pterm/pterm/interactive_multiselect_printer.go index c80f9355..754ba98a 100644 --- a/vendor/github.com/pterm/pterm/interactive_multiselect_printer.go +++ b/vendor/github.com/pterm/pterm/interactive_multiselect_printer.go @@ -32,16 +32,17 @@ var ( // InteractiveMultiselectPrinter is a printer for interactive multiselect menus. type InteractiveMultiselectPrinter struct { - DefaultText string - TextStyle *Style - Options []string - OptionStyle *Style - DefaultOptions []string - MaxHeight int - Selector string - SelectorStyle *Style - Filter bool - Checkmark *Checkmark + DefaultText string + TextStyle *Style + Options []string + OptionStyle *Style + DefaultOptions []string + MaxHeight int + Selector string + SelectorStyle *Style + Filter bool + Checkmark *Checkmark + OnInterruptFunc func() selectedOption int selectedOptions []int @@ -52,7 +53,10 @@ type InteractiveMultiselectPrinter struct { displayedOptionsStart int displayedOptionsEnd int - KeySelect keys.KeyCode + // KeySelect is the select key. It cannot be keys.Space when Filter is enabled. + KeySelect keys.KeyCode + + // KeyConfirm is the confirm key. It cannot be keys.Space when Filter is enabled. KeyConfirm keys.KeyCode } @@ -81,18 +85,20 @@ func (p InteractiveMultiselectPrinter) WithMaxHeight(maxHeight int) *Interactive } // WithFilter sets the Filter option -func (p InteractiveMultiselectPrinter) WithFilter(filter bool) *InteractiveMultiselectPrinter { - p.Filter = filter +func (p InteractiveMultiselectPrinter) WithFilter(b ...bool) *InteractiveMultiselectPrinter { + p.Filter = internal.WithBoolean(b) return &p } // WithKeySelect sets the confirm key +// It cannot be keys.Space when Filter is enabled. func (p InteractiveMultiselectPrinter) WithKeySelect(keySelect keys.KeyCode) *InteractiveMultiselectPrinter { p.KeySelect = keySelect return &p } // WithKeyConfirm sets the confirm key +// It cannot be keys.Space when Filter is enabled. func (p InteractiveMultiselectPrinter) WithKeyConfirm(keyConfirm keys.KeyCode) *InteractiveMultiselectPrinter { p.KeyConfirm = keyConfirm return &p @@ -104,11 +110,17 @@ func (p InteractiveMultiselectPrinter) WithCheckmark(checkmark *Checkmark) *Inte return &p } +// OnInterrupt sets the function to execute on exit of the input reader +func (p InteractiveMultiselectPrinter) WithOnInterruptFunc(exitFunc func()) *InteractiveMultiselectPrinter { + p.OnInterruptFunc = exitFunc + return &p +} + // Show shows the interactive multiselect menu and returns the selected entry. func (p *InteractiveMultiselectPrinter) Show(text ...string) ([]string, error) { // should be the first defer statement to make sure it is executed last // and all the needed cleanup can be done before - cancel, exit := internal.NewCancelationSignal() + cancel, exit := internal.NewCancelationSignal(p.OnInterruptFunc) defer exit() if len(text) == 0 || Sprint(text[0]) == "" { @@ -228,7 +240,7 @@ func (p *InteractiveMultiselectPrinter) Show(text ...string) ([]string, error) { p.selectedOptions = append(p.selectedOptions, i) } area.Update(p.renderSelectMenu()) - case keys.Up: + case keys.Up, keys.CtrlP: if len(p.fuzzySearchMatches) == 0 { return false, nil } @@ -251,7 +263,7 @@ func (p *InteractiveMultiselectPrinter) Show(text ...string) ([]string, error) { } area.Update(p.renderSelectMenu()) - case keys.Down: + case keys.Down, keys.CtrlN: if len(p.fuzzySearchMatches) == 0 { return false, nil } diff --git a/vendor/github.com/pterm/pterm/interactive_select_printer.go b/vendor/github.com/pterm/pterm/interactive_select_printer.go index 0af1a0d6..0c4bc995 100644 --- a/vendor/github.com/pterm/pterm/interactive_select_printer.go +++ b/vendor/github.com/pterm/pterm/interactive_select_printer.go @@ -23,19 +23,22 @@ var ( MaxHeight: 5, Selector: ">", SelectorStyle: &ThemeDefault.SecondaryStyle, + Filter: true, } ) // InteractiveSelectPrinter is a printer for interactive select menus. type InteractiveSelectPrinter struct { - TextStyle *Style - DefaultText string - Options []string - OptionStyle *Style - DefaultOption string - MaxHeight int - Selector string - SelectorStyle *Style + TextStyle *Style + DefaultText string + Options []string + OptionStyle *Style + DefaultOption string + MaxHeight int + Selector string + SelectorStyle *Style + OnInterruptFunc func() + Filter bool selectedOption int result string @@ -71,11 +74,23 @@ func (p InteractiveSelectPrinter) WithMaxHeight(maxHeight int) *InteractiveSelec return &p } +// OnInterrupt sets the function to execute on exit of the input reader +func (p InteractiveSelectPrinter) WithOnInterruptFunc(exitFunc func()) *InteractiveSelectPrinter { + p.OnInterruptFunc = exitFunc + return &p +} + +// WithFilter sets the Filter option +func (p InteractiveSelectPrinter) WithFilter(b ...bool) *InteractiveSelectPrinter { + p.Filter = internal.WithBoolean(b) + return &p +} + // Show shows the interactive select menu and returns the selected entry. func (p *InteractiveSelectPrinter) Show(text ...string) (string, error) { // should be the first defer statement to make sure it is executed last // and all the needed cleanup can be done before - cancel, exit := internal.NewCancelationSignal() + cancel, exit := internal.NewCancelationSignal(p.OnInterruptFunc) defer exit() if len(text) == 0 || Sprint(text[0]) == "" { @@ -142,14 +157,16 @@ func (p *InteractiveSelectPrinter) Show(text ...string) (string, error) { switch key { case keys.RuneKey: - // Fuzzy search for options - // append to fuzzy search string - p.fuzzySearchString += keyInfo.String() - p.selectedOption = 0 - p.displayedOptionsStart = 0 - p.displayedOptionsEnd = maxHeight - p.displayedOptions = append([]string{}, p.fuzzySearchMatches[:maxHeight]...) - area.Update(p.renderSelectMenu()) + if p.Filter { + // Fuzzy search for options + // append to fuzzy search string + p.fuzzySearchString += keyInfo.String() + p.selectedOption = 0 + p.displayedOptionsStart = 0 + p.displayedOptionsEnd = maxHeight + p.displayedOptions = append([]string{}, p.fuzzySearchMatches[:maxHeight]...) + area.Update(p.renderSelectMenu()) + } case keys.Space: p.fuzzySearchString += " " p.selectedOption = 0 @@ -179,7 +196,7 @@ func (p *InteractiveSelectPrinter) Show(text ...string) (string, error) { p.displayedOptions = append([]string{}, p.fuzzySearchMatches[p.displayedOptionsStart:p.displayedOptionsEnd]...) area.Update(p.renderSelectMenu()) - case keys.Up: + case keys.Up, keys.CtrlP: if len(p.fuzzySearchMatches) == 0 { return false, nil } @@ -202,7 +219,7 @@ func (p *InteractiveSelectPrinter) Show(text ...string) (string, error) { } area.Update(p.renderSelectMenu()) - case keys.Down: + case keys.Down, keys.CtrlN: if len(p.fuzzySearchMatches) == 0 { return false, nil } @@ -245,7 +262,11 @@ func (p *InteractiveSelectPrinter) Show(text ...string) (string, error) { func (p *InteractiveSelectPrinter) renderSelectMenu() string { var content string - content += Sprintf("%s %s: %s\n", p.text, p.SelectorStyle.Sprint("[type to search]"), p.fuzzySearchString) + if p.Filter { + content += Sprintf("%s %s: %s\n", p.text, p.SelectorStyle.Sprint("[type to search]"), p.fuzzySearchString) + } else { + content += Sprintf("%s:\n", p.text) + } // find options that match fuzzy search string rankedResults := fuzzy.RankFindFold(p.fuzzySearchString, p.Options) diff --git a/vendor/github.com/pterm/pterm/interactive_textinput_printer.go b/vendor/github.com/pterm/pterm/interactive_textinput_printer.go index e7174555..88535c51 100644 --- a/vendor/github.com/pterm/pterm/interactive_textinput_printer.go +++ b/vendor/github.com/pterm/pterm/interactive_textinput_printer.go @@ -10,19 +10,23 @@ import ( "github.com/pterm/pterm/internal" ) -var ( - // DefaultInteractiveTextInput is the default InteractiveTextInput printer. - DefaultInteractiveTextInput = InteractiveTextInputPrinter{ - DefaultText: "Input text", - TextStyle: &ThemeDefault.PrimaryStyle, - } -) +// DefaultInteractiveTextInput is the default InteractiveTextInput printer. +var DefaultInteractiveTextInput = InteractiveTextInputPrinter{ + DefaultText: "Input text", + Delimiter: ": ", + TextStyle: &ThemeDefault.PrimaryStyle, + Mask: "", +} // InteractiveTextInputPrinter is a printer for interactive select menus. type InteractiveTextInputPrinter struct { - TextStyle *Style - DefaultText string - MultiLine bool + TextStyle *Style + DefaultText string + DefaultValue string + Delimiter string + MultiLine bool + Mask string + OnInterruptFunc func() input []string cursorXPos int @@ -36,6 +40,12 @@ func (p InteractiveTextInputPrinter) WithDefaultText(text string) *InteractiveTe return &p } +// WithDefaultValue sets the default value. +func (p InteractiveTextInputPrinter) WithDefaultValue(value string) *InteractiveTextInputPrinter { + p.DefaultValue = value + return &p +} + // WithTextStyle sets the text style. func (p InteractiveTextInputPrinter) WithTextStyle(style *Style) *InteractiveTextInputPrinter { p.TextStyle = style @@ -48,11 +58,29 @@ func (p InteractiveTextInputPrinter) WithMultiLine(multiLine ...bool) *Interacti return &p } +// WithMask sets the mask. +func (p InteractiveTextInputPrinter) WithMask(mask string) *InteractiveTextInputPrinter { + p.Mask = mask + return &p +} + +// WithOnInterruptFunc sets the function to execute on exit of the input reader +func (p InteractiveTextInputPrinter) WithOnInterruptFunc(exitFunc func()) *InteractiveTextInputPrinter { + p.OnInterruptFunc = exitFunc + return &p +} + +// WithDelimiter sets the delimiter between the message and the input. +func (p InteractiveTextInputPrinter) WithDelimiter(delimiter string) *InteractiveTextInputPrinter { + p.Delimiter = delimiter + return &p +} + // Show shows the interactive select menu and returns the selected entry. func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { // should be the first defer statement to make sure it is executed last // and all the needed cleanup can be done before - cancel, exit := internal.NewCancelationSignal() + cancel, exit := internal.NewCancelationSignal(p.OnInterruptFunc) defer exit() var areaText string @@ -62,24 +90,26 @@ func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { } if p.MultiLine { - areaText = p.TextStyle.Sprintfln("%s %s :", text[0], ThemeDefault.SecondaryStyle.Sprint("[Press tab to submit]")) + areaText = p.TextStyle.Sprintfln("%s %s %s", text[0], ThemeDefault.SecondaryStyle.Sprint("[Press tab to submit]"), p.Delimiter) } else { - areaText = p.TextStyle.Sprintf("%s: ", text[0]) + areaText = p.TextStyle.Sprintf("%s%s", text[0], p.Delimiter) } + p.text = areaText - area, err := DefaultArea.Start(areaText) - defer area.Stop() - if err != nil { - return "", err - } + area := cursor.NewArea() + area.Update(areaText) + area.StartOfLine() - cursor.Up(1) - cursor.StartOfLine() if !p.MultiLine { cursor.Right(len(RemoveColorFromString(areaText))) } - err = keyboard.Listen(func(key keys.Key) (stop bool, err error) { + if p.DefaultValue != "" { + p.input = append(p.input, p.DefaultValue) + p.updateArea(&area) + } + + err := keyboard.Listen(func(key keys.Key) (stop bool, err error) { if !p.MultiLine { p.cursorYPos = 0 } @@ -90,6 +120,7 @@ func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { switch key.Code { case keys.Tab: if p.MultiLine { + area.Bottom() return true, nil } case keys.Enter: @@ -104,7 +135,6 @@ func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { p.input = append(p.input, appendAfterY...) p.cursorYPos++ p.cursorXPos = -internal.GetStringMaxWidth(p.input[p.cursorYPos]) - cursor.Down(1) cursor.StartOfLine() } else { return true, nil @@ -173,7 +203,7 @@ func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { } } - p.updateArea(area) + p.updateArea(&area) return false, nil }) @@ -195,11 +225,12 @@ func (p InteractiveTextInputPrinter) Show(text ...string) (string, error) { return strings.ReplaceAll(areaText, p.text, ""), nil } -func (p InteractiveTextInputPrinter) updateArea(area *AreaPrinter) string { +func (p InteractiveTextInputPrinter) updateArea(area *cursor.Area) string { if !p.MultiLine { p.cursorYPos = 0 } areaText := p.text + for i, s := range p.input { if i < len(p.input)-1 { areaText += s + "\n" @@ -207,14 +238,19 @@ func (p InteractiveTextInputPrinter) updateArea(area *AreaPrinter) string { areaText += s } } + + if p.Mask != "" { + areaText = p.text + strings.Repeat(p.Mask, internal.GetStringMaxWidth(areaText)-internal.GetStringMaxWidth(p.text)) + } + if p.cursorXPos+internal.GetStringMaxWidth(p.input[p.cursorYPos]) < 1 { p.cursorXPos = -internal.GetStringMaxWidth(p.input[p.cursorYPos]) } - cursor.StartOfLine() area.Update(areaText) - cursor.Up(len(p.input) - p.cursorYPos) - cursor.StartOfLine() + area.Top() + area.Down(p.cursorYPos + 1) + area.StartOfLine() if p.MultiLine { cursor.Right(internal.GetStringMaxWidth(p.input[p.cursorYPos]) + p.cursorXPos) } else { diff --git a/vendor/github.com/pterm/pterm/interface_live_printer.go b/vendor/github.com/pterm/pterm/interface_live_printer.go index b8d3dbe3..69dce345 100644 --- a/vendor/github.com/pterm/pterm/interface_live_printer.go +++ b/vendor/github.com/pterm/pterm/interface_live_printer.go @@ -1,5 +1,7 @@ package pterm +import "io" + // LivePrinter is a printer which can update it's output live. type LivePrinter interface { // GenericStart runs Start, but returns a LivePrinter. @@ -11,4 +13,6 @@ type LivePrinter interface { // This is used for the interface LivePrinter. // You most likely want to use Stop instead of this in your program. GenericStop() (*LivePrinter, error) + + SetWriter(writer io.Writer) } diff --git a/vendor/github.com/pterm/pterm/internal/cancelation_signal.go b/vendor/github.com/pterm/pterm/internal/cancelation_signal.go index d6f79b0a..33f17cc1 100644 --- a/vendor/github.com/pterm/pterm/internal/cancelation_signal.go +++ b/vendor/github.com/pterm/pterm/internal/cancelation_signal.go @@ -1,19 +1,20 @@ package internal -import ( - "os" -) - // NewCancelationSignal for keeping track of a cancelation -func NewCancelationSignal() (func(), func()) { +func NewCancelationSignal(interruptFunc func()) (func(), func()) { canceled := false cancel := func() { canceled = true } + exit := func() { if canceled { - os.Exit(1) + if interruptFunc != nil { + interruptFunc() + } else { + Exit(1) + } } } diff --git a/vendor/github.com/pterm/pterm/internal/exit.go b/vendor/github.com/pterm/pterm/internal/exit.go new file mode 100644 index 00000000..f07a0045 --- /dev/null +++ b/vendor/github.com/pterm/pterm/internal/exit.go @@ -0,0 +1,14 @@ +package internal + +import "os" + +// ExitFuncType is the type of function used to exit the program. +type ExitFuncType func(int) + +// DefaultExitFunc is the default function used to exit the program. +var DefaultExitFunc ExitFuncType = os.Exit + +// Exit calls the current exit function. +func Exit(code int) { + DefaultExitFunc(code) +} diff --git a/vendor/github.com/pterm/pterm/internal/rgb_complementary.go b/vendor/github.com/pterm/pterm/internal/rgb_complementary.go new file mode 100644 index 00000000..71520b33 --- /dev/null +++ b/vendor/github.com/pterm/pterm/internal/rgb_complementary.go @@ -0,0 +1,5 @@ +package internal + +func Complementary(r, g, b uint8) (uint8, uint8, uint8) { + return 255 - r, 255 - g, 255 - b +} diff --git a/vendor/github.com/pterm/pterm/internal/title_in_line.go b/vendor/github.com/pterm/pterm/internal/title_in_line.go index 3aae546b..68e28ec6 100644 --- a/vendor/github.com/pterm/pterm/internal/title_in_line.go +++ b/vendor/github.com/pterm/pterm/internal/title_in_line.go @@ -2,17 +2,15 @@ package internal import ( "strings" - - "github.com/gookit/color" ) // AddTitleToLine adds a title to a site of a line ex: "─ This is the title ──────" func AddTitleToLine(title, line string, length int, left bool) string { var ret string if left { - ret += line + " " + title + " " + line + strings.Repeat(line, length-(4+len(color.ClearCode(title)))) + ret += line + " " + title + " " + line + strings.Repeat(line, length-(4+GetStringMaxWidth(title))) } else { - ret += strings.Repeat(line, length-(4+len(color.ClearCode(title)))) + line + " " + title + " " + line + ret += strings.Repeat(line, length-(4+GetStringMaxWidth(title))) + line + " " + title + " " + line } return ret @@ -21,7 +19,7 @@ func AddTitleToLine(title, line string, length int, left bool) string { // AddTitleToLineCenter adds a title to the center of a line ex: "─ This is the title ──────" func AddTitleToLineCenter(title, line string, length int) string { var ret string - repeatString := length - (4 + len(color.ClearCode(title))) + repeatString := length - (4 + GetStringMaxWidth(title)) unevenRepeatString := repeatString % 2 ret += strings.Repeat(line, repeatString/2) + line + " " + title + " " + line + strings.Repeat(line, repeatString/2+unevenRepeatString) diff --git a/vendor/github.com/pterm/pterm/logger.go b/vendor/github.com/pterm/pterm/logger.go index 4fed9283..26eea5b0 100644 --- a/vendor/github.com/pterm/pterm/logger.go +++ b/vendor/github.com/pterm/pterm/logger.go @@ -200,6 +200,9 @@ func (l Logger) AppendKeyStyle(key string, style Style) *Logger { // CanPrint checks if the logger can print a specific log level. func (l Logger) CanPrint(level LogLevel) bool { + if l.Level == LogLevelDisabled { + return false + } return l.Level <= level } @@ -261,7 +264,7 @@ func (l Logger) combineArgs(args ...[]LoggerArgument) []LoggerArgument { } func (l Logger) print(level LogLevel, msg string, args []LoggerArgument) { - if l.Level > level { + if !l.CanPrint(level) { return } diff --git a/vendor/github.com/pterm/pterm/multi_live_printer.go b/vendor/github.com/pterm/pterm/multi_live_printer.go new file mode 100644 index 00000000..b9e0d0e8 --- /dev/null +++ b/vendor/github.com/pterm/pterm/multi_live_printer.go @@ -0,0 +1,124 @@ +package pterm + +import ( + "atomicgo.dev/schedule" + "bytes" + "io" + "os" + "strings" + "time" +) + +var DefaultMultiPrinter = MultiPrinter{ + printers: []LivePrinter{}, + Writer: os.Stdout, + UpdateDelay: time.Millisecond * 200, + + buffers: []*bytes.Buffer{}, + area: DefaultArea, +} + +type MultiPrinter struct { + IsActive bool + Writer io.Writer + UpdateDelay time.Duration + + printers []LivePrinter + buffers []*bytes.Buffer + area AreaPrinter +} + +// SetWriter sets the writer for the AreaPrinter. +func (p *MultiPrinter) SetWriter(writer io.Writer) { + p.Writer = writer +} + +// WithWriter returns a fork of the MultiPrinter with a new writer. +func (p MultiPrinter) WithWriter(writer io.Writer) *MultiPrinter { + p.Writer = writer + return &p +} + +// WithUpdateDelay returns a fork of the MultiPrinter with a new update delay. +func (p MultiPrinter) WithUpdateDelay(delay time.Duration) *MultiPrinter { + p.UpdateDelay = delay + return &p +} + +func (p *MultiPrinter) NewWriter() io.Writer { + buf := bytes.NewBufferString("") + p.buffers = append(p.buffers, buf) + return buf +} + +// getString returns all buffers appended and separated by a newline. +func (p *MultiPrinter) getString() string { + var buffer bytes.Buffer + for _, b := range p.buffers { + s := b.String() + s = strings.Trim(s, "\n") + + parts := strings.Split(s, "\r") // only get the last override + s = parts[len(parts)-1] + + // check if s is empty, if so get one part before, repeat until not empty + for s == "" { + parts = parts[:len(parts)-1] + s = parts[len(parts)-1] + } + + s = strings.Trim(s, "\n\r") + buffer.WriteString(s) + buffer.WriteString("\n") + } + return buffer.String() +} + +func (p *MultiPrinter) Start() (*MultiPrinter, error) { + p.IsActive = true + for _, printer := range p.printers { + printer.GenericStart() + } + + schedule.Every(p.UpdateDelay, func() bool { + if !p.IsActive { + return false + } + + p.area.Update(p.getString()) + + return true + }) + + return p, nil +} + +func (p *MultiPrinter) Stop() (*MultiPrinter, error) { + p.IsActive = false + for _, printer := range p.printers { + printer.GenericStop() + } + time.Sleep(time.Millisecond * 20) + p.area.Update(p.getString()) + p.area.Stop() + + return p, nil +} + +// GenericStart runs Start, but returns a LivePrinter. +// This is used for the interface LivePrinter. +// You most likely want to use Start instead of this in your program. +func (p MultiPrinter) GenericStart() (*LivePrinter, error) { + p2, _ := p.Start() + lp := LivePrinter(p2) + return &lp, nil +} + +// GenericStop runs Stop, but returns a LivePrinter. +// This is used for the interface LivePrinter. +// You most likely want to use Stop instead of this in your program. +func (p MultiPrinter) GenericStop() (*LivePrinter, error) { + p2, _ := p.Stop() + lp := LivePrinter(p2) + return &lp, nil +} diff --git a/vendor/github.com/pterm/pterm/progressbar_printer.go b/vendor/github.com/pterm/pterm/progressbar_printer.go index 2fb3f910..1cf4a0c2 100644 --- a/vendor/github.com/pterm/pterm/progressbar_printer.go +++ b/vendor/github.com/pterm/pterm/progressbar_printer.go @@ -1,8 +1,11 @@ package pterm import ( + "atomicgo.dev/cursor" + "atomicgo.dev/schedule" + "fmt" "io" - "strconv" + "math" "strings" "time" @@ -15,23 +18,21 @@ import ( // Generally, there should only be one active ProgressbarPrinter at a time. var ActiveProgressBarPrinters []*ProgressbarPrinter -var ( - // DefaultProgressbar is the default ProgressbarPrinter. - DefaultProgressbar = ProgressbarPrinter{ - Total: 100, - BarCharacter: "█", - LastCharacter: "█", - ElapsedTimeRoundingFactor: time.Second, - BarStyle: &ThemeDefault.ProgressbarBarStyle, - TitleStyle: &ThemeDefault.ProgressbarTitleStyle, - ShowTitle: true, - ShowCount: true, - ShowPercentage: true, - ShowElapsedTime: true, - BarFiller: " ", - MaxWidth: 80, - } -) +// DefaultProgressbar is the default ProgressbarPrinter. +var DefaultProgressbar = ProgressbarPrinter{ + Total: 100, + BarCharacter: "█", + LastCharacter: "█", + ElapsedTimeRoundingFactor: time.Second, + BarStyle: &ThemeDefault.ProgressbarBarStyle, + TitleStyle: &ThemeDefault.ProgressbarTitleStyle, + ShowTitle: true, + ShowCount: true, + ShowPercentage: true, + ShowElapsedTime: true, + BarFiller: Gray("█"), + MaxWidth: 80, +} // ProgressbarPrinter shows a progress animation in the terminal. type ProgressbarPrinter struct { @@ -55,7 +56,8 @@ type ProgressbarPrinter struct { IsActive bool - startedAt time.Time + startedAt time.Time + rerenderTask *schedule.Task Writer io.Writer } @@ -158,6 +160,11 @@ func (p ProgressbarPrinter) WithWriter(writer io.Writer) *ProgressbarPrinter { return &p } +// SetWriter sets the custom Writer. +func (p *ProgressbarPrinter) SetWriter(writer io.Writer) { + p.Writer = writer +} + // Increment current value by one. func (p *ProgressbarPrinter) Increment() *ProgressbarPrinter { p.Add(1) @@ -173,6 +180,14 @@ func (p *ProgressbarPrinter) UpdateTitle(title string) *ProgressbarPrinter { // This is the update logic, renders the progressbar func (p *ProgressbarPrinter) updateProgress() *ProgressbarPrinter { + Fprinto(p.Writer, p.getString()) + return p +} + +func (p *ProgressbarPrinter) getString() string { + if !p.IsActive { + return "" + } if p.TitleStyle == nil { p.TitleStyle = NewStyle() } @@ -180,7 +195,7 @@ func (p *ProgressbarPrinter) updateProgress() *ProgressbarPrinter { p.BarStyle = NewStyle() } if p.Total == 0 { - return nil + return "" } var before string @@ -195,25 +210,20 @@ func (p *ProgressbarPrinter) updateProgress() *ProgressbarPrinter { width = p.MaxWidth } - currentPercentage := int(internal.PercentageRound(float64(int64(p.Total)), float64(int64(p.Current)))) - - decoratorCount := Gray("[") + LightWhite(p.Current) + Gray("/") + LightWhite(p.Total) + Gray("]") - - decoratorCurrentPercentage := color.RGB(NewRGB(255, 0, 0).Fade(0, float32(p.Total), float32(p.Current), NewRGB(0, 255, 0)).GetValues()). - Sprint(strconv.Itoa(currentPercentage) + "%") - - decoratorTitle := p.TitleStyle.Sprint(p.Title) - if p.ShowTitle { - before += decoratorTitle + " " + before += p.TitleStyle.Sprint(p.Title) + " " } if p.ShowCount { - before += decoratorCount + " " + padding := 1 + int(math.Log10(float64(p.Total))) + before += Gray("[") + LightWhite(fmt.Sprintf("%0*d", padding, p.Current)) + Gray("/") + LightWhite(p.Total) + Gray("]") + " " } after += " " if p.ShowPercentage { + currentPercentage := int(internal.PercentageRound(float64(int64(p.Total)), float64(int64(p.Current)))) + decoratorCurrentPercentage := color.RGB(NewRGB(255, 0, 0).Fade(0, float32(p.Total), float32(p.Current), NewRGB(0, 255, 0)).GetValues()). + Sprintf("%3d%%", currentPercentage) after += decoratorCurrentPercentage + " " } if p.ShowElapsedTime { @@ -228,17 +238,12 @@ func (p *ProgressbarPrinter) updateProgress() *ProgressbarPrinter { barFiller = strings.Repeat(p.BarFiller, barMaxLength-barCurrentLength) } - var bar string + bar := barFiller if barCurrentLength > 0 { - bar = p.BarStyle.Sprint(strings.Repeat(p.BarCharacter, barCurrentLength)+p.LastCharacter) + barFiller - } else { - bar = "" + bar = p.BarStyle.Sprint(strings.Repeat(p.BarCharacter, barCurrentLength)+p.LastCharacter) + bar } - if !RawOutput { - Fprinto(p.Writer, before+bar+after) - } - return p + return before + bar + after } // Add to current value. @@ -251,6 +256,8 @@ func (p *ProgressbarPrinter) Add(count int) *ProgressbarPrinter { p.updateProgress() if p.Current >= p.Total { + p.Total = p.Current + p.updateProgress() p.Stop() } return p @@ -258,6 +265,7 @@ func (p *ProgressbarPrinter) Add(count int) *ProgressbarPrinter { // Start the ProgressbarPrinter. func (p ProgressbarPrinter) Start(title ...interface{}) (*ProgressbarPrinter, error) { + cursor.Hide() if RawOutput && p.ShowTitle { Fprintln(p.Writer, p.Title) } @@ -270,11 +278,23 @@ func (p ProgressbarPrinter) Start(title ...interface{}) (*ProgressbarPrinter, er p.updateProgress() + if p.ShowElapsedTime { + p.rerenderTask = schedule.Every(time.Second, func() bool { + p.updateProgress() + return true + }) + } + return &p, nil } // Stop the ProgressbarPrinter. func (p *ProgressbarPrinter) Stop() (*ProgressbarPrinter, error) { + if p.rerenderTask != nil && p.rerenderTask.IsActive() { + p.rerenderTask.Stop() + } + cursor.Show() + if !p.IsActive { return p, nil } @@ -291,7 +311,7 @@ func (p *ProgressbarPrinter) Stop() (*ProgressbarPrinter, error) { // GenericStart runs Start, but returns a LivePrinter. // This is used for the interface LivePrinter. // You most likely want to use Start instead of this in your program. -func (p ProgressbarPrinter) GenericStart() (*LivePrinter, error) { +func (p *ProgressbarPrinter) GenericStart() (*LivePrinter, error) { p2, _ := p.Start() lp := LivePrinter(p2) return &lp, nil @@ -300,7 +320,7 @@ func (p ProgressbarPrinter) GenericStart() (*LivePrinter, error) { // GenericStop runs Stop, but returns a LivePrinter. // This is used for the interface LivePrinter. // You most likely want to use Stop instead of this in your program. -func (p ProgressbarPrinter) GenericStop() (*LivePrinter, error) { +func (p *ProgressbarPrinter) GenericStop() (*LivePrinter, error) { p2, _ := p.Stop() lp := LivePrinter(p2) return &lp, nil diff --git a/vendor/github.com/pterm/pterm/pterm.go b/vendor/github.com/pterm/pterm/pterm.go index b42a1140..f9d76ca3 100644 --- a/vendor/github.com/pterm/pterm/pterm.go +++ b/vendor/github.com/pterm/pterm/pterm.go @@ -63,7 +63,7 @@ func DisableStyling() { DisableColor() } -// RecalculateTerminalSize updates already initialized terminal dimensions. Has to be called after a termina resize to guarantee proper rendering. Applies only to new instances. +// RecalculateTerminalSize updates already initialized terminal dimensions. Has to be called after a terminal resize to guarantee proper rendering. Applies only to new instances. func RecalculateTerminalSize() { // keep in sync with DefaultBarChart DefaultBarChart.Width = GetTerminalWidth() * 2 / 3 diff --git a/vendor/github.com/pterm/pterm/rgb.go b/vendor/github.com/pterm/pterm/rgb.go index 23b2fa48..a35dd1a0 100644 --- a/vendor/github.com/pterm/pterm/rgb.go +++ b/vendor/github.com/pterm/pterm/rgb.go @@ -18,6 +18,133 @@ type RGB struct { Background bool } +type RGBStyle struct { + Options []Color + Foreground, Background RGB + + hasBg bool +} + +// NewRGBStyle returns a new RGBStyle. +// The foreground color is required, the background color is optional. +// The colors will be set as is, ignoring the RGB.Background property. +func NewRGBStyle(foreground RGB, background ...RGB) RGBStyle { + var s RGBStyle + s.Foreground = foreground + if len(background) > 0 { + s.Background = background[0] + s.hasBg = true + } + return s +} + +// AddOptions adds options to the RGBStyle. +func (p RGBStyle) AddOptions(opts ...Color) RGBStyle { + p.Options = append(p.Options, opts...) + return p +} + +// Print formats using the default formats for its operands and writes to standard output. +// Spaces are added between operands when neither is a string. +// It returns the number of bytes written and any write error encountered. +func (p RGBStyle) Print(a ...interface{}) *TextPrinter { + Print(p.Sprint(a...)) + tp := TextPrinter(p) + return &tp +} + +// Println formats using the default formats for its operands and writes to standard output. +// Spaces are always added between operands and a newline is appended. +// It returns the number of bytes written and any write error encountered. +func (p RGBStyle) Println(a ...interface{}) *TextPrinter { + Println(p.Sprint(a...)) + tp := TextPrinter(p) + return &tp +} + +// Printf formats according to a format specifier and writes to standard output. +// It returns the number of bytes written and any write error encountered. +func (p RGBStyle) Printf(format string, a ...interface{}) *TextPrinter { + Printf(format, p.Sprint(a...)) + tp := TextPrinter(p) + return &tp +} + +// Printfln formats according to a format specifier and writes to standard output. +// Spaces are always added between operands and a newline is appended. +// It returns the number of bytes written and any write error encountered. +func (p RGBStyle) Printfln(format string, a ...interface{}) *TextPrinter { + Printf(format, p.Sprint(a...)) + tp := TextPrinter(p) + return &tp +} + +// PrintOnError prints every error which is not nil. +// If every error is nil, nothing will be printed. +// This can be used for simple error checking. +func (p RGBStyle) PrintOnError(a ...interface{}) *TextPrinter { + for _, arg := range a { + if err, ok := arg.(error); ok { + if err != nil { + p.Println(err) + } + } + } + + tp := TextPrinter(p) + return &tp +} + +// PrintOnErrorf wraps every error which is not nil and prints it. +// If every error is nil, nothing will be printed. +// This can be used for simple error checking. +func (p RGBStyle) PrintOnErrorf(format string, a ...interface{}) *TextPrinter { + for _, arg := range a { + if err, ok := arg.(error); ok { + if err != nil { + p.Println(fmt.Errorf(format, err)) + } + } + } + + tp := TextPrinter(p) + return &tp +} + +// Sprint formats using the default formats for its operands and returns the resulting string. +// Spaces are added between operands when neither is a string. +func (p RGBStyle) Sprint(a ...interface{}) string { + var rgbStyle *color.RGBStyle + if !p.hasBg { + rgbStyle = color.NewRGBStyle(color.RGB(p.Foreground.R, p.Foreground.G, p.Foreground.B)) + } else { + rgbStyle = color.NewRGBStyle(color.RGB(p.Foreground.R, p.Foreground.G, p.Foreground.B), color.RGB(p.Background.R, p.Background.G, p.Background.B)) + } + if len(p.Options) > 0 { + for _, opt := range p.Options { + rgbStyle.AddOpts(color.Color(opt)) + } + } + return rgbStyle.Sprint(a...) +} + +// Sprintln formats using the default formats for its operands and returns the resulting string. +// Spaces are always added between operands and a newline is appended. +func (p RGBStyle) Sprintln(a ...interface{}) string { + return p.Sprint(a...) + "\n" +} + +// Sprintf formats according to a format specifier and returns the resulting string. +func (p RGBStyle) Sprintf(format string, a ...interface{}) string { + return fmt.Sprintf(format, p.Sprint(a...)) +} + +// Sprintfln formats according to a format specifier and returns the resulting string. +// Spaces are always added between operands and a newline is appended. +func (p RGBStyle) Sprintfln(format string, a ...interface{}) string { + return fmt.Sprintf(format, p.Sprint(a...)) + "\n" +} + // GetValues returns the RGB values separately. func (p RGB) GetValues() (r, g, b uint8) { return p.R, p.G, p.B @@ -160,3 +287,11 @@ func (p RGB) PrintOnErrorf(format string, a ...interface{}) *TextPrinter { tp := TextPrinter(p) return &tp } + +func (p RGB) ToRGBStyle() RGBStyle { + if p.Background { + return RGBStyle{Background: p} + } + + return RGBStyle{Foreground: p} +} diff --git a/vendor/github.com/pterm/pterm/slog_handler.go b/vendor/github.com/pterm/pterm/slog_handler.go new file mode 100644 index 00000000..a2b2acfe --- /dev/null +++ b/vendor/github.com/pterm/pterm/slog_handler.go @@ -0,0 +1,90 @@ +package pterm + +import ( + "context" + + "log/slog" +) + +type SlogHandler struct { + logger *Logger + attrs []slog.Attr +} + +// Enabled returns true if the given level is enabled. +func (s *SlogHandler) Enabled(ctx context.Context, level slog.Level) bool { + switch level { + case slog.LevelDebug: + return s.logger.CanPrint(LogLevelDebug) + case slog.LevelInfo: + return s.logger.CanPrint(LogLevelInfo) + case slog.LevelWarn: + return s.logger.CanPrint(LogLevelWarn) + case slog.LevelError: + return s.logger.CanPrint(LogLevelError) + } + return false +} + +// Handle handles the given record. +func (s *SlogHandler) Handle(ctx context.Context, record slog.Record) error { + level := record.Level + message := record.Message + + // Convert slog Attrs to a map. + keyValsMap := make(map[string]interface{}) + + record.Attrs(func(attr slog.Attr) bool { + keyValsMap[attr.Key] = attr.Value + return true + }) + + for _, attr := range s.attrs { + keyValsMap[attr.Key] = attr.Value + } + + args := s.logger.ArgsFromMap(keyValsMap) + + // Wrapping args inside another slice to match [][]LoggerArgument + argsWrapped := [][]LoggerArgument{args} + + logger := s.logger + + // Must be done here, see https://github.com/pterm/pterm/issues/608#issuecomment-1876001650 + if logger.CallerOffset == 0 { + logger = logger.WithCallerOffset(3) + } + + switch level { + case slog.LevelDebug: + logger.Debug(message, argsWrapped...) + case slog.LevelInfo: + logger.Info(message, argsWrapped...) + case slog.LevelWarn: + logger.Warn(message, argsWrapped...) + case slog.LevelError: + logger.Error(message, argsWrapped...) + default: + logger.Print(message, argsWrapped...) + } + + return nil +} + +// WithAttrs returns a new handler with the given attributes. +func (s *SlogHandler) WithAttrs(attrs []slog.Attr) slog.Handler { + newS := *s + newS.attrs = attrs + return &newS +} + +// WithGroup is not yet supported. +func (s *SlogHandler) WithGroup(name string) slog.Handler { + // Grouping is not yet supported by pterm. + return s +} + +// NewSlogHandler returns a new logging handler that can be intrgrated with log/slog. +func NewSlogHandler(logger *Logger) *SlogHandler { + return &SlogHandler{logger: logger} +} diff --git a/vendor/github.com/pterm/pterm/spinner_printer.go b/vendor/github.com/pterm/pterm/spinner_printer.go index 82b84af7..044b6976 100644 --- a/vendor/github.com/pterm/pterm/spinner_printer.go +++ b/vendor/github.com/pterm/pterm/spinner_printer.go @@ -110,15 +110,18 @@ func (p SpinnerPrinter) WithWriter(writer io.Writer) *SpinnerPrinter { return &p } +// SetWriter sets the custom Writer. +func (p *SpinnerPrinter) SetWriter(writer io.Writer) { + p.Writer = writer +} + // UpdateText updates the message of the active SpinnerPrinter. // Can be used live. func (s *SpinnerPrinter) UpdateText(text string) { s.Text = text if !RawOutput { - fClearLine(s.Writer) Fprinto(s.Writer, s.Style.Sprint(s.currentSequence)+" "+s.MessageStyle.Sprint(s.Text)) - } - if RawOutput { + } else { Fprintln(s.Writer, s.Text) } } @@ -152,7 +155,6 @@ func (s SpinnerPrinter) Start(text ...interface{}) (*SpinnerPrinter, error) { if s.ShowTimer { timer = " (" + time.Since(s.startedAt).Round(s.TimerRoundingFactor).String() + ")" } - fClearLine(s.Writer) Fprinto(s.Writer, s.Style.Sprint(seq)+" "+s.MessageStyle.Sprint(s.Text)+s.TimerStyle.Sprint(timer)) s.currentSequence = seq time.Sleep(s.Delay) @@ -182,8 +184,8 @@ func (s *SpinnerPrinter) Stop() error { // This is used for the interface LivePrinter. // You most likely want to use Start instead of this in your program. func (s *SpinnerPrinter) GenericStart() (*LivePrinter, error) { - _, _ = s.Start() - lp := LivePrinter(s) + p2, _ := s.Start() + lp := LivePrinter(p2) return &lp, nil } diff --git a/vendor/github.com/pterm/pterm/table_printer.go b/vendor/github.com/pterm/pterm/table_printer.go index cbef5019..54e8965a 100644 --- a/vendor/github.com/pterm/pterm/table_printer.go +++ b/vendor/github.com/pterm/pterm/table_printer.go @@ -2,9 +2,10 @@ package pterm import ( "encoding/csv" - "github.com/pterm/pterm/internal" "io" "strings" + + "github.com/pterm/pterm/internal" ) // DefaultTable contains standards, which can be used to print a TablePrinter. @@ -183,8 +184,8 @@ func (p TablePrinter) Srender() (string, error) { c.lines = strings.Split(cRaw, "\n") c.height = len(c.lines) for _, l := range c.lines { - if internal.GetStringMaxWidth(l) > c.width { - c.width = internal.GetStringMaxWidth(l) + if maxWidth := internal.GetStringMaxWidth(l); maxWidth > c.width { + c.width = maxWidth } } r.cells = append(r.cells, c) @@ -262,7 +263,7 @@ func (p TablePrinter) renderRow(t table, r row) string { } if i < len(c.lines) { - s += strings.TrimSpace(c.lines[i]) + s += c.lines[i] } if j < len(r.cells)-1 { diff --git a/vendor/github.com/pterm/pterm/theme.go b/vendor/github.com/pterm/pterm/theme.go index 22466ea7..91594c8c 100644 --- a/vendor/github.com/pterm/pterm/theme.go +++ b/vendor/github.com/pterm/pterm/theme.go @@ -30,6 +30,9 @@ var ( TableStyle: Style{FgDefault}, TableHeaderStyle: Style{FgLightCyan}, TableSeparatorStyle: Style{FgGray}, + HeatmapStyle: Style{FgDefault}, + HeatmapHeaderStyle: Style{FgLightCyan}, + HeatmapSeparatorStyle: Style{FgDefault}, SectionStyle: Style{Bold, FgYellow}, BulletListTextStyle: Style{FgDefault}, BulletListBulletStyle: Style{FgGray}, @@ -81,6 +84,9 @@ type Theme struct { TableStyle Style TableHeaderStyle Style TableSeparatorStyle Style + HeatmapStyle Style + HeatmapHeaderStyle Style + HeatmapSeparatorStyle Style SectionStyle Style BulletListTextStyle Style BulletListBulletStyle Style diff --git a/vendor/github.com/pterm/pterm/tree_printer.go b/vendor/github.com/pterm/pterm/tree_printer.go index fe10c133..113f9202 100644 --- a/vendor/github.com/pterm/pterm/tree_printer.go +++ b/vendor/github.com/pterm/pterm/tree_printer.go @@ -124,7 +124,7 @@ func (p TreePrinter) Srender() (string, error) { var result string if p.Root.Text != "" { - result += p.Root.Text + "\n" + result += p.TextStyle.Sprint(p.Root.Text) + "\n" } result += walkOverTree(p.Root.Children, p, "") return result, nil diff --git a/vendor/github.com/sheldonhull/magetools/ci/ci.go b/vendor/github.com/sheldonhull/magetools/ci/ci.go index 670dadcb..162c3f79 100644 --- a/vendor/github.com/sheldonhull/magetools/ci/ci.go +++ b/vendor/github.com/sheldonhull/magetools/ci/ci.go @@ -16,19 +16,35 @@ import ( // IsCI will set the global variable for IsCI based on lookup of the environment variable. func IsCI() bool { magetoolsutils.CheckPtermDebug() + _, exists := os.LookupEnv("AGENT_ID") if exists { pterm.Info.Println("Azure DevOps match based on AGENT_ID. Setting IS_CI = 1") return true } + _, exists = os.LookupEnv("GITLAB_CI") + if exists { + pterm.Info.Println("Gitlab Runner match based on [GITLAB_CI] env variable. Setting IS_CI = 1") + + return true + } + _, exists = os.LookupEnv("NETLIFY") if exists { pterm.Info.Println("Netlify match based on [NETLIFY] environment. Setting IS_CI = 1") return true } + _, exists = os.LookupEnv("GITHUB_ACTIONS") + if exists { + pterm.Info.Println("GitHub Actions match based on [GITHUB_ACTIONS] environment. Setting IS_CI = 1") + + return true + } + // CI is also set for Netlify, so it's important to run the NETFLIFY check before the CI check. + // It might be used by others since it's so common so let's leave this check to the very end. _, exists = os.LookupEnv("CI") if exists { pterm.Info.Println("GitHub actions match based on [CI] env variable. Setting IS_CI = 1") diff --git a/vendor/github.com/sheldonhull/magetools/gotools/gotools.go b/vendor/github.com/sheldonhull/magetools/gotools/gotools.go index f952722b..3bf5cc17 100644 --- a/vendor/github.com/sheldonhull/magetools/gotools/gotools.go +++ b/vendor/github.com/sheldonhull/magetools/gotools/gotools.go @@ -270,7 +270,7 @@ func (Go) Lint() error { Printfln("unable to find %s: %v", appgolangcilint, err) return err } - pterm.Info.Printfln("gotestsum found: %s", golangcilint) + pterm.Info.Printfln("golangci-lint found: %s", golangcilint) if err := sh.RunV(golangcilint, "run"); err != nil { pterm.Error.WithShowLineNumber(true).WithLineNumberOffset(1).Println("golangci-lint failure") diff --git a/vendor/github.com/sheldonhull/magetools/pkg/magetoolsutils/magetoolsutils.go b/vendor/github.com/sheldonhull/magetools/pkg/magetoolsutils/magetoolsutils.go index 20b6762b..ca60bfe7 100644 --- a/vendor/github.com/sheldonhull/magetools/pkg/magetoolsutils/magetoolsutils.go +++ b/vendor/github.com/sheldonhull/magetools/pkg/magetoolsutils/magetoolsutils.go @@ -32,7 +32,7 @@ func CheckPtermDebug() { //nolint:cyclop,funlen // cyclop,funlen: i'm sure there } if debug { pterm.Debug.Println("strconv.ParseBool(\"DEBUG\") true, enabling debug output and exiting") - pterm.Info.Println("DEBUG env var detected, setting tasks to debug level output") + pterm.Debug.Println("DEBUG env var detected, setting tasks to debug level output") pterm.EnableDebugMessages() return } @@ -50,7 +50,7 @@ func CheckPtermDebug() { //nolint:cyclop,funlen // cyclop,funlen: i'm sure there if debug { pterm.Debug.Println("strconv.ParseBool(\"SYSTEM_DEBUG\") true, enabling debug output and exiting") - pterm.Info.Println("SYSTEM_DEBUG env var detected, setting tasks to debug level output") + pterm.Debug.Println("SYSTEM_DEBUG env var detected, setting tasks to debug level output") pterm.EnableDebugMessages() return } @@ -74,7 +74,7 @@ func CheckPtermDebug() { //nolint:cyclop,funlen // cyclop,funlen: i'm sure there } if mg.Verbose() { pterm.Debug.Printfln("mg.Verbose() true, setting pterm.EnableDebugMessages()") - pterm.Info.Println("mg.Verbose() true (-v or MAGEFILE_VERBOSE env var), setting tasks to debug level output") + pterm.Debug.Println("mg.Verbose() true (-v or MAGEFILE_VERBOSE env var), setting tasks to debug level output") pterm.EnableDebugMessages() return } diff --git a/vendor/github.com/sirupsen/logrus/.gitignore b/vendor/github.com/sirupsen/logrus/.gitignore index 6b7d7d1e..1fb13abe 100644 --- a/vendor/github.com/sirupsen/logrus/.gitignore +++ b/vendor/github.com/sirupsen/logrus/.gitignore @@ -1,2 +1,4 @@ logrus vendor + +.idea/ diff --git a/vendor/github.com/sirupsen/logrus/.golangci.yml b/vendor/github.com/sirupsen/logrus/.golangci.yml new file mode 100644 index 00000000..65dc2850 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/.golangci.yml @@ -0,0 +1,40 @@ +run: + # do not run on test files yet + tests: false + +# all available settings of specific linters +linters-settings: + errcheck: + # report about not checking of errors in type assetions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + lll: + line-length: 100 + tab-width: 4 + + prealloc: + simple: false + range-loops: false + for-loops: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + +linters: + enable: + - megacheck + - govet + disable: + - maligned + - prealloc + disable-all: false + presets: + - bugs + - unused + fast: false diff --git a/vendor/github.com/sirupsen/logrus/.travis.yml b/vendor/github.com/sirupsen/logrus/.travis.yml index 1f953beb..c1dbd5a3 100644 --- a/vendor/github.com/sirupsen/logrus/.travis.yml +++ b/vendor/github.com/sirupsen/logrus/.travis.yml @@ -1,51 +1,15 @@ language: go +go_import_path: github.com/sirupsen/logrus +git: + depth: 1 env: - - GOMAXPROCS=4 GORACE=halt_on_error=1 -matrix: - include: - - go: 1.10.x - install: - - go get github.com/stretchr/testify/assert - - go get golang.org/x/crypto/ssh/terminal - - go get golang.org/x/sys/unix - - go get golang.org/x/sys/windows - script: - - go test -race -v ./... - - go: 1.11.x - env: GO111MODULE=on - install: - - go mod download - script: - - go test -race -v ./... - - go: 1.11.x - env: GO111MODULE=off - install: - - go get github.com/stretchr/testify/assert - - go get golang.org/x/crypto/ssh/terminal - - go get golang.org/x/sys/unix - - go get golang.org/x/sys/windows - script: - - go test -race -v ./... - - go: 1.10.x - install: - - go get github.com/stretchr/testify/assert - - go get golang.org/x/crypto/ssh/terminal - - go get golang.org/x/sys/unix - - go get golang.org/x/sys/windows - script: - - go test -race -v -tags appengine ./... - - go: 1.11.x - env: GO111MODULE=on - install: - - go mod download - script: - - go test -race -v -tags appengine ./... - - go: 1.11.x - env: GO111MODULE=off - install: - - go get github.com/stretchr/testify/assert - - go get golang.org/x/crypto/ssh/terminal - - go get golang.org/x/sys/unix - - go get golang.org/x/sys/windows - script: - - go test -race -v -tags appengine ./... + - GO111MODULE=on +go: 1.15.x +os: linux +install: + - ./travis/install.sh +script: + - cd ci + - go run mage.go -v -w ../ crossBuild + - go run mage.go -v -w ../ lint + - go run mage.go -v -w ../ test diff --git a/vendor/github.com/sirupsen/logrus/CHANGELOG.md b/vendor/github.com/sirupsen/logrus/CHANGELOG.md index cb85d9f9..7567f612 100644 --- a/vendor/github.com/sirupsen/logrus/CHANGELOG.md +++ b/vendor/github.com/sirupsen/logrus/CHANGELOG.md @@ -1,3 +1,97 @@ +# 1.8.1 +Code quality: + * move magefile in its own subdir/submodule to remove magefile dependency on logrus consumer + * improve timestamp format documentation + +Fixes: + * fix race condition on logger hooks + + +# 1.8.0 + +Correct versioning number replacing v1.7.1. + +# 1.7.1 + +Beware this release has introduced a new public API and its semver is therefore incorrect. + +Code quality: + * use go 1.15 in travis + * use magefile as task runner + +Fixes: + * small fixes about new go 1.13 error formatting system + * Fix for long time race condiction with mutating data hooks + +Features: + * build support for zos + +# 1.7.0 +Fixes: + * the dependency toward a windows terminal library has been removed + +Features: + * a new buffer pool management API has been added + * a set of `Fn()` functions have been added + +# 1.6.0 +Fixes: + * end of line cleanup + * revert the entry concurrency bug fix whic leads to deadlock under some circumstances + * update dependency on go-windows-terminal-sequences to fix a crash with go 1.14 + +Features: + * add an option to the `TextFormatter` to completely disable fields quoting + +# 1.5.0 +Code quality: + * add golangci linter run on travis + +Fixes: + * add mutex for hooks concurrent access on `Entry` data + * caller function field for go1.14 + * fix build issue for gopherjs target + +Feature: + * add an hooks/writer sub-package whose goal is to split output on different stream depending on the trace level + * add a `DisableHTMLEscape` option in the `JSONFormatter` + * add `ForceQuote` and `PadLevelText` options in the `TextFormatter` + +# 1.4.2 + * Fixes build break for plan9, nacl, solaris +# 1.4.1 +This new release introduces: + * Enhance TextFormatter to not print caller information when they are empty (#944) + * Remove dependency on golang.org/x/crypto (#932, #943) + +Fixes: + * Fix Entry.WithContext method to return a copy of the initial entry (#941) + +# 1.4.0 +This new release introduces: + * Add `DeferExitHandler`, similar to `RegisterExitHandler` but prepending the handler to the list of handlers (semantically like `defer`) (#848). + * Add `CallerPrettyfier` to `JSONFormatter` and `TextFormatter` (#909, #911) + * Add `Entry.WithContext()` and `Entry.Context`, to set a context on entries to be used e.g. in hooks (#919). + +Fixes: + * Fix wrong method calls `Logger.Print` and `Logger.Warningln` (#893). + * Update `Entry.Logf` to not do string formatting unless the log level is enabled (#903) + * Fix infinite recursion on unknown `Level.String()` (#907) + * Fix race condition in `getCaller` (#916). + + +# 1.3.0 +This new release introduces: + * Log, Logf, Logln functions for Logger and Entry that take a Level + +Fixes: + * Building prometheus node_exporter on AIX (#840) + * Race condition in TextFormatter (#468) + * Travis CI import path (#868) + * Remove coloured output on Windows (#862) + * Pointer to func as field in JSONFormatter (#870) + * Properly marshal Levels (#873) + # 1.2.0 This new release introduces: * A new method `SetReportCaller` in the `Logger` to enable the file, line and calling function from which the trace has been issued diff --git a/vendor/github.com/sirupsen/logrus/README.md b/vendor/github.com/sirupsen/logrus/README.md index 093bb13f..d1d4a85f 100644 --- a/vendor/github.com/sirupsen/logrus/README.md +++ b/vendor/github.com/sirupsen/logrus/README.md @@ -1,8 +1,28 @@ -# Logrus :walrus: [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![GoDoc](https://godoc.org/github.com/sirupsen/logrus?status.svg)](https://godoc.org/github.com/sirupsen/logrus) +# Logrus :walrus: [![Build Status](https://github.com/sirupsen/logrus/workflows/CI/badge.svg)](https://github.com/sirupsen/logrus/actions?query=workflow%3ACI) [![Build Status](https://travis-ci.org/sirupsen/logrus.svg?branch=master)](https://travis-ci.org/sirupsen/logrus) [![Go Reference](https://pkg.go.dev/badge/github.com/sirupsen/logrus.svg)](https://pkg.go.dev/github.com/sirupsen/logrus) Logrus is a structured logger for Go (golang), completely API compatible with the standard library logger. +**Logrus is in maintenance-mode.** We will not be introducing new features. It's +simply too hard to do in a way that won't break many people's projects, which is +the last thing you want from your Logging library (again...). + +This does not mean Logrus is dead. Logrus will continue to be maintained for +security, (backwards compatible) bug fixes, and performance (where we are +limited by the interface). + +I believe Logrus' biggest contribution is to have played a part in today's +widespread use of structured logging in Golang. There doesn't seem to be a +reason to do a major, breaking iteration into Logrus V2, since the fantastic Go +community has built those independently. Many fantastic alternatives have sprung +up. Logrus would look like those, had it been re-designed with what we know +about structured logging in Go today. Check out, for example, +[Zerolog][zerolog], [Zap][zap], and [Apex][apex]. + +[zerolog]: https://github.com/rs/zerolog +[zap]: https://github.com/uber-go/zap +[apex]: https://github.com/apex/log + **Seeing weird case-sensitive problems?** It's in the past been possible to import Logrus as both upper- and lower-case. Due to the Go package environment, this caused issues in the community and we needed a standard. Some environments @@ -15,11 +35,6 @@ comments](https://github.com/sirupsen/logrus/issues/553#issuecomment-306591437). For an in-depth explanation of the casing issue, see [this comment](https://github.com/sirupsen/logrus/issues/570#issuecomment-313933276). -**Are you interested in assisting in maintaining Logrus?** Currently I have a -lot of obligations, and I am unable to provide Logrus with the maintainership it -needs. If you'd like to help, please reach out to me at `simon at author's -username dot com`. - Nicely color-coded in development (when a TTY is attached, otherwise just plain text): @@ -28,7 +43,7 @@ plain text): With `log.SetFormatter(&log.JSONFormatter{})`, for easy parsing by logstash or Splunk: -```json +```text {"animal":"walrus","level":"info","msg":"A group of walrus emerges from the ocean","size":10,"time":"2014-03-10 19:57:38.562264131 -0400 EDT"} @@ -84,7 +99,7 @@ time="2015-03-26T01:27:38-04:00" level=fatal method=github.com/sirupsen/arcticcr ``` Note that this does add measurable overhead - the cost will depend on the version of Go, but is between 20 and 40% in recent tests with 1.6 and 1.7. You can validate this in your -environment via benchmarks: +environment via benchmarks: ``` go test -bench=.*CallerTracing ``` @@ -187,7 +202,7 @@ func main() { log.Out = os.Stdout // You could set this to any `io.Writer` such as a file - // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY, 0666) + // file, err := os.OpenFile("logrus.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666) // if err == nil { // log.Out = file // } else { @@ -272,7 +287,7 @@ func init() { ``` Note: Syslog hook also support connecting to local syslog (Ex. "/dev/log" or "/var/run/syslog" or "/var/run/log"). For the detail, please check the [syslog hook README](hooks/syslog/README.md). -A list of currently known of service hook can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) +A list of currently known service hooks can be found in this wiki [page](https://github.com/sirupsen/logrus/wiki/Hooks) #### Level logging @@ -302,6 +317,8 @@ log.SetLevel(log.InfoLevel) It may be useful to set `log.Level = logrus.DebugLevel` in a debug or verbose environment if your application has that. +Note: If you want different log levels for global (`log.SetLevel(...)`) and syslog logging, please check the [syslog hook README](hooks/syslog/README.md#different-log-levels-for-local-and-remote-logging). + #### Entries Besides the fields added with `WithField` or `WithFields` some fields are @@ -326,7 +343,7 @@ import ( log "github.com/sirupsen/logrus" ) -init() { +func init() { // do something here to set environment depending on an environment variable // or command-line flag if Environment == "production" { @@ -354,6 +371,7 @@ The built-in logging formatters are: [github.com/mattn/go-colorable](https://github.com/mattn/go-colorable). * When colors are enabled, levels are truncated to 4 characters by default. To disable truncation set the `DisableLevelTruncation` field to `true`. + * When outputting to a TTY, it's often helpful to visually scan down a column where all the levels are the same width. Setting the `PadLevelText` field to `true` enables this behavior, by adding padding to the level text. * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#TextFormatter). * `logrus.JSONFormatter`. Logs fields as JSON. * All options are listed in the [generated docs](https://godoc.org/github.com/sirupsen/logrus#JSONFormatter). @@ -361,9 +379,13 @@ The built-in logging formatters are: Third party logging formatters: * [`FluentdFormatter`](https://github.com/joonix/log). Formats entries that can be parsed by Kubernetes and Google Container Engine. +* [`GELF`](https://github.com/fabienm/go-logrus-formatters). Formats entries so they comply to Graylog's [GELF 1.1 specification](http://docs.graylog.org/en/2.4/pages/gelf.html). * [`logstash`](https://github.com/bshuster-repo/logrus-logstash-hook). Logs fields as [Logstash](http://logstash.net) Events. * [`prefixed`](https://github.com/x-cray/logrus-prefixed-formatter). Displays log entry source along with alternative layout. -* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the P͉̫o̳̼̊w̖͈̰͎e̬͔̭͂r͚̼̹̲ ̫͓͉̳͈ō̠͕͖̚f̝͍̠ ͕̲̞͖͑Z̖̫̤̫ͪa͉̬͈̗l͖͎g̳̥o̰̥̅!̣͔̲̻͊̄ ̙̘̦̹̦. +* [`zalgo`](https://github.com/aybabtme/logzalgo). Invoking the Power of Zalgo. +* [`nested-logrus-formatter`](https://github.com/antonfisher/nested-logrus-formatter). Converts logrus fields to a nested structure. +* [`powerful-logrus-formatter`](https://github.com/zput/zxcTool). get fileName, log's line number and the latest function's name when print log; Sava log to files. +* [`caption-json-formatter`](https://github.com/nolleh/caption_json_formatter). logrus's message json formatter with human-readable caption added. You can define your formatter by implementing the `Formatter` interface, requiring a `Format` method. `Format` takes an `*Entry`. `entry.Data` is a @@ -382,7 +404,7 @@ func (f *MyJSONFormatter) Format(entry *Entry) ([]byte, error) { // source of the official loggers. serialized, err := json.Marshal(entry.Data) if err != nil { - return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) + return nil, fmt.Errorf("Failed to marshal fields to JSON, %w", err) } return append(serialized, '\n'), nil } @@ -428,14 +450,14 @@ entries. It should not be a feature of the application-level logger. | Tool | Description | | ---- | ----------- | -|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will generated with different config at different environment.| +|[Logrus Mate](https://github.com/gogap/logrus_mate)|Logrus mate is a tool for Logrus to manage loggers, you can initial logger's level, hook and formatter by config file, the logger will be generated with different configs in different environments.| |[Logrus Viper Helper](https://github.com/heirko/go-contrib/tree/master/logrusHelper)|An Helper around Logrus to wrap with spf13/Viper to load configuration with fangs! And to simplify Logrus configuration use some behavior of [Logrus Mate](https://github.com/gogap/logrus_mate). [sample](https://github.com/heirko/iris-contrib/blob/master/middleware/logrus-logger/example) | #### Testing Logrus has a built in facility for asserting the presence of log messages. This is implemented through the `test` hook and provides: -* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just add the `test` hook +* decorators for existing logger (`test.NewLocal` and `test.NewGlobal`) which basically just adds the `test` hook * a test logger (`test.NewNullLogger`) that just records log messages (and does not output any): ```go @@ -463,7 +485,7 @@ func TestSomething(t*testing.T){ Logrus can register one or more functions that will be called when any `fatal` level message is logged. The registered handlers will be executed before -logrus performs a `os.Exit(1)`. This behavior may be helpful if callers need +logrus performs an `os.Exit(1)`. This behavior may be helpful if callers need to gracefully shutdown. Unlike a `panic("Something went wrong...")` call which can be intercepted with a deferred `recover` a call to `os.Exit(1)` can not be intercepted. ``` @@ -488,6 +510,6 @@ Situation when locking is not needed includes: 1) logger.Out is protected by locks. - 2) logger.Out is a os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allow multi-thread/multi-process writing) + 2) logger.Out is an os.File handler opened with `O_APPEND` flag, and every write is smaller than 4k. (This allows multi-thread/multi-process writing) (Refer to http://www.notthewizard.com/2014/06/17/are-files-appends-really-atomic/) diff --git a/vendor/github.com/sirupsen/logrus/alt_exit.go b/vendor/github.com/sirupsen/logrus/alt_exit.go index 8af90637..8fd189e1 100644 --- a/vendor/github.com/sirupsen/logrus/alt_exit.go +++ b/vendor/github.com/sirupsen/logrus/alt_exit.go @@ -51,9 +51,9 @@ func Exit(code int) { os.Exit(code) } -// RegisterExitHandler adds a Logrus Exit handler, call logrus.Exit to invoke -// all handlers. The handlers will also be invoked when any Fatal log entry is -// made. +// RegisterExitHandler appends a Logrus Exit handler to the list of handlers, +// call logrus.Exit to invoke all handlers. The handlers will also be invoked when +// any Fatal log entry is made. // // This method is useful when a caller wishes to use logrus to log a fatal // message but also needs to gracefully shutdown. An example usecase could be @@ -62,3 +62,15 @@ func Exit(code int) { func RegisterExitHandler(handler func()) { handlers = append(handlers, handler) } + +// DeferExitHandler prepends a Logrus Exit handler to the list of handlers, +// call logrus.Exit to invoke all handlers. The handlers will also be invoked when +// any Fatal log entry is made. +// +// This method is useful when a caller wishes to use logrus to log a fatal +// message but also needs to gracefully shutdown. An example usecase could be +// closing database connections, or sending a alert that the application is +// closing. +func DeferExitHandler(handler func()) { + handlers = append([]func(){handler}, handlers...) +} diff --git a/vendor/github.com/sirupsen/logrus/appveyor.yml b/vendor/github.com/sirupsen/logrus/appveyor.yml index b4ffca27..df9d65c3 100644 --- a/vendor/github.com/sirupsen/logrus/appveyor.yml +++ b/vendor/github.com/sirupsen/logrus/appveyor.yml @@ -1,14 +1,14 @@ version: "{build}" platform: x64 clone_folder: c:\gopath\src\github.com\sirupsen\logrus -environment: +environment: GOPATH: c:\gopath -branches: +branches: only: - master -install: +install: - set PATH=%GOPATH%\bin;c:\go\bin;%PATH% - go version -build_script: +build_script: - go get -t - go test diff --git a/vendor/github.com/sirupsen/logrus/buffer_pool.go b/vendor/github.com/sirupsen/logrus/buffer_pool.go new file mode 100644 index 00000000..c7787f77 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/buffer_pool.go @@ -0,0 +1,43 @@ +package logrus + +import ( + "bytes" + "sync" +) + +var ( + bufferPool BufferPool +) + +type BufferPool interface { + Put(*bytes.Buffer) + Get() *bytes.Buffer +} + +type defaultPool struct { + pool *sync.Pool +} + +func (p *defaultPool) Put(buf *bytes.Buffer) { + p.pool.Put(buf) +} + +func (p *defaultPool) Get() *bytes.Buffer { + return p.pool.Get().(*bytes.Buffer) +} + +// SetBufferPool allows to replace the default logrus buffer pool +// to better meets the specific needs of an application. +func SetBufferPool(bp BufferPool) { + bufferPool = bp +} + +func init() { + SetBufferPool(&defaultPool{ + pool: &sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, + }, + }) +} diff --git a/vendor/github.com/sirupsen/logrus/entry.go b/vendor/github.com/sirupsen/logrus/entry.go index cc85d3aa..71cdbbc3 100644 --- a/vendor/github.com/sirupsen/logrus/entry.go +++ b/vendor/github.com/sirupsen/logrus/entry.go @@ -2,6 +2,7 @@ package logrus import ( "bytes" + "context" "fmt" "os" "reflect" @@ -12,7 +13,6 @@ import ( ) var ( - bufferPool *sync.Pool // qualified package name, cached at first use logrusPackage string @@ -30,12 +30,6 @@ const ( ) func init() { - bufferPool = &sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, - } - // start at the bottom of the stack before the package-name cache is primed minimumCallerDepth = 1 } @@ -69,6 +63,9 @@ type Entry struct { // When formatter is called in entry.log(), a Buffer may be set to entry Buffer *bytes.Buffer + // Contains the context set by the user. Useful for hook processing etc. + Context context.Context + // err may contain a field formatting error err string } @@ -81,10 +78,23 @@ func NewEntry(logger *Logger) *Entry { } } +func (entry *Entry) Dup() *Entry { + data := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + data[k] = v + } + return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, Context: entry.Context, err: entry.err} +} + +// Returns the bytes representation of this entry from the formatter. +func (entry *Entry) Bytes() ([]byte, error) { + return entry.Logger.Formatter.Format(entry) +} + // Returns the string representation from the reader and ultimately the // formatter. func (entry *Entry) String() (string, error) { - serialized, err := entry.Logger.Formatter.Format(entry) + serialized, err := entry.Bytes() if err != nil { return "", err } @@ -97,6 +107,15 @@ func (entry *Entry) WithError(err error) *Entry { return entry.WithField(ErrorKey, err) } +// Add a context to the Entry. +func (entry *Entry) WithContext(ctx context.Context) *Entry { + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: entry.Time, err: entry.err, Context: ctx} +} + // Add a single field to the Entry. func (entry *Entry) WithField(key string, value interface{}) *Entry { return entry.WithFields(Fields{key: value}) @@ -108,23 +127,36 @@ func (entry *Entry) WithFields(fields Fields) *Entry { for k, v := range entry.Data { data[k] = v } - var field_err string + fieldErr := entry.err for k, v := range fields { - if t := reflect.TypeOf(v); t != nil && t.Kind() == reflect.Func { - field_err = fmt.Sprintf("can not add field %q", k) - if entry.err != "" { - field_err = entry.err + ", " + field_err + isErrField := false + if t := reflect.TypeOf(v); t != nil { + switch { + case t.Kind() == reflect.Func, t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Func: + isErrField = true + } + } + if isErrField { + tmp := fmt.Sprintf("can not add field %q", k) + if fieldErr != "" { + fieldErr = entry.err + ", " + tmp + } else { + fieldErr = tmp } } else { data[k] = v } } - return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, err: field_err} + return &Entry{Logger: entry.Logger, Data: data, Time: entry.Time, err: fieldErr, Context: entry.Context} } // Overrides the time of the Entry. func (entry *Entry) WithTime(t time.Time) *Entry { - return &Entry{Logger: entry.Logger, Data: entry.Data, Time: t} + dataCopy := make(Fields, len(entry.Data)) + for k, v := range entry.Data { + dataCopy[k] = v + } + return &Entry{Logger: entry.Logger, Data: dataCopy, Time: t, err: entry.err, Context: entry.Context} } // getPackageName reduces a fully qualified function name to the package name @@ -145,26 +177,34 @@ func getPackageName(f string) string { // getCaller retrieves the name of the first non-logrus calling function func getCaller() *runtime.Frame { - // Restrict the lookback frames to avoid runaway lookups - pcs := make([]uintptr, maximumCallerDepth) - depth := runtime.Callers(minimumCallerDepth, pcs) - frames := runtime.CallersFrames(pcs[:depth]) - // cache this package's fully-qualified name callerInitOnce.Do(func() { - logrusPackage = getPackageName(runtime.FuncForPC(pcs[0]).Name()) + pcs := make([]uintptr, maximumCallerDepth) + _ = runtime.Callers(0, pcs) + + // dynamic get the package name and the minimum caller depth + for i := 0; i < maximumCallerDepth; i++ { + funcName := runtime.FuncForPC(pcs[i]).Name() + if strings.Contains(funcName, "getCaller") { + logrusPackage = getPackageName(funcName) + break + } + } - // now that we have the cache, we can skip a minimum count of known-logrus functions - // XXX this is dubious, the number of frames may vary store an entry in a logger interface minimumCallerDepth = knownLogrusFrames }) + // Restrict the lookback frames to avoid runaway lookups + pcs := make([]uintptr, maximumCallerDepth) + depth := runtime.Callers(minimumCallerDepth, pcs) + frames := runtime.CallersFrames(pcs[:depth]) + for f, again := frames.Next(); again; f, again = frames.Next() { pkg := getPackageName(f.Function) // If the caller isn't part of this package, we're done if pkg != logrusPackage { - return &f + return &f //nolint:scopelint } } @@ -178,49 +218,66 @@ func (entry Entry) HasCaller() (has bool) { entry.Caller != nil } -// This function is not declared with a pointer value because otherwise -// race conditions will occur when using multiple goroutines -func (entry Entry) log(level Level, msg string) { +func (entry *Entry) log(level Level, msg string) { var buffer *bytes.Buffer - // Default to now, but allow users to override if they want. - // - // We don't have to worry about polluting future calls to Entry#log() - // with this assignment because this function is declared with a - // non-pointer receiver. - if entry.Time.IsZero() { - entry.Time = time.Now() - } + newEntry := entry.Dup() - entry.Level = level - entry.Message = msg - if entry.Logger.ReportCaller { - entry.Caller = getCaller() + if newEntry.Time.IsZero() { + newEntry.Time = time.Now() } - entry.fireHooks() + newEntry.Level = level + newEntry.Message = msg + + newEntry.Logger.mu.Lock() + reportCaller := newEntry.Logger.ReportCaller + bufPool := newEntry.getBufferPool() + newEntry.Logger.mu.Unlock() - buffer = bufferPool.Get().(*bytes.Buffer) + if reportCaller { + newEntry.Caller = getCaller() + } + + newEntry.fireHooks() + buffer = bufPool.Get() + defer func() { + newEntry.Buffer = nil + buffer.Reset() + bufPool.Put(buffer) + }() buffer.Reset() - defer bufferPool.Put(buffer) - entry.Buffer = buffer + newEntry.Buffer = buffer - entry.write() + newEntry.write() - entry.Buffer = nil + newEntry.Buffer = nil // To avoid Entry#log() returning a value that only would make sense for // panic() to use in Entry#Panic(), we avoid the allocation by checking // directly here. if level <= PanicLevel { - panic(&entry) + panic(newEntry) } } +func (entry *Entry) getBufferPool() (pool BufferPool) { + if entry.Logger.BufferPool != nil { + return entry.Logger.BufferPool + } + return bufferPool +} + func (entry *Entry) fireHooks() { + var tmpHooks LevelHooks entry.Logger.mu.Lock() - defer entry.Logger.mu.Unlock() - err := entry.Logger.Hooks.Fire(entry.Level, entry) + tmpHooks = make(LevelHooks, len(entry.Logger.Hooks)) + for k, v := range entry.Logger.Hooks { + tmpHooks[k] = v + } + entry.Logger.mu.Unlock() + + err := tmpHooks.Fire(entry.Level, entry) if err != nil { fmt.Fprintf(os.Stderr, "Failed to fire hook: %v\n", err) } @@ -232,24 +289,28 @@ func (entry *Entry) write() { serialized, err := entry.Logger.Formatter.Format(entry) if err != nil { fmt.Fprintf(os.Stderr, "Failed to obtain reader, %v\n", err) - } else { - _, err = entry.Logger.Out.Write(serialized) - if err != nil { - fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) - } + return + } + if _, err := entry.Logger.Out.Write(serialized); err != nil { + fmt.Fprintf(os.Stderr, "Failed to write to log, %v\n", err) } } -func (entry *Entry) Trace(args ...interface{}) { - if entry.Logger.IsLevelEnabled(TraceLevel) { - entry.log(TraceLevel, fmt.Sprint(args...)) +// Log will log a message at the level given as parameter. +// Warning: using Log at Panic or Fatal level will not respectively Panic nor Exit. +// For this behaviour Entry.Panic or Entry.Fatal should be used instead. +func (entry *Entry) Log(level Level, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.log(level, fmt.Sprint(args...)) } } +func (entry *Entry) Trace(args ...interface{}) { + entry.Log(TraceLevel, args...) +} + func (entry *Entry) Debug(args ...interface{}) { - if entry.Logger.IsLevelEnabled(DebugLevel) { - entry.log(DebugLevel, fmt.Sprint(args...)) - } + entry.Log(DebugLevel, args...) } func (entry *Entry) Print(args ...interface{}) { @@ -257,15 +318,11 @@ func (entry *Entry) Print(args ...interface{}) { } func (entry *Entry) Info(args ...interface{}) { - if entry.Logger.IsLevelEnabled(InfoLevel) { - entry.log(InfoLevel, fmt.Sprint(args...)) - } + entry.Log(InfoLevel, args...) } func (entry *Entry) Warn(args ...interface{}) { - if entry.Logger.IsLevelEnabled(WarnLevel) { - entry.log(WarnLevel, fmt.Sprint(args...)) - } + entry.Log(WarnLevel, args...) } func (entry *Entry) Warning(args ...interface{}) { @@ -273,43 +330,36 @@ func (entry *Entry) Warning(args ...interface{}) { } func (entry *Entry) Error(args ...interface{}) { - if entry.Logger.IsLevelEnabled(ErrorLevel) { - entry.log(ErrorLevel, fmt.Sprint(args...)) - } + entry.Log(ErrorLevel, args...) } func (entry *Entry) Fatal(args ...interface{}) { - if entry.Logger.IsLevelEnabled(FatalLevel) { - entry.log(FatalLevel, fmt.Sprint(args...)) - } + entry.Log(FatalLevel, args...) entry.Logger.Exit(1) } func (entry *Entry) Panic(args ...interface{}) { - if entry.Logger.IsLevelEnabled(PanicLevel) { - entry.log(PanicLevel, fmt.Sprint(args...)) - } - panic(fmt.Sprint(args...)) + entry.Log(PanicLevel, args...) } // Entry Printf family functions -func (entry *Entry) Tracef(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(TraceLevel) { - entry.Trace(fmt.Sprintf(format, args...)) +func (entry *Entry) Logf(level Level, format string, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.Log(level, fmt.Sprintf(format, args...)) } } +func (entry *Entry) Tracef(format string, args ...interface{}) { + entry.Logf(TraceLevel, format, args...) +} + func (entry *Entry) Debugf(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(DebugLevel) { - entry.Debug(fmt.Sprintf(format, args...)) - } + entry.Logf(DebugLevel, format, args...) } func (entry *Entry) Infof(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(InfoLevel) { - entry.Info(fmt.Sprintf(format, args...)) - } + entry.Logf(InfoLevel, format, args...) } func (entry *Entry) Printf(format string, args ...interface{}) { @@ -317,9 +367,7 @@ func (entry *Entry) Printf(format string, args ...interface{}) { } func (entry *Entry) Warnf(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(WarnLevel) { - entry.Warn(fmt.Sprintf(format, args...)) - } + entry.Logf(WarnLevel, format, args...) } func (entry *Entry) Warningf(format string, args ...interface{}) { @@ -327,42 +375,36 @@ func (entry *Entry) Warningf(format string, args ...interface{}) { } func (entry *Entry) Errorf(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(ErrorLevel) { - entry.Error(fmt.Sprintf(format, args...)) - } + entry.Logf(ErrorLevel, format, args...) } func (entry *Entry) Fatalf(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(FatalLevel) { - entry.Fatal(fmt.Sprintf(format, args...)) - } + entry.Logf(FatalLevel, format, args...) entry.Logger.Exit(1) } func (entry *Entry) Panicf(format string, args ...interface{}) { - if entry.Logger.IsLevelEnabled(PanicLevel) { - entry.Panic(fmt.Sprintf(format, args...)) - } + entry.Logf(PanicLevel, format, args...) } // Entry Println family functions -func (entry *Entry) Traceln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(TraceLevel) { - entry.Trace(entry.sprintlnn(args...)) +func (entry *Entry) Logln(level Level, args ...interface{}) { + if entry.Logger.IsLevelEnabled(level) { + entry.Log(level, entry.sprintlnn(args...)) } } +func (entry *Entry) Traceln(args ...interface{}) { + entry.Logln(TraceLevel, args...) +} + func (entry *Entry) Debugln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(DebugLevel) { - entry.Debug(entry.sprintlnn(args...)) - } + entry.Logln(DebugLevel, args...) } func (entry *Entry) Infoln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(InfoLevel) { - entry.Info(entry.sprintlnn(args...)) - } + entry.Logln(InfoLevel, args...) } func (entry *Entry) Println(args ...interface{}) { @@ -370,9 +412,7 @@ func (entry *Entry) Println(args ...interface{}) { } func (entry *Entry) Warnln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(WarnLevel) { - entry.Warn(entry.sprintlnn(args...)) - } + entry.Logln(WarnLevel, args...) } func (entry *Entry) Warningln(args ...interface{}) { @@ -380,22 +420,16 @@ func (entry *Entry) Warningln(args ...interface{}) { } func (entry *Entry) Errorln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(ErrorLevel) { - entry.Error(entry.sprintlnn(args...)) - } + entry.Logln(ErrorLevel, args...) } func (entry *Entry) Fatalln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(FatalLevel) { - entry.Fatal(entry.sprintlnn(args...)) - } + entry.Logln(FatalLevel, args...) entry.Logger.Exit(1) } func (entry *Entry) Panicln(args ...interface{}) { - if entry.Logger.IsLevelEnabled(PanicLevel) { - entry.Panic(entry.sprintlnn(args...)) - } + entry.Logln(PanicLevel, args...) } // Sprintlnn => Sprint no newline. This is to get the behavior of how diff --git a/vendor/github.com/sirupsen/logrus/exported.go b/vendor/github.com/sirupsen/logrus/exported.go index 7342613c..017c30ce 100644 --- a/vendor/github.com/sirupsen/logrus/exported.go +++ b/vendor/github.com/sirupsen/logrus/exported.go @@ -1,6 +1,7 @@ package logrus import ( + "context" "io" "time" ) @@ -55,6 +56,11 @@ func WithError(err error) *Entry { return std.WithField(ErrorKey, err) } +// WithContext creates an entry from the standard logger and adds a context to it. +func WithContext(ctx context.Context) *Entry { + return std.WithContext(ctx) +} + // WithField creates an entry from the standard logger and adds a field to // it. If you want multiple fields, use `WithFields`. // @@ -74,7 +80,7 @@ func WithFields(fields Fields) *Entry { return std.WithFields(fields) } -// WithTime creats an entry from the standard logger and overrides the time of +// WithTime creates an entry from the standard logger and overrides the time of // logs generated with it. // // Note that it doesn't log until you call Debug, Print, Info, Warn, Fatal @@ -128,6 +134,51 @@ func Fatal(args ...interface{}) { std.Fatal(args...) } +// TraceFn logs a message from a func at level Trace on the standard logger. +func TraceFn(fn LogFunction) { + std.TraceFn(fn) +} + +// DebugFn logs a message from a func at level Debug on the standard logger. +func DebugFn(fn LogFunction) { + std.DebugFn(fn) +} + +// PrintFn logs a message from a func at level Info on the standard logger. +func PrintFn(fn LogFunction) { + std.PrintFn(fn) +} + +// InfoFn logs a message from a func at level Info on the standard logger. +func InfoFn(fn LogFunction) { + std.InfoFn(fn) +} + +// WarnFn logs a message from a func at level Warn on the standard logger. +func WarnFn(fn LogFunction) { + std.WarnFn(fn) +} + +// WarningFn logs a message from a func at level Warn on the standard logger. +func WarningFn(fn LogFunction) { + std.WarningFn(fn) +} + +// ErrorFn logs a message from a func at level Error on the standard logger. +func ErrorFn(fn LogFunction) { + std.ErrorFn(fn) +} + +// PanicFn logs a message from a func at level Panic on the standard logger. +func PanicFn(fn LogFunction) { + std.PanicFn(fn) +} + +// FatalFn logs a message from a func at level Fatal on the standard logger then the process will exit with status set to 1. +func FatalFn(fn LogFunction) { + std.FatalFn(fn) +} + // Tracef logs a message at level Trace on the standard logger. func Tracef(format string, args ...interface{}) { std.Tracef(format, args...) diff --git a/vendor/github.com/sirupsen/logrus/json_formatter.go b/vendor/github.com/sirupsen/logrus/json_formatter.go index 26057535..c96dc563 100644 --- a/vendor/github.com/sirupsen/logrus/json_formatter.go +++ b/vendor/github.com/sirupsen/logrus/json_formatter.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/json" "fmt" + "runtime" ) type fieldKey string @@ -22,11 +23,17 @@ func (f FieldMap) resolve(key fieldKey) string { // JSONFormatter formats logs into parsable json type JSONFormatter struct { // TimestampFormat sets the format used for marshaling timestamps. + // The format to use is the same than for time.Format or time.Parse from the standard + // library. + // The standard Library already provides a set of predefined format. TimestampFormat string // DisableTimestamp allows disabling automatic timestamps in output DisableTimestamp bool + // DisableHTMLEscape allows disabling html escaping in output + DisableHTMLEscape bool + // DataKey allows users to put all the log entry parameters into a nested dictionary at a given key. DataKey string @@ -42,6 +49,12 @@ type JSONFormatter struct { // } FieldMap FieldMap + // CallerPrettyfier can be set by the user to modify the content + // of the function and file keys in the json data when ReportCaller is + // activated. If any of the returned value is the empty string the + // corresponding key will be removed from json fields. + CallerPrettyfier func(*runtime.Frame) (function string, file string) + // PrettyPrint will indent all json logs PrettyPrint bool } @@ -82,8 +95,17 @@ func (f *JSONFormatter) Format(entry *Entry) ([]byte, error) { data[f.FieldMap.resolve(FieldKeyMsg)] = entry.Message data[f.FieldMap.resolve(FieldKeyLevel)] = entry.Level.String() if entry.HasCaller() { - data[f.FieldMap.resolve(FieldKeyFunc)] = entry.Caller.Function - data[f.FieldMap.resolve(FieldKeyFile)] = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + funcVal := entry.Caller.Function + fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } + if funcVal != "" { + data[f.FieldMap.resolve(FieldKeyFunc)] = funcVal + } + if fileVal != "" { + data[f.FieldMap.resolve(FieldKeyFile)] = fileVal + } } var b *bytes.Buffer @@ -94,11 +116,12 @@ func (f *JSONFormatter) Format(entry *Entry) ([]byte, error) { } encoder := json.NewEncoder(b) + encoder.SetEscapeHTML(!f.DisableHTMLEscape) if f.PrettyPrint { encoder.SetIndent("", " ") } if err := encoder.Encode(data); err != nil { - return nil, fmt.Errorf("Failed to marshal fields to JSON, %v", err) + return nil, fmt.Errorf("failed to marshal fields to JSON, %w", err) } return b.Bytes(), nil diff --git a/vendor/github.com/sirupsen/logrus/logger.go b/vendor/github.com/sirupsen/logrus/logger.go index 5ceca0ea..5ff0aef6 100644 --- a/vendor/github.com/sirupsen/logrus/logger.go +++ b/vendor/github.com/sirupsen/logrus/logger.go @@ -1,6 +1,7 @@ package logrus import ( + "context" "io" "os" "sync" @@ -8,6 +9,11 @@ import ( "time" ) +// LogFunction For big messages, it can be more efficient to pass a function +// and only call it if the log level is actually enables rather than +// generating the log message and then checking if the level is enabled +type LogFunction func() []interface{} + type Logger struct { // The logs are `io.Copy`'d to this in a mutex. It's common to set this to a // file, or leave it default which is `os.Stderr`. You can also set this to @@ -38,6 +44,9 @@ type Logger struct { entryPool sync.Pool // Function to exit the application, defaults to `os.Exit()` ExitFunc exitFunc + // The buffer pool used to format the log. If it is nil, the default global + // buffer pool will be used. + BufferPool BufferPool } type exitFunc func(int) @@ -67,10 +76,10 @@ func (mw *MutexWrap) Disable() { // `Out` and `Hooks` directly on the default logger instance. You can also just // instantiate your own: // -// var log = &Logger{ +// var log = &logrus.Logger{ // Out: os.Stderr, -// Formatter: new(JSONFormatter), -// Hooks: make(LevelHooks), +// Formatter: new(logrus.TextFormatter), +// Hooks: make(logrus.LevelHooks), // Level: logrus.DebugLevel, // } // @@ -99,8 +108,9 @@ func (logger *Logger) releaseEntry(entry *Entry) { logger.entryPool.Put(entry) } -// Adds a field to the log entry, note that it doesn't log until you call -// Debug, Print, Info, Warn, Error, Fatal or Panic. It only creates a log entry. +// WithField allocates a new entry and adds a field to it. +// Debug, Print, Info, Warn, Error, Fatal or Panic must be then applied to +// this new returned entry. // If you want multiple fields, use `WithFields`. func (logger *Logger) WithField(key string, value interface{}) *Entry { entry := logger.newEntry() @@ -124,6 +134,13 @@ func (logger *Logger) WithError(err error) *Entry { return entry.WithError(err) } +// Add a context to the log entry. +func (logger *Logger) WithContext(ctx context.Context) *Entry { + entry := logger.newEntry() + defer logger.releaseEntry(entry) + return entry.WithContext(ctx) +} + // Overrides the time of the log entry. func (logger *Logger) WithTime(t time.Time) *Entry { entry := logger.newEntry() @@ -131,28 +148,24 @@ func (logger *Logger) WithTime(t time.Time) *Entry { return entry.WithTime(t) } -func (logger *Logger) Tracef(format string, args ...interface{}) { - if logger.IsLevelEnabled(TraceLevel) { +func (logger *Logger) Logf(level Level, format string, args ...interface{}) { + if logger.IsLevelEnabled(level) { entry := logger.newEntry() - entry.Tracef(format, args...) + entry.Logf(level, format, args...) logger.releaseEntry(entry) } } +func (logger *Logger) Tracef(format string, args ...interface{}) { + logger.Logf(TraceLevel, format, args...) +} + func (logger *Logger) Debugf(format string, args ...interface{}) { - if logger.IsLevelEnabled(DebugLevel) { - entry := logger.newEntry() - entry.Debugf(format, args...) - logger.releaseEntry(entry) - } + logger.Logf(DebugLevel, format, args...) } func (logger *Logger) Infof(format string, args ...interface{}) { - if logger.IsLevelEnabled(InfoLevel) { - entry := logger.newEntry() - entry.Infof(format, args...) - logger.releaseEntry(entry) - } + logger.Logf(InfoLevel, format, args...) } func (logger *Logger) Printf(format string, args ...interface{}) { @@ -162,139 +175,141 @@ func (logger *Logger) Printf(format string, args ...interface{}) { } func (logger *Logger) Warnf(format string, args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warnf(format, args...) - logger.releaseEntry(entry) - } + logger.Logf(WarnLevel, format, args...) } func (logger *Logger) Warningf(format string, args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warnf(format, args...) - logger.releaseEntry(entry) - } + logger.Warnf(format, args...) } func (logger *Logger) Errorf(format string, args ...interface{}) { - if logger.IsLevelEnabled(ErrorLevel) { - entry := logger.newEntry() - entry.Errorf(format, args...) - logger.releaseEntry(entry) - } + logger.Logf(ErrorLevel, format, args...) } func (logger *Logger) Fatalf(format string, args ...interface{}) { - if logger.IsLevelEnabled(FatalLevel) { - entry := logger.newEntry() - entry.Fatalf(format, args...) - logger.releaseEntry(entry) - } + logger.Logf(FatalLevel, format, args...) logger.Exit(1) } func (logger *Logger) Panicf(format string, args ...interface{}) { - if logger.IsLevelEnabled(PanicLevel) { + logger.Logf(PanicLevel, format, args...) +} + +// Log will log a message at the level given as parameter. +// Warning: using Log at Panic or Fatal level will not respectively Panic nor Exit. +// For this behaviour Logger.Panic or Logger.Fatal should be used instead. +func (logger *Logger) Log(level Level, args ...interface{}) { + if logger.IsLevelEnabled(level) { entry := logger.newEntry() - entry.Panicf(format, args...) + entry.Log(level, args...) logger.releaseEntry(entry) } } -func (logger *Logger) Trace(args ...interface{}) { - if logger.IsLevelEnabled(TraceLevel) { +func (logger *Logger) LogFn(level Level, fn LogFunction) { + if logger.IsLevelEnabled(level) { entry := logger.newEntry() - entry.Trace(args...) + entry.Log(level, fn()...) logger.releaseEntry(entry) } } +func (logger *Logger) Trace(args ...interface{}) { + logger.Log(TraceLevel, args...) +} + func (logger *Logger) Debug(args ...interface{}) { - if logger.IsLevelEnabled(DebugLevel) { - entry := logger.newEntry() - entry.Debug(args...) - logger.releaseEntry(entry) - } + logger.Log(DebugLevel, args...) } func (logger *Logger) Info(args ...interface{}) { - if logger.IsLevelEnabled(InfoLevel) { - entry := logger.newEntry() - entry.Info(args...) - logger.releaseEntry(entry) - } + logger.Log(InfoLevel, args...) } func (logger *Logger) Print(args ...interface{}) { entry := logger.newEntry() - entry.Info(args...) + entry.Print(args...) logger.releaseEntry(entry) } func (logger *Logger) Warn(args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warn(args...) - logger.releaseEntry(entry) - } + logger.Log(WarnLevel, args...) } func (logger *Logger) Warning(args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warn(args...) - logger.releaseEntry(entry) - } + logger.Warn(args...) } func (logger *Logger) Error(args ...interface{}) { - if logger.IsLevelEnabled(ErrorLevel) { - entry := logger.newEntry() - entry.Error(args...) - logger.releaseEntry(entry) - } + logger.Log(ErrorLevel, args...) } func (logger *Logger) Fatal(args ...interface{}) { - if logger.IsLevelEnabled(FatalLevel) { - entry := logger.newEntry() - entry.Fatal(args...) - logger.releaseEntry(entry) - } + logger.Log(FatalLevel, args...) logger.Exit(1) } func (logger *Logger) Panic(args ...interface{}) { - if logger.IsLevelEnabled(PanicLevel) { + logger.Log(PanicLevel, args...) +} + +func (logger *Logger) TraceFn(fn LogFunction) { + logger.LogFn(TraceLevel, fn) +} + +func (logger *Logger) DebugFn(fn LogFunction) { + logger.LogFn(DebugLevel, fn) +} + +func (logger *Logger) InfoFn(fn LogFunction) { + logger.LogFn(InfoLevel, fn) +} + +func (logger *Logger) PrintFn(fn LogFunction) { + entry := logger.newEntry() + entry.Print(fn()...) + logger.releaseEntry(entry) +} + +func (logger *Logger) WarnFn(fn LogFunction) { + logger.LogFn(WarnLevel, fn) +} + +func (logger *Logger) WarningFn(fn LogFunction) { + logger.WarnFn(fn) +} + +func (logger *Logger) ErrorFn(fn LogFunction) { + logger.LogFn(ErrorLevel, fn) +} + +func (logger *Logger) FatalFn(fn LogFunction) { + logger.LogFn(FatalLevel, fn) + logger.Exit(1) +} + +func (logger *Logger) PanicFn(fn LogFunction) { + logger.LogFn(PanicLevel, fn) +} + +func (logger *Logger) Logln(level Level, args ...interface{}) { + if logger.IsLevelEnabled(level) { entry := logger.newEntry() - entry.Panic(args...) + entry.Logln(level, args...) logger.releaseEntry(entry) } } func (logger *Logger) Traceln(args ...interface{}) { - if logger.IsLevelEnabled(TraceLevel) { - entry := logger.newEntry() - entry.Traceln(args...) - logger.releaseEntry(entry) - } + logger.Logln(TraceLevel, args...) } func (logger *Logger) Debugln(args ...interface{}) { - if logger.IsLevelEnabled(DebugLevel) { - entry := logger.newEntry() - entry.Debugln(args...) - logger.releaseEntry(entry) - } + logger.Logln(DebugLevel, args...) } func (logger *Logger) Infoln(args ...interface{}) { - if logger.IsLevelEnabled(InfoLevel) { - entry := logger.newEntry() - entry.Infoln(args...) - logger.releaseEntry(entry) - } + logger.Logln(InfoLevel, args...) } func (logger *Logger) Println(args ...interface{}) { @@ -304,44 +319,24 @@ func (logger *Logger) Println(args ...interface{}) { } func (logger *Logger) Warnln(args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warnln(args...) - logger.releaseEntry(entry) - } + logger.Logln(WarnLevel, args...) } func (logger *Logger) Warningln(args ...interface{}) { - if logger.IsLevelEnabled(WarnLevel) { - entry := logger.newEntry() - entry.Warnln(args...) - logger.releaseEntry(entry) - } + logger.Warnln(args...) } func (logger *Logger) Errorln(args ...interface{}) { - if logger.IsLevelEnabled(ErrorLevel) { - entry := logger.newEntry() - entry.Errorln(args...) - logger.releaseEntry(entry) - } + logger.Logln(ErrorLevel, args...) } func (logger *Logger) Fatalln(args ...interface{}) { - if logger.IsLevelEnabled(FatalLevel) { - entry := logger.newEntry() - entry.Fatalln(args...) - logger.releaseEntry(entry) - } + logger.Logln(FatalLevel, args...) logger.Exit(1) } func (logger *Logger) Panicln(args ...interface{}) { - if logger.IsLevelEnabled(PanicLevel) { - entry := logger.newEntry() - entry.Panicln(args...) - logger.releaseEntry(entry) - } + logger.Logln(PanicLevel, args...) } func (logger *Logger) Exit(code int) { @@ -413,3 +408,10 @@ func (logger *Logger) ReplaceHooks(hooks LevelHooks) LevelHooks { logger.mu.Unlock() return oldHooks } + +// SetBufferPool sets the logger buffer pool. +func (logger *Logger) SetBufferPool(pool BufferPool) { + logger.mu.Lock() + defer logger.mu.Unlock() + logger.BufferPool = pool +} diff --git a/vendor/github.com/sirupsen/logrus/logrus.go b/vendor/github.com/sirupsen/logrus/logrus.go index 4ef45186..2f16224c 100644 --- a/vendor/github.com/sirupsen/logrus/logrus.go +++ b/vendor/github.com/sirupsen/logrus/logrus.go @@ -14,24 +14,11 @@ type Level uint32 // Convert the Level to a string. E.g. PanicLevel becomes "panic". func (level Level) String() string { - switch level { - case TraceLevel: - return "trace" - case DebugLevel: - return "debug" - case InfoLevel: - return "info" - case WarnLevel: - return "warning" - case ErrorLevel: - return "error" - case FatalLevel: - return "fatal" - case PanicLevel: - return "panic" + if b, err := level.MarshalText(); err == nil { + return string(b) + } else { + return "unknown" } - - return "unknown" } // ParseLevel takes a string level and returns the Logrus log level constant. @@ -64,11 +51,32 @@ func (level *Level) UnmarshalText(text []byte) error { return err } - *level = Level(l) + *level = l return nil } +func (level Level) MarshalText() ([]byte, error) { + switch level { + case TraceLevel: + return []byte("trace"), nil + case DebugLevel: + return []byte("debug"), nil + case InfoLevel: + return []byte("info"), nil + case WarnLevel: + return []byte("warning"), nil + case ErrorLevel: + return []byte("error"), nil + case FatalLevel: + return []byte("fatal"), nil + case PanicLevel: + return []byte("panic"), nil + } + + return nil, fmt.Errorf("not a valid logrus level %d", level) +} + // A constant exposing all logging levels var AllLevels = []Level{ PanicLevel, diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go new file mode 100644 index 00000000..49978998 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_bsd.go @@ -0,0 +1,13 @@ +// +build darwin dragonfly freebsd netbsd openbsd +// +build !js + +package logrus + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TIOCGETA + +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_js.go b/vendor/github.com/sirupsen/logrus/terminal_check_js.go index 0c209750..ebdae3ec 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_js.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_js.go @@ -2,10 +2,6 @@ package logrus -import ( - "io" -) - -func checkIfTerminal(w io.Writer) bool { +func isTerminal(fd int) bool { return false } diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go b/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go new file mode 100644 index 00000000..97af92c6 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_no_terminal.go @@ -0,0 +1,11 @@ +// +build js nacl plan9 + +package logrus + +import ( + "io" +) + +func checkIfTerminal(w io.Writer) bool { + return false +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go index cf309d6f..3293fb3c 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_notappengine.go @@ -1,18 +1,16 @@ -// +build !appengine,!js,!windows +// +build !appengine,!js,!windows,!nacl,!plan9 package logrus import ( "io" "os" - - "golang.org/x/crypto/ssh/terminal" ) func checkIfTerminal(w io.Writer) bool { switch v := w.(type) { case *os.File: - return terminal.IsTerminal(int(v.Fd())) + return isTerminal(int(v.Fd())) default: return false } diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go b/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go new file mode 100644 index 00000000..f6710b3b --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_solaris.go @@ -0,0 +1,11 @@ +package logrus + +import ( + "golang.org/x/sys/unix" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermio(fd, unix.TCGETA) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_unix.go b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go new file mode 100644 index 00000000..04748b85 --- /dev/null +++ b/vendor/github.com/sirupsen/logrus/terminal_check_unix.go @@ -0,0 +1,13 @@ +// +build linux aix zos +// +build !js + +package logrus + +import "golang.org/x/sys/unix" + +const ioctlReadTermios = unix.TCGETS + +func isTerminal(fd int) bool { + _, err := unix.IoctlGetTermios(fd, ioctlReadTermios) + return err == nil +} diff --git a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go index 3b9d2864..2879eb50 100644 --- a/vendor/github.com/sirupsen/logrus/terminal_check_windows.go +++ b/vendor/github.com/sirupsen/logrus/terminal_check_windows.go @@ -5,16 +5,23 @@ package logrus import ( "io" "os" - "syscall" + + "golang.org/x/sys/windows" ) func checkIfTerminal(w io.Writer) bool { switch v := w.(type) { case *os.File: + handle := windows.Handle(v.Fd()) var mode uint32 - err := syscall.GetConsoleMode(syscall.Handle(v.Fd()), &mode) - return err == nil - default: - return false + if err := windows.GetConsoleMode(handle, &mode); err != nil { + return false + } + mode |= windows.ENABLE_VIRTUAL_TERMINAL_PROCESSING + if err := windows.SetConsoleMode(handle, mode); err != nil { + return false + } + return true } + return false } diff --git a/vendor/github.com/sirupsen/logrus/terminal_notwindows.go b/vendor/github.com/sirupsen/logrus/terminal_notwindows.go deleted file mode 100644 index 3dbd2372..00000000 --- a/vendor/github.com/sirupsen/logrus/terminal_notwindows.go +++ /dev/null @@ -1,8 +0,0 @@ -// +build !windows - -package logrus - -import "io" - -func initTerminal(w io.Writer) { -} diff --git a/vendor/github.com/sirupsen/logrus/terminal_windows.go b/vendor/github.com/sirupsen/logrus/terminal_windows.go deleted file mode 100644 index b4ef5286..00000000 --- a/vendor/github.com/sirupsen/logrus/terminal_windows.go +++ /dev/null @@ -1,18 +0,0 @@ -// +build !appengine,!js,windows - -package logrus - -import ( - "io" - "os" - "syscall" - - sequences "github.com/konsorten/go-windows-terminal-sequences" -) - -func initTerminal(w io.Writer) { - switch v := w.(type) { - case *os.File: - sequences.EnableVirtualTerminalProcessing(syscall.Handle(v.Fd()), true) - } -} diff --git a/vendor/github.com/sirupsen/logrus/text_formatter.go b/vendor/github.com/sirupsen/logrus/text_formatter.go index 49ec92f1..be2c6efe 100644 --- a/vendor/github.com/sirupsen/logrus/text_formatter.go +++ b/vendor/github.com/sirupsen/logrus/text_formatter.go @@ -4,25 +4,23 @@ import ( "bytes" "fmt" "os" + "runtime" "sort" + "strconv" "strings" "sync" "time" + "unicode/utf8" ) const ( - nocolor = 0 - red = 31 - green = 32 - yellow = 33 - blue = 36 - gray = 37 + red = 31 + yellow = 33 + blue = 36 + gray = 37 ) -var ( - baseTimestamp time.Time - emptyFieldMap FieldMap -) +var baseTimestamp time.Time func init() { baseTimestamp = time.Now() @@ -36,6 +34,14 @@ type TextFormatter struct { // Force disabling colors. DisableColors bool + // Force quoting of all values + ForceQuote bool + + // DisableQuote disables quoting for all values. + // DisableQuote will have a lower priority than ForceQuote. + // If both of them are set to true, quote will be forced on all values. + DisableQuote bool + // Override coloring based on CLICOLOR and CLICOLOR_FORCE. - https://bixense.com/clicolors/ EnvironmentOverrideColors bool @@ -47,7 +53,10 @@ type TextFormatter struct { // the time passed since beginning of execution. FullTimestamp bool - // TimestampFormat to use for display when a full timestamp is printed + // TimestampFormat to use for display when a full timestamp is printed. + // The format to use is the same than for time.Format or time.Parse from the standard + // library. + // The standard Library already provides a set of predefined format. TimestampFormat string // The fields are sorted by default for a consistent output. For applications @@ -61,6 +70,10 @@ type TextFormatter struct { // Disables the truncation of the level text to 4 characters. DisableLevelTruncation bool + // PadLevelText Adds padding the level text so that all the levels output at the same length + // PadLevelText is a superset of the DisableLevelTruncation option + PadLevelText bool + // QuoteEmptyFields will wrap empty fields in quotes if true QuoteEmptyFields bool @@ -76,28 +89,39 @@ type TextFormatter struct { // FieldKeyMsg: "@message"}} FieldMap FieldMap + // CallerPrettyfier can be set by the user to modify the content + // of the function and file keys in the data when ReportCaller is + // activated. If any of the returned value is the empty string the + // corresponding key will be removed from fields. + CallerPrettyfier func(*runtime.Frame) (function string, file string) + terminalInitOnce sync.Once + + // The max length of the level text, generated dynamically on init + levelTextMaxLength int } func (f *TextFormatter) init(entry *Entry) { if entry.Logger != nil { f.isTerminal = checkIfTerminal(entry.Logger.Out) - - if f.isTerminal { - initTerminal(entry.Logger.Out) + } + // Get the max length of the level text + for _, level := range AllLevels { + levelTextLength := utf8.RuneCount([]byte(level.String())) + if levelTextLength > f.levelTextMaxLength { + f.levelTextMaxLength = levelTextLength } } } func (f *TextFormatter) isColored() bool { - isColored := f.ForceColors || f.isTerminal + isColored := f.ForceColors || (f.isTerminal && (runtime.GOOS != "windows")) if f.EnvironmentOverrideColors { - if force, ok := os.LookupEnv("CLICOLOR_FORCE"); ok && force != "0" { + switch force, ok := os.LookupEnv("CLICOLOR_FORCE"); { + case ok && force != "0": isColored = true - } else if ok && force == "0" { - isColored = false - } else if os.Getenv("CLICOLOR") == "0" { + case ok && force == "0", os.Getenv("CLICOLOR") == "0": isColored = false } } @@ -107,14 +131,19 @@ func (f *TextFormatter) isColored() bool { // Format renders a single log entry func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { - prefixFieldClashes(entry.Data, f.FieldMap, entry.HasCaller()) - - keys := make([]string, 0, len(entry.Data)) - for k := range entry.Data { + data := make(Fields) + for k, v := range entry.Data { + data[k] = v + } + prefixFieldClashes(data, f.FieldMap, entry.HasCaller()) + keys := make([]string, 0, len(data)) + for k := range data { keys = append(keys, k) } - fixedKeys := make([]string, 0, 4+len(entry.Data)) + var funcVal, fileVal string + + fixedKeys := make([]string, 0, 4+len(data)) if !f.DisableTimestamp { fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyTime)) } @@ -126,8 +155,19 @@ func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLogrusError)) } if entry.HasCaller() { - fixedKeys = append(fixedKeys, - f.FieldMap.resolve(FieldKeyFunc), f.FieldMap.resolve(FieldKeyFile)) + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } else { + funcVal = entry.Caller.Function + fileVal = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + } + + if funcVal != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFunc)) + } + if fileVal != "" { + fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFile)) + } } if !f.DisableSorting { @@ -160,8 +200,9 @@ func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { timestampFormat = defaultTimestampFormat } if f.isColored() { - f.printColored(b, entry, keys, timestampFormat) + f.printColored(b, entry, keys, data, timestampFormat) } else { + for _, key := range fixedKeys { var value interface{} switch { @@ -174,11 +215,11 @@ func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { case key == f.FieldMap.resolve(FieldKeyLogrusError): value = entry.err case key == f.FieldMap.resolve(FieldKeyFunc) && entry.HasCaller(): - value = entry.Caller.Function + value = funcVal case key == f.FieldMap.resolve(FieldKeyFile) && entry.HasCaller(): - value = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + value = fileVal default: - value = entry.Data[key] + value = data[key] } f.appendKeyValue(b, key, value) } @@ -188,7 +229,7 @@ func (f *TextFormatter) Format(entry *Entry) ([]byte, error) { return b.Bytes(), nil } -func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, timestampFormat string) { +func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, data Fields, timestampFormat string) { var levelColor int switch entry.Level { case DebugLevel, TraceLevel: @@ -197,44 +238,73 @@ func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []strin levelColor = yellow case ErrorLevel, FatalLevel, PanicLevel: levelColor = red + case InfoLevel: + levelColor = blue default: levelColor = blue } levelText := strings.ToUpper(entry.Level.String()) - if !f.DisableLevelTruncation { + if !f.DisableLevelTruncation && !f.PadLevelText { levelText = levelText[0:4] } + if f.PadLevelText { + // Generates the format string used in the next line, for example "%-6s" or "%-7s". + // Based on the max level text length. + formatString := "%-" + strconv.Itoa(f.levelTextMaxLength) + "s" + // Formats the level text by appending spaces up to the max length, for example: + // - "INFO " + // - "WARNING" + levelText = fmt.Sprintf(formatString, levelText) + } // Remove a single newline if it already exists in the message to keep // the behavior of logrus text_formatter the same as the stdlib log package entry.Message = strings.TrimSuffix(entry.Message, "\n") caller := "" - if entry.HasCaller() { - caller = fmt.Sprintf("%s:%d %s()", - entry.Caller.File, entry.Caller.Line, entry.Caller.Function) + funcVal := fmt.Sprintf("%s()", entry.Caller.Function) + fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line) + + if f.CallerPrettyfier != nil { + funcVal, fileVal = f.CallerPrettyfier(entry.Caller) + } + + if fileVal == "" { + caller = funcVal + } else if funcVal == "" { + caller = fileVal + } else { + caller = fileVal + " " + funcVal + } } - if f.DisableTimestamp { + switch { + case f.DisableTimestamp: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m%s %-44s ", levelColor, levelText, caller, entry.Message) - } else if !f.FullTimestamp { + case !f.FullTimestamp: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d]%s %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), caller, entry.Message) - } else { + default: fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s]%s %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), caller, entry.Message) } for _, k := range keys { - v := entry.Data[k] + v := data[k] fmt.Fprintf(b, " \x1b[%dm%s\x1b[0m=", levelColor, k) f.appendValue(b, v) } } func (f *TextFormatter) needsQuoting(text string) bool { + if f.ForceQuote { + return true + } if f.QuoteEmptyFields && len(text) == 0 { return true } + if f.DisableQuote { + return false + } for _, ch := range text { if !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || diff --git a/vendor/github.com/sirupsen/logrus/writer.go b/vendor/github.com/sirupsen/logrus/writer.go index 9e1f7513..074fd4b8 100644 --- a/vendor/github.com/sirupsen/logrus/writer.go +++ b/vendor/github.com/sirupsen/logrus/writer.go @@ -4,25 +4,35 @@ import ( "bufio" "io" "runtime" + "strings" ) +// Writer at INFO level. See WriterLevel for details. func (logger *Logger) Writer() *io.PipeWriter { return logger.WriterLevel(InfoLevel) } +// WriterLevel returns an io.Writer that can be used to write arbitrary text to +// the logger at the given log level. Each line written to the writer will be +// printed in the usual way using formatters and hooks. The writer is part of an +// io.Pipe and it is the callers responsibility to close the writer when done. +// This can be used to override the standard library logger easily. func (logger *Logger) WriterLevel(level Level) *io.PipeWriter { return NewEntry(logger).WriterLevel(level) } +// Writer returns an io.Writer that writes to the logger at the info log level func (entry *Entry) Writer() *io.PipeWriter { return entry.WriterLevel(InfoLevel) } +// WriterLevel returns an io.Writer that writes to the logger at the given log level func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { reader, writer := io.Pipe() var printFunc func(args ...interface{}) + // Determine which log function to use based on the specified log level switch level { case TraceLevel: printFunc = entry.Trace @@ -42,23 +52,51 @@ func (entry *Entry) WriterLevel(level Level) *io.PipeWriter { printFunc = entry.Print } + // Start a new goroutine to scan the input and write it to the logger using the specified print function. + // It splits the input into chunks of up to 64KB to avoid buffer overflows. go entry.writerScanner(reader, printFunc) + + // Set a finalizer function to close the writer when it is garbage collected runtime.SetFinalizer(writer, writerFinalizer) return writer } +// writerScanner scans the input from the reader and writes it to the logger func (entry *Entry) writerScanner(reader *io.PipeReader, printFunc func(args ...interface{})) { scanner := bufio.NewScanner(reader) + + // Set the buffer size to the maximum token size to avoid buffer overflows + scanner.Buffer(make([]byte, bufio.MaxScanTokenSize), bufio.MaxScanTokenSize) + + // Define a split function to split the input into chunks of up to 64KB + chunkSize := bufio.MaxScanTokenSize // 64KB + splitFunc := func(data []byte, atEOF bool) (int, []byte, error) { + if len(data) >= chunkSize { + return chunkSize, data[:chunkSize], nil + } + + return bufio.ScanLines(data, atEOF) + } + + // Use the custom split function to split the input + scanner.Split(splitFunc) + + // Scan the input and write it to the logger using the specified print function for scanner.Scan() { - printFunc(scanner.Text()) + printFunc(strings.TrimRight(scanner.Text(), "\r\n")) } + + // If there was an error while scanning the input, log an error if err := scanner.Err(); err != nil { entry.Errorf("Error while reading from Writer: %s", err) } + + // Close the reader when we are done reader.Close() } +// WriterFinalizer is a finalizer function that closes then given writer when it is garbage collected func writerFinalizer(writer *io.PipeWriter) { writer.Close() } diff --git a/vendor/github.com/stretchr/testify/assert/assertion_compare.go b/vendor/github.com/stretchr/testify/assert/assertion_compare.go index 95d8e59d..b774da88 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_compare.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_compare.go @@ -352,9 +352,9 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) { // Greater asserts that the first element is greater than the second // -// assert.Greater(t, 2, 1) -// assert.Greater(t, float64(2), float64(1)) -// assert.Greater(t, "b", "a") +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -364,10 +364,10 @@ func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqual(t, 2, 1) -// assert.GreaterOrEqual(t, 2, 2) -// assert.GreaterOrEqual(t, "b", "a") -// assert.GreaterOrEqual(t, "b", "b") +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -377,9 +377,9 @@ func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...in // Less asserts that the first element is less than the second // -// assert.Less(t, 1, 2) -// assert.Less(t, float64(1), float64(2)) -// assert.Less(t, "a", "b") +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -389,10 +389,10 @@ func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) // LessOrEqual asserts that the first element is less than or equal to the second // -// assert.LessOrEqual(t, 1, 2) -// assert.LessOrEqual(t, 2, 2) -// assert.LessOrEqual(t, "a", "b") -// assert.LessOrEqual(t, "b", "b") +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -402,8 +402,8 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter // Positive asserts that the specified element is positive // -// assert.Positive(t, 1) -// assert.Positive(t, 1.23) +// assert.Positive(t, 1) +// assert.Positive(t, 1.23) func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -414,8 +414,8 @@ func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { // Negative asserts that the specified element is negative // -// assert.Negative(t, -1) -// assert.Negative(t, -1.23) +// assert.Negative(t, -1) +// assert.Negative(t, -1.23) func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_format.go b/vendor/github.com/stretchr/testify/assert/assertion_format.go index 7880b8f9..84dbd6c7 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_format.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_format.go @@ -22,9 +22,9 @@ func Conditionf(t TestingT, comp Comparison, msg string, args ...interface{}) bo // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") -// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") -// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -56,7 +56,7 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Emptyf(t, obj, "error message %s", "formatted") +// assert.Emptyf(t, obj, "error message %s", "formatted") func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -66,7 +66,7 @@ func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) boo // Equalf asserts that two objects are equal. // -// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// assert.Equalf(t, 123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -81,8 +81,8 @@ func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, ar // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -90,10 +90,27 @@ func EqualErrorf(t TestingT, theError error, errString string, msg string, args return EqualError(t, theError, errString, append([]interface{}{msg}, args...)...) } +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValuesf(t, S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// assert.EqualExportedValuesf(t, S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return EqualExportedValues(t, expected, actual, append([]interface{}{msg}, args...)...) +} + // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") +// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -103,10 +120,10 @@ func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg stri // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Errorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func Errorf(t TestingT, err error, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -126,8 +143,8 @@ func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...int // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") func ErrorContainsf(t TestingT, theError error, contains string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -147,7 +164,7 @@ func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -155,9 +172,34 @@ func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick return Eventually(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) } +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithTf(t, func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithTf(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + return EventuallyWithT(t, condition, waitFor, tick, append([]interface{}{msg}, args...)...) +} + // Exactlyf asserts that two objects are equal in value and type. // -// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") +// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -183,7 +225,7 @@ func FailNowf(t TestingT, failureMessage string, msg string, args ...interface{} // Falsef asserts that the specified value is false. // -// assert.Falsef(t, myBool, "error message %s", "formatted") +// assert.Falsef(t, myBool, "error message %s", "formatted") func Falsef(t TestingT, value bool, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -202,9 +244,9 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) bool // Greaterf asserts that the first element is greater than the second // -// assert.Greaterf(t, 2, 1, "error message %s", "formatted") -// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") -// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -214,10 +256,10 @@ func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...in // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -228,7 +270,7 @@ func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, arg // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -241,7 +283,7 @@ func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -253,7 +295,7 @@ func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, u // HTTPErrorf asserts that a specified handler returns an error status code. // -// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -265,7 +307,7 @@ func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -277,7 +319,7 @@ func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url stri // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { @@ -289,7 +331,7 @@ func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url st // HTTPSuccessf asserts that a specified handler returns a success status code. // -// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -301,7 +343,7 @@ func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url strin // Implementsf asserts that an object is implemented by the specified interface. // -// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -311,7 +353,7 @@ func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, ms // InDeltaf asserts that the two numerals are within delta of each other. // -// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -353,9 +395,9 @@ func InEpsilonSlicef(t TestingT, expected interface{}, actual interface{}, epsil // IsDecreasingf asserts that the collection is decreasing // -// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -365,9 +407,9 @@ func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface // IsIncreasingf asserts that the collection is increasing // -// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -377,9 +419,9 @@ func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface // IsNonDecreasingf asserts that the collection is not decreasing // -// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -389,9 +431,9 @@ func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interf // IsNonIncreasingf asserts that the collection is not increasing // -// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -409,7 +451,7 @@ func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg strin // JSONEqf asserts that two JSON strings are equivalent. // -// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -420,7 +462,7 @@ func JSONEqf(t TestingT, expected string, actual string, msg string, args ...int // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -430,9 +472,9 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf // Lessf asserts that the first element is less than the second // -// assert.Lessf(t, 1, 2, "error message %s", "formatted") -// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") -// assert.Lessf(t, "a", "b", "error message %s", "formatted") +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") +// assert.Lessf(t, "a", "b", "error message %s", "formatted") func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -442,10 +484,10 @@ func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...inter // LessOrEqualf asserts that the first element is less than or equal to the second // -// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") -// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -455,8 +497,8 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args . // Negativef asserts that the specified element is negative // -// assert.Negativef(t, -1, "error message %s", "formatted") -// assert.Negativef(t, -1.23, "error message %s", "formatted") +// assert.Negativef(t, -1, "error message %s", "formatted") +// assert.Negativef(t, -1.23, "error message %s", "formatted") func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -467,7 +509,7 @@ func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) bool // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -477,7 +519,7 @@ func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time. // Nilf asserts that the specified object is nil. // -// assert.Nilf(t, err, "error message %s", "formatted") +// assert.Nilf(t, err, "error message %s", "formatted") func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -496,10 +538,10 @@ func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) bool // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoErrorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func NoErrorf(t TestingT, err error, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -519,9 +561,9 @@ func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) boo // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -532,9 +574,9 @@ func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, a // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -544,7 +586,7 @@ func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) // NotEqualf asserts that the specified values are NOT equal. // -// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -557,7 +599,7 @@ func NotEqualf(t TestingT, expected interface{}, actual interface{}, msg string, // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -576,7 +618,7 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf // NotNilf asserts that the specified object is not nil. // -// assert.NotNilf(t, err, "error message %s", "formatted") +// assert.NotNilf(t, err, "error message %s", "formatted") func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -586,7 +628,7 @@ func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) bo // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -596,8 +638,8 @@ func NotPanicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bo // NotRegexpf asserts that a specified regexp does not match a string. // -// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -607,7 +649,7 @@ func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args .. // NotSamef asserts that two pointers do not reference the same object. // -// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -621,7 +663,7 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -639,7 +681,7 @@ func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) bool { // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -651,7 +693,7 @@ func Panicsf(t TestingT, f PanicTestFunc, msg string, args ...interface{}) bool // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -662,7 +704,7 @@ func PanicsWithErrorf(t TestingT, errString string, f PanicTestFunc, msg string, // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -672,8 +714,8 @@ func PanicsWithValuef(t TestingT, expected interface{}, f PanicTestFunc, msg str // Positivef asserts that the specified element is positive // -// assert.Positivef(t, 1, "error message %s", "formatted") -// assert.Positivef(t, 1.23, "error message %s", "formatted") +// assert.Positivef(t, 1, "error message %s", "formatted") +// assert.Positivef(t, 1.23, "error message %s", "formatted") func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -683,8 +725,8 @@ func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) bool // Regexpf asserts that a specified regexp matches a string. // -// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -694,7 +736,7 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in // Samef asserts that two pointers reference the same object. // -// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -708,7 +750,7 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -718,7 +760,7 @@ func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args // Truef asserts that the specified value is true. // -// assert.Truef(t, myBool, "error message %s", "formatted") +// assert.Truef(t, myBool, "error message %s", "formatted") func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -728,7 +770,7 @@ func Truef(t TestingT, value bool, msg string, args ...interface{}) bool { // WithinDurationf asserts that the two times are within duration delta of each other. // -// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -738,7 +780,7 @@ func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta tim // WithinRangef asserts that a time is within a time range (inclusive). // -// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func WithinRangef(t TestingT, actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_forward.go b/vendor/github.com/stretchr/testify/assert/assertion_forward.go index 339515b8..b1d94aec 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_forward.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_forward.go @@ -30,9 +30,9 @@ func (a *Assertions) Conditionf(comp Comparison, msg string, args ...interface{} // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Contains("Hello World", "World") -// a.Contains(["Hello", "World"], "World") -// a.Contains({"Hello": "World"}, "Hello") +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -43,9 +43,9 @@ func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs .. // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Containsf("Hello World", "World", "error message %s", "formatted") -// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") -// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -98,7 +98,7 @@ func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg st // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Empty(obj) +// a.Empty(obj) func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -109,7 +109,7 @@ func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool { // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Emptyf(obj, "error message %s", "formatted") +// a.Emptyf(obj, "error message %s", "formatted") func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -119,7 +119,7 @@ func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) // Equal asserts that two objects are equal. // -// a.Equal(123, 123) +// a.Equal(123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -134,8 +134,8 @@ func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualError(err, expectedErrorString) +// actualObj, err := SomeFunction() +// a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -146,8 +146,8 @@ func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ... // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -155,10 +155,44 @@ func (a *Assertions) EqualErrorf(theError error, errString string, msg string, a return EqualErrorf(a.t, theError, errString, msg, args...) } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValues(S{1, 2}, S{1, 3}) => true +// a.EqualExportedValues(S{1, 2}, S{2, 3}) => false +func (a *Assertions) EqualExportedValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EqualExportedValues(a.t, expected, actual, msgAndArgs...) +} + +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValuesf(S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// a.EqualExportedValuesf(S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EqualExportedValuesf(a.t, expected, actual, msg, args...) +} + // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValues(uint32(123), int32(123)) +// a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -169,7 +203,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") +// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -179,7 +213,7 @@ func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg // Equalf asserts that two objects are equal. // -// a.Equalf(123, 123, "error message %s", "formatted") +// a.Equalf(123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -193,10 +227,10 @@ func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Error(err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -225,8 +259,8 @@ func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args .. // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContains(err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// a.ErrorContains(err, expectedErrorSubString) func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -237,8 +271,8 @@ func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs . // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") func (a *Assertions) ErrorContainsf(theError error, contains string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -266,10 +300,10 @@ func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...inter // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Errorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -280,7 +314,7 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) bool { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -288,10 +322,60 @@ func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, ti return Eventually(a.t, condition, waitFor, tick, msgAndArgs...) } +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithT(func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithT(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EventuallyWithT(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithTf(func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithTf(condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + return EventuallyWithTf(a.t, condition, waitFor, tick, msg, args...) +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -301,7 +385,7 @@ func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, t // Exactly asserts that two objects are equal in value and type. // -// a.Exactly(int32(123), int64(123)) +// a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -311,7 +395,7 @@ func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArg // Exactlyf asserts that two objects are equal in value and type. // -// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") +// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -353,7 +437,7 @@ func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{ // False asserts that the specified value is false. // -// a.False(myBool) +// a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -363,7 +447,7 @@ func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool { // Falsef asserts that the specified value is false. // -// a.Falsef(myBool, "error message %s", "formatted") +// a.Falsef(myBool, "error message %s", "formatted") func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -391,9 +475,9 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) b // Greater asserts that the first element is greater than the second // -// a.Greater(2, 1) -// a.Greater(float64(2), float64(1)) -// a.Greater("b", "a") +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -403,10 +487,10 @@ func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...inter // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqual(2, 1) -// a.GreaterOrEqual(2, 2) -// a.GreaterOrEqual("b", "a") -// a.GreaterOrEqual("b", "b") +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -416,10 +500,10 @@ func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs . // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") -// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") -// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") -// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -429,9 +513,9 @@ func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, // Greaterf asserts that the first element is greater than the second // -// a.Greaterf(2, 1, "error message %s", "formatted") -// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") -// a.Greaterf("b", "a", "error message %s", "formatted") +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") +// a.Greaterf("b", "a", "error message %s", "formatted") func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -442,7 +526,7 @@ func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args . // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -455,7 +539,7 @@ func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, u // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -468,7 +552,7 @@ func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -481,7 +565,7 @@ func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) bool { @@ -493,7 +577,7 @@ func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method strin // HTTPError asserts that a specified handler returns an error status code. // -// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -505,7 +589,7 @@ func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url stri // HTTPErrorf asserts that a specified handler returns an error status code. // -// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -517,7 +601,7 @@ func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url str // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -529,7 +613,7 @@ func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url s // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -541,7 +625,7 @@ func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) +// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { @@ -553,7 +637,7 @@ func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) bool { @@ -565,7 +649,7 @@ func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, ur // HTTPSuccess asserts that a specified handler returns a success status code. // -// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) +// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -577,7 +661,7 @@ func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url st // HTTPSuccessf asserts that a specified handler returns a success status code. // -// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) bool { @@ -589,7 +673,7 @@ func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url s // Implements asserts that an object is implemented by the specified interface. // -// a.Implements((*MyInterface)(nil), new(MyObject)) +// a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -599,7 +683,7 @@ func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, // Implementsf asserts that an object is implemented by the specified interface. // -// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -609,7 +693,7 @@ func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{} // InDelta asserts that the two numerals are within delta of each other. // -// a.InDelta(math.Pi, 22/7.0, 0.01) +// a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -651,7 +735,7 @@ func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, del // InDeltaf asserts that the two numerals are within delta of each other. // -// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -693,9 +777,9 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo // IsDecreasing asserts that the collection is decreasing // -// a.IsDecreasing([]int{2, 1, 0}) -// a.IsDecreasing([]float{2, 1}) -// a.IsDecreasing([]string{"b", "a"}) +// a.IsDecreasing([]int{2, 1, 0}) +// a.IsDecreasing([]float{2, 1}) +// a.IsDecreasing([]string{"b", "a"}) func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -705,9 +789,9 @@ func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) // IsDecreasingf asserts that the collection is decreasing // -// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") -// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") +// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -717,9 +801,9 @@ func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...inter // IsIncreasing asserts that the collection is increasing // -// a.IsIncreasing([]int{1, 2, 3}) -// a.IsIncreasing([]float{1, 2}) -// a.IsIncreasing([]string{"a", "b"}) +// a.IsIncreasing([]int{1, 2, 3}) +// a.IsIncreasing([]float{1, 2}) +// a.IsIncreasing([]string{"a", "b"}) func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -729,9 +813,9 @@ func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) // IsIncreasingf asserts that the collection is increasing // -// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") -// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") +// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -741,9 +825,9 @@ func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...inter // IsNonDecreasing asserts that the collection is not decreasing // -// a.IsNonDecreasing([]int{1, 1, 2}) -// a.IsNonDecreasing([]float{1, 2}) -// a.IsNonDecreasing([]string{"a", "b"}) +// a.IsNonDecreasing([]int{1, 1, 2}) +// a.IsNonDecreasing([]float{1, 2}) +// a.IsNonDecreasing([]string{"a", "b"}) func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -753,9 +837,9 @@ func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface // IsNonDecreasingf asserts that the collection is not decreasing // -// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -765,9 +849,9 @@ func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...in // IsNonIncreasing asserts that the collection is not increasing // -// a.IsNonIncreasing([]int{2, 1, 1}) -// a.IsNonIncreasing([]float{2, 1}) -// a.IsNonIncreasing([]string{"b", "a"}) +// a.IsNonIncreasing([]int{2, 1, 1}) +// a.IsNonIncreasing([]float{2, 1}) +// a.IsNonIncreasing([]string{"b", "a"}) func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -777,9 +861,9 @@ func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface // IsNonIncreasingf asserts that the collection is not increasing // -// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -805,7 +889,7 @@ func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg s // JSONEq asserts that two JSON strings are equivalent. // -// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -815,7 +899,7 @@ func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interf // JSONEqf asserts that two JSON strings are equivalent. // -// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -826,7 +910,7 @@ func (a *Assertions) JSONEqf(expected string, actual string, msg string, args .. // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// a.Len(mySlice, 3) +// a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -837,7 +921,7 @@ func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// a.Lenf(mySlice, 3, "error message %s", "formatted") +// a.Lenf(mySlice, 3, "error message %s", "formatted") func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -847,9 +931,9 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in // Less asserts that the first element is less than the second // -// a.Less(1, 2) -// a.Less(float64(1), float64(2)) -// a.Less("a", "b") +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -859,10 +943,10 @@ func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interfac // LessOrEqual asserts that the first element is less than or equal to the second // -// a.LessOrEqual(1, 2) -// a.LessOrEqual(2, 2) -// a.LessOrEqual("a", "b") -// a.LessOrEqual("b", "b") +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -872,10 +956,10 @@ func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...i // LessOrEqualf asserts that the first element is less than or equal to the second // -// a.LessOrEqualf(1, 2, "error message %s", "formatted") -// a.LessOrEqualf(2, 2, "error message %s", "formatted") -// a.LessOrEqualf("a", "b", "error message %s", "formatted") -// a.LessOrEqualf("b", "b", "error message %s", "formatted") +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -885,9 +969,9 @@ func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, ar // Lessf asserts that the first element is less than the second // -// a.Lessf(1, 2, "error message %s", "formatted") -// a.Lessf(float64(1), float64(2), "error message %s", "formatted") -// a.Lessf("a", "b", "error message %s", "formatted") +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1), float64(2), "error message %s", "formatted") +// a.Lessf("a", "b", "error message %s", "formatted") func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -897,8 +981,8 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i // Negative asserts that the specified element is negative // -// a.Negative(-1) -// a.Negative(-1.23) +// a.Negative(-1) +// a.Negative(-1.23) func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -908,8 +992,8 @@ func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) bool { // Negativef asserts that the specified element is negative // -// a.Negativef(-1, "error message %s", "formatted") -// a.Negativef(-1.23, "error message %s", "formatted") +// a.Negativef(-1, "error message %s", "formatted") +// a.Negativef(-1.23, "error message %s", "formatted") func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -920,7 +1004,7 @@ func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) b // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) +// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -931,7 +1015,7 @@ func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick ti // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -941,7 +1025,7 @@ func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick t // Nil asserts that the specified object is nil. // -// a.Nil(err) +// a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -951,7 +1035,7 @@ func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool { // Nilf asserts that the specified object is nil. // -// a.Nilf(err, "error message %s", "formatted") +// a.Nilf(err, "error message %s", "formatted") func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -979,10 +1063,10 @@ func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoError(err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoError(err) { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -992,10 +1076,10 @@ func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool { // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoErrorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1024,9 +1108,9 @@ func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContains("Hello World", "Earth") -// a.NotContains(["Hello", "World"], "Earth") -// a.NotContains({"Hello": "World"}, "Earth") +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1037,9 +1121,9 @@ func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") -// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") -// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1050,9 +1134,9 @@ func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg strin // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmpty(obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmpty(obj) { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1063,9 +1147,9 @@ func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) boo // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmptyf(obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1075,7 +1159,7 @@ func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface // NotEqual asserts that the specified values are NOT equal. // -// a.NotEqual(obj1, obj2) +// a.NotEqual(obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1088,7 +1172,7 @@ func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndAr // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValues(obj1, obj2) +// a.NotEqualValues(obj1, obj2) func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1098,7 +1182,7 @@ func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, ms // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1108,7 +1192,7 @@ func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, m // NotEqualf asserts that the specified values are NOT equal. // -// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1139,7 +1223,7 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in // NotNil asserts that the specified object is not nil. // -// a.NotNil(err) +// a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1149,7 +1233,7 @@ func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool // NotNilf asserts that the specified object is not nil. // -// a.NotNilf(err, "error message %s", "formatted") +// a.NotNilf(err, "error message %s", "formatted") func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1159,7 +1243,7 @@ func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanics(func(){ RemainCalm() }) +// a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1169,7 +1253,7 @@ func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1179,8 +1263,8 @@ func (a *Assertions) NotPanicsf(f PanicTestFunc, msg string, args ...interface{} // NotRegexp asserts that a specified regexp does not match a string. // -// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") -// a.NotRegexp("^start", "it's not starting") +// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") +// a.NotRegexp("^start", "it's not starting") func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1190,8 +1274,8 @@ func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...in // NotRegexpf asserts that a specified regexp does not match a string. // -// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1201,7 +1285,7 @@ func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, arg // NotSame asserts that two pointers do not reference the same object. // -// a.NotSame(ptr1, ptr2) +// a.NotSame(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1214,7 +1298,7 @@ func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArg // NotSamef asserts that two pointers do not reference the same object. // -// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") +// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1228,7 +1312,7 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1239,7 +1323,7 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1265,7 +1349,7 @@ func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) bo // Panics asserts that the code inside the specified PanicTestFunc panics. // -// a.Panics(func(){ GoCrazy() }) +// a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1277,7 +1361,7 @@ func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool { // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithError("crazy error", func(){ GoCrazy() }) +// a.PanicsWithError("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1289,7 +1373,7 @@ func (a *Assertions) PanicsWithError(errString string, f PanicTestFunc, msgAndAr // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1300,7 +1384,7 @@ func (a *Assertions) PanicsWithErrorf(errString string, f PanicTestFunc, msg str // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1311,7 +1395,7 @@ func (a *Assertions) PanicsWithValue(expected interface{}, f PanicTestFunc, msgA // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1321,7 +1405,7 @@ func (a *Assertions) PanicsWithValuef(expected interface{}, f PanicTestFunc, msg // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1331,8 +1415,8 @@ func (a *Assertions) Panicsf(f PanicTestFunc, msg string, args ...interface{}) b // Positive asserts that the specified element is positive // -// a.Positive(1) -// a.Positive(1.23) +// a.Positive(1) +// a.Positive(1.23) func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1342,8 +1426,8 @@ func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) bool { // Positivef asserts that the specified element is positive // -// a.Positivef(1, "error message %s", "formatted") -// a.Positivef(1.23, "error message %s", "formatted") +// a.Positivef(1, "error message %s", "formatted") +// a.Positivef(1.23, "error message %s", "formatted") func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1353,8 +1437,8 @@ func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) b // Regexp asserts that a specified regexp matches a string. // -// a.Regexp(regexp.MustCompile("start"), "it's starting") -// a.Regexp("start...$", "it's not starting") +// a.Regexp(regexp.MustCompile("start"), "it's starting") +// a.Regexp("start...$", "it's not starting") func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1364,8 +1448,8 @@ func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...inter // Regexpf asserts that a specified regexp matches a string. // -// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1375,7 +1459,7 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . // Same asserts that two pointers reference the same object. // -// a.Same(ptr1, ptr2) +// a.Same(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1388,7 +1472,7 @@ func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs . // Samef asserts that two pointers reference the same object. // -// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// a.Samef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1402,7 +1486,7 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1413,7 +1497,7 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ... // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1423,7 +1507,7 @@ func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, a // True asserts that the specified value is true. // -// a.True(myBool) +// a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1433,7 +1517,7 @@ func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool { // Truef asserts that the specified value is true. // -// a.Truef(myBool, "error message %s", "formatted") +// a.Truef(myBool, "error message %s", "formatted") func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1443,7 +1527,7 @@ func (a *Assertions) Truef(value bool, msg string, args ...interface{}) bool { // WithinDuration asserts that the two times are within duration delta of each other. // -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1453,7 +1537,7 @@ func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta // WithinDurationf asserts that the two times are within duration delta of each other. // -// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1463,7 +1547,7 @@ func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta // WithinRange asserts that a time is within a time range (inclusive). // -// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Time, msgAndArgs ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1473,7 +1557,7 @@ func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Tim // WithinRangef asserts that a time is within a time range (inclusive). // -// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func (a *Assertions) WithinRangef(actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) bool { if h, ok := a.t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/assertion_order.go b/vendor/github.com/stretchr/testify/assert/assertion_order.go index 75944878..00df62a0 100644 --- a/vendor/github.com/stretchr/testify/assert/assertion_order.go +++ b/vendor/github.com/stretchr/testify/assert/assertion_order.go @@ -46,36 +46,36 @@ func isOrdered(t TestingT, object interface{}, allowedComparesResults []CompareT // IsIncreasing asserts that the collection is increasing // -// assert.IsIncreasing(t, []int{1, 2, 3}) -// assert.IsIncreasing(t, []float{1, 2}) -// assert.IsIncreasing(t, []string{"a", "b"}) +// assert.IsIncreasing(t, []int{1, 2, 3}) +// assert.IsIncreasing(t, []float{1, 2}) +// assert.IsIncreasing(t, []string{"a", "b"}) func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareLess}, "\"%v\" is not less than \"%v\"", msgAndArgs...) } // IsNonIncreasing asserts that the collection is not increasing // -// assert.IsNonIncreasing(t, []int{2, 1, 1}) -// assert.IsNonIncreasing(t, []float{2, 1}) -// assert.IsNonIncreasing(t, []string{"b", "a"}) +// assert.IsNonIncreasing(t, []int{2, 1, 1}) +// assert.IsNonIncreasing(t, []float{2, 1}) +// assert.IsNonIncreasing(t, []string{"b", "a"}) func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareEqual, compareGreater}, "\"%v\" is not greater than or equal to \"%v\"", msgAndArgs...) } // IsDecreasing asserts that the collection is decreasing // -// assert.IsDecreasing(t, []int{2, 1, 0}) -// assert.IsDecreasing(t, []float{2, 1}) -// assert.IsDecreasing(t, []string{"b", "a"}) +// assert.IsDecreasing(t, []int{2, 1, 0}) +// assert.IsDecreasing(t, []float{2, 1}) +// assert.IsDecreasing(t, []string{"b", "a"}) func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareGreater}, "\"%v\" is not greater than \"%v\"", msgAndArgs...) } // IsNonDecreasing asserts that the collection is not decreasing // -// assert.IsNonDecreasing(t, []int{1, 1, 2}) -// assert.IsNonDecreasing(t, []float{1, 2}) -// assert.IsNonDecreasing(t, []string{"a", "b"}) +// assert.IsNonDecreasing(t, []int{1, 1, 2}) +// assert.IsNonDecreasing(t, []float{1, 2}) +// assert.IsNonDecreasing(t, []string{"a", "b"}) func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { return isOrdered(t, object, []CompareType{compareLess, compareEqual}, "\"%v\" is not less than or equal to \"%v\"", msgAndArgs...) } diff --git a/vendor/github.com/stretchr/testify/assert/assertions.go b/vendor/github.com/stretchr/testify/assert/assertions.go index 2924cf3a..a55d1bba 100644 --- a/vendor/github.com/stretchr/testify/assert/assertions.go +++ b/vendor/github.com/stretchr/testify/assert/assertions.go @@ -75,6 +75,77 @@ func ObjectsAreEqual(expected, actual interface{}) bool { return bytes.Equal(exp, act) } +// copyExportedFields iterates downward through nested data structures and creates a copy +// that only contains the exported struct fields. +func copyExportedFields(expected interface{}) interface{} { + if isNil(expected) { + return expected + } + + expectedType := reflect.TypeOf(expected) + expectedKind := expectedType.Kind() + expectedValue := reflect.ValueOf(expected) + + switch expectedKind { + case reflect.Struct: + result := reflect.New(expectedType).Elem() + for i := 0; i < expectedType.NumField(); i++ { + field := expectedType.Field(i) + isExported := field.IsExported() + if isExported { + fieldValue := expectedValue.Field(i) + if isNil(fieldValue) || isNil(fieldValue.Interface()) { + continue + } + newValue := copyExportedFields(fieldValue.Interface()) + result.Field(i).Set(reflect.ValueOf(newValue)) + } + } + return result.Interface() + + case reflect.Ptr: + result := reflect.New(expectedType.Elem()) + unexportedRemoved := copyExportedFields(expectedValue.Elem().Interface()) + result.Elem().Set(reflect.ValueOf(unexportedRemoved)) + return result.Interface() + + case reflect.Array, reflect.Slice: + result := reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len()) + for i := 0; i < expectedValue.Len(); i++ { + index := expectedValue.Index(i) + if isNil(index) { + continue + } + unexportedRemoved := copyExportedFields(index.Interface()) + result.Index(i).Set(reflect.ValueOf(unexportedRemoved)) + } + return result.Interface() + + case reflect.Map: + result := reflect.MakeMap(expectedType) + for _, k := range expectedValue.MapKeys() { + index := expectedValue.MapIndex(k) + unexportedRemoved := copyExportedFields(index.Interface()) + result.SetMapIndex(k, reflect.ValueOf(unexportedRemoved)) + } + return result.Interface() + + default: + return expected + } +} + +// ObjectsExportedFieldsAreEqual determines if the exported (public) fields of two objects are +// considered equal. This comparison of only exported fields is applied recursively to nested data +// structures. +// +// This function does no assertion of any kind. +func ObjectsExportedFieldsAreEqual(expected, actual interface{}) bool { + expectedCleaned := copyExportedFields(expected) + actualCleaned := copyExportedFields(actual) + return ObjectsAreEqualValues(expectedCleaned, actualCleaned) +} + // ObjectsAreEqualValues gets whether two objects are equal, or if their // values are equal. func ObjectsAreEqualValues(expected, actual interface{}) bool { @@ -271,7 +342,7 @@ type labeledContent struct { // labeledOutput returns a string consisting of the provided labeledContent. Each labeled output is appended in the following manner: // -// \t{{label}}:{{align_spaces}}\t{{content}}\n +// \t{{label}}:{{align_spaces}}\t{{content}}\n // // The initial carriage return is required to undo/erase any padding added by testing.T.Errorf. The "\t{{label}}:" is for the label. // If a label is shorter than the longest label provided, padding spaces are added to make all the labels match in length. Once this @@ -294,7 +365,7 @@ func labeledOutput(content ...labeledContent) string { // Implements asserts that an object is implemented by the specified interface. // -// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -326,7 +397,7 @@ func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs // Equal asserts that two objects are equal. // -// assert.Equal(t, 123, 123) +// assert.Equal(t, 123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -367,7 +438,7 @@ func validateEqualArgs(expected, actual interface{}) error { // Same asserts that two pointers reference the same object. // -// assert.Same(t, ptr1, ptr2) +// assert.Same(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -387,7 +458,7 @@ func Same(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) b // NotSame asserts that two pointers do not reference the same object. // -// assert.NotSame(t, ptr1, ptr2) +// assert.NotSame(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -455,7 +526,7 @@ func truncatingFormat(data interface{}) string { // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValues(t, uint32(123), int32(123)) +// assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -473,9 +544,53 @@ func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interfa } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValues(t, S{1, 2}, S{1, 3}) => true +// assert.EqualExportedValues(t, S{1, 2}, S{2, 3}) => false +func EqualExportedValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + aType := reflect.TypeOf(expected) + bType := reflect.TypeOf(actual) + + if aType != bType { + return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...) + } + + if aType.Kind() != reflect.Struct { + return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...) + } + + if bType.Kind() != reflect.Struct { + return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...) + } + + expected = copyExportedFields(expected) + actual = copyExportedFields(actual) + + if !ObjectsAreEqualValues(expected, actual) { + diff := diff(expected, actual) + expected, actual = formatUnequalValues(expected, actual) + return Fail(t, fmt.Sprintf("Not equal (comparing only exported fields): \n"+ + "expected: %s\n"+ + "actual : %s%s", expected, actual, diff), msgAndArgs...) + } + + return true +} + // Exactly asserts that two objects are equal in value and type. // -// assert.Exactly(t, int32(123), int64(123)) +// assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -494,7 +609,7 @@ func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{} // NotNil asserts that the specified object is not nil. // -// assert.NotNil(t, err) +// assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if !isNil(object) { return true @@ -540,7 +655,7 @@ func isNil(object interface{}) bool { // Nil asserts that the specified object is nil. // -// assert.Nil(t, err) +// assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { if isNil(object) { return true @@ -583,7 +698,7 @@ func isEmpty(object interface{}) bool { // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Empty(t, obj) +// assert.Empty(t, obj) func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := isEmpty(object) if !pass { @@ -600,9 +715,9 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmpty(t, obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmpty(t, obj) { +// assert.Equal(t, "two", obj[1]) +// } func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool { pass := !isEmpty(object) if !pass { @@ -631,7 +746,7 @@ func getLen(x interface{}) (ok bool, length int) { // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// assert.Len(t, mySlice, 3) +// assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -649,7 +764,7 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) // True asserts that the specified value is true. // -// assert.True(t, myBool) +// assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { if !value { if h, ok := t.(tHelper); ok { @@ -664,7 +779,7 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) bool { // False asserts that the specified value is false. // -// assert.False(t, myBool) +// assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { if value { if h, ok := t.(tHelper); ok { @@ -679,7 +794,7 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) bool { // NotEqual asserts that the specified values are NOT equal. // -// assert.NotEqual(t, obj1, obj2) +// assert.NotEqual(t, obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -702,7 +817,7 @@ func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{ // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValues(t, obj1, obj2) +// assert.NotEqualValues(t, obj1, obj2) func NotEqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -761,9 +876,9 @@ func containsElement(list interface{}, element interface{}) (ok, found bool) { // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Contains(t, "Hello World", "World") -// assert.Contains(t, ["Hello", "World"], "World") -// assert.Contains(t, {"Hello": "World"}, "Hello") +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -784,9 +899,9 @@ func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bo // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContains(t, "Hello World", "Earth") -// assert.NotContains(t, ["Hello", "World"], "Earth") -// assert.NotContains(t, {"Hello": "World"}, "Earth") +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -794,10 +909,10 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) ok, found := containsElement(s, contains) if !ok { - return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v could not be applied builtin len()", s), msgAndArgs...) } if found { - return Fail(t, fmt.Sprintf("\"%s\" should not contain \"%s\"", s, contains), msgAndArgs...) + return Fail(t, fmt.Sprintf("%#v should not contain %#v", s, contains), msgAndArgs...) } return true @@ -807,7 +922,7 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -863,7 +978,7 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1048,7 +1163,7 @@ func didPanic(f PanicTestFunc) (didPanic bool, message interface{}, stack string // Panics asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panics(t, func(){ GoCrazy() }) +// assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1064,7 +1179,7 @@ func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1085,7 +1200,7 @@ func PanicsWithValue(t TestingT, expected interface{}, f PanicTestFunc, msgAndAr // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1105,7 +1220,7 @@ func PanicsWithError(t TestingT, errString string, f PanicTestFunc, msgAndArgs . // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanics(t, func(){ RemainCalm() }) +// assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1120,7 +1235,7 @@ func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool { // WithinDuration asserts that the two times are within duration delta of each other. // -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1136,7 +1251,7 @@ func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, // WithinRange asserts that a time is within a time range (inclusive). // -// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func WithinRange(t TestingT, actual, start, end time.Time, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1195,7 +1310,7 @@ func toFloat(x interface{}) (float64, bool) { // InDelta asserts that the two numerals are within delta of each other. // -// assert.InDelta(t, math.Pi, 22/7.0, 0.01) +// assert.InDelta(t, math.Pi, 22/7.0, 0.01) func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1368,10 +1483,10 @@ func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, m // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoError(t, err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoError(t, err) { +// assert.Equal(t, expectedObj, actualObj) +// } func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { if err != nil { if h, ok := t.(tHelper); ok { @@ -1385,10 +1500,10 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool { // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Error(t, err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) +// } func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { if err == nil { if h, ok := t.(tHelper); ok { @@ -1403,8 +1518,8 @@ func Error(t TestingT, err error, msgAndArgs ...interface{}) bool { // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualError(t, err, expectedErrorString) +// actualObj, err := SomeFunction() +// assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1426,8 +1541,8 @@ func EqualError(t TestingT, theError error, errString string, msgAndArgs ...inte // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContains(t, err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// assert.ErrorContains(t, err, expectedErrorSubString) func ErrorContains(t TestingT, theError error, contains string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1460,8 +1575,8 @@ func matchRegexp(rx interface{}, str interface{}) bool { // Regexp asserts that a specified regexp matches a string. // -// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") -// assert.Regexp(t, "start...$", "it's not starting") +// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") +// assert.Regexp(t, "start...$", "it's not starting") func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1478,8 +1593,8 @@ func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface // NotRegexp asserts that a specified regexp does not match a string. // -// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") -// assert.NotRegexp(t, "^start", "it's not starting") +// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") +// assert.NotRegexp(t, "^start", "it's not starting") func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1591,7 +1706,7 @@ func NoDirExists(t TestingT, path string, msgAndArgs ...interface{}) bool { // JSONEq asserts that two JSON strings are equivalent. // -// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1714,7 +1829,7 @@ type tHelper interface { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() @@ -1744,10 +1859,93 @@ func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick t } } +// CollectT implements the TestingT interface and collects all errors. +type CollectT struct { + errors []error +} + +// Errorf collects the error. +func (c *CollectT) Errorf(format string, args ...interface{}) { + c.errors = append(c.errors, fmt.Errorf(format, args...)) +} + +// FailNow panics. +func (c *CollectT) FailNow() { + panic("Assertion failed") +} + +// Reset clears the collected errors. +func (c *CollectT) Reset() { + c.errors = nil +} + +// Copy copies the collected errors to the supplied t. +func (c *CollectT) Copy(t TestingT) { + if tt, ok := t.(tHelper); ok { + tt.Helper() + } + for _, err := range c.errors { + t.Errorf("%v", err) + } +} + +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithT(t, func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { + if h, ok := t.(tHelper); ok { + h.Helper() + } + + collect := new(CollectT) + ch := make(chan bool, 1) + + timer := time.NewTimer(waitFor) + defer timer.Stop() + + ticker := time.NewTicker(tick) + defer ticker.Stop() + + for tick := ticker.C; ; { + select { + case <-timer.C: + collect.Copy(t) + return Fail(t, "Condition never satisfied", msgAndArgs...) + case <-tick: + tick = nil + collect.Reset() + go func() { + condition(collect) + ch <- len(collect.errors) == 0 + }() + case v := <-ch: + if v { + return true + } + tick = ticker.C + } + } +} + // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) +// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) bool { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/assert/doc.go b/vendor/github.com/stretchr/testify/assert/doc.go index c9dccc4d..4953981d 100644 --- a/vendor/github.com/stretchr/testify/assert/doc.go +++ b/vendor/github.com/stretchr/testify/assert/doc.go @@ -1,39 +1,40 @@ // Package assert provides a set of comprehensive testing tools for use with the normal Go testing system. // -// Example Usage +// # Example Usage // // The following is a complete example using assert in a standard test function: -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) // -// func TestSomething(t *testing.T) { +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) // -// var a string = "Hello" -// var b string = "Hello" +// func TestSomething(t *testing.T) { // -// assert.Equal(t, a, b, "The two words should be the same.") +// var a string = "Hello" +// var b string = "Hello" // -// } +// assert.Equal(t, a, b, "The two words should be the same.") +// +// } // // if you assert many times, use the format below: // -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// ) +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// ) // -// func TestSomething(t *testing.T) { -// assert := assert.New(t) +// func TestSomething(t *testing.T) { +// assert := assert.New(t) // -// var a string = "Hello" -// var b string = "Hello" +// var a string = "Hello" +// var b string = "Hello" // -// assert.Equal(a, b, "The two words should be the same.") -// } +// assert.Equal(a, b, "The two words should be the same.") +// } // -// Assertions +// # Assertions // // Assertions allow you to easily write test code, and are global funcs in the `assert` package. // All assertion functions take, as the first argument, the `*testing.T` object provided by the diff --git a/vendor/github.com/stretchr/testify/assert/http_assertions.go b/vendor/github.com/stretchr/testify/assert/http_assertions.go index 4ed341dd..d8038c28 100644 --- a/vendor/github.com/stretchr/testify/assert/http_assertions.go +++ b/vendor/github.com/stretchr/testify/assert/http_assertions.go @@ -23,7 +23,7 @@ func httpCode(handler http.HandlerFunc, method, url string, values url.Values) ( // HTTPSuccess asserts that a specified handler returns a success status code. // -// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) +// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -45,7 +45,7 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, value // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -67,7 +67,7 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, valu // HTTPError asserts that a specified handler returns an error status code. // -// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, msgAndArgs ...interface{}) bool { @@ -89,7 +89,7 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) +// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) bool { @@ -124,7 +124,7 @@ func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) s // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { @@ -144,7 +144,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) bool { diff --git a/vendor/github.com/stretchr/testify/require/doc.go b/vendor/github.com/stretchr/testify/require/doc.go index 169de392..96843472 100644 --- a/vendor/github.com/stretchr/testify/require/doc.go +++ b/vendor/github.com/stretchr/testify/require/doc.go @@ -1,24 +1,25 @@ // Package require implements the same assertions as the `assert` package but // stops test execution when a test fails. // -// Example Usage +// # Example Usage // // The following is a complete example using require in a standard test function: -// import ( -// "testing" -// "github.com/stretchr/testify/require" -// ) // -// func TestSomething(t *testing.T) { +// import ( +// "testing" +// "github.com/stretchr/testify/require" +// ) // -// var a string = "Hello" -// var b string = "Hello" +// func TestSomething(t *testing.T) { // -// require.Equal(t, a, b, "The two words should be the same.") +// var a string = "Hello" +// var b string = "Hello" // -// } +// require.Equal(t, a, b, "The two words should be the same.") // -// Assertions +// } +// +// # Assertions // // The `require` package have same global functions as in the `assert` package, // but instead of returning a boolean result they call `t.FailNow()`. diff --git a/vendor/github.com/stretchr/testify/require/require.go b/vendor/github.com/stretchr/testify/require/require.go index 880853f5..63f85214 100644 --- a/vendor/github.com/stretchr/testify/require/require.go +++ b/vendor/github.com/stretchr/testify/require/require.go @@ -37,9 +37,9 @@ func Conditionf(t TestingT, comp assert.Comparison, msg string, args ...interfac // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Contains(t, "Hello World", "World") -// assert.Contains(t, ["Hello", "World"], "World") -// assert.Contains(t, {"Hello": "World"}, "Hello") +// assert.Contains(t, "Hello World", "World") +// assert.Contains(t, ["Hello", "World"], "World") +// assert.Contains(t, {"Hello": "World"}, "Hello") func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -53,9 +53,9 @@ func Contains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...int // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") -// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") -// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") +// assert.Containsf(t, "Hello World", "World", "error message %s", "formatted") +// assert.Containsf(t, ["Hello", "World"], "World", "error message %s", "formatted") +// assert.Containsf(t, {"Hello": "World"}, "Hello", "error message %s", "formatted") func Containsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -123,7 +123,7 @@ func ElementsMatchf(t TestingT, listA interface{}, listB interface{}, msg string // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Empty(t, obj) +// assert.Empty(t, obj) func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -137,7 +137,7 @@ func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) { // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// assert.Emptyf(t, obj, "error message %s", "formatted") +// assert.Emptyf(t, obj, "error message %s", "formatted") func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -150,7 +150,7 @@ func Emptyf(t TestingT, object interface{}, msg string, args ...interface{}) { // Equal asserts that two objects are equal. // -// assert.Equal(t, 123, 123) +// assert.Equal(t, 123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -168,8 +168,8 @@ func Equal(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...i // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualError(t, err, expectedErrorString) +// actualObj, err := SomeFunction() +// assert.EqualError(t, err, expectedErrorString) func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -183,8 +183,8 @@ func EqualError(t TestingT, theError error, errString string, msgAndArgs ...inte // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.EqualErrorf(t, err, expectedErrorString, "error message %s", "formatted") func EqualErrorf(t TestingT, theError error, errString string, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -195,10 +195,50 @@ func EqualErrorf(t TestingT, theError error, errString string, msg string, args t.FailNow() } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValues(t, S{1, 2}, S{1, 3}) => true +// assert.EqualExportedValues(t, S{1, 2}, S{2, 3}) => false +func EqualExportedValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.EqualExportedValues(t, expected, actual, msgAndArgs...) { + return + } + t.FailNow() +} + +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// assert.EqualExportedValuesf(t, S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// assert.EqualExportedValuesf(t, S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.EqualExportedValuesf(t, expected, actual, msg, args...) { + return + } + t.FailNow() +} + // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValues(t, uint32(123), int32(123)) +// assert.EqualValues(t, uint32(123), int32(123)) func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -212,7 +252,7 @@ func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArg // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") +// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted") func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -225,7 +265,7 @@ func EqualValuesf(t TestingT, expected interface{}, actual interface{}, msg stri // Equalf asserts that two objects are equal. // -// assert.Equalf(t, 123, 123, "error message %s", "formatted") +// assert.Equalf(t, 123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -242,10 +282,10 @@ func Equalf(t TestingT, expected interface{}, actual interface{}, msg string, ar // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Error(t, err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if assert.Error(t, err) { +// assert.Equal(t, expectedError, err) +// } func Error(t TestingT, err error, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -283,8 +323,8 @@ func ErrorAsf(t TestingT, err error, target interface{}, msg string, args ...int // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContains(t, err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// assert.ErrorContains(t, err, expectedErrorSubString) func ErrorContains(t TestingT, theError error, contains string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -298,8 +338,8 @@ func ErrorContains(t TestingT, theError error, contains string, msgAndArgs ...in // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// assert.ErrorContainsf(t, err, expectedErrorSubString, "error message %s", "formatted") func ErrorContainsf(t TestingT, theError error, contains string, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -336,10 +376,10 @@ func ErrorIsf(t TestingT, err error, target error, msg string, args ...interface // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if assert.Errorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if assert.Errorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func Errorf(t TestingT, err error, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -353,7 +393,7 @@ func Errorf(t TestingT, err error, msg string, args ...interface{}) { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) +// assert.Eventually(t, func() bool { return true; }, time.Second, 10*time.Millisecond) func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -364,10 +404,66 @@ func Eventually(t TestingT, condition func() bool, waitFor time.Duration, tick t t.FailNow() } +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithT(t, func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithT(t TestingT, condition func(collect *assert.CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.EventuallyWithT(t, condition, waitFor, tick, msgAndArgs...) { + return + } + t.FailNow() +} + +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// assert.EventuallyWithTf(t, func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func EventuallyWithTf(t TestingT, condition func(collect *assert.CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { + if h, ok := t.(tHelper); ok { + h.Helper() + } + if assert.EventuallyWithTf(t, condition, waitFor, tick, msg, args...) { + return + } + t.FailNow() +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Eventuallyf(t, func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -380,7 +476,7 @@ func Eventuallyf(t TestingT, condition func() bool, waitFor time.Duration, tick // Exactly asserts that two objects are equal in value and type. // -// assert.Exactly(t, int32(123), int64(123)) +// assert.Exactly(t, int32(123), int64(123)) func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -393,7 +489,7 @@ func Exactly(t TestingT, expected interface{}, actual interface{}, msgAndArgs .. // Exactlyf asserts that two objects are equal in value and type. // -// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") +// assert.Exactlyf(t, int32(123), int64(123), "error message %s", "formatted") func Exactlyf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -450,7 +546,7 @@ func Failf(t TestingT, failureMessage string, msg string, args ...interface{}) { // False asserts that the specified value is false. // -// assert.False(t, myBool) +// assert.False(t, myBool) func False(t TestingT, value bool, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -463,7 +559,7 @@ func False(t TestingT, value bool, msgAndArgs ...interface{}) { // Falsef asserts that the specified value is false. // -// assert.Falsef(t, myBool, "error message %s", "formatted") +// assert.Falsef(t, myBool, "error message %s", "formatted") func Falsef(t TestingT, value bool, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -500,9 +596,9 @@ func FileExistsf(t TestingT, path string, msg string, args ...interface{}) { // Greater asserts that the first element is greater than the second // -// assert.Greater(t, 2, 1) -// assert.Greater(t, float64(2), float64(1)) -// assert.Greater(t, "b", "a") +// assert.Greater(t, 2, 1) +// assert.Greater(t, float64(2), float64(1)) +// assert.Greater(t, "b", "a") func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -515,10 +611,10 @@ func Greater(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqual(t, 2, 1) -// assert.GreaterOrEqual(t, 2, 2) -// assert.GreaterOrEqual(t, "b", "a") -// assert.GreaterOrEqual(t, "b", "b") +// assert.GreaterOrEqual(t, 2, 1) +// assert.GreaterOrEqual(t, 2, 2) +// assert.GreaterOrEqual(t, "b", "a") +// assert.GreaterOrEqual(t, "b", "b") func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -531,10 +627,10 @@ func GreaterOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...in // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") -// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 1, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "a", "error message %s", "formatted") +// assert.GreaterOrEqualf(t, "b", "b", "error message %s", "formatted") func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -547,9 +643,9 @@ func GreaterOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, arg // Greaterf asserts that the first element is greater than the second // -// assert.Greaterf(t, 2, 1, "error message %s", "formatted") -// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") -// assert.Greaterf(t, "b", "a", "error message %s", "formatted") +// assert.Greaterf(t, 2, 1, "error message %s", "formatted") +// assert.Greaterf(t, float64(2), float64(1), "error message %s", "formatted") +// assert.Greaterf(t, "b", "a", "error message %s", "formatted") func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -563,7 +659,7 @@ func Greaterf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...in // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { @@ -579,7 +675,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method string, url s // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { @@ -595,7 +691,7 @@ func HTTPBodyContainsf(t TestingT, handler http.HandlerFunc, method string, url // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// assert.HTTPBodyNotContains(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { @@ -611,7 +707,7 @@ func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method string, ur // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// assert.HTTPBodyNotContainsf(t, myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { @@ -626,7 +722,7 @@ func HTTPBodyNotContainsf(t TestingT, handler http.HandlerFunc, method string, u // HTTPError asserts that a specified handler returns an error status code. // -// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -641,7 +737,7 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method string, url string, // HTTPErrorf asserts that a specified handler returns an error status code. // -// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPErrorf(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -656,7 +752,7 @@ func HTTPErrorf(t TestingT, handler http.HandlerFunc, method string, url string, // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -671,7 +767,7 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method string, url strin // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// assert.HTTPRedirectf(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -686,7 +782,7 @@ func HTTPRedirectf(t TestingT, handler http.HandlerFunc, method string, url stri // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) +// assert.HTTPStatusCode(t, myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) { @@ -701,7 +797,7 @@ func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method string, url str // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// assert.HTTPStatusCodef(t, myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) { @@ -716,7 +812,7 @@ func HTTPStatusCodef(t TestingT, handler http.HandlerFunc, method string, url st // HTTPSuccess asserts that a specified handler returns a success status code. // -// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) +// assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -731,7 +827,7 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method string, url string // HTTPSuccessf asserts that a specified handler returns a success status code. // -// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// assert.HTTPSuccessf(t, myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -746,7 +842,7 @@ func HTTPSuccessf(t TestingT, handler http.HandlerFunc, method string, url strin // Implements asserts that an object is implemented by the specified interface. // -// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) +// assert.Implements(t, (*MyInterface)(nil), new(MyObject)) func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -759,7 +855,7 @@ func Implements(t TestingT, interfaceObject interface{}, object interface{}, msg // Implementsf asserts that an object is implemented by the specified interface. // -// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// assert.Implementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -772,7 +868,7 @@ func Implementsf(t TestingT, interfaceObject interface{}, object interface{}, ms // InDelta asserts that the two numerals are within delta of each other. // -// assert.InDelta(t, math.Pi, 22/7.0, 0.01) +// assert.InDelta(t, math.Pi, 22/7.0, 0.01) func InDelta(t TestingT, expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -829,7 +925,7 @@ func InDeltaSlicef(t TestingT, expected interface{}, actual interface{}, delta f // InDeltaf asserts that the two numerals are within delta of each other. // -// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// assert.InDeltaf(t, math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func InDeltaf(t TestingT, expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -886,9 +982,9 @@ func InEpsilonf(t TestingT, expected interface{}, actual interface{}, epsilon fl // IsDecreasing asserts that the collection is decreasing // -// assert.IsDecreasing(t, []int{2, 1, 0}) -// assert.IsDecreasing(t, []float{2, 1}) -// assert.IsDecreasing(t, []string{"b", "a"}) +// assert.IsDecreasing(t, []int{2, 1, 0}) +// assert.IsDecreasing(t, []float{2, 1}) +// assert.IsDecreasing(t, []string{"b", "a"}) func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -901,9 +997,9 @@ func IsDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { // IsDecreasingf asserts that the collection is decreasing // -// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []int{2, 1, 0}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsDecreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -916,9 +1012,9 @@ func IsDecreasingf(t TestingT, object interface{}, msg string, args ...interface // IsIncreasing asserts that the collection is increasing // -// assert.IsIncreasing(t, []int{1, 2, 3}) -// assert.IsIncreasing(t, []float{1, 2}) -// assert.IsIncreasing(t, []string{"a", "b"}) +// assert.IsIncreasing(t, []int{1, 2, 3}) +// assert.IsIncreasing(t, []float{1, 2}) +// assert.IsIncreasing(t, []string{"a", "b"}) func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -931,9 +1027,9 @@ func IsIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { // IsIncreasingf asserts that the collection is increasing // -// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []int{1, 2, 3}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsIncreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -946,9 +1042,9 @@ func IsIncreasingf(t TestingT, object interface{}, msg string, args ...interface // IsNonDecreasing asserts that the collection is not decreasing // -// assert.IsNonDecreasing(t, []int{1, 1, 2}) -// assert.IsNonDecreasing(t, []float{1, 2}) -// assert.IsNonDecreasing(t, []string{"a", "b"}) +// assert.IsNonDecreasing(t, []int{1, 1, 2}) +// assert.IsNonDecreasing(t, []float{1, 2}) +// assert.IsNonDecreasing(t, []string{"a", "b"}) func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -961,9 +1057,9 @@ func IsNonDecreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) // IsNonDecreasingf asserts that the collection is not decreasing // -// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") -// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []int{1, 1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []float{1, 2}, "error message %s", "formatted") +// assert.IsNonDecreasingf(t, []string{"a", "b"}, "error message %s", "formatted") func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -976,9 +1072,9 @@ func IsNonDecreasingf(t TestingT, object interface{}, msg string, args ...interf // IsNonIncreasing asserts that the collection is not increasing // -// assert.IsNonIncreasing(t, []int{2, 1, 1}) -// assert.IsNonIncreasing(t, []float{2, 1}) -// assert.IsNonIncreasing(t, []string{"b", "a"}) +// assert.IsNonIncreasing(t, []int{2, 1, 1}) +// assert.IsNonIncreasing(t, []float{2, 1}) +// assert.IsNonIncreasing(t, []string{"b", "a"}) func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -991,9 +1087,9 @@ func IsNonIncreasing(t TestingT, object interface{}, msgAndArgs ...interface{}) // IsNonIncreasingf asserts that the collection is not increasing // -// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") -// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []int{2, 1, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []float{2, 1}, "error message %s", "formatted") +// assert.IsNonIncreasingf(t, []string{"b", "a"}, "error message %s", "formatted") func IsNonIncreasingf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1028,7 +1124,7 @@ func IsTypef(t TestingT, expectedType interface{}, object interface{}, msg strin // JSONEq asserts that two JSON strings are equivalent. // -// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1041,7 +1137,7 @@ func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{ // JSONEqf asserts that two JSON strings are equivalent. // -// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// assert.JSONEqf(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func JSONEqf(t TestingT, expected string, actual string, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1055,7 +1151,7 @@ func JSONEqf(t TestingT, expected string, actual string, msg string, args ...int // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// assert.Len(t, mySlice, 3) +// assert.Len(t, mySlice, 3) func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1069,7 +1165,7 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") +// assert.Lenf(t, mySlice, 3, "error message %s", "formatted") func Lenf(t TestingT, object interface{}, length int, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1082,9 +1178,9 @@ func Lenf(t TestingT, object interface{}, length int, msg string, args ...interf // Less asserts that the first element is less than the second // -// assert.Less(t, 1, 2) -// assert.Less(t, float64(1), float64(2)) -// assert.Less(t, "a", "b") +// assert.Less(t, 1, 2) +// assert.Less(t, float64(1), float64(2)) +// assert.Less(t, "a", "b") func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1097,10 +1193,10 @@ func Less(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) // LessOrEqual asserts that the first element is less than or equal to the second // -// assert.LessOrEqual(t, 1, 2) -// assert.LessOrEqual(t, 2, 2) -// assert.LessOrEqual(t, "a", "b") -// assert.LessOrEqual(t, "b", "b") +// assert.LessOrEqual(t, 1, 2) +// assert.LessOrEqual(t, 2, 2) +// assert.LessOrEqual(t, "a", "b") +// assert.LessOrEqual(t, "b", "b") func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1113,10 +1209,10 @@ func LessOrEqual(t TestingT, e1 interface{}, e2 interface{}, msgAndArgs ...inter // LessOrEqualf asserts that the first element is less than or equal to the second // -// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") -// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") -// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, 1, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, 2, 2, "error message %s", "formatted") +// assert.LessOrEqualf(t, "a", "b", "error message %s", "formatted") +// assert.LessOrEqualf(t, "b", "b", "error message %s", "formatted") func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1129,9 +1225,9 @@ func LessOrEqualf(t TestingT, e1 interface{}, e2 interface{}, msg string, args . // Lessf asserts that the first element is less than the second // -// assert.Lessf(t, 1, 2, "error message %s", "formatted") -// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") -// assert.Lessf(t, "a", "b", "error message %s", "formatted") +// assert.Lessf(t, 1, 2, "error message %s", "formatted") +// assert.Lessf(t, float64(1), float64(2), "error message %s", "formatted") +// assert.Lessf(t, "a", "b", "error message %s", "formatted") func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1144,8 +1240,8 @@ func Lessf(t TestingT, e1 interface{}, e2 interface{}, msg string, args ...inter // Negative asserts that the specified element is negative // -// assert.Negative(t, -1) -// assert.Negative(t, -1.23) +// assert.Negative(t, -1) +// assert.Negative(t, -1.23) func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1158,8 +1254,8 @@ func Negative(t TestingT, e interface{}, msgAndArgs ...interface{}) { // Negativef asserts that the specified element is negative // -// assert.Negativef(t, -1, "error message %s", "formatted") -// assert.Negativef(t, -1.23, "error message %s", "formatted") +// assert.Negativef(t, -1, "error message %s", "formatted") +// assert.Negativef(t, -1.23, "error message %s", "formatted") func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1173,7 +1269,7 @@ func Negativef(t TestingT, e interface{}, msg string, args ...interface{}) { // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) +// assert.Never(t, func() bool { return false; }, time.Second, 10*time.Millisecond) func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1187,7 +1283,7 @@ func Never(t TestingT, condition func() bool, waitFor time.Duration, tick time.D // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// assert.Neverf(t, func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1200,7 +1296,7 @@ func Neverf(t TestingT, condition func() bool, waitFor time.Duration, tick time. // Nil asserts that the specified object is nil. // -// assert.Nil(t, err) +// assert.Nil(t, err) func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1213,7 +1309,7 @@ func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) { // Nilf asserts that the specified object is nil. // -// assert.Nilf(t, err, "error message %s", "formatted") +// assert.Nilf(t, err, "error message %s", "formatted") func Nilf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1250,10 +1346,10 @@ func NoDirExistsf(t TestingT, path string, msg string, args ...interface{}) { // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoError(t, err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoError(t, err) { +// assert.Equal(t, expectedObj, actualObj) +// } func NoError(t TestingT, err error, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1266,10 +1362,10 @@ func NoError(t TestingT, err error, msgAndArgs ...interface{}) { // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if assert.NoErrorf(t, err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if assert.NoErrorf(t, err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func NoErrorf(t TestingT, err error, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1307,9 +1403,9 @@ func NoFileExistsf(t TestingT, path string, msg string, args ...interface{}) { // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContains(t, "Hello World", "Earth") -// assert.NotContains(t, ["Hello", "World"], "Earth") -// assert.NotContains(t, {"Hello": "World"}, "Earth") +// assert.NotContains(t, "Hello World", "Earth") +// assert.NotContains(t, ["Hello", "World"], "Earth") +// assert.NotContains(t, {"Hello": "World"}, "Earth") func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1323,9 +1419,9 @@ func NotContains(t TestingT, s interface{}, contains interface{}, msgAndArgs ... // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") -// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, "Hello World", "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, ["Hello", "World"], "Earth", "error message %s", "formatted") +// assert.NotContainsf(t, {"Hello": "World"}, "Earth", "error message %s", "formatted") func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1339,9 +1435,9 @@ func NotContainsf(t TestingT, s interface{}, contains interface{}, msg string, a // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmpty(t, obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmpty(t, obj) { +// assert.Equal(t, "two", obj[1]) +// } func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1355,9 +1451,9 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) { // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if assert.NotEmptyf(t, obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1370,7 +1466,7 @@ func NotEmptyf(t TestingT, object interface{}, msg string, args ...interface{}) // NotEqual asserts that the specified values are NOT equal. // -// assert.NotEqual(t, obj1, obj2) +// assert.NotEqual(t, obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1386,7 +1482,7 @@ func NotEqual(t TestingT, expected interface{}, actual interface{}, msgAndArgs . // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValues(t, obj1, obj2) +// assert.NotEqualValues(t, obj1, obj2) func NotEqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1399,7 +1495,7 @@ func NotEqualValues(t TestingT, expected interface{}, actual interface{}, msgAnd // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualValuesf(t, obj1, obj2, "error message %s", "formatted") func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1412,7 +1508,7 @@ func NotEqualValuesf(t TestingT, expected interface{}, actual interface{}, msg s // NotEqualf asserts that the specified values are NOT equal. // -// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") +// assert.NotEqualf(t, obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1452,7 +1548,7 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf // NotNil asserts that the specified object is not nil. // -// assert.NotNil(t, err) +// assert.NotNil(t, err) func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1465,7 +1561,7 @@ func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) { // NotNilf asserts that the specified object is not nil. // -// assert.NotNilf(t, err, "error message %s", "formatted") +// assert.NotNilf(t, err, "error message %s", "formatted") func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1478,7 +1574,7 @@ func NotNilf(t TestingT, object interface{}, msg string, args ...interface{}) { // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanics(t, func(){ RemainCalm() }) +// assert.NotPanics(t, func(){ RemainCalm() }) func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1491,7 +1587,7 @@ func NotPanics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") +// assert.NotPanicsf(t, func(){ RemainCalm() }, "error message %s", "formatted") func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1504,8 +1600,8 @@ func NotPanicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interfac // NotRegexp asserts that a specified regexp does not match a string. // -// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") -// assert.NotRegexp(t, "^start", "it's not starting") +// assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting") +// assert.NotRegexp(t, "^start", "it's not starting") func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1518,8 +1614,8 @@ func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interf // NotRegexpf asserts that a specified regexp does not match a string. // -// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") +// assert.NotRegexpf(t, regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// assert.NotRegexpf(t, "^start", "it's not starting", "error message %s", "formatted") func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1532,7 +1628,7 @@ func NotRegexpf(t TestingT, rx interface{}, str interface{}, msg string, args .. // NotSame asserts that two pointers do not reference the same object. // -// assert.NotSame(t, ptr1, ptr2) +// assert.NotSame(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1548,7 +1644,7 @@ func NotSame(t TestingT, expected interface{}, actual interface{}, msgAndArgs .. // NotSamef asserts that two pointers do not reference the same object. // -// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.NotSamef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1565,7 +1661,7 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string, // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1579,7 +1675,7 @@ func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...i // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1614,7 +1710,7 @@ func NotZerof(t TestingT, i interface{}, msg string, args ...interface{}) { // Panics asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panics(t, func(){ GoCrazy() }) +// assert.Panics(t, func(){ GoCrazy() }) func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1629,7 +1725,7 @@ func Panics(t TestingT, f assert.PanicTestFunc, msgAndArgs ...interface{}) { // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithError(t, "crazy error", func(){ GoCrazy() }) func PanicsWithError(t TestingT, errString string, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1644,7 +1740,7 @@ func PanicsWithError(t TestingT, errString string, f assert.PanicTestFunc, msgAn // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithErrorf(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithErrorf(t TestingT, errString string, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1658,7 +1754,7 @@ func PanicsWithErrorf(t TestingT, errString string, f assert.PanicTestFunc, msg // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) +// assert.PanicsWithValue(t, "crazy error", func(){ GoCrazy() }) func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1672,7 +1768,7 @@ func PanicsWithValue(t TestingT, expected interface{}, f assert.PanicTestFunc, m // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// assert.PanicsWithValuef(t, "crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1685,7 +1781,7 @@ func PanicsWithValuef(t TestingT, expected interface{}, f assert.PanicTestFunc, // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") +// assert.Panicsf(t, func(){ GoCrazy() }, "error message %s", "formatted") func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1698,8 +1794,8 @@ func Panicsf(t TestingT, f assert.PanicTestFunc, msg string, args ...interface{} // Positive asserts that the specified element is positive // -// assert.Positive(t, 1) -// assert.Positive(t, 1.23) +// assert.Positive(t, 1) +// assert.Positive(t, 1.23) func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1712,8 +1808,8 @@ func Positive(t TestingT, e interface{}, msgAndArgs ...interface{}) { // Positivef asserts that the specified element is positive // -// assert.Positivef(t, 1, "error message %s", "formatted") -// assert.Positivef(t, 1.23, "error message %s", "formatted") +// assert.Positivef(t, 1, "error message %s", "formatted") +// assert.Positivef(t, 1.23, "error message %s", "formatted") func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1726,8 +1822,8 @@ func Positivef(t TestingT, e interface{}, msg string, args ...interface{}) { // Regexp asserts that a specified regexp matches a string. // -// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") -// assert.Regexp(t, "start...$", "it's not starting") +// assert.Regexp(t, regexp.MustCompile("start"), "it's starting") +// assert.Regexp(t, "start...$", "it's not starting") func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1740,8 +1836,8 @@ func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface // Regexpf asserts that a specified regexp matches a string. // -// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") +// assert.Regexpf(t, regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// assert.Regexpf(t, "start...$", "it's not starting", "error message %s", "formatted") func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1754,7 +1850,7 @@ func Regexpf(t TestingT, rx interface{}, str interface{}, msg string, args ...in // Same asserts that two pointers reference the same object. // -// assert.Same(t, ptr1, ptr2) +// assert.Same(t, ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1770,7 +1866,7 @@ func Same(t TestingT, expected interface{}, actual interface{}, msgAndArgs ...in // Samef asserts that two pointers reference the same object. // -// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") +// assert.Samef(t, ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1787,7 +1883,7 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1801,7 +1897,7 @@ func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...inte // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1814,7 +1910,7 @@ func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args // True asserts that the specified value is true. // -// assert.True(t, myBool) +// assert.True(t, myBool) func True(t TestingT, value bool, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1827,7 +1923,7 @@ func True(t TestingT, value bool, msgAndArgs ...interface{}) { // Truef asserts that the specified value is true. // -// assert.Truef(t, myBool, "error message %s", "formatted") +// assert.Truef(t, myBool, "error message %s", "formatted") func Truef(t TestingT, value bool, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1840,7 +1936,7 @@ func Truef(t TestingT, value bool, msg string, args ...interface{}) { // WithinDuration asserts that the two times are within duration delta of each other. // -// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) +// assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second) func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1853,7 +1949,7 @@ func WithinDuration(t TestingT, expected time.Time, actual time.Time, delta time // WithinDurationf asserts that the two times are within duration delta of each other. // -// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// assert.WithinDurationf(t, time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1866,7 +1962,7 @@ func WithinDurationf(t TestingT, expected time.Time, actual time.Time, delta tim // WithinRange asserts that a time is within a time range (inclusive). // -// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// assert.WithinRange(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func WithinRange(t TestingT, actual time.Time, start time.Time, end time.Time, msgAndArgs ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() @@ -1879,7 +1975,7 @@ func WithinRange(t TestingT, actual time.Time, start time.Time, end time.Time, m // WithinRangef asserts that a time is within a time range (inclusive). // -// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// assert.WithinRangef(t, time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func WithinRangef(t TestingT, actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) { if h, ok := t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/require/require_forward.go b/vendor/github.com/stretchr/testify/require/require_forward.go index 960bf6f2..3b5b0933 100644 --- a/vendor/github.com/stretchr/testify/require/require_forward.go +++ b/vendor/github.com/stretchr/testify/require/require_forward.go @@ -31,9 +31,9 @@ func (a *Assertions) Conditionf(comp assert.Comparison, msg string, args ...inte // Contains asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Contains("Hello World", "World") -// a.Contains(["Hello", "World"], "World") -// a.Contains({"Hello": "World"}, "Hello") +// a.Contains("Hello World", "World") +// a.Contains(["Hello", "World"], "World") +// a.Contains({"Hello": "World"}, "Hello") func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -44,9 +44,9 @@ func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs .. // Containsf asserts that the specified string, list(array, slice...) or map contains the // specified substring or element. // -// a.Containsf("Hello World", "World", "error message %s", "formatted") -// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") -// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") +// a.Containsf("Hello World", "World", "error message %s", "formatted") +// a.Containsf(["Hello", "World"], "World", "error message %s", "formatted") +// a.Containsf({"Hello": "World"}, "Hello", "error message %s", "formatted") func (a *Assertions) Containsf(s interface{}, contains interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -99,7 +99,7 @@ func (a *Assertions) ElementsMatchf(listA interface{}, listB interface{}, msg st // Empty asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Empty(obj) +// a.Empty(obj) func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -110,7 +110,7 @@ func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) { // Emptyf asserts that the specified object is empty. I.e. nil, "", false, 0 or either // a slice or a channel with len == 0. // -// a.Emptyf(obj, "error message %s", "formatted") +// a.Emptyf(obj, "error message %s", "formatted") func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -120,7 +120,7 @@ func (a *Assertions) Emptyf(object interface{}, msg string, args ...interface{}) // Equal asserts that two objects are equal. // -// a.Equal(123, 123) +// a.Equal(123, 123) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -135,8 +135,8 @@ func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs // EqualError asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualError(err, expectedErrorString) +// actualObj, err := SomeFunction() +// a.EqualError(err, expectedErrorString) func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -147,8 +147,8 @@ func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ... // EqualErrorf asserts that a function returned an error (i.e. not `nil`) // and that it is equal to the provided error. // -// actualObj, err := SomeFunction() -// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.EqualErrorf(err, expectedErrorString, "error message %s", "formatted") func (a *Assertions) EqualErrorf(theError error, errString string, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -156,10 +156,44 @@ func (a *Assertions) EqualErrorf(theError error, errString string, msg string, a EqualErrorf(a.t, theError, errString, msg, args...) } +// EqualExportedValues asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValues(S{1, 2}, S{1, 3}) => true +// a.EqualExportedValues(S{1, 2}, S{2, 3}) => false +func (a *Assertions) EqualExportedValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + EqualExportedValues(a.t, expected, actual, msgAndArgs...) +} + +// EqualExportedValuesf asserts that the types of two objects are equal and their public +// fields are also equal. This is useful for comparing structs that have private fields +// that could potentially differ. +// +// type S struct { +// Exported int +// notExported int +// } +// a.EqualExportedValuesf(S{1, 2}, S{1, 3}, "error message %s", "formatted") => true +// a.EqualExportedValuesf(S{1, 2}, S{2, 3}, "error message %s", "formatted") => false +func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + EqualExportedValuesf(a.t, expected, actual, msg, args...) +} + // EqualValues asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValues(uint32(123), int32(123)) +// a.EqualValues(uint32(123), int32(123)) func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -170,7 +204,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn // EqualValuesf asserts that two objects are equal or convertable to the same types // and equal. // -// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") +// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted") func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -180,7 +214,7 @@ func (a *Assertions) EqualValuesf(expected interface{}, actual interface{}, msg // Equalf asserts that two objects are equal. // -// a.Equalf(123, 123, "error message %s", "formatted") +// a.Equalf(123, 123, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). Function equality @@ -194,10 +228,10 @@ func (a *Assertions) Equalf(expected interface{}, actual interface{}, msg string // Error asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Error(err) { -// assert.Equal(t, expectedError, err) -// } +// actualObj, err := SomeFunction() +// if a.Error(err) { +// assert.Equal(t, expectedError, err) +// } func (a *Assertions) Error(err error, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -226,8 +260,8 @@ func (a *Assertions) ErrorAsf(err error, target interface{}, msg string, args .. // ErrorContains asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContains(err, expectedErrorSubString) +// actualObj, err := SomeFunction() +// a.ErrorContains(err, expectedErrorSubString) func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -238,8 +272,8 @@ func (a *Assertions) ErrorContains(theError error, contains string, msgAndArgs . // ErrorContainsf asserts that a function returned an error (i.e. not `nil`) // and that the error contains the specified substring. // -// actualObj, err := SomeFunction() -// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") +// actualObj, err := SomeFunction() +// a.ErrorContainsf(err, expectedErrorSubString, "error message %s", "formatted") func (a *Assertions) ErrorContainsf(theError error, contains string, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -267,10 +301,10 @@ func (a *Assertions) ErrorIsf(err error, target error, msg string, args ...inter // Errorf asserts that a function returned an error (i.e. not `nil`). // -// actualObj, err := SomeFunction() -// if a.Errorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedErrorf, err) -// } +// actualObj, err := SomeFunction() +// if a.Errorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedErrorf, err) +// } func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -281,7 +315,7 @@ func (a *Assertions) Errorf(err error, msg string, args ...interface{}) { // Eventually asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) +// a.Eventually(func() bool { return true; }, time.Second, 10*time.Millisecond) func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -289,10 +323,60 @@ func (a *Assertions) Eventually(condition func() bool, waitFor time.Duration, ti Eventually(a.t, condition, waitFor, tick, msgAndArgs...) } +// EventuallyWithT asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithT(func(c *assert.CollectT) { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithT(condition func(collect *assert.CollectT), waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + EventuallyWithT(a.t, condition, waitFor, tick, msgAndArgs...) +} + +// EventuallyWithTf asserts that given condition will be met in waitFor time, +// periodically checking target function each tick. In contrast to Eventually, +// it supplies a CollectT to the condition function, so that the condition +// function can use the CollectT to call other assertions. +// The condition is considered "met" if no errors are raised in a tick. +// The supplied CollectT collects all errors from one tick (if there are any). +// If the condition is not met before waitFor, the collected errors of +// the last tick are copied to t. +// +// externalValue := false +// go func() { +// time.Sleep(8*time.Second) +// externalValue = true +// }() +// a.EventuallyWithTf(func(c *assert.CollectT, "error message %s", "formatted") { +// // add assertions as needed; any assertion failure will fail the current tick +// assert.True(c, externalValue, "expected 'externalValue' to be true") +// }, 1*time.Second, 10*time.Second, "external state has not changed to 'true'; still false") +func (a *Assertions) EventuallyWithTf(condition func(collect *assert.CollectT), waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { + if h, ok := a.t.(tHelper); ok { + h.Helper() + } + EventuallyWithTf(a.t, condition, waitFor, tick, msg, args...) +} + // Eventuallyf asserts that given condition will be met in waitFor time, // periodically checking target function each tick. // -// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Eventuallyf(func() bool { return true; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -302,7 +386,7 @@ func (a *Assertions) Eventuallyf(condition func() bool, waitFor time.Duration, t // Exactly asserts that two objects are equal in value and type. // -// a.Exactly(int32(123), int64(123)) +// a.Exactly(int32(123), int64(123)) func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -312,7 +396,7 @@ func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArg // Exactlyf asserts that two objects are equal in value and type. // -// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") +// a.Exactlyf(int32(123), int64(123), "error message %s", "formatted") func (a *Assertions) Exactlyf(expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -354,7 +438,7 @@ func (a *Assertions) Failf(failureMessage string, msg string, args ...interface{ // False asserts that the specified value is false. // -// a.False(myBool) +// a.False(myBool) func (a *Assertions) False(value bool, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -364,7 +448,7 @@ func (a *Assertions) False(value bool, msgAndArgs ...interface{}) { // Falsef asserts that the specified value is false. // -// a.Falsef(myBool, "error message %s", "formatted") +// a.Falsef(myBool, "error message %s", "formatted") func (a *Assertions) Falsef(value bool, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -392,9 +476,9 @@ func (a *Assertions) FileExistsf(path string, msg string, args ...interface{}) { // Greater asserts that the first element is greater than the second // -// a.Greater(2, 1) -// a.Greater(float64(2), float64(1)) -// a.Greater("b", "a") +// a.Greater(2, 1) +// a.Greater(float64(2), float64(1)) +// a.Greater("b", "a") func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -404,10 +488,10 @@ func (a *Assertions) Greater(e1 interface{}, e2 interface{}, msgAndArgs ...inter // GreaterOrEqual asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqual(2, 1) -// a.GreaterOrEqual(2, 2) -// a.GreaterOrEqual("b", "a") -// a.GreaterOrEqual("b", "b") +// a.GreaterOrEqual(2, 1) +// a.GreaterOrEqual(2, 2) +// a.GreaterOrEqual("b", "a") +// a.GreaterOrEqual("b", "b") func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -417,10 +501,10 @@ func (a *Assertions) GreaterOrEqual(e1 interface{}, e2 interface{}, msgAndArgs . // GreaterOrEqualf asserts that the first element is greater than or equal to the second // -// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") -// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") -// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") -// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") +// a.GreaterOrEqualf(2, 1, "error message %s", "formatted") +// a.GreaterOrEqualf(2, 2, "error message %s", "formatted") +// a.GreaterOrEqualf("b", "a", "error message %s", "formatted") +// a.GreaterOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -430,9 +514,9 @@ func (a *Assertions) GreaterOrEqualf(e1 interface{}, e2 interface{}, msg string, // Greaterf asserts that the first element is greater than the second // -// a.Greaterf(2, 1, "error message %s", "formatted") -// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") -// a.Greaterf("b", "a", "error message %s", "formatted") +// a.Greaterf(2, 1, "error message %s", "formatted") +// a.Greaterf(float64(2), float64(1), "error message %s", "formatted") +// a.Greaterf("b", "a", "error message %s", "formatted") func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -443,7 +527,7 @@ func (a *Assertions) Greaterf(e1 interface{}, e2 interface{}, msg string, args . // HTTPBodyContains asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { @@ -456,7 +540,7 @@ func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, u // HTTPBodyContainsf asserts that a specified handler returns a // body that contains a string. // -// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { @@ -469,7 +553,7 @@ func (a *Assertions) HTTPBodyContainsf(handler http.HandlerFunc, method string, // HTTPBodyNotContains asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") +// a.HTTPBodyNotContains(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msgAndArgs ...interface{}) { @@ -482,7 +566,7 @@ func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string // HTTPBodyNotContainsf asserts that a specified handler returns a // body that does not contain a string. // -// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") +// a.HTTPBodyNotContainsf(myHandler, "GET", "www.google.com", nil, "I'm Feeling Lucky", "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}, msg string, args ...interface{}) { @@ -494,7 +578,7 @@ func (a *Assertions) HTTPBodyNotContainsf(handler http.HandlerFunc, method strin // HTTPError asserts that a specified handler returns an error status code. // -// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -506,7 +590,7 @@ func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url stri // HTTPErrorf asserts that a specified handler returns an error status code. // -// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPErrorf(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -518,7 +602,7 @@ func (a *Assertions) HTTPErrorf(handler http.HandlerFunc, method string, url str // HTTPRedirect asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -530,7 +614,7 @@ func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url s // HTTPRedirectf asserts that a specified handler returns a redirect status code. // -// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} +// a.HTTPRedirectf(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}} // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -542,7 +626,7 @@ func (a *Assertions) HTTPRedirectf(handler http.HandlerFunc, method string, url // HTTPStatusCode asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) +// a.HTTPStatusCode(myHandler, "GET", "/notImplemented", nil, 501) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msgAndArgs ...interface{}) { @@ -554,7 +638,7 @@ func (a *Assertions) HTTPStatusCode(handler http.HandlerFunc, method string, url // HTTPStatusCodef asserts that a specified handler returns a specified status code. // -// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") +// a.HTTPStatusCodef(myHandler, "GET", "/notImplemented", nil, 501, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, url string, values url.Values, statuscode int, msg string, args ...interface{}) { @@ -566,7 +650,7 @@ func (a *Assertions) HTTPStatusCodef(handler http.HandlerFunc, method string, ur // HTTPSuccess asserts that a specified handler returns a success status code. // -// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) +// a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil) // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values, msgAndArgs ...interface{}) { @@ -578,7 +662,7 @@ func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url st // HTTPSuccessf asserts that a specified handler returns a success status code. // -// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") +// a.HTTPSuccessf(myHandler, "POST", "http://www.google.com", nil, "error message %s", "formatted") // // Returns whether the assertion was successful (true) or not (false). func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url string, values url.Values, msg string, args ...interface{}) { @@ -590,7 +674,7 @@ func (a *Assertions) HTTPSuccessf(handler http.HandlerFunc, method string, url s // Implements asserts that an object is implemented by the specified interface. // -// a.Implements((*MyInterface)(nil), new(MyObject)) +// a.Implements((*MyInterface)(nil), new(MyObject)) func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -600,7 +684,7 @@ func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, // Implementsf asserts that an object is implemented by the specified interface. // -// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") +// a.Implementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted") func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -610,7 +694,7 @@ func (a *Assertions) Implementsf(interfaceObject interface{}, object interface{} // InDelta asserts that the two numerals are within delta of each other. // -// a.InDelta(math.Pi, 22/7.0, 0.01) +// a.InDelta(math.Pi, 22/7.0, 0.01) func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -652,7 +736,7 @@ func (a *Assertions) InDeltaSlicef(expected interface{}, actual interface{}, del // InDeltaf asserts that the two numerals are within delta of each other. // -// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") +// a.InDeltaf(math.Pi, 22/7.0, 0.01, "error message %s", "formatted") func (a *Assertions) InDeltaf(expected interface{}, actual interface{}, delta float64, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -694,9 +778,9 @@ func (a *Assertions) InEpsilonf(expected interface{}, actual interface{}, epsilo // IsDecreasing asserts that the collection is decreasing // -// a.IsDecreasing([]int{2, 1, 0}) -// a.IsDecreasing([]float{2, 1}) -// a.IsDecreasing([]string{"b", "a"}) +// a.IsDecreasing([]int{2, 1, 0}) +// a.IsDecreasing([]float{2, 1}) +// a.IsDecreasing([]string{"b", "a"}) func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -706,9 +790,9 @@ func (a *Assertions) IsDecreasing(object interface{}, msgAndArgs ...interface{}) // IsDecreasingf asserts that the collection is decreasing // -// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") -// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsDecreasingf([]int{2, 1, 0}, "error message %s", "formatted") +// a.IsDecreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsDecreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -718,9 +802,9 @@ func (a *Assertions) IsDecreasingf(object interface{}, msg string, args ...inter // IsIncreasing asserts that the collection is increasing // -// a.IsIncreasing([]int{1, 2, 3}) -// a.IsIncreasing([]float{1, 2}) -// a.IsIncreasing([]string{"a", "b"}) +// a.IsIncreasing([]int{1, 2, 3}) +// a.IsIncreasing([]float{1, 2}) +// a.IsIncreasing([]string{"a", "b"}) func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -730,9 +814,9 @@ func (a *Assertions) IsIncreasing(object interface{}, msgAndArgs ...interface{}) // IsIncreasingf asserts that the collection is increasing // -// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") -// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsIncreasingf([]int{1, 2, 3}, "error message %s", "formatted") +// a.IsIncreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsIncreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -742,9 +826,9 @@ func (a *Assertions) IsIncreasingf(object interface{}, msg string, args ...inter // IsNonDecreasing asserts that the collection is not decreasing // -// a.IsNonDecreasing([]int{1, 1, 2}) -// a.IsNonDecreasing([]float{1, 2}) -// a.IsNonDecreasing([]string{"a", "b"}) +// a.IsNonDecreasing([]int{1, 1, 2}) +// a.IsNonDecreasing([]float{1, 2}) +// a.IsNonDecreasing([]string{"a", "b"}) func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -754,9 +838,9 @@ func (a *Assertions) IsNonDecreasing(object interface{}, msgAndArgs ...interface // IsNonDecreasingf asserts that the collection is not decreasing // -// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") -// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") +// a.IsNonDecreasingf([]int{1, 1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]float{1, 2}, "error message %s", "formatted") +// a.IsNonDecreasingf([]string{"a", "b"}, "error message %s", "formatted") func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -766,9 +850,9 @@ func (a *Assertions) IsNonDecreasingf(object interface{}, msg string, args ...in // IsNonIncreasing asserts that the collection is not increasing // -// a.IsNonIncreasing([]int{2, 1, 1}) -// a.IsNonIncreasing([]float{2, 1}) -// a.IsNonIncreasing([]string{"b", "a"}) +// a.IsNonIncreasing([]int{2, 1, 1}) +// a.IsNonIncreasing([]float{2, 1}) +// a.IsNonIncreasing([]string{"b", "a"}) func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -778,9 +862,9 @@ func (a *Assertions) IsNonIncreasing(object interface{}, msgAndArgs ...interface // IsNonIncreasingf asserts that the collection is not increasing // -// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") -// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") +// a.IsNonIncreasingf([]int{2, 1, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]float{2, 1}, "error message %s", "formatted") +// a.IsNonIncreasingf([]string{"b", "a"}, "error message %s", "formatted") func (a *Assertions) IsNonIncreasingf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -806,7 +890,7 @@ func (a *Assertions) IsTypef(expectedType interface{}, object interface{}, msg s // JSONEq asserts that two JSON strings are equivalent. // -// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) +// a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`) func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -816,7 +900,7 @@ func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interf // JSONEqf asserts that two JSON strings are equivalent. // -// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") +// a.JSONEqf(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`, "error message %s", "formatted") func (a *Assertions) JSONEqf(expected string, actual string, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -827,7 +911,7 @@ func (a *Assertions) JSONEqf(expected string, actual string, msg string, args .. // Len asserts that the specified object has specific length. // Len also fails if the object has a type that len() not accept. // -// a.Len(mySlice, 3) +// a.Len(mySlice, 3) func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -838,7 +922,7 @@ func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface // Lenf asserts that the specified object has specific length. // Lenf also fails if the object has a type that len() not accept. // -// a.Lenf(mySlice, 3, "error message %s", "formatted") +// a.Lenf(mySlice, 3, "error message %s", "formatted") func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -848,9 +932,9 @@ func (a *Assertions) Lenf(object interface{}, length int, msg string, args ...in // Less asserts that the first element is less than the second // -// a.Less(1, 2) -// a.Less(float64(1), float64(2)) -// a.Less("a", "b") +// a.Less(1, 2) +// a.Less(float64(1), float64(2)) +// a.Less("a", "b") func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -860,10 +944,10 @@ func (a *Assertions) Less(e1 interface{}, e2 interface{}, msgAndArgs ...interfac // LessOrEqual asserts that the first element is less than or equal to the second // -// a.LessOrEqual(1, 2) -// a.LessOrEqual(2, 2) -// a.LessOrEqual("a", "b") -// a.LessOrEqual("b", "b") +// a.LessOrEqual(1, 2) +// a.LessOrEqual(2, 2) +// a.LessOrEqual("a", "b") +// a.LessOrEqual("b", "b") func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -873,10 +957,10 @@ func (a *Assertions) LessOrEqual(e1 interface{}, e2 interface{}, msgAndArgs ...i // LessOrEqualf asserts that the first element is less than or equal to the second // -// a.LessOrEqualf(1, 2, "error message %s", "formatted") -// a.LessOrEqualf(2, 2, "error message %s", "formatted") -// a.LessOrEqualf("a", "b", "error message %s", "formatted") -// a.LessOrEqualf("b", "b", "error message %s", "formatted") +// a.LessOrEqualf(1, 2, "error message %s", "formatted") +// a.LessOrEqualf(2, 2, "error message %s", "formatted") +// a.LessOrEqualf("a", "b", "error message %s", "formatted") +// a.LessOrEqualf("b", "b", "error message %s", "formatted") func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -886,9 +970,9 @@ func (a *Assertions) LessOrEqualf(e1 interface{}, e2 interface{}, msg string, ar // Lessf asserts that the first element is less than the second // -// a.Lessf(1, 2, "error message %s", "formatted") -// a.Lessf(float64(1), float64(2), "error message %s", "formatted") -// a.Lessf("a", "b", "error message %s", "formatted") +// a.Lessf(1, 2, "error message %s", "formatted") +// a.Lessf(float64(1), float64(2), "error message %s", "formatted") +// a.Lessf("a", "b", "error message %s", "formatted") func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -898,8 +982,8 @@ func (a *Assertions) Lessf(e1 interface{}, e2 interface{}, msg string, args ...i // Negative asserts that the specified element is negative // -// a.Negative(-1) -// a.Negative(-1.23) +// a.Negative(-1) +// a.Negative(-1.23) func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -909,8 +993,8 @@ func (a *Assertions) Negative(e interface{}, msgAndArgs ...interface{}) { // Negativef asserts that the specified element is negative // -// a.Negativef(-1, "error message %s", "formatted") -// a.Negativef(-1.23, "error message %s", "formatted") +// a.Negativef(-1, "error message %s", "formatted") +// a.Negativef(-1.23, "error message %s", "formatted") func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -921,7 +1005,7 @@ func (a *Assertions) Negativef(e interface{}, msg string, args ...interface{}) { // Never asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) +// a.Never(func() bool { return false; }, time.Second, 10*time.Millisecond) func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -932,7 +1016,7 @@ func (a *Assertions) Never(condition func() bool, waitFor time.Duration, tick ti // Neverf asserts that the given condition doesn't satisfy in waitFor time, // periodically checking the target function each tick. // -// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") +// a.Neverf(func() bool { return false; }, time.Second, 10*time.Millisecond, "error message %s", "formatted") func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick time.Duration, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -942,7 +1026,7 @@ func (a *Assertions) Neverf(condition func() bool, waitFor time.Duration, tick t // Nil asserts that the specified object is nil. // -// a.Nil(err) +// a.Nil(err) func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -952,7 +1036,7 @@ func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) { // Nilf asserts that the specified object is nil. // -// a.Nilf(err, "error message %s", "formatted") +// a.Nilf(err, "error message %s", "formatted") func (a *Assertions) Nilf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -980,10 +1064,10 @@ func (a *Assertions) NoDirExistsf(path string, msg string, args ...interface{}) // NoError asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoError(err) { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoError(err) { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -993,10 +1077,10 @@ func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) { // NoErrorf asserts that a function returned no error (i.e. `nil`). // -// actualObj, err := SomeFunction() -// if a.NoErrorf(err, "error message %s", "formatted") { -// assert.Equal(t, expectedObj, actualObj) -// } +// actualObj, err := SomeFunction() +// if a.NoErrorf(err, "error message %s", "formatted") { +// assert.Equal(t, expectedObj, actualObj) +// } func (a *Assertions) NoErrorf(err error, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1025,9 +1109,9 @@ func (a *Assertions) NoFileExistsf(path string, msg string, args ...interface{}) // NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContains("Hello World", "Earth") -// a.NotContains(["Hello", "World"], "Earth") -// a.NotContains({"Hello": "World"}, "Earth") +// a.NotContains("Hello World", "Earth") +// a.NotContains(["Hello", "World"], "Earth") +// a.NotContains({"Hello": "World"}, "Earth") func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1038,9 +1122,9 @@ func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs // NotContainsf asserts that the specified string, list(array, slice...) or map does NOT contain the // specified substring or element. // -// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") -// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") -// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") +// a.NotContainsf("Hello World", "Earth", "error message %s", "formatted") +// a.NotContainsf(["Hello", "World"], "Earth", "error message %s", "formatted") +// a.NotContainsf({"Hello": "World"}, "Earth", "error message %s", "formatted") func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1051,9 +1135,9 @@ func (a *Assertions) NotContainsf(s interface{}, contains interface{}, msg strin // NotEmpty asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmpty(obj) { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmpty(obj) { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1064,9 +1148,9 @@ func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) { // NotEmptyf asserts that the specified object is NOT empty. I.e. not nil, "", false, 0 or either // a slice or a channel with len == 0. // -// if a.NotEmptyf(obj, "error message %s", "formatted") { -// assert.Equal(t, "two", obj[1]) -// } +// if a.NotEmptyf(obj, "error message %s", "formatted") { +// assert.Equal(t, "two", obj[1]) +// } func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1076,7 +1160,7 @@ func (a *Assertions) NotEmptyf(object interface{}, msg string, args ...interface // NotEqual asserts that the specified values are NOT equal. // -// a.NotEqual(obj1, obj2) +// a.NotEqual(obj1, obj2) // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1089,7 +1173,7 @@ func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndAr // NotEqualValues asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValues(obj1, obj2) +// a.NotEqualValues(obj1, obj2) func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1099,7 +1183,7 @@ func (a *Assertions) NotEqualValues(expected interface{}, actual interface{}, ms // NotEqualValuesf asserts that two objects are not equal even when converted to the same type // -// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualValuesf(obj1, obj2, "error message %s", "formatted") func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1109,7 +1193,7 @@ func (a *Assertions) NotEqualValuesf(expected interface{}, actual interface{}, m // NotEqualf asserts that the specified values are NOT equal. // -// a.NotEqualf(obj1, obj2, "error message %s", "formatted") +// a.NotEqualf(obj1, obj2, "error message %s", "formatted") // // Pointer variable equality is determined based on the equality of the // referenced values (as opposed to the memory addresses). @@ -1140,7 +1224,7 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in // NotNil asserts that the specified object is not nil. // -// a.NotNil(err) +// a.NotNil(err) func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1150,7 +1234,7 @@ func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) { // NotNilf asserts that the specified object is not nil. // -// a.NotNilf(err, "error message %s", "formatted") +// a.NotNilf(err, "error message %s", "formatted") func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1160,7 +1244,7 @@ func (a *Assertions) NotNilf(object interface{}, msg string, args ...interface{} // NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanics(func(){ RemainCalm() }) +// a.NotPanics(func(){ RemainCalm() }) func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1170,7 +1254,7 @@ func (a *Assertions) NotPanics(f assert.PanicTestFunc, msgAndArgs ...interface{} // NotPanicsf asserts that the code inside the specified PanicTestFunc does NOT panic. // -// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") +// a.NotPanicsf(func(){ RemainCalm() }, "error message %s", "formatted") func (a *Assertions) NotPanicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1180,8 +1264,8 @@ func (a *Assertions) NotPanicsf(f assert.PanicTestFunc, msg string, args ...inte // NotRegexp asserts that a specified regexp does not match a string. // -// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") -// a.NotRegexp("^start", "it's not starting") +// a.NotRegexp(regexp.MustCompile("starts"), "it's starting") +// a.NotRegexp("^start", "it's not starting") func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1191,8 +1275,8 @@ func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...in // NotRegexpf asserts that a specified regexp does not match a string. // -// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") -// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") +// a.NotRegexpf(regexp.MustCompile("starts"), "it's starting", "error message %s", "formatted") +// a.NotRegexpf("^start", "it's not starting", "error message %s", "formatted") func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1202,7 +1286,7 @@ func (a *Assertions) NotRegexpf(rx interface{}, str interface{}, msg string, arg // NotSame asserts that two pointers do not reference the same object. // -// a.NotSame(ptr1, ptr2) +// a.NotSame(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1215,7 +1299,7 @@ func (a *Assertions) NotSame(expected interface{}, actual interface{}, msgAndArg // NotSamef asserts that two pointers do not reference the same object. // -// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") +// a.NotSamef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1229,7 +1313,7 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri // NotSubset asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") +// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]") func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1240,7 +1324,7 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs // NotSubsetf asserts that the specified list(array, slice...) contains not all // elements given in the specified subset(array, slice...). // -// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") +// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted") func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1266,7 +1350,7 @@ func (a *Assertions) NotZerof(i interface{}, msg string, args ...interface{}) { // Panics asserts that the code inside the specified PanicTestFunc panics. // -// a.Panics(func(){ GoCrazy() }) +// a.Panics(func(){ GoCrazy() }) func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1278,7 +1362,7 @@ func (a *Assertions) Panics(f assert.PanicTestFunc, msgAndArgs ...interface{}) { // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithError("crazy error", func(){ GoCrazy() }) +// a.PanicsWithError("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithError(errString string, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1290,7 +1374,7 @@ func (a *Assertions) PanicsWithError(errString string, f assert.PanicTestFunc, m // panics, and that the recovered panic value is an error that satisfies the // EqualError comparison. // -// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithErrorf("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithErrorf(errString string, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1301,7 +1385,7 @@ func (a *Assertions) PanicsWithErrorf(errString string, f assert.PanicTestFunc, // PanicsWithValue asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) +// a.PanicsWithValue("crazy error", func(){ GoCrazy() }) func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFunc, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1312,7 +1396,7 @@ func (a *Assertions) PanicsWithValue(expected interface{}, f assert.PanicTestFun // PanicsWithValuef asserts that the code inside the specified PanicTestFunc panics, and that // the recovered panic value equals the expected panic value. // -// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") +// a.PanicsWithValuef("crazy error", func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) PanicsWithValuef(expected interface{}, f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1322,7 +1406,7 @@ func (a *Assertions) PanicsWithValuef(expected interface{}, f assert.PanicTestFu // Panicsf asserts that the code inside the specified PanicTestFunc panics. // -// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") +// a.Panicsf(func(){ GoCrazy() }, "error message %s", "formatted") func (a *Assertions) Panicsf(f assert.PanicTestFunc, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1332,8 +1416,8 @@ func (a *Assertions) Panicsf(f assert.PanicTestFunc, msg string, args ...interfa // Positive asserts that the specified element is positive // -// a.Positive(1) -// a.Positive(1.23) +// a.Positive(1) +// a.Positive(1.23) func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1343,8 +1427,8 @@ func (a *Assertions) Positive(e interface{}, msgAndArgs ...interface{}) { // Positivef asserts that the specified element is positive // -// a.Positivef(1, "error message %s", "formatted") -// a.Positivef(1.23, "error message %s", "formatted") +// a.Positivef(1, "error message %s", "formatted") +// a.Positivef(1.23, "error message %s", "formatted") func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1354,8 +1438,8 @@ func (a *Assertions) Positivef(e interface{}, msg string, args ...interface{}) { // Regexp asserts that a specified regexp matches a string. // -// a.Regexp(regexp.MustCompile("start"), "it's starting") -// a.Regexp("start...$", "it's not starting") +// a.Regexp(regexp.MustCompile("start"), "it's starting") +// a.Regexp("start...$", "it's not starting") func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1365,8 +1449,8 @@ func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...inter // Regexpf asserts that a specified regexp matches a string. // -// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") -// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") +// a.Regexpf(regexp.MustCompile("start"), "it's starting", "error message %s", "formatted") +// a.Regexpf("start...$", "it's not starting", "error message %s", "formatted") func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1376,7 +1460,7 @@ func (a *Assertions) Regexpf(rx interface{}, str interface{}, msg string, args . // Same asserts that two pointers reference the same object. // -// a.Same(ptr1, ptr2) +// a.Same(ptr1, ptr2) // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1389,7 +1473,7 @@ func (a *Assertions) Same(expected interface{}, actual interface{}, msgAndArgs . // Samef asserts that two pointers reference the same object. // -// a.Samef(ptr1, ptr2, "error message %s", "formatted") +// a.Samef(ptr1, ptr2, "error message %s", "formatted") // // Both arguments must be pointer variables. Pointer variable sameness is // determined based on the equality of both type and value. @@ -1403,7 +1487,7 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string, // Subset asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") +// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]") func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1414,7 +1498,7 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ... // Subsetf asserts that the specified list(array, slice...) contains all // elements given in the specified subset(array, slice...). // -// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") +// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted") func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1424,7 +1508,7 @@ func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, a // True asserts that the specified value is true. // -// a.True(myBool) +// a.True(myBool) func (a *Assertions) True(value bool, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1434,7 +1518,7 @@ func (a *Assertions) True(value bool, msgAndArgs ...interface{}) { // Truef asserts that the specified value is true. // -// a.Truef(myBool, "error message %s", "formatted") +// a.Truef(myBool, "error message %s", "formatted") func (a *Assertions) Truef(value bool, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1444,7 +1528,7 @@ func (a *Assertions) Truef(value bool, msg string, args ...interface{}) { // WithinDuration asserts that the two times are within duration delta of each other. // -// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) +// a.WithinDuration(time.Now(), time.Now(), 10*time.Second) func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1454,7 +1538,7 @@ func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta // WithinDurationf asserts that the two times are within duration delta of each other. // -// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") +// a.WithinDurationf(time.Now(), time.Now(), 10*time.Second, "error message %s", "formatted") func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta time.Duration, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1464,7 +1548,7 @@ func (a *Assertions) WithinDurationf(expected time.Time, actual time.Time, delta // WithinRange asserts that a time is within a time range (inclusive). // -// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) +// a.WithinRange(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second)) func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Time, msgAndArgs ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() @@ -1474,7 +1558,7 @@ func (a *Assertions) WithinRange(actual time.Time, start time.Time, end time.Tim // WithinRangef asserts that a time is within a time range (inclusive). // -// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") +// a.WithinRangef(time.Now(), time.Now().Add(-time.Second), time.Now().Add(time.Second), "error message %s", "formatted") func (a *Assertions) WithinRangef(actual time.Time, start time.Time, end time.Time, msg string, args ...interface{}) { if h, ok := a.t.(tHelper); ok { h.Helper() diff --git a/vendor/github.com/stretchr/testify/suite/doc.go b/vendor/github.com/stretchr/testify/suite/doc.go index f91a245d..8d55a3aa 100644 --- a/vendor/github.com/stretchr/testify/suite/doc.go +++ b/vendor/github.com/stretchr/testify/suite/doc.go @@ -29,37 +29,38 @@ // Suite object has assertion methods. // // A crude example: -// // Basic imports -// import ( -// "testing" -// "github.com/stretchr/testify/assert" -// "github.com/stretchr/testify/suite" -// ) // -// // Define the suite, and absorb the built-in basic suite -// // functionality from testify - including a T() method which -// // returns the current testing context -// type ExampleTestSuite struct { -// suite.Suite -// VariableThatShouldStartAtFive int -// } +// // Basic imports +// import ( +// "testing" +// "github.com/stretchr/testify/assert" +// "github.com/stretchr/testify/suite" +// ) // -// // Make sure that VariableThatShouldStartAtFive is set to five -// // before each test -// func (suite *ExampleTestSuite) SetupTest() { -// suite.VariableThatShouldStartAtFive = 5 -// } +// // Define the suite, and absorb the built-in basic suite +// // functionality from testify - including a T() method which +// // returns the current testing context +// type ExampleTestSuite struct { +// suite.Suite +// VariableThatShouldStartAtFive int +// } // -// // All methods that begin with "Test" are run as tests within a -// // suite. -// func (suite *ExampleTestSuite) TestExample() { -// assert.Equal(suite.T(), 5, suite.VariableThatShouldStartAtFive) -// suite.Equal(5, suite.VariableThatShouldStartAtFive) -// } +// // Make sure that VariableThatShouldStartAtFive is set to five +// // before each test +// func (suite *ExampleTestSuite) SetupTest() { +// suite.VariableThatShouldStartAtFive = 5 +// } // -// // In order for 'go test' to run this suite, we need to create -// // a normal test function and pass our suite to suite.Run -// func TestExampleTestSuite(t *testing.T) { -// suite.Run(t, new(ExampleTestSuite)) -// } +// // All methods that begin with "Test" are run as tests within a +// // suite. +// func (suite *ExampleTestSuite) TestExample() { +// assert.Equal(suite.T(), 5, suite.VariableThatShouldStartAtFive) +// suite.Equal(5, suite.VariableThatShouldStartAtFive) +// } +// +// // In order for 'go test' to run this suite, we need to create +// // a normal test function and pass our suite to suite.Run +// func TestExampleTestSuite(t *testing.T) { +// suite.Run(t, new(ExampleTestSuite)) +// } package suite diff --git a/vendor/github.com/ztrue/tracerr/.travis.yml b/vendor/github.com/ztrue/tracerr/.travis.yml index 6fa4d856..6e20d5cd 100644 --- a/vendor/github.com/ztrue/tracerr/.travis.yml +++ b/vendor/github.com/ztrue/tracerr/.travis.yml @@ -1,7 +1,6 @@ language: go before_install: - - go get github.com/logrusorgru/aurora - go get github.com/mattn/goveralls go: diff --git a/vendor/github.com/ztrue/tracerr/CHANGELOG.md b/vendor/github.com/ztrue/tracerr/CHANGELOG.md index b8560c97..7a8a2a84 100644 --- a/vendor/github.com/ztrue/tracerr/CHANGELOG.md +++ b/vendor/github.com/ztrue/tracerr/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.4.0] - 2023-05-21 + +### Changed + +- `github.com/logrusorgru/aurora` dependency removed. +- Deprecated `ioutil.ReadFile` replaced with `os.ReadFile`. + ## [0.3.0] - 2019-03-15 ### Added diff --git a/vendor/github.com/ztrue/tracerr/colors.go b/vendor/github.com/ztrue/tracerr/colors.go new file mode 100644 index 00000000..c96999e9 --- /dev/null +++ b/vendor/github.com/ztrue/tracerr/colors.go @@ -0,0 +1,27 @@ +package tracerr + +import ( + "fmt" +) + +// Colorize outputs using [ANSI Escape Codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + +func color(code int, in string) string { + return fmt.Sprintf("\x1b[%dm%s\x1b[0m", code, in) +} + +func bold(in string) string { + return color(1, in) +} + +func black(in string) string { + return color(30, in) +} + +func red(in string) string { + return color(31, in) +} + +func yellow(in string) string { + return color(33, in) +} diff --git a/vendor/github.com/ztrue/tracerr/print.go b/vendor/github.com/ztrue/tracerr/print.go index 2249ace6..c6d36406 100644 --- a/vendor/github.com/ztrue/tracerr/print.go +++ b/vendor/github.com/ztrue/tracerr/print.go @@ -2,11 +2,10 @@ package tracerr import ( "fmt" - "io/ioutil" + "os" + "strconv" "strings" "sync" - - "github.com/logrusorgru/aurora" ) // DefaultLinesAfter is number of source lines after traced line to display. @@ -26,7 +25,7 @@ func Print(err error) { // PrintSource prints error message with stack trace and source fragments. // -// By default 6 lines of source code will be printed, +// By default, 6 lines of source code will be printed, // see DefaultLinesAfter and DefaultLinesBefore. // // Pass a single number to specify a total number of source lines. @@ -95,7 +94,7 @@ func readLines(path string) ([]string, error) { return lines, nil } - b, err := ioutil.ReadFile(path) + b, err := os.ReadFile(path) if err != nil { return nil, fmt.Errorf("tracerr: file %s not found", path) } @@ -111,7 +110,7 @@ func sourceRows(rows []string, frame Frame, before, after int, colorized bool) [ if err != nil { message := err.Error() if colorized { - message = aurora.Brown(message).String() + message = yellow(message) } return append(rows, message, "") } @@ -121,7 +120,7 @@ func sourceRows(rows []string, frame Frame, before, after int, colorized bool) [ len(lines), frame.Line, ) if colorized { - message = aurora.Brown(message).String() + message = yellow(message) } return append(rows, message, "") } @@ -136,14 +135,14 @@ func sourceRows(rows []string, frame Frame, before, after int, colorized bool) [ var message string // TODO Pad to the same length. if i == frame.Line-1 { - message = fmt.Sprintf("%d\t%s", i+1, string(line)) + message = fmt.Sprintf("%d\t%s", i+1, line) if colorized { - message = aurora.Red(message).String() + message = red(message) } } else if colorized { - message = aurora.Sprintf("%d\t%s", aurora.Black(i+1), string(line)) + message = fmt.Sprintf("%s\t%s", black(strconv.Itoa(i+1)), line) } else { - message = fmt.Sprintf("%d\t%s", i+1, string(line)) + message = fmt.Sprintf("%d\t%s", i+1, line) } rows = append(rows, message) } @@ -172,7 +171,7 @@ func sprint(err error, nums []int, colorized bool) string { for _, frame := range frames { message := frame.String() if colorized { - message = aurora.Bold(message).String() + message = bold(message) } rows = append(rows, message) if withSource { diff --git a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go deleted file mode 100644 index a4d1919a..00000000 --- a/vendor/golang.org/x/crypto/ssh/terminal/terminal.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package terminal provides support functions for dealing with terminals, as -// commonly found on UNIX systems. -// -// Deprecated: this package moved to golang.org/x/term. -package terminal - -import ( - "io" - - "golang.org/x/term" -) - -// EscapeCodes contains escape sequences that can be written to the terminal in -// order to achieve different styles of text. -type EscapeCodes = term.EscapeCodes - -// Terminal contains the state for running a VT100 terminal that is capable of -// reading lines of input. -type Terminal = term.Terminal - -// NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is -// a local terminal, that terminal must first have been put into raw mode. -// prompt is a string that is written at the start of each input line (i.e. -// "> "). -func NewTerminal(c io.ReadWriter, prompt string) *Terminal { - return term.NewTerminal(c, prompt) -} - -// ErrPasteIndicator may be returned from ReadLine as the error, in addition -// to valid line data. It indicates that bracketed paste mode is enabled and -// that the returned line consists only of pasted data. Programs may wish to -// interpret pasted data more literally than typed data. -var ErrPasteIndicator = term.ErrPasteIndicator - -// State contains the state of a terminal. -type State = term.State - -// IsTerminal returns whether the given file descriptor is a terminal. -func IsTerminal(fd int) bool { - return term.IsTerminal(fd) -} - -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { - return term.ReadPassword(fd) -} - -// MakeRaw puts the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -func MakeRaw(fd int) (*State, error) { - return term.MakeRaw(fd) -} - -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, oldState *State) error { - return term.Restore(fd, oldState) -} - -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { - return term.GetState(fd) -} - -// GetSize returns the dimensions of the given terminal. -func GetSize(fd int) (width, height int, err error) { - return term.GetSize(fd) -} diff --git a/vendor/google.golang.org/protobuf/AUTHORS b/vendor/google.golang.org/protobuf/AUTHORS new file mode 100644 index 00000000..2b00ddba --- /dev/null +++ b/vendor/google.golang.org/protobuf/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at https://tip.golang.org/AUTHORS. diff --git a/vendor/google.golang.org/protobuf/CONTRIBUTORS b/vendor/google.golang.org/protobuf/CONTRIBUTORS new file mode 100644 index 00000000..1fbd3e97 --- /dev/null +++ b/vendor/google.golang.org/protobuf/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at https://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/crypto/LICENSE b/vendor/google.golang.org/protobuf/LICENSE similarity index 96% rename from vendor/golang.org/x/crypto/LICENSE rename to vendor/google.golang.org/protobuf/LICENSE index 6a66aea5..49ea0f92 100644 --- a/vendor/golang.org/x/crypto/LICENSE +++ b/vendor/google.golang.org/protobuf/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. +Copyright (c) 2018 The Go Authors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/vendor/golang.org/x/crypto/PATENTS b/vendor/google.golang.org/protobuf/PATENTS similarity index 100% rename from vendor/golang.org/x/crypto/PATENTS rename to vendor/google.golang.org/protobuf/PATENTS diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/decode.go b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go new file mode 100644 index 00000000..8fb1d9e0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/encoding/prototext/decode.go @@ -0,0 +1,773 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package prototext + +import ( + "fmt" + "unicode/utf8" + + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/encoding/text" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/internal/set" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + pref "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +// Unmarshal reads the given []byte into the given proto.Message. +// The provided message must be mutable (e.g., a non-nil pointer to a message). +func Unmarshal(b []byte, m proto.Message) error { + return UnmarshalOptions{}.Unmarshal(b, m) +} + +// UnmarshalOptions is a configurable textproto format unmarshaler. +type UnmarshalOptions struct { + pragma.NoUnkeyedLiterals + + // AllowPartial accepts input for messages that will result in missing + // required fields. If AllowPartial is false (the default), Unmarshal will + // return error if there are any missing required fields. + AllowPartial bool + + // DiscardUnknown specifies whether to ignore unknown fields when parsing. + // An unknown field is any field whose field name or field number does not + // resolve to any known or extension field in the message. + // By default, unmarshal rejects unknown fields as an error. + DiscardUnknown bool + + // Resolver is used for looking up types when unmarshaling + // google.protobuf.Any messages or extension fields. + // If nil, this defaults to using protoregistry.GlobalTypes. + Resolver interface { + protoregistry.MessageTypeResolver + protoregistry.ExtensionTypeResolver + } +} + +// Unmarshal reads the given []byte and populates the given proto.Message +// using options in the UnmarshalOptions object. +// The provided message must be mutable (e.g., a non-nil pointer to a message). +func (o UnmarshalOptions) Unmarshal(b []byte, m proto.Message) error { + return o.unmarshal(b, m) +} + +// unmarshal is a centralized function that all unmarshal operations go through. +// For profiling purposes, avoid changing the name of this function or +// introducing other code paths for unmarshal that do not go through this. +func (o UnmarshalOptions) unmarshal(b []byte, m proto.Message) error { + proto.Reset(m) + + if o.Resolver == nil { + o.Resolver = protoregistry.GlobalTypes + } + + dec := decoder{text.NewDecoder(b), o} + if err := dec.unmarshalMessage(m.ProtoReflect(), false); err != nil { + return err + } + if o.AllowPartial { + return nil + } + return proto.CheckInitialized(m) +} + +type decoder struct { + *text.Decoder + opts UnmarshalOptions +} + +// newError returns an error object with position info. +func (d decoder) newError(pos int, f string, x ...interface{}) error { + line, column := d.Position(pos) + head := fmt.Sprintf("(line %d:%d): ", line, column) + return errors.New(head+f, x...) +} + +// unexpectedTokenError returns a syntax error for the given unexpected token. +func (d decoder) unexpectedTokenError(tok text.Token) error { + return d.syntaxError(tok.Pos(), "unexpected token: %s", tok.RawString()) +} + +// syntaxError returns a syntax error for given position. +func (d decoder) syntaxError(pos int, f string, x ...interface{}) error { + line, column := d.Position(pos) + head := fmt.Sprintf("syntax error (line %d:%d): ", line, column) + return errors.New(head+f, x...) +} + +// unmarshalMessage unmarshals into the given protoreflect.Message. +func (d decoder) unmarshalMessage(m pref.Message, checkDelims bool) error { + messageDesc := m.Descriptor() + if !flags.ProtoLegacy && messageset.IsMessageSet(messageDesc) { + return errors.New("no support for proto1 MessageSets") + } + + if messageDesc.FullName() == genid.Any_message_fullname { + return d.unmarshalAny(m, checkDelims) + } + + if checkDelims { + tok, err := d.Read() + if err != nil { + return err + } + + if tok.Kind() != text.MessageOpen { + return d.unexpectedTokenError(tok) + } + } + + var seenNums set.Ints + var seenOneofs set.Ints + fieldDescs := messageDesc.Fields() + + for { + // Read field name. + tok, err := d.Read() + if err != nil { + return err + } + switch typ := tok.Kind(); typ { + case text.Name: + // Continue below. + case text.EOF: + if checkDelims { + return text.ErrUnexpectedEOF + } + return nil + default: + if checkDelims && typ == text.MessageClose { + return nil + } + return d.unexpectedTokenError(tok) + } + + // Resolve the field descriptor. + var name pref.Name + var fd pref.FieldDescriptor + var xt pref.ExtensionType + var xtErr error + var isFieldNumberName bool + + switch tok.NameKind() { + case text.IdentName: + name = pref.Name(tok.IdentName()) + fd = fieldDescs.ByTextName(string(name)) + + case text.TypeName: + // Handle extensions only. This code path is not for Any. + xt, xtErr = d.opts.Resolver.FindExtensionByName(pref.FullName(tok.TypeName())) + + case text.FieldNumber: + isFieldNumberName = true + num := pref.FieldNumber(tok.FieldNumber()) + if !num.IsValid() { + return d.newError(tok.Pos(), "invalid field number: %d", num) + } + fd = fieldDescs.ByNumber(num) + if fd == nil { + xt, xtErr = d.opts.Resolver.FindExtensionByNumber(messageDesc.FullName(), num) + } + } + + if xt != nil { + fd = xt.TypeDescriptor() + if !messageDesc.ExtensionRanges().Has(fd.Number()) || fd.ContainingMessage().FullName() != messageDesc.FullName() { + return d.newError(tok.Pos(), "message %v cannot be extended by %v", messageDesc.FullName(), fd.FullName()) + } + } else if xtErr != nil && xtErr != protoregistry.NotFound { + return d.newError(tok.Pos(), "unable to resolve [%s]: %v", tok.RawString(), xtErr) + } + if flags.ProtoLegacy { + if fd != nil && fd.IsWeak() && fd.Message().IsPlaceholder() { + fd = nil // reset since the weak reference is not linked in + } + } + + // Handle unknown fields. + if fd == nil { + if d.opts.DiscardUnknown || messageDesc.ReservedNames().Has(name) { + d.skipValue() + continue + } + return d.newError(tok.Pos(), "unknown field: %v", tok.RawString()) + } + + // Handle fields identified by field number. + if isFieldNumberName { + // TODO: Add an option to permit parsing field numbers. + // + // This requires careful thought as the MarshalOptions.EmitUnknown + // option allows formatting unknown fields as the field number and the + // best-effort textual representation of the field value. In that case, + // it may not be possible to unmarshal the value from a parser that does + // have information about the unknown field. + return d.newError(tok.Pos(), "cannot specify field by number: %v", tok.RawString()) + } + + switch { + case fd.IsList(): + kind := fd.Kind() + if kind != pref.MessageKind && kind != pref.GroupKind && !tok.HasSeparator() { + return d.syntaxError(tok.Pos(), "missing field separator :") + } + + list := m.Mutable(fd).List() + if err := d.unmarshalList(fd, list); err != nil { + return err + } + + case fd.IsMap(): + mmap := m.Mutable(fd).Map() + if err := d.unmarshalMap(fd, mmap); err != nil { + return err + } + + default: + kind := fd.Kind() + if kind != pref.MessageKind && kind != pref.GroupKind && !tok.HasSeparator() { + return d.syntaxError(tok.Pos(), "missing field separator :") + } + + // If field is a oneof, check if it has already been set. + if od := fd.ContainingOneof(); od != nil { + idx := uint64(od.Index()) + if seenOneofs.Has(idx) { + return d.newError(tok.Pos(), "error parsing %q, oneof %v is already set", tok.RawString(), od.FullName()) + } + seenOneofs.Set(idx) + } + + num := uint64(fd.Number()) + if seenNums.Has(num) { + return d.newError(tok.Pos(), "non-repeated field %q is repeated", tok.RawString()) + } + + if err := d.unmarshalSingular(fd, m); err != nil { + return err + } + seenNums.Set(num) + } + } + + return nil +} + +// unmarshalSingular unmarshals a non-repeated field value specified by the +// given FieldDescriptor. +func (d decoder) unmarshalSingular(fd pref.FieldDescriptor, m pref.Message) error { + var val pref.Value + var err error + switch fd.Kind() { + case pref.MessageKind, pref.GroupKind: + val = m.NewField(fd) + err = d.unmarshalMessage(val.Message(), true) + default: + val, err = d.unmarshalScalar(fd) + } + if err == nil { + m.Set(fd, val) + } + return err +} + +// unmarshalScalar unmarshals a scalar/enum protoreflect.Value specified by the +// given FieldDescriptor. +func (d decoder) unmarshalScalar(fd pref.FieldDescriptor) (pref.Value, error) { + tok, err := d.Read() + if err != nil { + return pref.Value{}, err + } + + if tok.Kind() != text.Scalar { + return pref.Value{}, d.unexpectedTokenError(tok) + } + + kind := fd.Kind() + switch kind { + case pref.BoolKind: + if b, ok := tok.Bool(); ok { + return pref.ValueOfBool(b), nil + } + + case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind: + if n, ok := tok.Int32(); ok { + return pref.ValueOfInt32(n), nil + } + + case pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: + if n, ok := tok.Int64(); ok { + return pref.ValueOfInt64(n), nil + } + + case pref.Uint32Kind, pref.Fixed32Kind: + if n, ok := tok.Uint32(); ok { + return pref.ValueOfUint32(n), nil + } + + case pref.Uint64Kind, pref.Fixed64Kind: + if n, ok := tok.Uint64(); ok { + return pref.ValueOfUint64(n), nil + } + + case pref.FloatKind: + if n, ok := tok.Float32(); ok { + return pref.ValueOfFloat32(n), nil + } + + case pref.DoubleKind: + if n, ok := tok.Float64(); ok { + return pref.ValueOfFloat64(n), nil + } + + case pref.StringKind: + if s, ok := tok.String(); ok { + if strs.EnforceUTF8(fd) && !utf8.ValidString(s) { + return pref.Value{}, d.newError(tok.Pos(), "contains invalid UTF-8") + } + return pref.ValueOfString(s), nil + } + + case pref.BytesKind: + if b, ok := tok.String(); ok { + return pref.ValueOfBytes([]byte(b)), nil + } + + case pref.EnumKind: + if lit, ok := tok.Enum(); ok { + // Lookup EnumNumber based on name. + if enumVal := fd.Enum().Values().ByName(pref.Name(lit)); enumVal != nil { + return pref.ValueOfEnum(enumVal.Number()), nil + } + } + if num, ok := tok.Int32(); ok { + return pref.ValueOfEnum(pref.EnumNumber(num)), nil + } + + default: + panic(fmt.Sprintf("invalid scalar kind %v", kind)) + } + + return pref.Value{}, d.newError(tok.Pos(), "invalid value for %v type: %v", kind, tok.RawString()) +} + +// unmarshalList unmarshals into given protoreflect.List. A list value can +// either be in [] syntax or simply just a single scalar/message value. +func (d decoder) unmarshalList(fd pref.FieldDescriptor, list pref.List) error { + tok, err := d.Peek() + if err != nil { + return err + } + + switch fd.Kind() { + case pref.MessageKind, pref.GroupKind: + switch tok.Kind() { + case text.ListOpen: + d.Read() + for { + tok, err := d.Peek() + if err != nil { + return err + } + + switch tok.Kind() { + case text.ListClose: + d.Read() + return nil + case text.MessageOpen: + pval := list.NewElement() + if err := d.unmarshalMessage(pval.Message(), true); err != nil { + return err + } + list.Append(pval) + default: + return d.unexpectedTokenError(tok) + } + } + + case text.MessageOpen: + pval := list.NewElement() + if err := d.unmarshalMessage(pval.Message(), true); err != nil { + return err + } + list.Append(pval) + return nil + } + + default: + switch tok.Kind() { + case text.ListOpen: + d.Read() + for { + tok, err := d.Peek() + if err != nil { + return err + } + + switch tok.Kind() { + case text.ListClose: + d.Read() + return nil + case text.Scalar: + pval, err := d.unmarshalScalar(fd) + if err != nil { + return err + } + list.Append(pval) + default: + return d.unexpectedTokenError(tok) + } + } + + case text.Scalar: + pval, err := d.unmarshalScalar(fd) + if err != nil { + return err + } + list.Append(pval) + return nil + } + } + + return d.unexpectedTokenError(tok) +} + +// unmarshalMap unmarshals into given protoreflect.Map. A map value is a +// textproto message containing {key: , value: }. +func (d decoder) unmarshalMap(fd pref.FieldDescriptor, mmap pref.Map) error { + // Determine ahead whether map entry is a scalar type or a message type in + // order to call the appropriate unmarshalMapValue func inside + // unmarshalMapEntry. + var unmarshalMapValue func() (pref.Value, error) + switch fd.MapValue().Kind() { + case pref.MessageKind, pref.GroupKind: + unmarshalMapValue = func() (pref.Value, error) { + pval := mmap.NewValue() + if err := d.unmarshalMessage(pval.Message(), true); err != nil { + return pref.Value{}, err + } + return pval, nil + } + default: + unmarshalMapValue = func() (pref.Value, error) { + return d.unmarshalScalar(fd.MapValue()) + } + } + + tok, err := d.Read() + if err != nil { + return err + } + switch tok.Kind() { + case text.MessageOpen: + return d.unmarshalMapEntry(fd, mmap, unmarshalMapValue) + + case text.ListOpen: + for { + tok, err := d.Read() + if err != nil { + return err + } + switch tok.Kind() { + case text.ListClose: + return nil + case text.MessageOpen: + if err := d.unmarshalMapEntry(fd, mmap, unmarshalMapValue); err != nil { + return err + } + default: + return d.unexpectedTokenError(tok) + } + } + + default: + return d.unexpectedTokenError(tok) + } +} + +// unmarshalMap unmarshals into given protoreflect.Map. A map value is a +// textproto message containing {key: , value: }. +func (d decoder) unmarshalMapEntry(fd pref.FieldDescriptor, mmap pref.Map, unmarshalMapValue func() (pref.Value, error)) error { + var key pref.MapKey + var pval pref.Value +Loop: + for { + // Read field name. + tok, err := d.Read() + if err != nil { + return err + } + switch tok.Kind() { + case text.Name: + if tok.NameKind() != text.IdentName { + if !d.opts.DiscardUnknown { + return d.newError(tok.Pos(), "unknown map entry field %q", tok.RawString()) + } + d.skipValue() + continue Loop + } + // Continue below. + case text.MessageClose: + break Loop + default: + return d.unexpectedTokenError(tok) + } + + switch name := pref.Name(tok.IdentName()); name { + case genid.MapEntry_Key_field_name: + if !tok.HasSeparator() { + return d.syntaxError(tok.Pos(), "missing field separator :") + } + if key.IsValid() { + return d.newError(tok.Pos(), "map entry %q cannot be repeated", name) + } + val, err := d.unmarshalScalar(fd.MapKey()) + if err != nil { + return err + } + key = val.MapKey() + + case genid.MapEntry_Value_field_name: + if kind := fd.MapValue().Kind(); (kind != pref.MessageKind) && (kind != pref.GroupKind) { + if !tok.HasSeparator() { + return d.syntaxError(tok.Pos(), "missing field separator :") + } + } + if pval.IsValid() { + return d.newError(tok.Pos(), "map entry %q cannot be repeated", name) + } + pval, err = unmarshalMapValue() + if err != nil { + return err + } + + default: + if !d.opts.DiscardUnknown { + return d.newError(tok.Pos(), "unknown map entry field %q", name) + } + d.skipValue() + } + } + + if !key.IsValid() { + key = fd.MapKey().Default().MapKey() + } + if !pval.IsValid() { + switch fd.MapValue().Kind() { + case pref.MessageKind, pref.GroupKind: + // If value field is not set for message/group types, construct an + // empty one as default. + pval = mmap.NewValue() + default: + pval = fd.MapValue().Default() + } + } + mmap.Set(key, pval) + return nil +} + +// unmarshalAny unmarshals an Any textproto. It can either be in expanded form +// or non-expanded form. +func (d decoder) unmarshalAny(m pref.Message, checkDelims bool) error { + var typeURL string + var bValue []byte + var seenTypeUrl bool + var seenValue bool + var isExpanded bool + + if checkDelims { + tok, err := d.Read() + if err != nil { + return err + } + + if tok.Kind() != text.MessageOpen { + return d.unexpectedTokenError(tok) + } + } + +Loop: + for { + // Read field name. Can only have 3 possible field names, i.e. type_url, + // value and type URL name inside []. + tok, err := d.Read() + if err != nil { + return err + } + if typ := tok.Kind(); typ != text.Name { + if checkDelims { + if typ == text.MessageClose { + break Loop + } + } else if typ == text.EOF { + break Loop + } + return d.unexpectedTokenError(tok) + } + + switch tok.NameKind() { + case text.IdentName: + // Both type_url and value fields require field separator :. + if !tok.HasSeparator() { + return d.syntaxError(tok.Pos(), "missing field separator :") + } + + switch name := pref.Name(tok.IdentName()); name { + case genid.Any_TypeUrl_field_name: + if seenTypeUrl { + return d.newError(tok.Pos(), "duplicate %v field", genid.Any_TypeUrl_field_fullname) + } + if isExpanded { + return d.newError(tok.Pos(), "conflict with [%s] field", typeURL) + } + tok, err := d.Read() + if err != nil { + return err + } + var ok bool + typeURL, ok = tok.String() + if !ok { + return d.newError(tok.Pos(), "invalid %v field value: %v", genid.Any_TypeUrl_field_fullname, tok.RawString()) + } + seenTypeUrl = true + + case genid.Any_Value_field_name: + if seenValue { + return d.newError(tok.Pos(), "duplicate %v field", genid.Any_Value_field_fullname) + } + if isExpanded { + return d.newError(tok.Pos(), "conflict with [%s] field", typeURL) + } + tok, err := d.Read() + if err != nil { + return err + } + s, ok := tok.String() + if !ok { + return d.newError(tok.Pos(), "invalid %v field value: %v", genid.Any_Value_field_fullname, tok.RawString()) + } + bValue = []byte(s) + seenValue = true + + default: + if !d.opts.DiscardUnknown { + return d.newError(tok.Pos(), "invalid field name %q in %v message", tok.RawString(), genid.Any_message_fullname) + } + } + + case text.TypeName: + if isExpanded { + return d.newError(tok.Pos(), "cannot have more than one type") + } + if seenTypeUrl { + return d.newError(tok.Pos(), "conflict with type_url field") + } + typeURL = tok.TypeName() + var err error + bValue, err = d.unmarshalExpandedAny(typeURL, tok.Pos()) + if err != nil { + return err + } + isExpanded = true + + default: + if !d.opts.DiscardUnknown { + return d.newError(tok.Pos(), "invalid field name %q in %v message", tok.RawString(), genid.Any_message_fullname) + } + } + } + + fds := m.Descriptor().Fields() + if len(typeURL) > 0 { + m.Set(fds.ByNumber(genid.Any_TypeUrl_field_number), pref.ValueOfString(typeURL)) + } + if len(bValue) > 0 { + m.Set(fds.ByNumber(genid.Any_Value_field_number), pref.ValueOfBytes(bValue)) + } + return nil +} + +func (d decoder) unmarshalExpandedAny(typeURL string, pos int) ([]byte, error) { + mt, err := d.opts.Resolver.FindMessageByURL(typeURL) + if err != nil { + return nil, d.newError(pos, "unable to resolve message [%v]: %v", typeURL, err) + } + // Create new message for the embedded message type and unmarshal the value + // field into it. + m := mt.New() + if err := d.unmarshalMessage(m, true); err != nil { + return nil, err + } + // Serialize the embedded message and return the resulting bytes. + b, err := proto.MarshalOptions{ + AllowPartial: true, // Never check required fields inside an Any. + Deterministic: true, + }.Marshal(m.Interface()) + if err != nil { + return nil, d.newError(pos, "error in marshaling message into Any.value: %v", err) + } + return b, nil +} + +// skipValue makes the decoder parse a field value in order to advance the read +// to the next field. It relies on Read returning an error if the types are not +// in valid sequence. +func (d decoder) skipValue() error { + tok, err := d.Read() + if err != nil { + return err + } + // Only need to continue reading for messages and lists. + switch tok.Kind() { + case text.MessageOpen: + return d.skipMessageValue() + + case text.ListOpen: + for { + tok, err := d.Read() + if err != nil { + return err + } + switch tok.Kind() { + case text.ListClose: + return nil + case text.MessageOpen: + return d.skipMessageValue() + default: + // Skip items. This will not validate whether skipped values are + // of the same type or not, same behavior as C++ + // TextFormat::Parser::AllowUnknownField(true) version 3.8.0. + if err := d.skipValue(); err != nil { + return err + } + } + } + } + return nil +} + +// skipMessageValue makes the decoder parse and skip over all fields in a +// message. It assumes that the previous read type is MessageOpen. +func (d decoder) skipMessageValue() error { + for { + tok, err := d.Read() + if err != nil { + return err + } + switch tok.Kind() { + case text.MessageClose: + return nil + case text.Name: + if err := d.skipValue(); err != nil { + return err + } + } + } +} diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/doc.go b/vendor/google.golang.org/protobuf/encoding/prototext/doc.go new file mode 100644 index 00000000..162b4f98 --- /dev/null +++ b/vendor/google.golang.org/protobuf/encoding/prototext/doc.go @@ -0,0 +1,7 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package prototext marshals and unmarshals protocol buffer messages as the +// textproto format. +package prototext diff --git a/vendor/google.golang.org/protobuf/encoding/prototext/encode.go b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go new file mode 100644 index 00000000..8d5304dc --- /dev/null +++ b/vendor/google.golang.org/protobuf/encoding/prototext/encode.go @@ -0,0 +1,371 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package prototext + +import ( + "fmt" + "strconv" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/encoding/text" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/order" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +const defaultIndent = " " + +// Format formats the message as a multiline string. +// This function is only intended for human consumption and ignores errors. +// Do not depend on the output being stable. It may change over time across +// different versions of the program. +func Format(m proto.Message) string { + return MarshalOptions{Multiline: true}.Format(m) +} + +// Marshal writes the given proto.Message in textproto format using default +// options. Do not depend on the output being stable. It may change over time +// across different versions of the program. +func Marshal(m proto.Message) ([]byte, error) { + return MarshalOptions{}.Marshal(m) +} + +// MarshalOptions is a configurable text format marshaler. +type MarshalOptions struct { + pragma.NoUnkeyedLiterals + + // Multiline specifies whether the marshaler should format the output in + // indented-form with every textual element on a new line. + // If Indent is an empty string, then an arbitrary indent is chosen. + Multiline bool + + // Indent specifies the set of indentation characters to use in a multiline + // formatted output such that every entry is preceded by Indent and + // terminated by a newline. If non-empty, then Multiline is treated as true. + // Indent can only be composed of space or tab characters. + Indent string + + // EmitASCII specifies whether to format strings and bytes as ASCII only + // as opposed to using UTF-8 encoding when possible. + EmitASCII bool + + // allowInvalidUTF8 specifies whether to permit the encoding of strings + // with invalid UTF-8. This is unexported as it is intended to only + // be specified by the Format method. + allowInvalidUTF8 bool + + // AllowPartial allows messages that have missing required fields to marshal + // without returning an error. If AllowPartial is false (the default), + // Marshal will return error if there are any missing required fields. + AllowPartial bool + + // EmitUnknown specifies whether to emit unknown fields in the output. + // If specified, the unmarshaler may be unable to parse the output. + // The default is to exclude unknown fields. + EmitUnknown bool + + // Resolver is used for looking up types when expanding google.protobuf.Any + // messages. If nil, this defaults to using protoregistry.GlobalTypes. + Resolver interface { + protoregistry.ExtensionTypeResolver + protoregistry.MessageTypeResolver + } +} + +// Format formats the message as a string. +// This method is only intended for human consumption and ignores errors. +// Do not depend on the output being stable. It may change over time across +// different versions of the program. +func (o MarshalOptions) Format(m proto.Message) string { + if m == nil || !m.ProtoReflect().IsValid() { + return "" // invalid syntax, but okay since this is for debugging + } + o.allowInvalidUTF8 = true + o.AllowPartial = true + o.EmitUnknown = true + b, _ := o.Marshal(m) + return string(b) +} + +// Marshal writes the given proto.Message in textproto format using options in +// MarshalOptions object. Do not depend on the output being stable. It may +// change over time across different versions of the program. +func (o MarshalOptions) Marshal(m proto.Message) ([]byte, error) { + return o.marshal(m) +} + +// marshal is a centralized function that all marshal operations go through. +// For profiling purposes, avoid changing the name of this function or +// introducing other code paths for marshal that do not go through this. +func (o MarshalOptions) marshal(m proto.Message) ([]byte, error) { + var delims = [2]byte{'{', '}'} + + if o.Multiline && o.Indent == "" { + o.Indent = defaultIndent + } + if o.Resolver == nil { + o.Resolver = protoregistry.GlobalTypes + } + + internalEnc, err := text.NewEncoder(o.Indent, delims, o.EmitASCII) + if err != nil { + return nil, err + } + + // Treat nil message interface as an empty message, + // in which case there is nothing to output. + if m == nil { + return []byte{}, nil + } + + enc := encoder{internalEnc, o} + err = enc.marshalMessage(m.ProtoReflect(), false) + if err != nil { + return nil, err + } + out := enc.Bytes() + if len(o.Indent) > 0 && len(out) > 0 { + out = append(out, '\n') + } + if o.AllowPartial { + return out, nil + } + return out, proto.CheckInitialized(m) +} + +type encoder struct { + *text.Encoder + opts MarshalOptions +} + +// marshalMessage marshals the given protoreflect.Message. +func (e encoder) marshalMessage(m pref.Message, inclDelims bool) error { + messageDesc := m.Descriptor() + if !flags.ProtoLegacy && messageset.IsMessageSet(messageDesc) { + return errors.New("no support for proto1 MessageSets") + } + + if inclDelims { + e.StartMessage() + defer e.EndMessage() + } + + // Handle Any expansion. + if messageDesc.FullName() == genid.Any_message_fullname { + if e.marshalAny(m) { + return nil + } + // If unable to expand, continue on to marshal Any as a regular message. + } + + // Marshal fields. + var err error + order.RangeFields(m, order.IndexNameFieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if err = e.marshalField(fd.TextName(), v, fd); err != nil { + return false + } + return true + }) + if err != nil { + return err + } + + // Marshal unknown fields. + if e.opts.EmitUnknown { + e.marshalUnknown(m.GetUnknown()) + } + + return nil +} + +// marshalField marshals the given field with protoreflect.Value. +func (e encoder) marshalField(name string, val pref.Value, fd pref.FieldDescriptor) error { + switch { + case fd.IsList(): + return e.marshalList(name, val.List(), fd) + case fd.IsMap(): + return e.marshalMap(name, val.Map(), fd) + default: + e.WriteName(name) + return e.marshalSingular(val, fd) + } +} + +// marshalSingular marshals the given non-repeated field value. This includes +// all scalar types, enums, messages, and groups. +func (e encoder) marshalSingular(val pref.Value, fd pref.FieldDescriptor) error { + kind := fd.Kind() + switch kind { + case pref.BoolKind: + e.WriteBool(val.Bool()) + + case pref.StringKind: + s := val.String() + if !e.opts.allowInvalidUTF8 && strs.EnforceUTF8(fd) && !utf8.ValidString(s) { + return errors.InvalidUTF8(string(fd.FullName())) + } + e.WriteString(s) + + case pref.Int32Kind, pref.Int64Kind, + pref.Sint32Kind, pref.Sint64Kind, + pref.Sfixed32Kind, pref.Sfixed64Kind: + e.WriteInt(val.Int()) + + case pref.Uint32Kind, pref.Uint64Kind, + pref.Fixed32Kind, pref.Fixed64Kind: + e.WriteUint(val.Uint()) + + case pref.FloatKind: + // Encoder.WriteFloat handles the special numbers NaN and infinites. + e.WriteFloat(val.Float(), 32) + + case pref.DoubleKind: + // Encoder.WriteFloat handles the special numbers NaN and infinites. + e.WriteFloat(val.Float(), 64) + + case pref.BytesKind: + e.WriteString(string(val.Bytes())) + + case pref.EnumKind: + num := val.Enum() + if desc := fd.Enum().Values().ByNumber(num); desc != nil { + e.WriteLiteral(string(desc.Name())) + } else { + // Use numeric value if there is no enum description. + e.WriteInt(int64(num)) + } + + case pref.MessageKind, pref.GroupKind: + return e.marshalMessage(val.Message(), true) + + default: + panic(fmt.Sprintf("%v has unknown kind: %v", fd.FullName(), kind)) + } + return nil +} + +// marshalList marshals the given protoreflect.List as multiple name-value fields. +func (e encoder) marshalList(name string, list pref.List, fd pref.FieldDescriptor) error { + size := list.Len() + for i := 0; i < size; i++ { + e.WriteName(name) + if err := e.marshalSingular(list.Get(i), fd); err != nil { + return err + } + } + return nil +} + +// marshalMap marshals the given protoreflect.Map as multiple name-value fields. +func (e encoder) marshalMap(name string, mmap pref.Map, fd pref.FieldDescriptor) error { + var err error + order.RangeEntries(mmap, order.GenericKeyOrder, func(key pref.MapKey, val pref.Value) bool { + e.WriteName(name) + e.StartMessage() + defer e.EndMessage() + + e.WriteName(string(genid.MapEntry_Key_field_name)) + err = e.marshalSingular(key.Value(), fd.MapKey()) + if err != nil { + return false + } + + e.WriteName(string(genid.MapEntry_Value_field_name)) + err = e.marshalSingular(val, fd.MapValue()) + if err != nil { + return false + } + return true + }) + return err +} + +// marshalUnknown parses the given []byte and marshals fields out. +// This function assumes proper encoding in the given []byte. +func (e encoder) marshalUnknown(b []byte) { + const dec = 10 + const hex = 16 + for len(b) > 0 { + num, wtype, n := protowire.ConsumeTag(b) + b = b[n:] + e.WriteName(strconv.FormatInt(int64(num), dec)) + + switch wtype { + case protowire.VarintType: + var v uint64 + v, n = protowire.ConsumeVarint(b) + e.WriteUint(v) + case protowire.Fixed32Type: + var v uint32 + v, n = protowire.ConsumeFixed32(b) + e.WriteLiteral("0x" + strconv.FormatUint(uint64(v), hex)) + case protowire.Fixed64Type: + var v uint64 + v, n = protowire.ConsumeFixed64(b) + e.WriteLiteral("0x" + strconv.FormatUint(v, hex)) + case protowire.BytesType: + var v []byte + v, n = protowire.ConsumeBytes(b) + e.WriteString(string(v)) + case protowire.StartGroupType: + e.StartMessage() + var v []byte + v, n = protowire.ConsumeGroup(num, b) + e.marshalUnknown(v) + e.EndMessage() + default: + panic(fmt.Sprintf("prototext: error parsing unknown field wire type: %v", wtype)) + } + + b = b[n:] + } +} + +// marshalAny marshals the given google.protobuf.Any message in expanded form. +// It returns true if it was able to marshal, else false. +func (e encoder) marshalAny(any pref.Message) bool { + // Construct the embedded message. + fds := any.Descriptor().Fields() + fdType := fds.ByNumber(genid.Any_TypeUrl_field_number) + typeURL := any.Get(fdType).String() + mt, err := e.opts.Resolver.FindMessageByURL(typeURL) + if err != nil { + return false + } + m := mt.New().Interface() + + // Unmarshal bytes into embedded message. + fdValue := fds.ByNumber(genid.Any_Value_field_number) + value := any.Get(fdValue) + err = proto.UnmarshalOptions{ + AllowPartial: true, + Resolver: e.opts.Resolver, + }.Unmarshal(value.Bytes(), m) + if err != nil { + return false + } + + // Get current encoder position. If marshaling fails, reset encoder output + // back to this position. + pos := e.Snapshot() + + // Field name is the proto field name enclosed in []. + e.WriteName("[" + typeURL + "]") + err = e.marshalMessage(m.ProtoReflect(), true) + if err != nil { + e.Reset(pos) + return false + } + return true +} diff --git a/vendor/google.golang.org/protobuf/encoding/protowire/wire.go b/vendor/google.golang.org/protobuf/encoding/protowire/wire.go new file mode 100644 index 00000000..a427f8b7 --- /dev/null +++ b/vendor/google.golang.org/protobuf/encoding/protowire/wire.go @@ -0,0 +1,538 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protowire parses and formats the raw wire encoding. +// See https://developers.google.com/protocol-buffers/docs/encoding. +// +// For marshaling and unmarshaling entire protobuf messages, +// use the "google.golang.org/protobuf/proto" package instead. +package protowire + +import ( + "io" + "math" + "math/bits" + + "google.golang.org/protobuf/internal/errors" +) + +// Number represents the field number. +type Number int32 + +const ( + MinValidNumber Number = 1 + FirstReservedNumber Number = 19000 + LastReservedNumber Number = 19999 + MaxValidNumber Number = 1<<29 - 1 +) + +// IsValid reports whether the field number is semantically valid. +// +// Note that while numbers within the reserved range are semantically invalid, +// they are syntactically valid in the wire format. +// Implementations may treat records with reserved field numbers as unknown. +func (n Number) IsValid() bool { + return MinValidNumber <= n && n < FirstReservedNumber || LastReservedNumber < n && n <= MaxValidNumber +} + +// Type represents the wire type. +type Type int8 + +const ( + VarintType Type = 0 + Fixed32Type Type = 5 + Fixed64Type Type = 1 + BytesType Type = 2 + StartGroupType Type = 3 + EndGroupType Type = 4 +) + +const ( + _ = -iota + errCodeTruncated + errCodeFieldNumber + errCodeOverflow + errCodeReserved + errCodeEndGroup +) + +var ( + errFieldNumber = errors.New("invalid field number") + errOverflow = errors.New("variable length integer overflow") + errReserved = errors.New("cannot parse reserved wire type") + errEndGroup = errors.New("mismatching end group marker") + errParse = errors.New("parse error") +) + +// ParseError converts an error code into an error value. +// This returns nil if n is a non-negative number. +func ParseError(n int) error { + if n >= 0 { + return nil + } + switch n { + case errCodeTruncated: + return io.ErrUnexpectedEOF + case errCodeFieldNumber: + return errFieldNumber + case errCodeOverflow: + return errOverflow + case errCodeReserved: + return errReserved + case errCodeEndGroup: + return errEndGroup + default: + return errParse + } +} + +// ConsumeField parses an entire field record (both tag and value) and returns +// the field number, the wire type, and the total length. +// This returns a negative length upon an error (see ParseError). +// +// The total length includes the tag header and the end group marker (if the +// field is a group). +func ConsumeField(b []byte) (Number, Type, int) { + num, typ, n := ConsumeTag(b) + if n < 0 { + return 0, 0, n // forward error code + } + m := ConsumeFieldValue(num, typ, b[n:]) + if m < 0 { + return 0, 0, m // forward error code + } + return num, typ, n + m +} + +// ConsumeFieldValue parses a field value and returns its length. +// This assumes that the field Number and wire Type have already been parsed. +// This returns a negative length upon an error (see ParseError). +// +// When parsing a group, the length includes the end group marker and +// the end group is verified to match the starting field number. +func ConsumeFieldValue(num Number, typ Type, b []byte) (n int) { + switch typ { + case VarintType: + _, n = ConsumeVarint(b) + return n + case Fixed32Type: + _, n = ConsumeFixed32(b) + return n + case Fixed64Type: + _, n = ConsumeFixed64(b) + return n + case BytesType: + _, n = ConsumeBytes(b) + return n + case StartGroupType: + n0 := len(b) + for { + num2, typ2, n := ConsumeTag(b) + if n < 0 { + return n // forward error code + } + b = b[n:] + if typ2 == EndGroupType { + if num != num2 { + return errCodeEndGroup + } + return n0 - len(b) + } + + n = ConsumeFieldValue(num2, typ2, b) + if n < 0 { + return n // forward error code + } + b = b[n:] + } + case EndGroupType: + return errCodeEndGroup + default: + return errCodeReserved + } +} + +// AppendTag encodes num and typ as a varint-encoded tag and appends it to b. +func AppendTag(b []byte, num Number, typ Type) []byte { + return AppendVarint(b, EncodeTag(num, typ)) +} + +// ConsumeTag parses b as a varint-encoded tag, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeTag(b []byte) (Number, Type, int) { + v, n := ConsumeVarint(b) + if n < 0 { + return 0, 0, n // forward error code + } + num, typ := DecodeTag(v) + if num < MinValidNumber { + return 0, 0, errCodeFieldNumber + } + return num, typ, n +} + +func SizeTag(num Number) int { + return SizeVarint(EncodeTag(num, 0)) // wire type has no effect on size +} + +// AppendVarint appends v to b as a varint-encoded uint64. +func AppendVarint(b []byte, v uint64) []byte { + switch { + case v < 1<<7: + b = append(b, byte(v)) + case v < 1<<14: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte(v>>7)) + case v < 1<<21: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte(v>>14)) + case v < 1<<28: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte(v>>21)) + case v < 1<<35: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte(v>>28)) + case v < 1<<42: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte(v>>35)) + case v < 1<<49: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte(v>>42)) + case v < 1<<56: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte(v>>49)) + case v < 1<<63: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte(v>>56)) + default: + b = append(b, + byte((v>>0)&0x7f|0x80), + byte((v>>7)&0x7f|0x80), + byte((v>>14)&0x7f|0x80), + byte((v>>21)&0x7f|0x80), + byte((v>>28)&0x7f|0x80), + byte((v>>35)&0x7f|0x80), + byte((v>>42)&0x7f|0x80), + byte((v>>49)&0x7f|0x80), + byte((v>>56)&0x7f|0x80), + 1) + } + return b +} + +// ConsumeVarint parses b as a varint-encoded uint64, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeVarint(b []byte) (v uint64, n int) { + var y uint64 + if len(b) <= 0 { + return 0, errCodeTruncated + } + v = uint64(b[0]) + if v < 0x80 { + return v, 1 + } + v -= 0x80 + + if len(b) <= 1 { + return 0, errCodeTruncated + } + y = uint64(b[1]) + v += y << 7 + if y < 0x80 { + return v, 2 + } + v -= 0x80 << 7 + + if len(b) <= 2 { + return 0, errCodeTruncated + } + y = uint64(b[2]) + v += y << 14 + if y < 0x80 { + return v, 3 + } + v -= 0x80 << 14 + + if len(b) <= 3 { + return 0, errCodeTruncated + } + y = uint64(b[3]) + v += y << 21 + if y < 0x80 { + return v, 4 + } + v -= 0x80 << 21 + + if len(b) <= 4 { + return 0, errCodeTruncated + } + y = uint64(b[4]) + v += y << 28 + if y < 0x80 { + return v, 5 + } + v -= 0x80 << 28 + + if len(b) <= 5 { + return 0, errCodeTruncated + } + y = uint64(b[5]) + v += y << 35 + if y < 0x80 { + return v, 6 + } + v -= 0x80 << 35 + + if len(b) <= 6 { + return 0, errCodeTruncated + } + y = uint64(b[6]) + v += y << 42 + if y < 0x80 { + return v, 7 + } + v -= 0x80 << 42 + + if len(b) <= 7 { + return 0, errCodeTruncated + } + y = uint64(b[7]) + v += y << 49 + if y < 0x80 { + return v, 8 + } + v -= 0x80 << 49 + + if len(b) <= 8 { + return 0, errCodeTruncated + } + y = uint64(b[8]) + v += y << 56 + if y < 0x80 { + return v, 9 + } + v -= 0x80 << 56 + + if len(b) <= 9 { + return 0, errCodeTruncated + } + y = uint64(b[9]) + v += y << 63 + if y < 2 { + return v, 10 + } + return 0, errCodeOverflow +} + +// SizeVarint returns the encoded size of a varint. +// The size is guaranteed to be within 1 and 10, inclusive. +func SizeVarint(v uint64) int { + // This computes 1 + (bits.Len64(v)-1)/7. + // 9/64 is a good enough approximation of 1/7 + return int(9*uint32(bits.Len64(v))+64) / 64 +} + +// AppendFixed32 appends v to b as a little-endian uint32. +func AppendFixed32(b []byte, v uint32) []byte { + return append(b, + byte(v>>0), + byte(v>>8), + byte(v>>16), + byte(v>>24)) +} + +// ConsumeFixed32 parses b as a little-endian uint32, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeFixed32(b []byte) (v uint32, n int) { + if len(b) < 4 { + return 0, errCodeTruncated + } + v = uint32(b[0])<<0 | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24 + return v, 4 +} + +// SizeFixed32 returns the encoded size of a fixed32; which is always 4. +func SizeFixed32() int { + return 4 +} + +// AppendFixed64 appends v to b as a little-endian uint64. +func AppendFixed64(b []byte, v uint64) []byte { + return append(b, + byte(v>>0), + byte(v>>8), + byte(v>>16), + byte(v>>24), + byte(v>>32), + byte(v>>40), + byte(v>>48), + byte(v>>56)) +} + +// ConsumeFixed64 parses b as a little-endian uint64, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeFixed64(b []byte) (v uint64, n int) { + if len(b) < 8 { + return 0, errCodeTruncated + } + v = uint64(b[0])<<0 | uint64(b[1])<<8 | uint64(b[2])<<16 | uint64(b[3])<<24 | uint64(b[4])<<32 | uint64(b[5])<<40 | uint64(b[6])<<48 | uint64(b[7])<<56 + return v, 8 +} + +// SizeFixed64 returns the encoded size of a fixed64; which is always 8. +func SizeFixed64() int { + return 8 +} + +// AppendBytes appends v to b as a length-prefixed bytes value. +func AppendBytes(b []byte, v []byte) []byte { + return append(AppendVarint(b, uint64(len(v))), v...) +} + +// ConsumeBytes parses b as a length-prefixed bytes value, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeBytes(b []byte) (v []byte, n int) { + m, n := ConsumeVarint(b) + if n < 0 { + return nil, n // forward error code + } + if m > uint64(len(b[n:])) { + return nil, errCodeTruncated + } + return b[n:][:m], n + int(m) +} + +// SizeBytes returns the encoded size of a length-prefixed bytes value, +// given only the length. +func SizeBytes(n int) int { + return SizeVarint(uint64(n)) + n +} + +// AppendString appends v to b as a length-prefixed bytes value. +func AppendString(b []byte, v string) []byte { + return append(AppendVarint(b, uint64(len(v))), v...) +} + +// ConsumeString parses b as a length-prefixed bytes value, reporting its length. +// This returns a negative length upon an error (see ParseError). +func ConsumeString(b []byte) (v string, n int) { + bb, n := ConsumeBytes(b) + return string(bb), n +} + +// AppendGroup appends v to b as group value, with a trailing end group marker. +// The value v must not contain the end marker. +func AppendGroup(b []byte, num Number, v []byte) []byte { + return AppendVarint(append(b, v...), EncodeTag(num, EndGroupType)) +} + +// ConsumeGroup parses b as a group value until the trailing end group marker, +// and verifies that the end marker matches the provided num. The value v +// does not contain the end marker, while the length does contain the end marker. +// This returns a negative length upon an error (see ParseError). +func ConsumeGroup(num Number, b []byte) (v []byte, n int) { + n = ConsumeFieldValue(num, StartGroupType, b) + if n < 0 { + return nil, n // forward error code + } + b = b[:n] + + // Truncate off end group marker, but need to handle denormalized varints. + // Assuming end marker is never 0 (which is always the case since + // EndGroupType is non-zero), we can truncate all trailing bytes where the + // lower 7 bits are all zero (implying that the varint is denormalized). + for len(b) > 0 && b[len(b)-1]&0x7f == 0 { + b = b[:len(b)-1] + } + b = b[:len(b)-SizeTag(num)] + return b, n +} + +// SizeGroup returns the encoded size of a group, given only the length. +func SizeGroup(num Number, n int) int { + return n + SizeTag(num) +} + +// DecodeTag decodes the field Number and wire Type from its unified form. +// The Number is -1 if the decoded field number overflows int32. +// Other than overflow, this does not check for field number validity. +func DecodeTag(x uint64) (Number, Type) { + // NOTE: MessageSet allows for larger field numbers than normal. + if x>>3 > uint64(math.MaxInt32) { + return -1, 0 + } + return Number(x >> 3), Type(x & 7) +} + +// EncodeTag encodes the field Number and wire Type into its unified form. +func EncodeTag(num Number, typ Type) uint64 { + return uint64(num)<<3 | uint64(typ&7) +} + +// DecodeZigZag decodes a zig-zag-encoded uint64 as an int64. +// Input: {…, 5, 3, 1, 0, 2, 4, 6, …} +// Output: {…, -3, -2, -1, 0, +1, +2, +3, …} +func DecodeZigZag(x uint64) int64 { + return int64(x>>1) ^ int64(x)<<63>>63 +} + +// EncodeZigZag encodes an int64 as a zig-zag-encoded uint64. +// Input: {…, -3, -2, -1, 0, +1, +2, +3, …} +// Output: {…, 5, 3, 1, 0, 2, 4, 6, …} +func EncodeZigZag(x int64) uint64 { + return uint64(x<<1) ^ uint64(x>>63) +} + +// DecodeBool decodes a uint64 as a bool. +// Input: { 0, 1, 2, …} +// Output: {false, true, true, …} +func DecodeBool(x uint64) bool { + return x != 0 +} + +// EncodeBool encodes a bool as a uint64. +// Input: {false, true} +// Output: { 0, 1} +func EncodeBool(x bool) uint64 { + if x { + return 1 + } + return 0 +} diff --git a/vendor/google.golang.org/protobuf/internal/descfmt/stringer.go b/vendor/google.golang.org/protobuf/internal/descfmt/stringer.go new file mode 100644 index 00000000..360c6332 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/descfmt/stringer.go @@ -0,0 +1,318 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package descfmt provides functionality to format descriptors. +package descfmt + +import ( + "fmt" + "io" + "reflect" + "strconv" + "strings" + + "google.golang.org/protobuf/internal/detrand" + "google.golang.org/protobuf/internal/pragma" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type list interface { + Len() int + pragma.DoNotImplement +} + +func FormatList(s fmt.State, r rune, vs list) { + io.WriteString(s, formatListOpt(vs, true, r == 'v' && (s.Flag('+') || s.Flag('#')))) +} +func formatListOpt(vs list, isRoot, allowMulti bool) string { + start, end := "[", "]" + if isRoot { + var name string + switch vs.(type) { + case pref.Names: + name = "Names" + case pref.FieldNumbers: + name = "FieldNumbers" + case pref.FieldRanges: + name = "FieldRanges" + case pref.EnumRanges: + name = "EnumRanges" + case pref.FileImports: + name = "FileImports" + case pref.Descriptor: + name = reflect.ValueOf(vs).MethodByName("Get").Type().Out(0).Name() + "s" + default: + name = reflect.ValueOf(vs).Elem().Type().Name() + } + start, end = name+"{", "}" + } + + var ss []string + switch vs := vs.(type) { + case pref.Names: + for i := 0; i < vs.Len(); i++ { + ss = append(ss, fmt.Sprint(vs.Get(i))) + } + return start + joinStrings(ss, false) + end + case pref.FieldNumbers: + for i := 0; i < vs.Len(); i++ { + ss = append(ss, fmt.Sprint(vs.Get(i))) + } + return start + joinStrings(ss, false) + end + case pref.FieldRanges: + for i := 0; i < vs.Len(); i++ { + r := vs.Get(i) + if r[0]+1 == r[1] { + ss = append(ss, fmt.Sprintf("%d", r[0])) + } else { + ss = append(ss, fmt.Sprintf("%d:%d", r[0], r[1])) // enum ranges are end exclusive + } + } + return start + joinStrings(ss, false) + end + case pref.EnumRanges: + for i := 0; i < vs.Len(); i++ { + r := vs.Get(i) + if r[0] == r[1] { + ss = append(ss, fmt.Sprintf("%d", r[0])) + } else { + ss = append(ss, fmt.Sprintf("%d:%d", r[0], int64(r[1])+1)) // enum ranges are end inclusive + } + } + return start + joinStrings(ss, false) + end + case pref.FileImports: + for i := 0; i < vs.Len(); i++ { + var rs records + rs.Append(reflect.ValueOf(vs.Get(i)), "Path", "Package", "IsPublic", "IsWeak") + ss = append(ss, "{"+rs.Join()+"}") + } + return start + joinStrings(ss, allowMulti) + end + default: + _, isEnumValue := vs.(pref.EnumValueDescriptors) + for i := 0; i < vs.Len(); i++ { + m := reflect.ValueOf(vs).MethodByName("Get") + v := m.Call([]reflect.Value{reflect.ValueOf(i)})[0].Interface() + ss = append(ss, formatDescOpt(v.(pref.Descriptor), false, allowMulti && !isEnumValue)) + } + return start + joinStrings(ss, allowMulti && isEnumValue) + end + } +} + +// descriptorAccessors is a list of accessors to print for each descriptor. +// +// Do not print all accessors since some contain redundant information, +// while others are pointers that we do not want to follow since the descriptor +// is actually a cyclic graph. +// +// Using a list allows us to print the accessors in a sensible order. +var descriptorAccessors = map[reflect.Type][]string{ + reflect.TypeOf((*pref.FileDescriptor)(nil)).Elem(): {"Path", "Package", "Imports", "Messages", "Enums", "Extensions", "Services"}, + reflect.TypeOf((*pref.MessageDescriptor)(nil)).Elem(): {"IsMapEntry", "Fields", "Oneofs", "ReservedNames", "ReservedRanges", "RequiredNumbers", "ExtensionRanges", "Messages", "Enums", "Extensions"}, + reflect.TypeOf((*pref.FieldDescriptor)(nil)).Elem(): {"Number", "Cardinality", "Kind", "HasJSONName", "JSONName", "HasPresence", "IsExtension", "IsPacked", "IsWeak", "IsList", "IsMap", "MapKey", "MapValue", "HasDefault", "Default", "ContainingOneof", "ContainingMessage", "Message", "Enum"}, + reflect.TypeOf((*pref.OneofDescriptor)(nil)).Elem(): {"Fields"}, // not directly used; must keep in sync with formatDescOpt + reflect.TypeOf((*pref.EnumDescriptor)(nil)).Elem(): {"Values", "ReservedNames", "ReservedRanges"}, + reflect.TypeOf((*pref.EnumValueDescriptor)(nil)).Elem(): {"Number"}, + reflect.TypeOf((*pref.ServiceDescriptor)(nil)).Elem(): {"Methods"}, + reflect.TypeOf((*pref.MethodDescriptor)(nil)).Elem(): {"Input", "Output", "IsStreamingClient", "IsStreamingServer"}, +} + +func FormatDesc(s fmt.State, r rune, t pref.Descriptor) { + io.WriteString(s, formatDescOpt(t, true, r == 'v' && (s.Flag('+') || s.Flag('#')))) +} +func formatDescOpt(t pref.Descriptor, isRoot, allowMulti bool) string { + rv := reflect.ValueOf(t) + rt := rv.MethodByName("ProtoType").Type().In(0) + + start, end := "{", "}" + if isRoot { + start = rt.Name() + "{" + } + + _, isFile := t.(pref.FileDescriptor) + rs := records{allowMulti: allowMulti} + if t.IsPlaceholder() { + if isFile { + rs.Append(rv, "Path", "Package", "IsPlaceholder") + } else { + rs.Append(rv, "FullName", "IsPlaceholder") + } + } else { + switch { + case isFile: + rs.Append(rv, "Syntax") + case isRoot: + rs.Append(rv, "Syntax", "FullName") + default: + rs.Append(rv, "Name") + } + switch t := t.(type) { + case pref.FieldDescriptor: + for _, s := range descriptorAccessors[rt] { + switch s { + case "MapKey": + if k := t.MapKey(); k != nil { + rs.recs = append(rs.recs, [2]string{"MapKey", k.Kind().String()}) + } + case "MapValue": + if v := t.MapValue(); v != nil { + switch v.Kind() { + case pref.EnumKind: + rs.recs = append(rs.recs, [2]string{"MapValue", string(v.Enum().FullName())}) + case pref.MessageKind, pref.GroupKind: + rs.recs = append(rs.recs, [2]string{"MapValue", string(v.Message().FullName())}) + default: + rs.recs = append(rs.recs, [2]string{"MapValue", v.Kind().String()}) + } + } + case "ContainingOneof": + if od := t.ContainingOneof(); od != nil { + rs.recs = append(rs.recs, [2]string{"Oneof", string(od.Name())}) + } + case "ContainingMessage": + if t.IsExtension() { + rs.recs = append(rs.recs, [2]string{"Extendee", string(t.ContainingMessage().FullName())}) + } + case "Message": + if !t.IsMap() { + rs.Append(rv, s) + } + default: + rs.Append(rv, s) + } + } + case pref.OneofDescriptor: + var ss []string + fs := t.Fields() + for i := 0; i < fs.Len(); i++ { + ss = append(ss, string(fs.Get(i).Name())) + } + if len(ss) > 0 { + rs.recs = append(rs.recs, [2]string{"Fields", "[" + joinStrings(ss, false) + "]"}) + } + default: + rs.Append(rv, descriptorAccessors[rt]...) + } + if rv.MethodByName("GoType").IsValid() { + rs.Append(rv, "GoType") + } + } + return start + rs.Join() + end +} + +type records struct { + recs [][2]string + allowMulti bool +} + +func (rs *records) Append(v reflect.Value, accessors ...string) { + for _, a := range accessors { + var rv reflect.Value + if m := v.MethodByName(a); m.IsValid() { + rv = m.Call(nil)[0] + } + if v.Kind() == reflect.Struct && !rv.IsValid() { + rv = v.FieldByName(a) + } + if !rv.IsValid() { + panic(fmt.Sprintf("unknown accessor: %v.%s", v.Type(), a)) + } + if _, ok := rv.Interface().(pref.Value); ok { + rv = rv.MethodByName("Interface").Call(nil)[0] + if !rv.IsNil() { + rv = rv.Elem() + } + } + + // Ignore zero values. + var isZero bool + switch rv.Kind() { + case reflect.Interface, reflect.Slice: + isZero = rv.IsNil() + case reflect.Bool: + isZero = rv.Bool() == false + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + isZero = rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + isZero = rv.Uint() == 0 + case reflect.String: + isZero = rv.String() == "" + } + if n, ok := rv.Interface().(list); ok { + isZero = n.Len() == 0 + } + if isZero { + continue + } + + // Format the value. + var s string + v := rv.Interface() + switch v := v.(type) { + case list: + s = formatListOpt(v, false, rs.allowMulti) + case pref.FieldDescriptor, pref.OneofDescriptor, pref.EnumValueDescriptor, pref.MethodDescriptor: + s = string(v.(pref.Descriptor).Name()) + case pref.Descriptor: + s = string(v.FullName()) + case string: + s = strconv.Quote(v) + case []byte: + s = fmt.Sprintf("%q", v) + default: + s = fmt.Sprint(v) + } + rs.recs = append(rs.recs, [2]string{a, s}) + } +} + +func (rs *records) Join() string { + var ss []string + + // In single line mode, simply join all records with commas. + if !rs.allowMulti { + for _, r := range rs.recs { + ss = append(ss, r[0]+formatColon(0)+r[1]) + } + return joinStrings(ss, false) + } + + // In allowMulti line mode, align single line records for more readable output. + var maxLen int + flush := func(i int) { + for _, r := range rs.recs[len(ss):i] { + ss = append(ss, r[0]+formatColon(maxLen-len(r[0]))+r[1]) + } + maxLen = 0 + } + for i, r := range rs.recs { + if isMulti := strings.Contains(r[1], "\n"); isMulti { + flush(i) + ss = append(ss, r[0]+formatColon(0)+strings.Join(strings.Split(r[1], "\n"), "\n\t")) + } else if maxLen < len(r[0]) { + maxLen = len(r[0]) + } + } + flush(len(rs.recs)) + return joinStrings(ss, true) +} + +func formatColon(padding int) string { + // Deliberately introduce instability into the debug output to + // discourage users from performing string comparisons. + // This provides us flexibility to change the output in the future. + if detrand.Bool() { + return ":" + strings.Repeat(" ", 1+padding) // use non-breaking spaces (U+00a0) + } else { + return ":" + strings.Repeat(" ", 1+padding) // use regular spaces (U+0020) + } +} + +func joinStrings(ss []string, isMulti bool) string { + if len(ss) == 0 { + return "" + } + if isMulti { + return "\n\t" + strings.Join(ss, "\n\t") + "\n" + } + return strings.Join(ss, ", ") +} diff --git a/vendor/google.golang.org/protobuf/internal/descopts/options.go b/vendor/google.golang.org/protobuf/internal/descopts/options.go new file mode 100644 index 00000000..8401be8c --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/descopts/options.go @@ -0,0 +1,29 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package descopts contains the nil pointers to concrete descriptor options. +// +// This package exists as a form of reverse dependency injection so that certain +// packages (e.g., internal/filedesc and internal/filetype can avoid a direct +// dependency on the descriptor proto package). +package descopts + +import pref "google.golang.org/protobuf/reflect/protoreflect" + +// These variables are set by the init function in descriptor.pb.go via logic +// in internal/filetype. In other words, so long as the descriptor proto package +// is linked in, these variables will be populated. +// +// Each variable is populated with a nil pointer to the options struct. +var ( + File pref.ProtoMessage + Enum pref.ProtoMessage + EnumValue pref.ProtoMessage + Message pref.ProtoMessage + Field pref.ProtoMessage + Oneof pref.ProtoMessage + ExtensionRange pref.ProtoMessage + Service pref.ProtoMessage + Method pref.ProtoMessage +) diff --git a/vendor/google.golang.org/protobuf/internal/detrand/rand.go b/vendor/google.golang.org/protobuf/internal/detrand/rand.go new file mode 100644 index 00000000..49c8676d --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/detrand/rand.go @@ -0,0 +1,69 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package detrand provides deterministically random functionality. +// +// The pseudo-randomness of these functions is seeded by the program binary +// itself and guarantees that the output does not change within a program, +// while ensuring that the output is unstable across different builds. +package detrand + +import ( + "encoding/binary" + "hash/fnv" + "os" +) + +// Disable disables detrand such that all functions returns the zero value. +// This function is not concurrent-safe and must be called during program init. +func Disable() { + randSeed = 0 +} + +// Bool returns a deterministically random boolean. +func Bool() bool { + return randSeed%2 == 1 +} + +// Intn returns a deterministically random integer between 0 and n-1, inclusive. +func Intn(n int) int { + if n <= 0 { + panic("must be positive") + } + return int(randSeed % uint64(n)) +} + +// randSeed is a best-effort at an approximate hash of the Go binary. +var randSeed = binaryHash() + +func binaryHash() uint64 { + // Open the Go binary. + s, err := os.Executable() + if err != nil { + return 0 + } + f, err := os.Open(s) + if err != nil { + return 0 + } + defer f.Close() + + // Hash the size and several samples of the Go binary. + const numSamples = 8 + var buf [64]byte + h := fnv.New64() + fi, err := f.Stat() + if err != nil { + return 0 + } + binary.LittleEndian.PutUint64(buf[:8], uint64(fi.Size())) + h.Write(buf[:8]) + for i := int64(0); i < numSamples; i++ { + if _, err := f.ReadAt(buf[:], i*fi.Size()/numSamples); err != nil { + return 0 + } + h.Write(buf[:]) + } + return h.Sum64() +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/defval/default.go b/vendor/google.golang.org/protobuf/internal/encoding/defval/default.go new file mode 100644 index 00000000..fdd9b13f --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/defval/default.go @@ -0,0 +1,213 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package defval marshals and unmarshals textual forms of default values. +// +// This package handles both the form historically used in Go struct field tags +// and also the form used by google.protobuf.FieldDescriptorProto.default_value +// since they differ in superficial ways. +package defval + +import ( + "fmt" + "math" + "strconv" + + ptext "google.golang.org/protobuf/internal/encoding/text" + errors "google.golang.org/protobuf/internal/errors" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// Format is the serialization format used to represent the default value. +type Format int + +const ( + _ Format = iota + + // Descriptor uses the serialization format that protoc uses with the + // google.protobuf.FieldDescriptorProto.default_value field. + Descriptor + + // GoTag uses the historical serialization format in Go struct field tags. + GoTag +) + +// Unmarshal deserializes the default string s according to the given kind k. +// When k is an enum, a list of enum value descriptors must be provided. +func Unmarshal(s string, k pref.Kind, evs pref.EnumValueDescriptors, f Format) (pref.Value, pref.EnumValueDescriptor, error) { + switch k { + case pref.BoolKind: + if f == GoTag { + switch s { + case "1": + return pref.ValueOfBool(true), nil, nil + case "0": + return pref.ValueOfBool(false), nil, nil + } + } else { + switch s { + case "true": + return pref.ValueOfBool(true), nil, nil + case "false": + return pref.ValueOfBool(false), nil, nil + } + } + case pref.EnumKind: + if f == GoTag { + // Go tags use the numeric form of the enum value. + if n, err := strconv.ParseInt(s, 10, 32); err == nil { + if ev := evs.ByNumber(pref.EnumNumber(n)); ev != nil { + return pref.ValueOfEnum(ev.Number()), ev, nil + } + } + } else { + // Descriptor default_value use the enum identifier. + ev := evs.ByName(pref.Name(s)) + if ev != nil { + return pref.ValueOfEnum(ev.Number()), ev, nil + } + } + case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind: + if v, err := strconv.ParseInt(s, 10, 32); err == nil { + return pref.ValueOfInt32(int32(v)), nil, nil + } + case pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: + if v, err := strconv.ParseInt(s, 10, 64); err == nil { + return pref.ValueOfInt64(int64(v)), nil, nil + } + case pref.Uint32Kind, pref.Fixed32Kind: + if v, err := strconv.ParseUint(s, 10, 32); err == nil { + return pref.ValueOfUint32(uint32(v)), nil, nil + } + case pref.Uint64Kind, pref.Fixed64Kind: + if v, err := strconv.ParseUint(s, 10, 64); err == nil { + return pref.ValueOfUint64(uint64(v)), nil, nil + } + case pref.FloatKind, pref.DoubleKind: + var v float64 + var err error + switch s { + case "-inf": + v = math.Inf(-1) + case "inf": + v = math.Inf(+1) + case "nan": + v = math.NaN() + default: + v, err = strconv.ParseFloat(s, 64) + } + if err == nil { + if k == pref.FloatKind { + return pref.ValueOfFloat32(float32(v)), nil, nil + } else { + return pref.ValueOfFloat64(float64(v)), nil, nil + } + } + case pref.StringKind: + // String values are already unescaped and can be used as is. + return pref.ValueOfString(s), nil, nil + case pref.BytesKind: + if b, ok := unmarshalBytes(s); ok { + return pref.ValueOfBytes(b), nil, nil + } + } + return pref.Value{}, nil, errors.New("could not parse value for %v: %q", k, s) +} + +// Marshal serializes v as the default string according to the given kind k. +// When specifying the Descriptor format for an enum kind, the associated +// enum value descriptor must be provided. +func Marshal(v pref.Value, ev pref.EnumValueDescriptor, k pref.Kind, f Format) (string, error) { + switch k { + case pref.BoolKind: + if f == GoTag { + if v.Bool() { + return "1", nil + } else { + return "0", nil + } + } else { + if v.Bool() { + return "true", nil + } else { + return "false", nil + } + } + case pref.EnumKind: + if f == GoTag { + return strconv.FormatInt(int64(v.Enum()), 10), nil + } else { + return string(ev.Name()), nil + } + case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind, pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: + return strconv.FormatInt(v.Int(), 10), nil + case pref.Uint32Kind, pref.Fixed32Kind, pref.Uint64Kind, pref.Fixed64Kind: + return strconv.FormatUint(v.Uint(), 10), nil + case pref.FloatKind, pref.DoubleKind: + f := v.Float() + switch { + case math.IsInf(f, -1): + return "-inf", nil + case math.IsInf(f, +1): + return "inf", nil + case math.IsNaN(f): + return "nan", nil + default: + if k == pref.FloatKind { + return strconv.FormatFloat(f, 'g', -1, 32), nil + } else { + return strconv.FormatFloat(f, 'g', -1, 64), nil + } + } + case pref.StringKind: + // String values are serialized as is without any escaping. + return v.String(), nil + case pref.BytesKind: + if s, ok := marshalBytes(v.Bytes()); ok { + return s, nil + } + } + return "", errors.New("could not format value for %v: %v", k, v) +} + +// unmarshalBytes deserializes bytes by applying C unescaping. +func unmarshalBytes(s string) ([]byte, bool) { + // Bytes values use the same escaping as the text format, + // however they lack the surrounding double quotes. + v, err := ptext.UnmarshalString(`"` + s + `"`) + if err != nil { + return nil, false + } + return []byte(v), true +} + +// marshalBytes serializes bytes by using C escaping. +// To match the exact output of protoc, this is identical to the +// CEscape function in strutil.cc of the protoc source code. +func marshalBytes(b []byte) (string, bool) { + var s []byte + for _, c := range b { + switch c { + case '\n': + s = append(s, `\n`...) + case '\r': + s = append(s, `\r`...) + case '\t': + s = append(s, `\t`...) + case '"': + s = append(s, `\"`...) + case '\'': + s = append(s, `\'`...) + case '\\': + s = append(s, `\\`...) + default: + if printableASCII := c >= 0x20 && c <= 0x7e; printableASCII { + s = append(s, c) + } else { + s = append(s, fmt.Sprintf(`\%03o`, c)...) + } + } + } + return string(s), true +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go b/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go new file mode 100644 index 00000000..c1866f3c --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/messageset/messageset.go @@ -0,0 +1,241 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package messageset encodes and decodes the obsolete MessageSet wire format. +package messageset + +import ( + "math" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// The MessageSet wire format is equivalent to a message defined as follows, +// where each Item defines an extension field with a field number of 'type_id' +// and content of 'message'. MessageSet extensions must be non-repeated message +// fields. +// +// message MessageSet { +// repeated group Item = 1 { +// required int32 type_id = 2; +// required string message = 3; +// } +// } +const ( + FieldItem = protowire.Number(1) + FieldTypeID = protowire.Number(2) + FieldMessage = protowire.Number(3) +) + +// ExtensionName is the field name for extensions of MessageSet. +// +// A valid MessageSet extension must be of the form: +// message MyMessage { +// extend proto2.bridge.MessageSet { +// optional MyMessage message_set_extension = 1234; +// } +// ... +// } +const ExtensionName = "message_set_extension" + +// IsMessageSet returns whether the message uses the MessageSet wire format. +func IsMessageSet(md pref.MessageDescriptor) bool { + xmd, ok := md.(interface{ IsMessageSet() bool }) + return ok && xmd.IsMessageSet() +} + +// IsMessageSetExtension reports this field properly extends a MessageSet. +func IsMessageSetExtension(fd pref.FieldDescriptor) bool { + switch { + case fd.Name() != ExtensionName: + return false + case !IsMessageSet(fd.ContainingMessage()): + return false + case fd.FullName().Parent() != fd.Message().FullName(): + return false + } + return true +} + +// SizeField returns the size of a MessageSet item field containing an extension +// with the given field number, not counting the contents of the message subfield. +func SizeField(num protowire.Number) int { + return 2*protowire.SizeTag(FieldItem) + protowire.SizeTag(FieldTypeID) + protowire.SizeVarint(uint64(num)) +} + +// Unmarshal parses a MessageSet. +// +// It calls fn with the type ID and value of each item in the MessageSet. +// Unknown fields are discarded. +// +// If wantLen is true, the item values include the varint length prefix. +// This is ugly, but simplifies the fast-path decoder in internal/impl. +func Unmarshal(b []byte, wantLen bool, fn func(typeID protowire.Number, value []byte) error) error { + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return protowire.ParseError(n) + } + b = b[n:] + if num != FieldItem || wtyp != protowire.StartGroupType { + n := protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return protowire.ParseError(n) + } + b = b[n:] + continue + } + typeID, value, n, err := ConsumeFieldValue(b, wantLen) + if err != nil { + return err + } + b = b[n:] + if typeID == 0 { + continue + } + if err := fn(typeID, value); err != nil { + return err + } + } + return nil +} + +// ConsumeFieldValue parses b as a MessageSet item field value until and including +// the trailing end group marker. It assumes the start group tag has already been parsed. +// It returns the contents of the type_id and message subfields and the total +// item length. +// +// If wantLen is true, the returned message value includes the length prefix. +func ConsumeFieldValue(b []byte, wantLen bool) (typeid protowire.Number, message []byte, n int, err error) { + ilen := len(b) + for { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return 0, nil, 0, protowire.ParseError(n) + } + b = b[n:] + switch { + case num == FieldItem && wtyp == protowire.EndGroupType: + if wantLen && len(message) == 0 { + // The message field was missing, which should never happen. + // Be prepared for this case anyway. + message = protowire.AppendVarint(message, 0) + } + return typeid, message, ilen - len(b), nil + case num == FieldTypeID && wtyp == protowire.VarintType: + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, nil, 0, protowire.ParseError(n) + } + b = b[n:] + if v < 1 || v > math.MaxInt32 { + return 0, nil, 0, errors.New("invalid type_id in message set") + } + typeid = protowire.Number(v) + case num == FieldMessage && wtyp == protowire.BytesType: + m, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, nil, 0, protowire.ParseError(n) + } + if message == nil { + if wantLen { + message = b[:n:n] + } else { + message = m[:len(m):len(m)] + } + } else { + // This case should never happen in practice, but handle it for + // correctness: The MessageSet item contains multiple message + // fields, which need to be merged. + // + // In the case where we're returning the length, this becomes + // quite inefficient since we need to strip the length off + // the existing data and reconstruct it with the combined length. + if wantLen { + _, nn := protowire.ConsumeVarint(message) + m0 := message[nn:] + message = nil + message = protowire.AppendVarint(message, uint64(len(m0)+len(m))) + message = append(message, m0...) + message = append(message, m...) + } else { + message = append(message, m...) + } + } + b = b[n:] + default: + // We have no place to put it, so we just ignore unknown fields. + n := protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return 0, nil, 0, protowire.ParseError(n) + } + b = b[n:] + } + } +} + +// AppendFieldStart appends the start of a MessageSet item field containing +// an extension with the given number. The caller must add the message +// subfield (including the tag). +func AppendFieldStart(b []byte, num protowire.Number) []byte { + b = protowire.AppendTag(b, FieldItem, protowire.StartGroupType) + b = protowire.AppendTag(b, FieldTypeID, protowire.VarintType) + b = protowire.AppendVarint(b, uint64(num)) + return b +} + +// AppendFieldEnd appends the trailing end group marker for a MessageSet item field. +func AppendFieldEnd(b []byte) []byte { + return protowire.AppendTag(b, FieldItem, protowire.EndGroupType) +} + +// SizeUnknown returns the size of an unknown fields section in MessageSet format. +// +// See AppendUnknown. +func SizeUnknown(unknown []byte) (size int) { + for len(unknown) > 0 { + num, typ, n := protowire.ConsumeTag(unknown) + if n < 0 || typ != protowire.BytesType { + return 0 + } + unknown = unknown[n:] + _, n = protowire.ConsumeBytes(unknown) + if n < 0 { + return 0 + } + unknown = unknown[n:] + size += SizeField(num) + protowire.SizeTag(FieldMessage) + n + } + return size +} + +// AppendUnknown appends unknown fields to b in MessageSet format. +// +// For historic reasons, unresolved items in a MessageSet are stored in a +// message's unknown fields section in non-MessageSet format. That is, an +// unknown item with typeID T and value V appears in the unknown fields as +// a field with number T and value V. +// +// This function converts the unknown fields back into MessageSet form. +func AppendUnknown(b, unknown []byte) ([]byte, error) { + for len(unknown) > 0 { + num, typ, n := protowire.ConsumeTag(unknown) + if n < 0 || typ != protowire.BytesType { + return nil, errors.New("invalid data in message set unknown fields") + } + unknown = unknown[n:] + _, n = protowire.ConsumeBytes(unknown) + if n < 0 { + return nil, errors.New("invalid data in message set unknown fields") + } + b = AppendFieldStart(b, num) + b = protowire.AppendTag(b, FieldMessage, protowire.BytesType) + b = append(b, unknown[:n]...) + b = AppendFieldEnd(b) + unknown = unknown[n:] + } + return b, nil +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go b/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go new file mode 100644 index 00000000..38f1931c --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/tag/tag.go @@ -0,0 +1,207 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package tag marshals and unmarshals the legacy struct tags as generated +// by historical versions of protoc-gen-go. +package tag + +import ( + "reflect" + "strconv" + "strings" + + defval "google.golang.org/protobuf/internal/encoding/defval" + fdesc "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/strs" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +var byteType = reflect.TypeOf(byte(0)) + +// Unmarshal decodes the tag into a prototype.Field. +// +// The goType is needed to determine the original protoreflect.Kind since the +// tag does not record sufficient information to determine that. +// The type is the underlying field type (e.g., a repeated field may be +// represented by []T, but the Go type passed in is just T). +// A list of enum value descriptors must be provided for enum fields. +// This does not populate the Enum or Message (except for weak message). +// +// This function is a best effort attempt; parsing errors are ignored. +func Unmarshal(tag string, goType reflect.Type, evs pref.EnumValueDescriptors) pref.FieldDescriptor { + f := new(fdesc.Field) + f.L0.ParentFile = fdesc.SurrogateProto2 + for len(tag) > 0 { + i := strings.IndexByte(tag, ',') + if i < 0 { + i = len(tag) + } + switch s := tag[:i]; { + case strings.HasPrefix(s, "name="): + f.L0.FullName = pref.FullName(s[len("name="):]) + case strings.Trim(s, "0123456789") == "": + n, _ := strconv.ParseUint(s, 10, 32) + f.L1.Number = pref.FieldNumber(n) + case s == "opt": + f.L1.Cardinality = pref.Optional + case s == "req": + f.L1.Cardinality = pref.Required + case s == "rep": + f.L1.Cardinality = pref.Repeated + case s == "varint": + switch goType.Kind() { + case reflect.Bool: + f.L1.Kind = pref.BoolKind + case reflect.Int32: + f.L1.Kind = pref.Int32Kind + case reflect.Int64: + f.L1.Kind = pref.Int64Kind + case reflect.Uint32: + f.L1.Kind = pref.Uint32Kind + case reflect.Uint64: + f.L1.Kind = pref.Uint64Kind + } + case s == "zigzag32": + if goType.Kind() == reflect.Int32 { + f.L1.Kind = pref.Sint32Kind + } + case s == "zigzag64": + if goType.Kind() == reflect.Int64 { + f.L1.Kind = pref.Sint64Kind + } + case s == "fixed32": + switch goType.Kind() { + case reflect.Int32: + f.L1.Kind = pref.Sfixed32Kind + case reflect.Uint32: + f.L1.Kind = pref.Fixed32Kind + case reflect.Float32: + f.L1.Kind = pref.FloatKind + } + case s == "fixed64": + switch goType.Kind() { + case reflect.Int64: + f.L1.Kind = pref.Sfixed64Kind + case reflect.Uint64: + f.L1.Kind = pref.Fixed64Kind + case reflect.Float64: + f.L1.Kind = pref.DoubleKind + } + case s == "bytes": + switch { + case goType.Kind() == reflect.String: + f.L1.Kind = pref.StringKind + case goType.Kind() == reflect.Slice && goType.Elem() == byteType: + f.L1.Kind = pref.BytesKind + default: + f.L1.Kind = pref.MessageKind + } + case s == "group": + f.L1.Kind = pref.GroupKind + case strings.HasPrefix(s, "enum="): + f.L1.Kind = pref.EnumKind + case strings.HasPrefix(s, "json="): + jsonName := s[len("json="):] + if jsonName != strs.JSONCamelCase(string(f.L0.FullName.Name())) { + f.L1.StringName.InitJSON(jsonName) + } + case s == "packed": + f.L1.HasPacked = true + f.L1.IsPacked = true + case strings.HasPrefix(s, "weak="): + f.L1.IsWeak = true + f.L1.Message = fdesc.PlaceholderMessage(pref.FullName(s[len("weak="):])) + case strings.HasPrefix(s, "def="): + // The default tag is special in that everything afterwards is the + // default regardless of the presence of commas. + s, i = tag[len("def="):], len(tag) + v, ev, _ := defval.Unmarshal(s, f.L1.Kind, evs, defval.GoTag) + f.L1.Default = fdesc.DefaultValue(v, ev) + case s == "proto3": + f.L0.ParentFile = fdesc.SurrogateProto3 + } + tag = strings.TrimPrefix(tag[i:], ",") + } + + // The generator uses the group message name instead of the field name. + // We obtain the real field name by lowercasing the group name. + if f.L1.Kind == pref.GroupKind { + f.L0.FullName = pref.FullName(strings.ToLower(string(f.L0.FullName))) + } + return f +} + +// Marshal encodes the protoreflect.FieldDescriptor as a tag. +// +// The enumName must be provided if the kind is an enum. +// Historically, the formulation of the enum "name" was the proto package +// dot-concatenated with the generated Go identifier for the enum type. +// Depending on the context on how Marshal is called, there are different ways +// through which that information is determined. As such it is the caller's +// responsibility to provide a function to obtain that information. +func Marshal(fd pref.FieldDescriptor, enumName string) string { + var tag []string + switch fd.Kind() { + case pref.BoolKind, pref.EnumKind, pref.Int32Kind, pref.Uint32Kind, pref.Int64Kind, pref.Uint64Kind: + tag = append(tag, "varint") + case pref.Sint32Kind: + tag = append(tag, "zigzag32") + case pref.Sint64Kind: + tag = append(tag, "zigzag64") + case pref.Sfixed32Kind, pref.Fixed32Kind, pref.FloatKind: + tag = append(tag, "fixed32") + case pref.Sfixed64Kind, pref.Fixed64Kind, pref.DoubleKind: + tag = append(tag, "fixed64") + case pref.StringKind, pref.BytesKind, pref.MessageKind: + tag = append(tag, "bytes") + case pref.GroupKind: + tag = append(tag, "group") + } + tag = append(tag, strconv.Itoa(int(fd.Number()))) + switch fd.Cardinality() { + case pref.Optional: + tag = append(tag, "opt") + case pref.Required: + tag = append(tag, "req") + case pref.Repeated: + tag = append(tag, "rep") + } + if fd.IsPacked() { + tag = append(tag, "packed") + } + name := string(fd.Name()) + if fd.Kind() == pref.GroupKind { + // The name of the FieldDescriptor for a group field is + // lowercased. To find the original capitalization, we + // look in the field's MessageType. + name = string(fd.Message().Name()) + } + tag = append(tag, "name="+name) + if jsonName := fd.JSONName(); jsonName != "" && jsonName != name && !fd.IsExtension() { + // NOTE: The jsonName != name condition is suspect, but it preserve + // the exact same semantics from the previous generator. + tag = append(tag, "json="+jsonName) + } + if fd.IsWeak() { + tag = append(tag, "weak="+string(fd.Message().FullName())) + } + // The previous implementation does not tag extension fields as proto3, + // even when the field is defined in a proto3 file. Match that behavior + // for consistency. + if fd.Syntax() == pref.Proto3 && !fd.IsExtension() { + tag = append(tag, "proto3") + } + if fd.Kind() == pref.EnumKind && enumName != "" { + tag = append(tag, "enum="+enumName) + } + if fd.ContainingOneof() != nil { + tag = append(tag, "oneof") + } + // This must appear last in the tag, since commas in strings aren't escaped. + if fd.HasDefault() { + def, _ := defval.Marshal(fd.Default(), fd.DefaultEnumValue(), fd.Kind(), defval.GoTag) + tag = append(tag, "def="+def) + } + return strings.Join(tag, ",") +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/decode.go b/vendor/google.golang.org/protobuf/internal/encoding/text/decode.go new file mode 100644 index 00000000..eb10ea10 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/decode.go @@ -0,0 +1,665 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package text + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strconv" + "unicode/utf8" + + "google.golang.org/protobuf/internal/errors" +) + +// Decoder is a token-based textproto decoder. +type Decoder struct { + // lastCall is last method called, either readCall or peekCall. + // Initial value is readCall. + lastCall call + + // lastToken contains the last read token. + lastToken Token + + // lastErr contains the last read error. + lastErr error + + // openStack is a stack containing the byte characters for MessageOpen and + // ListOpen kinds. The top of stack represents the message or the list that + // the current token is nested in. An empty stack means the current token is + // at the top level message. The characters '{' and '<' both represent the + // MessageOpen kind. + openStack []byte + + // orig is used in reporting line and column. + orig []byte + // in contains the unconsumed input. + in []byte +} + +// NewDecoder returns a Decoder to read the given []byte. +func NewDecoder(b []byte) *Decoder { + return &Decoder{orig: b, in: b} +} + +// ErrUnexpectedEOF means that EOF was encountered in the middle of the input. +var ErrUnexpectedEOF = errors.New("%v", io.ErrUnexpectedEOF) + +// call specifies which Decoder method was invoked. +type call uint8 + +const ( + readCall call = iota + peekCall +) + +// Peek looks ahead and returns the next token and error without advancing a read. +func (d *Decoder) Peek() (Token, error) { + defer func() { d.lastCall = peekCall }() + if d.lastCall == readCall { + d.lastToken, d.lastErr = d.Read() + } + return d.lastToken, d.lastErr +} + +// Read returns the next token. +// It will return an error if there is no valid token. +func (d *Decoder) Read() (Token, error) { + defer func() { d.lastCall = readCall }() + if d.lastCall == peekCall { + return d.lastToken, d.lastErr + } + + tok, err := d.parseNext(d.lastToken.kind) + if err != nil { + return Token{}, err + } + + switch tok.kind { + case comma, semicolon: + tok, err = d.parseNext(tok.kind) + if err != nil { + return Token{}, err + } + } + d.lastToken = tok + return tok, nil +} + +const ( + mismatchedFmt = "mismatched close character %q" + unexpectedFmt = "unexpected character %q" +) + +// parseNext parses the next Token based on given last kind. +func (d *Decoder) parseNext(lastKind Kind) (Token, error) { + // Trim leading spaces. + d.consume(0) + isEOF := false + if len(d.in) == 0 { + isEOF = true + } + + switch lastKind { + case EOF: + return d.consumeToken(EOF, 0, 0), nil + + case bof: + // Start of top level message. Next token can be EOF or Name. + if isEOF { + return d.consumeToken(EOF, 0, 0), nil + } + return d.parseFieldName() + + case Name: + // Next token can be MessageOpen, ListOpen or Scalar. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case '{', '<': + d.pushOpenStack(ch) + return d.consumeToken(MessageOpen, 1, 0), nil + case '[': + d.pushOpenStack(ch) + return d.consumeToken(ListOpen, 1, 0), nil + default: + return d.parseScalar() + } + + case Scalar: + openKind, closeCh := d.currentOpenKind() + switch openKind { + case bof: + // Top level message. + // Next token can be EOF, comma, semicolon or Name. + if isEOF { + return d.consumeToken(EOF, 0, 0), nil + } + switch d.in[0] { + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + case MessageOpen: + // Next token can be MessageClose, comma, semicolon or Name. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(MessageClose, 1, 0), nil + case otherCloseChar[closeCh]: + return Token{}, d.newSyntaxError(mismatchedFmt, ch) + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + case ListOpen: + // Next token can be ListClose or comma. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case ']': + d.popOpenStack() + return d.consumeToken(ListClose, 1, 0), nil + case ',': + return d.consumeToken(comma, 1, 0), nil + default: + return Token{}, d.newSyntaxError(unexpectedFmt, ch) + } + } + + case MessageOpen: + // Next token can be MessageClose or Name. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + _, closeCh := d.currentOpenKind() + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(MessageClose, 1, 0), nil + case otherCloseChar[closeCh]: + return Token{}, d.newSyntaxError(mismatchedFmt, ch) + default: + return d.parseFieldName() + } + + case MessageClose: + openKind, closeCh := d.currentOpenKind() + switch openKind { + case bof: + // Top level message. + // Next token can be EOF, comma, semicolon or Name. + if isEOF { + return d.consumeToken(EOF, 0, 0), nil + } + switch ch := d.in[0]; ch { + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + case MessageOpen: + // Next token can be MessageClose, comma, semicolon or Name. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(MessageClose, 1, 0), nil + case otherCloseChar[closeCh]: + return Token{}, d.newSyntaxError(mismatchedFmt, ch) + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + case ListOpen: + // Next token can be ListClose or comma + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(ListClose, 1, 0), nil + case ',': + return d.consumeToken(comma, 1, 0), nil + default: + return Token{}, d.newSyntaxError(unexpectedFmt, ch) + } + } + + case ListOpen: + // Next token can be ListClose, MessageStart or Scalar. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case ']': + d.popOpenStack() + return d.consumeToken(ListClose, 1, 0), nil + case '{', '<': + d.pushOpenStack(ch) + return d.consumeToken(MessageOpen, 1, 0), nil + default: + return d.parseScalar() + } + + case ListClose: + openKind, closeCh := d.currentOpenKind() + switch openKind { + case bof: + // Top level message. + // Next token can be EOF, comma, semicolon or Name. + if isEOF { + return d.consumeToken(EOF, 0, 0), nil + } + switch ch := d.in[0]; ch { + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + case MessageOpen: + // Next token can be MessageClose, comma, semicolon or Name. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(MessageClose, 1, 0), nil + case otherCloseChar[closeCh]: + return Token{}, d.newSyntaxError(mismatchedFmt, ch) + case ',': + return d.consumeToken(comma, 1, 0), nil + case ';': + return d.consumeToken(semicolon, 1, 0), nil + default: + return d.parseFieldName() + } + + default: + // It is not possible to have this case. Let it panic below. + } + + case comma, semicolon: + openKind, closeCh := d.currentOpenKind() + switch openKind { + case bof: + // Top level message. Next token can be EOF or Name. + if isEOF { + return d.consumeToken(EOF, 0, 0), nil + } + return d.parseFieldName() + + case MessageOpen: + // Next token can be MessageClose or Name. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case closeCh: + d.popOpenStack() + return d.consumeToken(MessageClose, 1, 0), nil + case otherCloseChar[closeCh]: + return Token{}, d.newSyntaxError(mismatchedFmt, ch) + default: + return d.parseFieldName() + } + + case ListOpen: + if lastKind == semicolon { + // It is not be possible to have this case as logic here + // should not have produced a semicolon Token when inside a + // list. Let it panic below. + break + } + // Next token can be MessageOpen or Scalar. + if isEOF { + return Token{}, ErrUnexpectedEOF + } + switch ch := d.in[0]; ch { + case '{', '<': + d.pushOpenStack(ch) + return d.consumeToken(MessageOpen, 1, 0), nil + default: + return d.parseScalar() + } + } + } + + line, column := d.Position(len(d.orig) - len(d.in)) + panic(fmt.Sprintf("Decoder.parseNext: bug at handling line %d:%d with lastKind=%v", line, column, lastKind)) +} + +var otherCloseChar = map[byte]byte{ + '}': '>', + '>': '}', +} + +// currentOpenKind indicates whether current position is inside a message, list +// or top-level message by returning MessageOpen, ListOpen or bof respectively. +// If the returned kind is either a MessageOpen or ListOpen, it also returns the +// corresponding closing character. +func (d *Decoder) currentOpenKind() (Kind, byte) { + if len(d.openStack) == 0 { + return bof, 0 + } + openCh := d.openStack[len(d.openStack)-1] + switch openCh { + case '{': + return MessageOpen, '}' + case '<': + return MessageOpen, '>' + case '[': + return ListOpen, ']' + } + panic(fmt.Sprintf("Decoder: openStack contains invalid byte %s", string(openCh))) +} + +func (d *Decoder) pushOpenStack(ch byte) { + d.openStack = append(d.openStack, ch) +} + +func (d *Decoder) popOpenStack() { + d.openStack = d.openStack[:len(d.openStack)-1] +} + +// parseFieldName parses field name and separator. +func (d *Decoder) parseFieldName() (tok Token, err error) { + defer func() { + if err == nil && d.tryConsumeChar(':') { + tok.attrs |= hasSeparator + } + }() + + // Extension or Any type URL. + if d.in[0] == '[' { + return d.parseTypeName() + } + + // Identifier. + if size := parseIdent(d.in, false); size > 0 { + return d.consumeToken(Name, size, uint8(IdentName)), nil + } + + // Field number. Identify if input is a valid number that is not negative + // and is decimal integer within 32-bit range. + if num := parseNumber(d.in); num.size > 0 { + if !num.neg && num.kind == numDec { + if _, err := strconv.ParseInt(string(d.in[:num.size]), 10, 32); err == nil { + return d.consumeToken(Name, num.size, uint8(FieldNumber)), nil + } + } + return Token{}, d.newSyntaxError("invalid field number: %s", d.in[:num.size]) + } + + return Token{}, d.newSyntaxError("invalid field name: %s", errRegexp.Find(d.in)) +} + +// parseTypeName parses Any type URL or extension field name. The name is +// enclosed in [ and ] characters. The C++ parser does not handle many legal URL +// strings. This implementation is more liberal and allows for the pattern +// ^[-_a-zA-Z0-9]+([./][-_a-zA-Z0-9]+)*`). Whitespaces and comments are allowed +// in between [ ], '.', '/' and the sub names. +func (d *Decoder) parseTypeName() (Token, error) { + startPos := len(d.orig) - len(d.in) + // Use alias s to advance first in order to use d.in for error handling. + // Caller already checks for [ as first character. + s := consume(d.in[1:], 0) + if len(s) == 0 { + return Token{}, ErrUnexpectedEOF + } + + var name []byte + for len(s) > 0 && isTypeNameChar(s[0]) { + name = append(name, s[0]) + s = s[1:] + } + s = consume(s, 0) + + var closed bool + for len(s) > 0 && !closed { + switch { + case s[0] == ']': + s = s[1:] + closed = true + + case s[0] == '/', s[0] == '.': + if len(name) > 0 && (name[len(name)-1] == '/' || name[len(name)-1] == '.') { + return Token{}, d.newSyntaxError("invalid type URL/extension field name: %s", + d.orig[startPos:len(d.orig)-len(s)+1]) + } + name = append(name, s[0]) + s = s[1:] + s = consume(s, 0) + for len(s) > 0 && isTypeNameChar(s[0]) { + name = append(name, s[0]) + s = s[1:] + } + s = consume(s, 0) + + default: + return Token{}, d.newSyntaxError( + "invalid type URL/extension field name: %s", d.orig[startPos:len(d.orig)-len(s)+1]) + } + } + + if !closed { + return Token{}, ErrUnexpectedEOF + } + + // First character cannot be '.'. Last character cannot be '.' or '/'. + size := len(name) + if size == 0 || name[0] == '.' || name[size-1] == '.' || name[size-1] == '/' { + return Token{}, d.newSyntaxError("invalid type URL/extension field name: %s", + d.orig[startPos:len(d.orig)-len(s)]) + } + + d.in = s + endPos := len(d.orig) - len(d.in) + d.consume(0) + + return Token{ + kind: Name, + attrs: uint8(TypeName), + pos: startPos, + raw: d.orig[startPos:endPos], + str: string(name), + }, nil +} + +func isTypeNameChar(b byte) bool { + return (b == '-' || b == '_' || + ('0' <= b && b <= '9') || + ('a' <= b && b <= 'z') || + ('A' <= b && b <= 'Z')) +} + +func isWhiteSpace(b byte) bool { + switch b { + case ' ', '\n', '\r', '\t': + return true + default: + return false + } +} + +// parseIdent parses an unquoted proto identifier and returns size. +// If allowNeg is true, it allows '-' to be the first character in the +// identifier. This is used when parsing literal values like -infinity, etc. +// Regular expression matches an identifier: `^[_a-zA-Z][_a-zA-Z0-9]*` +func parseIdent(input []byte, allowNeg bool) int { + var size int + + s := input + if len(s) == 0 { + return 0 + } + + if allowNeg && s[0] == '-' { + s = s[1:] + size++ + if len(s) == 0 { + return 0 + } + } + + switch { + case s[0] == '_', + 'a' <= s[0] && s[0] <= 'z', + 'A' <= s[0] && s[0] <= 'Z': + s = s[1:] + size++ + default: + return 0 + } + + for len(s) > 0 && (s[0] == '_' || + 'a' <= s[0] && s[0] <= 'z' || + 'A' <= s[0] && s[0] <= 'Z' || + '0' <= s[0] && s[0] <= '9') { + s = s[1:] + size++ + } + + if len(s) > 0 && !isDelim(s[0]) { + return 0 + } + + return size +} + +// parseScalar parses for a string, literal or number value. +func (d *Decoder) parseScalar() (Token, error) { + if d.in[0] == '"' || d.in[0] == '\'' { + return d.parseStringValue() + } + + if tok, ok := d.parseLiteralValue(); ok { + return tok, nil + } + + if tok, ok := d.parseNumberValue(); ok { + return tok, nil + } + + return Token{}, d.newSyntaxError("invalid scalar value: %s", errRegexp.Find(d.in)) +} + +// parseLiteralValue parses a literal value. A literal value is used for +// bools, special floats and enums. This function simply identifies that the +// field value is a literal. +func (d *Decoder) parseLiteralValue() (Token, bool) { + size := parseIdent(d.in, true) + if size == 0 { + return Token{}, false + } + return d.consumeToken(Scalar, size, literalValue), true +} + +// consumeToken constructs a Token for given Kind from d.in and consumes given +// size-length from it. +func (d *Decoder) consumeToken(kind Kind, size int, attrs uint8) Token { + // Important to compute raw and pos before consuming. + tok := Token{ + kind: kind, + attrs: attrs, + pos: len(d.orig) - len(d.in), + raw: d.in[:size], + } + d.consume(size) + return tok +} + +// newSyntaxError returns a syntax error with line and column information for +// current position. +func (d *Decoder) newSyntaxError(f string, x ...interface{}) error { + e := errors.New(f, x...) + line, column := d.Position(len(d.orig) - len(d.in)) + return errors.New("syntax error (line %d:%d): %v", line, column, e) +} + +// Position returns line and column number of given index of the original input. +// It will panic if index is out of range. +func (d *Decoder) Position(idx int) (line int, column int) { + b := d.orig[:idx] + line = bytes.Count(b, []byte("\n")) + 1 + if i := bytes.LastIndexByte(b, '\n'); i >= 0 { + b = b[i+1:] + } + column = utf8.RuneCount(b) + 1 // ignore multi-rune characters + return line, column +} + +func (d *Decoder) tryConsumeChar(c byte) bool { + if len(d.in) > 0 && d.in[0] == c { + d.consume(1) + return true + } + return false +} + +// consume consumes n bytes of input and any subsequent whitespace or comments. +func (d *Decoder) consume(n int) { + d.in = consume(d.in, n) + return +} + +// consume consumes n bytes of input and any subsequent whitespace or comments. +func consume(b []byte, n int) []byte { + b = b[n:] + for len(b) > 0 { + switch b[0] { + case ' ', '\n', '\r', '\t': + b = b[1:] + case '#': + if i := bytes.IndexByte(b, '\n'); i >= 0 { + b = b[i+len("\n"):] + } else { + b = nil + } + default: + return b + } + } + return b +} + +// Any sequence that looks like a non-delimiter (for error reporting). +var errRegexp = regexp.MustCompile(`^([-+._a-zA-Z0-9\/]+|.)`) + +// isDelim returns true if given byte is a delimiter character. +func isDelim(c byte) bool { + return !(c == '-' || c == '+' || c == '.' || c == '_' || + ('a' <= c && c <= 'z') || + ('A' <= c && c <= 'Z') || + ('0' <= c && c <= '9')) +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/decode_number.go b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_number.go new file mode 100644 index 00000000..f2d90b78 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_number.go @@ -0,0 +1,190 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package text + +// parseNumberValue parses a number from the input and returns a Token object. +func (d *Decoder) parseNumberValue() (Token, bool) { + in := d.in + num := parseNumber(in) + if num.size == 0 { + return Token{}, false + } + numAttrs := num.kind + if num.neg { + numAttrs |= isNegative + } + strSize := num.size + last := num.size - 1 + if num.kind == numFloat && (d.in[last] == 'f' || d.in[last] == 'F') { + strSize = last + } + tok := Token{ + kind: Scalar, + attrs: numberValue, + pos: len(d.orig) - len(d.in), + raw: d.in[:num.size], + str: string(d.in[:strSize]), + numAttrs: numAttrs, + } + d.consume(num.size) + return tok, true +} + +const ( + numDec uint8 = (1 << iota) / 2 + numHex + numOct + numFloat +) + +// number is the result of parsing out a valid number from parseNumber. It +// contains data for doing float or integer conversion via the strconv package +// in conjunction with the input bytes. +type number struct { + kind uint8 + neg bool + size int +} + +// parseNumber constructs a number object from given input. It allows for the +// following patterns: +// integer: ^-?([1-9][0-9]*|0[xX][0-9a-fA-F]+|0[0-7]*) +// float: ^-?((0|[1-9][0-9]*)?([.][0-9]*)?([eE][+-]?[0-9]+)?[fF]?) +// It also returns the number of parsed bytes for the given number, 0 if it is +// not a number. +func parseNumber(input []byte) number { + kind := numDec + var size int + var neg bool + + s := input + if len(s) == 0 { + return number{} + } + + // Optional - + if s[0] == '-' { + neg = true + s = s[1:] + size++ + if len(s) == 0 { + return number{} + } + } + + // C++ allows for whitespace and comments in between the negative sign and + // the rest of the number. This logic currently does not but is consistent + // with v1. + + switch { + case s[0] == '0': + if len(s) > 1 { + switch { + case s[1] == 'x' || s[1] == 'X': + // Parse as hex number. + kind = numHex + n := 2 + s = s[2:] + for len(s) > 0 && (('0' <= s[0] && s[0] <= '9') || + ('a' <= s[0] && s[0] <= 'f') || + ('A' <= s[0] && s[0] <= 'F')) { + s = s[1:] + n++ + } + if n == 2 { + return number{} + } + size += n + + case '0' <= s[1] && s[1] <= '7': + // Parse as octal number. + kind = numOct + n := 2 + s = s[2:] + for len(s) > 0 && '0' <= s[0] && s[0] <= '7' { + s = s[1:] + n++ + } + size += n + } + + if kind&(numHex|numOct) > 0 { + if len(s) > 0 && !isDelim(s[0]) { + return number{} + } + return number{kind: kind, neg: neg, size: size} + } + } + s = s[1:] + size++ + + case '1' <= s[0] && s[0] <= '9': + n := 1 + s = s[1:] + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + n++ + } + size += n + + case s[0] == '.': + // Set kind to numFloat to signify the intent to parse as float. And + // that it needs to have other digits after '.'. + kind = numFloat + + default: + return number{} + } + + // . followed by 0 or more digits. + if len(s) > 0 && s[0] == '.' { + n := 1 + s = s[1:] + // If decimal point was before any digits, it should be followed by + // other digits. + if len(s) == 0 && kind == numFloat { + return number{} + } + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + n++ + } + size += n + kind = numFloat + } + + // e or E followed by an optional - or + and 1 or more digits. + if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') { + kind = numFloat + s = s[1:] + n := 1 + if s[0] == '+' || s[0] == '-' { + s = s[1:] + n++ + if len(s) == 0 { + return number{} + } + } + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + n++ + } + size += n + } + + // Optional suffix f or F for floats. + if len(s) > 0 && (s[0] == 'f' || s[0] == 'F') { + kind = numFloat + s = s[1:] + size++ + } + + // Check that next byte is a delimiter or it is at the end. + if len(s) > 0 && !isDelim(s[0]) { + return number{} + } + + return number{kind: kind, neg: neg, size: size} +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/decode_string.go b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_string.go new file mode 100644 index 00000000..d4d34902 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_string.go @@ -0,0 +1,161 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package text + +import ( + "bytes" + "strconv" + "strings" + "unicode" + "unicode/utf16" + "unicode/utf8" + + "google.golang.org/protobuf/internal/strs" +) + +// parseStringValue parses string field token. +// This differs from parseString since the text format allows +// multiple back-to-back string literals where they are semantically treated +// as a single large string with all values concatenated. +// +// E.g., `"foo" "bar" "baz"` => "foobarbaz" +func (d *Decoder) parseStringValue() (Token, error) { + // Note that the ending quote is sufficient to unambiguously mark the end + // of a string. Thus, the text grammar does not require intervening + // whitespace or control characters in-between strings. + // Thus, the following is valid: + // `"foo"'bar'"baz"` => "foobarbaz" + in0 := d.in + var ss []string + for len(d.in) > 0 && (d.in[0] == '"' || d.in[0] == '\'') { + s, err := d.parseString() + if err != nil { + return Token{}, err + } + ss = append(ss, s) + } + // d.in already points to the end of the value at this point. + return Token{ + kind: Scalar, + attrs: stringValue, + pos: len(d.orig) - len(in0), + raw: in0[:len(in0)-len(d.in)], + str: strings.Join(ss, ""), + }, nil +} + +// parseString parses a string value enclosed in " or '. +func (d *Decoder) parseString() (string, error) { + in := d.in + if len(in) == 0 { + return "", ErrUnexpectedEOF + } + quote := in[0] + in = in[1:] + i := indexNeedEscapeInBytes(in) + in, out := in[i:], in[:i:i] // set cap to prevent mutations + for len(in) > 0 { + switch r, n := utf8.DecodeRune(in); { + case r == utf8.RuneError && n == 1: + return "", d.newSyntaxError("invalid UTF-8 detected") + case r == 0 || r == '\n': + return "", d.newSyntaxError("invalid character %q in string", r) + case r == rune(quote): + in = in[1:] + d.consume(len(d.in) - len(in)) + return string(out), nil + case r == '\\': + if len(in) < 2 { + return "", ErrUnexpectedEOF + } + switch r := in[1]; r { + case '"', '\'', '\\', '?': + in, out = in[2:], append(out, r) + case 'a': + in, out = in[2:], append(out, '\a') + case 'b': + in, out = in[2:], append(out, '\b') + case 'n': + in, out = in[2:], append(out, '\n') + case 'r': + in, out = in[2:], append(out, '\r') + case 't': + in, out = in[2:], append(out, '\t') + case 'v': + in, out = in[2:], append(out, '\v') + case 'f': + in, out = in[2:], append(out, '\f') + case '0', '1', '2', '3', '4', '5', '6', '7': + // One, two, or three octal characters. + n := len(in[1:]) - len(bytes.TrimLeft(in[1:], "01234567")) + if n > 3 { + n = 3 + } + v, err := strconv.ParseUint(string(in[1:1+n]), 8, 8) + if err != nil { + return "", d.newSyntaxError("invalid octal escape code %q in string", in[:1+n]) + } + in, out = in[1+n:], append(out, byte(v)) + case 'x': + // One or two hexadecimal characters. + n := len(in[2:]) - len(bytes.TrimLeft(in[2:], "0123456789abcdefABCDEF")) + if n > 2 { + n = 2 + } + v, err := strconv.ParseUint(string(in[2:2+n]), 16, 8) + if err != nil { + return "", d.newSyntaxError("invalid hex escape code %q in string", in[:2+n]) + } + in, out = in[2+n:], append(out, byte(v)) + case 'u', 'U': + // Four or eight hexadecimal characters + n := 6 + if r == 'U' { + n = 10 + } + if len(in) < n { + return "", ErrUnexpectedEOF + } + v, err := strconv.ParseUint(string(in[2:n]), 16, 32) + if utf8.MaxRune < v || err != nil { + return "", d.newSyntaxError("invalid Unicode escape code %q in string", in[:n]) + } + in = in[n:] + + r := rune(v) + if utf16.IsSurrogate(r) { + if len(in) < 6 { + return "", ErrUnexpectedEOF + } + v, err := strconv.ParseUint(string(in[2:6]), 16, 16) + r = utf16.DecodeRune(r, rune(v)) + if in[0] != '\\' || in[1] != 'u' || r == unicode.ReplacementChar || err != nil { + return "", d.newSyntaxError("invalid Unicode escape code %q in string", in[:6]) + } + in = in[6:] + } + out = append(out, string(r)...) + default: + return "", d.newSyntaxError("invalid escape code %q in string", in[:2]) + } + default: + i := indexNeedEscapeInBytes(in[n:]) + in, out = in[n+i:], append(out, in[:n+i]...) + } + } + return "", ErrUnexpectedEOF +} + +// indexNeedEscapeInString returns the index of the character that needs +// escaping. If no characters need escaping, this returns the input length. +func indexNeedEscapeInBytes(b []byte) int { return indexNeedEscapeInString(strs.UnsafeString(b)) } + +// UnmarshalString returns an unescaped string given a textproto string value. +// String value needs to contain single or double quotes. This is only used by +// internal/encoding/defval package for unmarshaling bytes. +func UnmarshalString(s string) (string, error) { + d := NewDecoder([]byte(s)) + return d.parseString() +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/decode_token.go b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_token.go new file mode 100644 index 00000000..83d2b0d5 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/decode_token.go @@ -0,0 +1,373 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package text + +import ( + "bytes" + "fmt" + "math" + "strconv" + "strings" + + "google.golang.org/protobuf/internal/flags" +) + +// Kind represents a token kind expressible in the textproto format. +type Kind uint8 + +// Kind values. +const ( + Invalid Kind = iota + EOF + Name // Name indicates the field name. + Scalar // Scalar are scalar values, e.g. "string", 47, ENUM_LITERAL, true. + MessageOpen + MessageClose + ListOpen + ListClose + + // comma and semi-colon are only for parsing in between values and should not be exposed. + comma + semicolon + + // bof indicates beginning of file, which is the default token + // kind at the beginning of parsing. + bof = Invalid +) + +func (t Kind) String() string { + switch t { + case Invalid: + return "" + case EOF: + return "eof" + case Scalar: + return "scalar" + case Name: + return "name" + case MessageOpen: + return "{" + case MessageClose: + return "}" + case ListOpen: + return "[" + case ListClose: + return "]" + case comma: + return "," + case semicolon: + return ";" + default: + return fmt.Sprintf("", uint8(t)) + } +} + +// NameKind represents different types of field names. +type NameKind uint8 + +// NameKind values. +const ( + IdentName NameKind = iota + 1 + TypeName + FieldNumber +) + +func (t NameKind) String() string { + switch t { + case IdentName: + return "IdentName" + case TypeName: + return "TypeName" + case FieldNumber: + return "FieldNumber" + default: + return fmt.Sprintf("", uint8(t)) + } +} + +// Bit mask in Token.attrs to indicate if a Name token is followed by the +// separator char ':'. The field name separator char is optional for message +// field or repeated message field, but required for all other types. Decoder +// simply indicates whether a Name token is followed by separator or not. It is +// up to the prototext package to validate. +const hasSeparator = 1 << 7 + +// Scalar value types. +const ( + numberValue = iota + 1 + stringValue + literalValue +) + +// Bit mask in Token.numAttrs to indicate that the number is a negative. +const isNegative = 1 << 7 + +// Token provides a parsed token kind and value. Values are provided by the +// different accessor methods. +type Token struct { + // Kind of the Token object. + kind Kind + // attrs contains metadata for the following Kinds: + // Name: hasSeparator bit and one of NameKind. + // Scalar: one of numberValue, stringValue, literalValue. + attrs uint8 + // numAttrs contains metadata for numberValue: + // - highest bit is whether negative or positive. + // - lower bits indicate one of numDec, numHex, numOct, numFloat. + numAttrs uint8 + // pos provides the position of the token in the original input. + pos int + // raw bytes of the serialized token. + // This is a subslice into the original input. + raw []byte + // str contains parsed string for the following: + // - stringValue of Scalar kind + // - numberValue of Scalar kind + // - TypeName of Name kind + str string +} + +// Kind returns the token kind. +func (t Token) Kind() Kind { + return t.kind +} + +// RawString returns the read value in string. +func (t Token) RawString() string { + return string(t.raw) +} + +// Pos returns the token position from the input. +func (t Token) Pos() int { + return t.pos +} + +// NameKind returns IdentName, TypeName or FieldNumber. +// It panics if type is not Name. +func (t Token) NameKind() NameKind { + if t.kind == Name { + return NameKind(t.attrs &^ hasSeparator) + } + panic(fmt.Sprintf("Token is not a Name type: %s", t.kind)) +} + +// HasSeparator returns true if the field name is followed by the separator char +// ':', else false. It panics if type is not Name. +func (t Token) HasSeparator() bool { + if t.kind == Name { + return t.attrs&hasSeparator != 0 + } + panic(fmt.Sprintf("Token is not a Name type: %s", t.kind)) +} + +// IdentName returns the value for IdentName type. +func (t Token) IdentName() string { + if t.kind == Name && t.attrs&uint8(IdentName) != 0 { + return string(t.raw) + } + panic(fmt.Sprintf("Token is not an IdentName: %s:%s", t.kind, NameKind(t.attrs&^hasSeparator))) +} + +// TypeName returns the value for TypeName type. +func (t Token) TypeName() string { + if t.kind == Name && t.attrs&uint8(TypeName) != 0 { + return t.str + } + panic(fmt.Sprintf("Token is not a TypeName: %s:%s", t.kind, NameKind(t.attrs&^hasSeparator))) +} + +// FieldNumber returns the value for FieldNumber type. It returns a +// non-negative int32 value. Caller will still need to validate for the correct +// field number range. +func (t Token) FieldNumber() int32 { + if t.kind != Name || t.attrs&uint8(FieldNumber) == 0 { + panic(fmt.Sprintf("Token is not a FieldNumber: %s:%s", t.kind, NameKind(t.attrs&^hasSeparator))) + } + // Following should not return an error as it had already been called right + // before this Token was constructed. + num, _ := strconv.ParseInt(string(t.raw), 10, 32) + return int32(num) +} + +// String returns the string value for a Scalar type. +func (t Token) String() (string, bool) { + if t.kind != Scalar || t.attrs != stringValue { + return "", false + } + return t.str, true +} + +// Enum returns the literal value for a Scalar type for use as enum literals. +func (t Token) Enum() (string, bool) { + if t.kind != Scalar || t.attrs != literalValue || (len(t.raw) > 0 && t.raw[0] == '-') { + return "", false + } + return string(t.raw), true +} + +// Bool returns the bool value for a Scalar type. +func (t Token) Bool() (bool, bool) { + if t.kind != Scalar { + return false, false + } + switch t.attrs { + case literalValue: + if b, ok := boolLits[string(t.raw)]; ok { + return b, true + } + case numberValue: + // Unsigned integer representation of 0 or 1 is permitted: 00, 0x0, 01, + // 0x1, etc. + n, err := strconv.ParseUint(t.str, 0, 64) + if err == nil { + switch n { + case 0: + return false, true + case 1: + return true, true + } + } + } + return false, false +} + +// These exact boolean literals are the ones supported in C++. +var boolLits = map[string]bool{ + "t": true, + "true": true, + "True": true, + "f": false, + "false": false, + "False": false, +} + +// Uint64 returns the uint64 value for a Scalar type. +func (t Token) Uint64() (uint64, bool) { + if t.kind != Scalar || t.attrs != numberValue || + t.numAttrs&isNegative > 0 || t.numAttrs&numFloat > 0 { + return 0, false + } + n, err := strconv.ParseUint(t.str, 0, 64) + if err != nil { + return 0, false + } + return n, true +} + +// Uint32 returns the uint32 value for a Scalar type. +func (t Token) Uint32() (uint32, bool) { + if t.kind != Scalar || t.attrs != numberValue || + t.numAttrs&isNegative > 0 || t.numAttrs&numFloat > 0 { + return 0, false + } + n, err := strconv.ParseUint(t.str, 0, 32) + if err != nil { + return 0, false + } + return uint32(n), true +} + +// Int64 returns the int64 value for a Scalar type. +func (t Token) Int64() (int64, bool) { + if t.kind != Scalar || t.attrs != numberValue || t.numAttrs&numFloat > 0 { + return 0, false + } + if n, err := strconv.ParseInt(t.str, 0, 64); err == nil { + return n, true + } + // C++ accepts large positive hex numbers as negative values. + // This feature is here for proto1 backwards compatibility purposes. + if flags.ProtoLegacy && (t.numAttrs == numHex) { + if n, err := strconv.ParseUint(t.str, 0, 64); err == nil { + return int64(n), true + } + } + return 0, false +} + +// Int32 returns the int32 value for a Scalar type. +func (t Token) Int32() (int32, bool) { + if t.kind != Scalar || t.attrs != numberValue || t.numAttrs&numFloat > 0 { + return 0, false + } + if n, err := strconv.ParseInt(t.str, 0, 32); err == nil { + return int32(n), true + } + // C++ accepts large positive hex numbers as negative values. + // This feature is here for proto1 backwards compatibility purposes. + if flags.ProtoLegacy && (t.numAttrs == numHex) { + if n, err := strconv.ParseUint(t.str, 0, 32); err == nil { + return int32(n), true + } + } + return 0, false +} + +// Float64 returns the float64 value for a Scalar type. +func (t Token) Float64() (float64, bool) { + if t.kind != Scalar { + return 0, false + } + switch t.attrs { + case literalValue: + if f, ok := floatLits[strings.ToLower(string(t.raw))]; ok { + return f, true + } + case numberValue: + n, err := strconv.ParseFloat(t.str, 64) + if err == nil { + return n, true + } + nerr := err.(*strconv.NumError) + if nerr.Err == strconv.ErrRange { + return n, true + } + } + return 0, false +} + +// Float32 returns the float32 value for a Scalar type. +func (t Token) Float32() (float32, bool) { + if t.kind != Scalar { + return 0, false + } + switch t.attrs { + case literalValue: + if f, ok := floatLits[strings.ToLower(string(t.raw))]; ok { + return float32(f), true + } + case numberValue: + n, err := strconv.ParseFloat(t.str, 64) + if err == nil { + // Overflows are treated as (-)infinity. + return float32(n), true + } + nerr := err.(*strconv.NumError) + if nerr.Err == strconv.ErrRange { + return float32(n), true + } + } + return 0, false +} + +// These are the supported float literals which C++ permits case-insensitive +// variants of these. +var floatLits = map[string]float64{ + "nan": math.NaN(), + "inf": math.Inf(1), + "infinity": math.Inf(1), + "-inf": math.Inf(-1), + "-infinity": math.Inf(-1), +} + +// TokenEquals returns true if given Tokens are equal, else false. +func TokenEquals(x, y Token) bool { + return x.kind == y.kind && + x.attrs == y.attrs && + x.numAttrs == y.numAttrs && + x.pos == y.pos && + bytes.Equal(x.raw, y.raw) && + x.str == y.str +} diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/doc.go b/vendor/google.golang.org/protobuf/internal/encoding/text/doc.go new file mode 100644 index 00000000..0ce8d6fb --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/doc.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package text implements the text format for protocol buffers. +// This package has no semantic understanding for protocol buffers and is only +// a parser and composer for the format. +// +// There is no formal specification for the protobuf text format, as such the +// C++ implementation (see google::protobuf::TextFormat) is the reference +// implementation of the text format. +// +// This package is neither a superset nor a subset of the C++ implementation. +// This implementation permits a more liberal grammar in some cases to be +// backwards compatible with the historical Go implementation. +// Future parsings unique to Go should not be added. +// Some grammars allowed by the C++ implementation are deliberately +// not implemented here because they are considered a bug by the protobuf team +// and should not be replicated. +// +// The Go implementation should implement a sufficient amount of the C++ +// grammar such that the default text serialization by C++ can be parsed by Go. +// However, just because the C++ parser accepts some input does not mean that +// the Go implementation should as well. +// +// The text format is almost a superset of JSON except: +// * message keys are not quoted strings, but identifiers +// * the top-level value must be a message without the delimiters +package text diff --git a/vendor/google.golang.org/protobuf/internal/encoding/text/encode.go b/vendor/google.golang.org/protobuf/internal/encoding/text/encode.go new file mode 100644 index 00000000..aa66bdd0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/encoding/text/encode.go @@ -0,0 +1,265 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package text + +import ( + "math" + "math/bits" + "strconv" + "strings" + "unicode/utf8" + + "google.golang.org/protobuf/internal/detrand" + "google.golang.org/protobuf/internal/errors" +) + +// encType represents an encoding type. +type encType uint8 + +const ( + _ encType = (1 << iota) / 2 + name + scalar + messageOpen + messageClose +) + +// Encoder provides methods to write out textproto constructs and values. The user is +// responsible for producing valid sequences of constructs and values. +type Encoder struct { + encoderState + + indent string + delims [2]byte + outputASCII bool +} + +type encoderState struct { + lastType encType + indents []byte + out []byte +} + +// NewEncoder returns an Encoder. +// +// If indent is a non-empty string, it causes every entry in a List or Message +// to be preceded by the indent and trailed by a newline. +// +// If delims is not the zero value, it controls the delimiter characters used +// for messages (e.g., "{}" vs "<>"). +// +// If outputASCII is true, strings will be serialized in such a way that +// multi-byte UTF-8 sequences are escaped. This property ensures that the +// overall output is ASCII (as opposed to UTF-8). +func NewEncoder(indent string, delims [2]byte, outputASCII bool) (*Encoder, error) { + e := &Encoder{} + if len(indent) > 0 { + if strings.Trim(indent, " \t") != "" { + return nil, errors.New("indent may only be composed of space and tab characters") + } + e.indent = indent + } + switch delims { + case [2]byte{0, 0}: + e.delims = [2]byte{'{', '}'} + case [2]byte{'{', '}'}, [2]byte{'<', '>'}: + e.delims = delims + default: + return nil, errors.New("delimiters may only be \"{}\" or \"<>\"") + } + e.outputASCII = outputASCII + + return e, nil +} + +// Bytes returns the content of the written bytes. +func (e *Encoder) Bytes() []byte { + return e.out +} + +// StartMessage writes out the '{' or '<' symbol. +func (e *Encoder) StartMessage() { + e.prepareNext(messageOpen) + e.out = append(e.out, e.delims[0]) +} + +// EndMessage writes out the '}' or '>' symbol. +func (e *Encoder) EndMessage() { + e.prepareNext(messageClose) + e.out = append(e.out, e.delims[1]) +} + +// WriteName writes out the field name and the separator ':'. +func (e *Encoder) WriteName(s string) { + e.prepareNext(name) + e.out = append(e.out, s...) + e.out = append(e.out, ':') +} + +// WriteBool writes out the given boolean value. +func (e *Encoder) WriteBool(b bool) { + if b { + e.WriteLiteral("true") + } else { + e.WriteLiteral("false") + } +} + +// WriteString writes out the given string value. +func (e *Encoder) WriteString(s string) { + e.prepareNext(scalar) + e.out = appendString(e.out, s, e.outputASCII) +} + +func appendString(out []byte, in string, outputASCII bool) []byte { + out = append(out, '"') + i := indexNeedEscapeInString(in) + in, out = in[i:], append(out, in[:i]...) + for len(in) > 0 { + switch r, n := utf8.DecodeRuneInString(in); { + case r == utf8.RuneError && n == 1: + // We do not report invalid UTF-8 because strings in the text format + // are used to represent both the proto string and bytes type. + r = rune(in[0]) + fallthrough + case r < ' ' || r == '"' || r == '\\' || r == 0x7f: + out = append(out, '\\') + switch r { + case '"', '\\': + out = append(out, byte(r)) + case '\n': + out = append(out, 'n') + case '\r': + out = append(out, 'r') + case '\t': + out = append(out, 't') + default: + out = append(out, 'x') + out = append(out, "00"[1+(bits.Len32(uint32(r))-1)/4:]...) + out = strconv.AppendUint(out, uint64(r), 16) + } + in = in[n:] + case r >= utf8.RuneSelf && (outputASCII || r <= 0x009f): + out = append(out, '\\') + if r <= math.MaxUint16 { + out = append(out, 'u') + out = append(out, "0000"[1+(bits.Len32(uint32(r))-1)/4:]...) + out = strconv.AppendUint(out, uint64(r), 16) + } else { + out = append(out, 'U') + out = append(out, "00000000"[1+(bits.Len32(uint32(r))-1)/4:]...) + out = strconv.AppendUint(out, uint64(r), 16) + } + in = in[n:] + default: + i := indexNeedEscapeInString(in[n:]) + in, out = in[n+i:], append(out, in[:n+i]...) + } + } + out = append(out, '"') + return out +} + +// indexNeedEscapeInString returns the index of the character that needs +// escaping. If no characters need escaping, this returns the input length. +func indexNeedEscapeInString(s string) int { + for i := 0; i < len(s); i++ { + if c := s[i]; c < ' ' || c == '"' || c == '\'' || c == '\\' || c >= 0x7f { + return i + } + } + return len(s) +} + +// WriteFloat writes out the given float value for given bitSize. +func (e *Encoder) WriteFloat(n float64, bitSize int) { + e.prepareNext(scalar) + e.out = appendFloat(e.out, n, bitSize) +} + +func appendFloat(out []byte, n float64, bitSize int) []byte { + switch { + case math.IsNaN(n): + return append(out, "nan"...) + case math.IsInf(n, +1): + return append(out, "inf"...) + case math.IsInf(n, -1): + return append(out, "-inf"...) + default: + return strconv.AppendFloat(out, n, 'g', -1, bitSize) + } +} + +// WriteInt writes out the given signed integer value. +func (e *Encoder) WriteInt(n int64) { + e.prepareNext(scalar) + e.out = append(e.out, strconv.FormatInt(n, 10)...) +} + +// WriteUint writes out the given unsigned integer value. +func (e *Encoder) WriteUint(n uint64) { + e.prepareNext(scalar) + e.out = append(e.out, strconv.FormatUint(n, 10)...) +} + +// WriteLiteral writes out the given string as a literal value without quotes. +// This is used for writing enum literal strings. +func (e *Encoder) WriteLiteral(s string) { + e.prepareNext(scalar) + e.out = append(e.out, s...) +} + +// prepareNext adds possible space and indentation for the next value based +// on last encType and indent option. It also updates e.lastType to next. +func (e *Encoder) prepareNext(next encType) { + defer func() { + e.lastType = next + }() + + // Single line. + if len(e.indent) == 0 { + // Add space after each field before the next one. + if e.lastType&(scalar|messageClose) != 0 && next == name { + e.out = append(e.out, ' ') + // Add a random extra space to make output unstable. + if detrand.Bool() { + e.out = append(e.out, ' ') + } + } + return + } + + // Multi-line. + switch { + case e.lastType == name: + e.out = append(e.out, ' ') + // Add a random extra space after name: to make output unstable. + if detrand.Bool() { + e.out = append(e.out, ' ') + } + + case e.lastType == messageOpen && next != messageClose: + e.indents = append(e.indents, e.indent...) + e.out = append(e.out, '\n') + e.out = append(e.out, e.indents...) + + case e.lastType&(scalar|messageClose) != 0: + if next == messageClose { + e.indents = e.indents[:len(e.indents)-len(e.indent)] + } + e.out = append(e.out, '\n') + e.out = append(e.out, e.indents...) + } +} + +// Snapshot returns the current snapshot for use in Reset. +func (e *Encoder) Snapshot() encoderState { + return e.encoderState +} + +// Reset resets the Encoder to the given encoderState from a Snapshot. +func (e *Encoder) Reset(es encoderState) { + e.encoderState = es +} diff --git a/vendor/google.golang.org/protobuf/internal/errors/errors.go b/vendor/google.golang.org/protobuf/internal/errors/errors.go new file mode 100644 index 00000000..20c17b35 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/errors/errors.go @@ -0,0 +1,89 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package errors implements functions to manipulate errors. +package errors + +import ( + "errors" + "fmt" + + "google.golang.org/protobuf/internal/detrand" +) + +// Error is a sentinel matching all errors produced by this package. +var Error = errors.New("protobuf error") + +// New formats a string according to the format specifier and arguments and +// returns an error that has a "proto" prefix. +func New(f string, x ...interface{}) error { + return &prefixError{s: format(f, x...)} +} + +type prefixError struct{ s string } + +var prefix = func() string { + // Deliberately introduce instability into the error message string to + // discourage users from performing error string comparisons. + if detrand.Bool() { + return "proto: " // use non-breaking spaces (U+00a0) + } else { + return "proto: " // use regular spaces (U+0020) + } +}() + +func (e *prefixError) Error() string { + return prefix + e.s +} + +func (e *prefixError) Unwrap() error { + return Error +} + +// Wrap returns an error that has a "proto" prefix, the formatted string described +// by the format specifier and arguments, and a suffix of err. The error wraps err. +func Wrap(err error, f string, x ...interface{}) error { + return &wrapError{ + s: format(f, x...), + err: err, + } +} + +type wrapError struct { + s string + err error +} + +func (e *wrapError) Error() string { + return format("%v%v: %v", prefix, e.s, e.err) +} + +func (e *wrapError) Unwrap() error { + return e.err +} + +func (e *wrapError) Is(target error) bool { + return target == Error +} + +func format(f string, x ...interface{}) string { + // avoid "proto: " prefix when chaining + for i := 0; i < len(x); i++ { + switch e := x[i].(type) { + case *prefixError: + x[i] = e.s + case *wrapError: + x[i] = format("%v: %v", e.s, e.err) + } + } + return fmt.Sprintf(f, x...) +} + +func InvalidUTF8(name string) error { + return New("field %v contains invalid UTF-8", name) +} + +func RequiredNotSet(name string) error { + return New("required field %v not set", name) +} diff --git a/vendor/google.golang.org/protobuf/internal/errors/is_go112.go b/vendor/google.golang.org/protobuf/internal/errors/is_go112.go new file mode 100644 index 00000000..f90e909b --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/errors/is_go112.go @@ -0,0 +1,39 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.13 + +package errors + +import "reflect" + +// Is is a copy of Go 1.13's errors.Is for use with older Go versions. +func Is(err, target error) bool { + if target == nil { + return err == target + } + + isComparable := reflect.TypeOf(target).Comparable() + for { + if isComparable && err == target { + return true + } + if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) { + return true + } + if err = unwrap(err); err == nil { + return false + } + } +} + +func unwrap(err error) error { + u, ok := err.(interface { + Unwrap() error + }) + if !ok { + return nil + } + return u.Unwrap() +} diff --git a/vendor/google.golang.org/protobuf/internal/errors/is_go113.go b/vendor/google.golang.org/protobuf/internal/errors/is_go113.go new file mode 100644 index 00000000..dc05f419 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/errors/is_go113.go @@ -0,0 +1,12 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.13 + +package errors + +import "errors" + +// Is is errors.Is. +func Is(err, target error) bool { return errors.Is(err, target) } diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/build.go b/vendor/google.golang.org/protobuf/internal/filedesc/build.go new file mode 100644 index 00000000..b293b694 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/build.go @@ -0,0 +1,158 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package filedesc provides functionality for constructing descriptors. +// +// The types in this package implement interfaces in the protoreflect package +// related to protobuf descripriptors. +package filedesc + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" +) + +// Builder construct a protoreflect.FileDescriptor from the raw descriptor. +type Builder struct { + // GoPackagePath is the Go package path that is invoking this builder. + GoPackagePath string + + // RawDescriptor is the wire-encoded bytes of FileDescriptorProto + // and must be populated. + RawDescriptor []byte + + // NumEnums is the total number of enums declared in the file. + NumEnums int32 + // NumMessages is the total number of messages declared in the file. + // It includes the implicit message declarations for map entries. + NumMessages int32 + // NumExtensions is the total number of extensions declared in the file. + NumExtensions int32 + // NumServices is the total number of services declared in the file. + NumServices int32 + + // TypeResolver resolves extension field types for descriptor options. + // If nil, it uses protoregistry.GlobalTypes. + TypeResolver interface { + preg.ExtensionTypeResolver + } + + // FileRegistry is use to lookup file, enum, and message dependencies. + // Once constructed, the file descriptor is registered here. + // If nil, it uses protoregistry.GlobalFiles. + FileRegistry interface { + FindFileByPath(string) (protoreflect.FileDescriptor, error) + FindDescriptorByName(pref.FullName) (pref.Descriptor, error) + RegisterFile(pref.FileDescriptor) error + } +} + +// resolverByIndex is an interface Builder.FileRegistry may implement. +// If so, it permits looking up an enum or message dependency based on the +// sub-list and element index into filetype.Builder.DependencyIndexes. +type resolverByIndex interface { + FindEnumByIndex(int32, int32, []Enum, []Message) pref.EnumDescriptor + FindMessageByIndex(int32, int32, []Enum, []Message) pref.MessageDescriptor +} + +// Indexes of each sub-list in filetype.Builder.DependencyIndexes. +const ( + listFieldDeps int32 = iota + listExtTargets + listExtDeps + listMethInDeps + listMethOutDeps +) + +// Out is the output of the Builder. +type Out struct { + File pref.FileDescriptor + + // Enums is all enum descriptors in "flattened ordering". + Enums []Enum + // Messages is all message descriptors in "flattened ordering". + // It includes the implicit message declarations for map entries. + Messages []Message + // Extensions is all extension descriptors in "flattened ordering". + Extensions []Extension + // Service is all service descriptors in "flattened ordering". + Services []Service +} + +// Build constructs a FileDescriptor given the parameters set in Builder. +// It assumes that the inputs are well-formed and panics if any inconsistencies +// are encountered. +// +// If NumEnums+NumMessages+NumExtensions+NumServices is zero, +// then Build automatically derives them from the raw descriptor. +func (db Builder) Build() (out Out) { + // Populate the counts if uninitialized. + if db.NumEnums+db.NumMessages+db.NumExtensions+db.NumServices == 0 { + db.unmarshalCounts(db.RawDescriptor, true) + } + + // Initialize resolvers and registries if unpopulated. + if db.TypeResolver == nil { + db.TypeResolver = preg.GlobalTypes + } + if db.FileRegistry == nil { + db.FileRegistry = preg.GlobalFiles + } + + fd := newRawFile(db) + out.File = fd + out.Enums = fd.allEnums + out.Messages = fd.allMessages + out.Extensions = fd.allExtensions + out.Services = fd.allServices + + if err := db.FileRegistry.RegisterFile(fd); err != nil { + panic(err) + } + return out +} + +// unmarshalCounts counts the number of enum, message, extension, and service +// declarations in the raw message, which is either a FileDescriptorProto +// or a MessageDescriptorProto depending on whether isFile is set. +func (db *Builder) unmarshalCounts(b []byte, isFile bool) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + if isFile { + switch num { + case genid.FileDescriptorProto_EnumType_field_number: + db.NumEnums++ + case genid.FileDescriptorProto_MessageType_field_number: + db.unmarshalCounts(v, false) + db.NumMessages++ + case genid.FileDescriptorProto_Extension_field_number: + db.NumExtensions++ + case genid.FileDescriptorProto_Service_field_number: + db.NumServices++ + } + } else { + switch num { + case genid.DescriptorProto_EnumType_field_number: + db.NumEnums++ + case genid.DescriptorProto_NestedType_field_number: + db.unmarshalCounts(v, false) + db.NumMessages++ + case genid.DescriptorProto_Extension_field_number: + db.NumExtensions++ + } + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go new file mode 100644 index 00000000..98ab142a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go @@ -0,0 +1,631 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import ( + "bytes" + "fmt" + "sync" + "sync/atomic" + + "google.golang.org/protobuf/internal/descfmt" + "google.golang.org/protobuf/internal/descopts" + "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/internal/strs" + pref "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +// The types in this file may have a suffix: +// • L0: Contains fields common to all descriptors (except File) and +// must be initialized up front. +// • L1: Contains fields specific to a descriptor and +// must be initialized up front. +// • L2: Contains fields that are lazily initialized when constructing +// from the raw file descriptor. When constructing as a literal, the L2 +// fields must be initialized up front. +// +// The types are exported so that packages like reflect/protodesc can +// directly construct descriptors. + +type ( + File struct { + fileRaw + L1 FileL1 + + once uint32 // atomically set if L2 is valid + mu sync.Mutex // protects L2 + L2 *FileL2 + } + FileL1 struct { + Syntax pref.Syntax + Path string + Package pref.FullName + + Enums Enums + Messages Messages + Extensions Extensions + Services Services + } + FileL2 struct { + Options func() pref.ProtoMessage + Imports FileImports + Locations SourceLocations + } +) + +func (fd *File) ParentFile() pref.FileDescriptor { return fd } +func (fd *File) Parent() pref.Descriptor { return nil } +func (fd *File) Index() int { return 0 } +func (fd *File) Syntax() pref.Syntax { return fd.L1.Syntax } +func (fd *File) Name() pref.Name { return fd.L1.Package.Name() } +func (fd *File) FullName() pref.FullName { return fd.L1.Package } +func (fd *File) IsPlaceholder() bool { return false } +func (fd *File) Options() pref.ProtoMessage { + if f := fd.lazyInit().Options; f != nil { + return f() + } + return descopts.File +} +func (fd *File) Path() string { return fd.L1.Path } +func (fd *File) Package() pref.FullName { return fd.L1.Package } +func (fd *File) Imports() pref.FileImports { return &fd.lazyInit().Imports } +func (fd *File) Enums() pref.EnumDescriptors { return &fd.L1.Enums } +func (fd *File) Messages() pref.MessageDescriptors { return &fd.L1.Messages } +func (fd *File) Extensions() pref.ExtensionDescriptors { return &fd.L1.Extensions } +func (fd *File) Services() pref.ServiceDescriptors { return &fd.L1.Services } +func (fd *File) SourceLocations() pref.SourceLocations { return &fd.lazyInit().Locations } +func (fd *File) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, fd) } +func (fd *File) ProtoType(pref.FileDescriptor) {} +func (fd *File) ProtoInternal(pragma.DoNotImplement) {} + +func (fd *File) lazyInit() *FileL2 { + if atomic.LoadUint32(&fd.once) == 0 { + fd.lazyInitOnce() + } + return fd.L2 +} + +func (fd *File) lazyInitOnce() { + fd.mu.Lock() + if fd.L2 == nil { + fd.lazyRawInit() // recursively initializes all L2 structures + } + atomic.StoreUint32(&fd.once, 1) + fd.mu.Unlock() +} + +// GoPackagePath is a pseudo-internal API for determining the Go package path +// that this file descriptor is declared in. +// +// WARNING: This method is exempt from the compatibility promise and may be +// removed in the future without warning. +func (fd *File) GoPackagePath() string { + return fd.builder.GoPackagePath +} + +type ( + Enum struct { + Base + L1 EnumL1 + L2 *EnumL2 // protected by fileDesc.once + } + EnumL1 struct { + eagerValues bool // controls whether EnumL2.Values is already populated + } + EnumL2 struct { + Options func() pref.ProtoMessage + Values EnumValues + ReservedNames Names + ReservedRanges EnumRanges + } + + EnumValue struct { + Base + L1 EnumValueL1 + } + EnumValueL1 struct { + Options func() pref.ProtoMessage + Number pref.EnumNumber + } +) + +func (ed *Enum) Options() pref.ProtoMessage { + if f := ed.lazyInit().Options; f != nil { + return f() + } + return descopts.Enum +} +func (ed *Enum) Values() pref.EnumValueDescriptors { + if ed.L1.eagerValues { + return &ed.L2.Values + } + return &ed.lazyInit().Values +} +func (ed *Enum) ReservedNames() pref.Names { return &ed.lazyInit().ReservedNames } +func (ed *Enum) ReservedRanges() pref.EnumRanges { return &ed.lazyInit().ReservedRanges } +func (ed *Enum) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, ed) } +func (ed *Enum) ProtoType(pref.EnumDescriptor) {} +func (ed *Enum) lazyInit() *EnumL2 { + ed.L0.ParentFile.lazyInit() // implicitly initializes L2 + return ed.L2 +} + +func (ed *EnumValue) Options() pref.ProtoMessage { + if f := ed.L1.Options; f != nil { + return f() + } + return descopts.EnumValue +} +func (ed *EnumValue) Number() pref.EnumNumber { return ed.L1.Number } +func (ed *EnumValue) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, ed) } +func (ed *EnumValue) ProtoType(pref.EnumValueDescriptor) {} + +type ( + Message struct { + Base + L1 MessageL1 + L2 *MessageL2 // protected by fileDesc.once + } + MessageL1 struct { + Enums Enums + Messages Messages + Extensions Extensions + IsMapEntry bool // promoted from google.protobuf.MessageOptions + IsMessageSet bool // promoted from google.protobuf.MessageOptions + } + MessageL2 struct { + Options func() pref.ProtoMessage + Fields Fields + Oneofs Oneofs + ReservedNames Names + ReservedRanges FieldRanges + RequiredNumbers FieldNumbers // must be consistent with Fields.Cardinality + ExtensionRanges FieldRanges + ExtensionRangeOptions []func() pref.ProtoMessage // must be same length as ExtensionRanges + } + + Field struct { + Base + L1 FieldL1 + } + FieldL1 struct { + Options func() pref.ProtoMessage + Number pref.FieldNumber + Cardinality pref.Cardinality // must be consistent with Message.RequiredNumbers + Kind pref.Kind + StringName stringName + IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto + IsWeak bool // promoted from google.protobuf.FieldOptions + HasPacked bool // promoted from google.protobuf.FieldOptions + IsPacked bool // promoted from google.protobuf.FieldOptions + HasEnforceUTF8 bool // promoted from google.protobuf.FieldOptions + EnforceUTF8 bool // promoted from google.protobuf.FieldOptions + Default defaultValue + ContainingOneof pref.OneofDescriptor // must be consistent with Message.Oneofs.Fields + Enum pref.EnumDescriptor + Message pref.MessageDescriptor + } + + Oneof struct { + Base + L1 OneofL1 + } + OneofL1 struct { + Options func() pref.ProtoMessage + Fields OneofFields // must be consistent with Message.Fields.ContainingOneof + } +) + +func (md *Message) Options() pref.ProtoMessage { + if f := md.lazyInit().Options; f != nil { + return f() + } + return descopts.Message +} +func (md *Message) IsMapEntry() bool { return md.L1.IsMapEntry } +func (md *Message) Fields() pref.FieldDescriptors { return &md.lazyInit().Fields } +func (md *Message) Oneofs() pref.OneofDescriptors { return &md.lazyInit().Oneofs } +func (md *Message) ReservedNames() pref.Names { return &md.lazyInit().ReservedNames } +func (md *Message) ReservedRanges() pref.FieldRanges { return &md.lazyInit().ReservedRanges } +func (md *Message) RequiredNumbers() pref.FieldNumbers { return &md.lazyInit().RequiredNumbers } +func (md *Message) ExtensionRanges() pref.FieldRanges { return &md.lazyInit().ExtensionRanges } +func (md *Message) ExtensionRangeOptions(i int) pref.ProtoMessage { + if f := md.lazyInit().ExtensionRangeOptions[i]; f != nil { + return f() + } + return descopts.ExtensionRange +} +func (md *Message) Enums() pref.EnumDescriptors { return &md.L1.Enums } +func (md *Message) Messages() pref.MessageDescriptors { return &md.L1.Messages } +func (md *Message) Extensions() pref.ExtensionDescriptors { return &md.L1.Extensions } +func (md *Message) ProtoType(pref.MessageDescriptor) {} +func (md *Message) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, md) } +func (md *Message) lazyInit() *MessageL2 { + md.L0.ParentFile.lazyInit() // implicitly initializes L2 + return md.L2 +} + +// IsMessageSet is a pseudo-internal API for checking whether a message +// should serialize in the proto1 message format. +// +// WARNING: This method is exempt from the compatibility promise and may be +// removed in the future without warning. +func (md *Message) IsMessageSet() bool { + return md.L1.IsMessageSet +} + +func (fd *Field) Options() pref.ProtoMessage { + if f := fd.L1.Options; f != nil { + return f() + } + return descopts.Field +} +func (fd *Field) Number() pref.FieldNumber { return fd.L1.Number } +func (fd *Field) Cardinality() pref.Cardinality { return fd.L1.Cardinality } +func (fd *Field) Kind() pref.Kind { return fd.L1.Kind } +func (fd *Field) HasJSONName() bool { return fd.L1.StringName.hasJSON } +func (fd *Field) JSONName() string { return fd.L1.StringName.getJSON(fd) } +func (fd *Field) TextName() string { return fd.L1.StringName.getText(fd) } +func (fd *Field) HasPresence() bool { + return fd.L1.Cardinality != pref.Repeated && (fd.L0.ParentFile.L1.Syntax == pref.Proto2 || fd.L1.Message != nil || fd.L1.ContainingOneof != nil) +} +func (fd *Field) HasOptionalKeyword() bool { + return (fd.L0.ParentFile.L1.Syntax == pref.Proto2 && fd.L1.Cardinality == pref.Optional && fd.L1.ContainingOneof == nil) || fd.L1.IsProto3Optional +} +func (fd *Field) IsPacked() bool { + if !fd.L1.HasPacked && fd.L0.ParentFile.L1.Syntax != pref.Proto2 && fd.L1.Cardinality == pref.Repeated { + switch fd.L1.Kind { + case pref.StringKind, pref.BytesKind, pref.MessageKind, pref.GroupKind: + default: + return true + } + } + return fd.L1.IsPacked +} +func (fd *Field) IsExtension() bool { return false } +func (fd *Field) IsWeak() bool { return fd.L1.IsWeak } +func (fd *Field) IsList() bool { return fd.Cardinality() == pref.Repeated && !fd.IsMap() } +func (fd *Field) IsMap() bool { return fd.Message() != nil && fd.Message().IsMapEntry() } +func (fd *Field) MapKey() pref.FieldDescriptor { + if !fd.IsMap() { + return nil + } + return fd.Message().Fields().ByNumber(genid.MapEntry_Key_field_number) +} +func (fd *Field) MapValue() pref.FieldDescriptor { + if !fd.IsMap() { + return nil + } + return fd.Message().Fields().ByNumber(genid.MapEntry_Value_field_number) +} +func (fd *Field) HasDefault() bool { return fd.L1.Default.has } +func (fd *Field) Default() pref.Value { return fd.L1.Default.get(fd) } +func (fd *Field) DefaultEnumValue() pref.EnumValueDescriptor { return fd.L1.Default.enum } +func (fd *Field) ContainingOneof() pref.OneofDescriptor { return fd.L1.ContainingOneof } +func (fd *Field) ContainingMessage() pref.MessageDescriptor { + return fd.L0.Parent.(pref.MessageDescriptor) +} +func (fd *Field) Enum() pref.EnumDescriptor { + return fd.L1.Enum +} +func (fd *Field) Message() pref.MessageDescriptor { + if fd.L1.IsWeak { + if d, _ := protoregistry.GlobalFiles.FindDescriptorByName(fd.L1.Message.FullName()); d != nil { + return d.(pref.MessageDescriptor) + } + } + return fd.L1.Message +} +func (fd *Field) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, fd) } +func (fd *Field) ProtoType(pref.FieldDescriptor) {} + +// EnforceUTF8 is a pseudo-internal API to determine whether to enforce UTF-8 +// validation for the string field. This exists for Google-internal use only +// since proto3 did not enforce UTF-8 validity prior to the open-source release. +// If this method does not exist, the default is to enforce valid UTF-8. +// +// WARNING: This method is exempt from the compatibility promise and may be +// removed in the future without warning. +func (fd *Field) EnforceUTF8() bool { + if fd.L1.HasEnforceUTF8 { + return fd.L1.EnforceUTF8 + } + return fd.L0.ParentFile.L1.Syntax == pref.Proto3 +} + +func (od *Oneof) IsSynthetic() bool { + return od.L0.ParentFile.L1.Syntax == pref.Proto3 && len(od.L1.Fields.List) == 1 && od.L1.Fields.List[0].HasOptionalKeyword() +} +func (od *Oneof) Options() pref.ProtoMessage { + if f := od.L1.Options; f != nil { + return f() + } + return descopts.Oneof +} +func (od *Oneof) Fields() pref.FieldDescriptors { return &od.L1.Fields } +func (od *Oneof) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, od) } +func (od *Oneof) ProtoType(pref.OneofDescriptor) {} + +type ( + Extension struct { + Base + L1 ExtensionL1 + L2 *ExtensionL2 // protected by fileDesc.once + } + ExtensionL1 struct { + Number pref.FieldNumber + Extendee pref.MessageDescriptor + Cardinality pref.Cardinality + Kind pref.Kind + } + ExtensionL2 struct { + Options func() pref.ProtoMessage + StringName stringName + IsProto3Optional bool // promoted from google.protobuf.FieldDescriptorProto + IsPacked bool // promoted from google.protobuf.FieldOptions + Default defaultValue + Enum pref.EnumDescriptor + Message pref.MessageDescriptor + } +) + +func (xd *Extension) Options() pref.ProtoMessage { + if f := xd.lazyInit().Options; f != nil { + return f() + } + return descopts.Field +} +func (xd *Extension) Number() pref.FieldNumber { return xd.L1.Number } +func (xd *Extension) Cardinality() pref.Cardinality { return xd.L1.Cardinality } +func (xd *Extension) Kind() pref.Kind { return xd.L1.Kind } +func (xd *Extension) HasJSONName() bool { return xd.lazyInit().StringName.hasJSON } +func (xd *Extension) JSONName() string { return xd.lazyInit().StringName.getJSON(xd) } +func (xd *Extension) TextName() string { return xd.lazyInit().StringName.getText(xd) } +func (xd *Extension) HasPresence() bool { return xd.L1.Cardinality != pref.Repeated } +func (xd *Extension) HasOptionalKeyword() bool { + return (xd.L0.ParentFile.L1.Syntax == pref.Proto2 && xd.L1.Cardinality == pref.Optional) || xd.lazyInit().IsProto3Optional +} +func (xd *Extension) IsPacked() bool { return xd.lazyInit().IsPacked } +func (xd *Extension) IsExtension() bool { return true } +func (xd *Extension) IsWeak() bool { return false } +func (xd *Extension) IsList() bool { return xd.Cardinality() == pref.Repeated } +func (xd *Extension) IsMap() bool { return false } +func (xd *Extension) MapKey() pref.FieldDescriptor { return nil } +func (xd *Extension) MapValue() pref.FieldDescriptor { return nil } +func (xd *Extension) HasDefault() bool { return xd.lazyInit().Default.has } +func (xd *Extension) Default() pref.Value { return xd.lazyInit().Default.get(xd) } +func (xd *Extension) DefaultEnumValue() pref.EnumValueDescriptor { return xd.lazyInit().Default.enum } +func (xd *Extension) ContainingOneof() pref.OneofDescriptor { return nil } +func (xd *Extension) ContainingMessage() pref.MessageDescriptor { return xd.L1.Extendee } +func (xd *Extension) Enum() pref.EnumDescriptor { return xd.lazyInit().Enum } +func (xd *Extension) Message() pref.MessageDescriptor { return xd.lazyInit().Message } +func (xd *Extension) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, xd) } +func (xd *Extension) ProtoType(pref.FieldDescriptor) {} +func (xd *Extension) ProtoInternal(pragma.DoNotImplement) {} +func (xd *Extension) lazyInit() *ExtensionL2 { + xd.L0.ParentFile.lazyInit() // implicitly initializes L2 + return xd.L2 +} + +type ( + Service struct { + Base + L1 ServiceL1 + L2 *ServiceL2 // protected by fileDesc.once + } + ServiceL1 struct{} + ServiceL2 struct { + Options func() pref.ProtoMessage + Methods Methods + } + + Method struct { + Base + L1 MethodL1 + } + MethodL1 struct { + Options func() pref.ProtoMessage + Input pref.MessageDescriptor + Output pref.MessageDescriptor + IsStreamingClient bool + IsStreamingServer bool + } +) + +func (sd *Service) Options() pref.ProtoMessage { + if f := sd.lazyInit().Options; f != nil { + return f() + } + return descopts.Service +} +func (sd *Service) Methods() pref.MethodDescriptors { return &sd.lazyInit().Methods } +func (sd *Service) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, sd) } +func (sd *Service) ProtoType(pref.ServiceDescriptor) {} +func (sd *Service) ProtoInternal(pragma.DoNotImplement) {} +func (sd *Service) lazyInit() *ServiceL2 { + sd.L0.ParentFile.lazyInit() // implicitly initializes L2 + return sd.L2 +} + +func (md *Method) Options() pref.ProtoMessage { + if f := md.L1.Options; f != nil { + return f() + } + return descopts.Method +} +func (md *Method) Input() pref.MessageDescriptor { return md.L1.Input } +func (md *Method) Output() pref.MessageDescriptor { return md.L1.Output } +func (md *Method) IsStreamingClient() bool { return md.L1.IsStreamingClient } +func (md *Method) IsStreamingServer() bool { return md.L1.IsStreamingServer } +func (md *Method) Format(s fmt.State, r rune) { descfmt.FormatDesc(s, r, md) } +func (md *Method) ProtoType(pref.MethodDescriptor) {} +func (md *Method) ProtoInternal(pragma.DoNotImplement) {} + +// Surrogate files are can be used to create standalone descriptors +// where the syntax is only information derived from the parent file. +var ( + SurrogateProto2 = &File{L1: FileL1{Syntax: pref.Proto2}, L2: &FileL2{}} + SurrogateProto3 = &File{L1: FileL1{Syntax: pref.Proto3}, L2: &FileL2{}} +) + +type ( + Base struct { + L0 BaseL0 + } + BaseL0 struct { + FullName pref.FullName // must be populated + ParentFile *File // must be populated + Parent pref.Descriptor + Index int + } +) + +func (d *Base) Name() pref.Name { return d.L0.FullName.Name() } +func (d *Base) FullName() pref.FullName { return d.L0.FullName } +func (d *Base) ParentFile() pref.FileDescriptor { + if d.L0.ParentFile == SurrogateProto2 || d.L0.ParentFile == SurrogateProto3 { + return nil // surrogate files are not real parents + } + return d.L0.ParentFile +} +func (d *Base) Parent() pref.Descriptor { return d.L0.Parent } +func (d *Base) Index() int { return d.L0.Index } +func (d *Base) Syntax() pref.Syntax { return d.L0.ParentFile.Syntax() } +func (d *Base) IsPlaceholder() bool { return false } +func (d *Base) ProtoInternal(pragma.DoNotImplement) {} + +type stringName struct { + hasJSON bool + once sync.Once + nameJSON string + nameText string +} + +// InitJSON initializes the name. It is exported for use by other internal packages. +func (s *stringName) InitJSON(name string) { + s.hasJSON = true + s.nameJSON = name +} + +func (s *stringName) lazyInit(fd pref.FieldDescriptor) *stringName { + s.once.Do(func() { + if fd.IsExtension() { + // For extensions, JSON and text are formatted the same way. + var name string + if messageset.IsMessageSetExtension(fd) { + name = string("[" + fd.FullName().Parent() + "]") + } else { + name = string("[" + fd.FullName() + "]") + } + s.nameJSON = name + s.nameText = name + } else { + // Format the JSON name. + if !s.hasJSON { + s.nameJSON = strs.JSONCamelCase(string(fd.Name())) + } + + // Format the text name. + s.nameText = string(fd.Name()) + if fd.Kind() == pref.GroupKind { + s.nameText = string(fd.Message().Name()) + } + } + }) + return s +} + +func (s *stringName) getJSON(fd pref.FieldDescriptor) string { return s.lazyInit(fd).nameJSON } +func (s *stringName) getText(fd pref.FieldDescriptor) string { return s.lazyInit(fd).nameText } + +func DefaultValue(v pref.Value, ev pref.EnumValueDescriptor) defaultValue { + dv := defaultValue{has: v.IsValid(), val: v, enum: ev} + if b, ok := v.Interface().([]byte); ok { + // Store a copy of the default bytes, so that we can detect + // accidental mutations of the original value. + dv.bytes = append([]byte(nil), b...) + } + return dv +} + +func unmarshalDefault(b []byte, k pref.Kind, pf *File, ed pref.EnumDescriptor) defaultValue { + var evs pref.EnumValueDescriptors + if k == pref.EnumKind { + // If the enum is declared within the same file, be careful not to + // blindly call the Values method, lest we bind ourselves in a deadlock. + if e, ok := ed.(*Enum); ok && e.L0.ParentFile == pf { + evs = &e.L2.Values + } else { + evs = ed.Values() + } + + // If we are unable to resolve the enum dependency, use a placeholder + // enum value since we will not be able to parse the default value. + if ed.IsPlaceholder() && pref.Name(b).IsValid() { + v := pref.ValueOfEnum(0) + ev := PlaceholderEnumValue(ed.FullName().Parent().Append(pref.Name(b))) + return DefaultValue(v, ev) + } + } + + v, ev, err := defval.Unmarshal(string(b), k, evs, defval.Descriptor) + if err != nil { + panic(err) + } + return DefaultValue(v, ev) +} + +type defaultValue struct { + has bool + val pref.Value + enum pref.EnumValueDescriptor + bytes []byte +} + +func (dv *defaultValue) get(fd pref.FieldDescriptor) pref.Value { + // Return the zero value as the default if unpopulated. + if !dv.has { + if fd.Cardinality() == pref.Repeated { + return pref.Value{} + } + switch fd.Kind() { + case pref.BoolKind: + return pref.ValueOfBool(false) + case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind: + return pref.ValueOfInt32(0) + case pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: + return pref.ValueOfInt64(0) + case pref.Uint32Kind, pref.Fixed32Kind: + return pref.ValueOfUint32(0) + case pref.Uint64Kind, pref.Fixed64Kind: + return pref.ValueOfUint64(0) + case pref.FloatKind: + return pref.ValueOfFloat32(0) + case pref.DoubleKind: + return pref.ValueOfFloat64(0) + case pref.StringKind: + return pref.ValueOfString("") + case pref.BytesKind: + return pref.ValueOfBytes(nil) + case pref.EnumKind: + if evs := fd.Enum().Values(); evs.Len() > 0 { + return pref.ValueOfEnum(evs.Get(0).Number()) + } + return pref.ValueOfEnum(0) + } + } + + if len(dv.bytes) > 0 && !bytes.Equal(dv.bytes, dv.val.Bytes()) { + // TODO: Avoid panic if we're running with the race detector + // and instead spawn a goroutine that periodically resets + // this value back to the original to induce a race. + panic(fmt.Sprintf("detected mutation on the default bytes for %v", fd.FullName())) + } + return dv.val +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go new file mode 100644 index 00000000..66e1fee5 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_init.go @@ -0,0 +1,471 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import ( + "sync" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/strs" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// fileRaw is a data struct used when initializing a file descriptor from +// a raw FileDescriptorProto. +type fileRaw struct { + builder Builder + allEnums []Enum + allMessages []Message + allExtensions []Extension + allServices []Service +} + +func newRawFile(db Builder) *File { + fd := &File{fileRaw: fileRaw{builder: db}} + fd.initDecls(db.NumEnums, db.NumMessages, db.NumExtensions, db.NumServices) + fd.unmarshalSeed(db.RawDescriptor) + + // Extended message targets are eagerly resolved since registration + // needs this information at program init time. + for i := range fd.allExtensions { + xd := &fd.allExtensions[i] + xd.L1.Extendee = fd.resolveMessageDependency(xd.L1.Extendee, listExtTargets, int32(i)) + } + + fd.checkDecls() + return fd +} + +// initDecls pre-allocates slices for the exact number of enums, messages +// (including map entries), extensions, and services declared in the proto file. +// This is done to avoid regrowing the slice, which would change the address +// for any previously seen declaration. +// +// The alloc methods "allocates" slices by pulling from the capacity. +func (fd *File) initDecls(numEnums, numMessages, numExtensions, numServices int32) { + fd.allEnums = make([]Enum, 0, numEnums) + fd.allMessages = make([]Message, 0, numMessages) + fd.allExtensions = make([]Extension, 0, numExtensions) + fd.allServices = make([]Service, 0, numServices) +} + +func (fd *File) allocEnums(n int) []Enum { + total := len(fd.allEnums) + es := fd.allEnums[total : total+n] + fd.allEnums = fd.allEnums[:total+n] + return es +} +func (fd *File) allocMessages(n int) []Message { + total := len(fd.allMessages) + ms := fd.allMessages[total : total+n] + fd.allMessages = fd.allMessages[:total+n] + return ms +} +func (fd *File) allocExtensions(n int) []Extension { + total := len(fd.allExtensions) + xs := fd.allExtensions[total : total+n] + fd.allExtensions = fd.allExtensions[:total+n] + return xs +} +func (fd *File) allocServices(n int) []Service { + total := len(fd.allServices) + xs := fd.allServices[total : total+n] + fd.allServices = fd.allServices[:total+n] + return xs +} + +// checkDecls performs a sanity check that the expected number of expected +// declarations matches the number that were found in the descriptor proto. +func (fd *File) checkDecls() { + switch { + case len(fd.allEnums) != cap(fd.allEnums): + case len(fd.allMessages) != cap(fd.allMessages): + case len(fd.allExtensions) != cap(fd.allExtensions): + case len(fd.allServices) != cap(fd.allServices): + default: + return + } + panic("mismatching cardinality") +} + +func (fd *File) unmarshalSeed(b []byte) { + sb := getBuilder() + defer putBuilder(sb) + + var prevField pref.FieldNumber + var numEnums, numMessages, numExtensions, numServices int + var posEnums, posMessages, posExtensions, posServices int + b0 := b + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.FileDescriptorProto_Syntax_field_number: + switch string(v) { + case "proto2": + fd.L1.Syntax = pref.Proto2 + case "proto3": + fd.L1.Syntax = pref.Proto3 + default: + panic("invalid syntax") + } + case genid.FileDescriptorProto_Name_field_number: + fd.L1.Path = sb.MakeString(v) + case genid.FileDescriptorProto_Package_field_number: + fd.L1.Package = pref.FullName(sb.MakeString(v)) + case genid.FileDescriptorProto_EnumType_field_number: + if prevField != genid.FileDescriptorProto_EnumType_field_number { + if numEnums > 0 { + panic("non-contiguous repeated field") + } + posEnums = len(b0) - len(b) - n - m + } + numEnums++ + case genid.FileDescriptorProto_MessageType_field_number: + if prevField != genid.FileDescriptorProto_MessageType_field_number { + if numMessages > 0 { + panic("non-contiguous repeated field") + } + posMessages = len(b0) - len(b) - n - m + } + numMessages++ + case genid.FileDescriptorProto_Extension_field_number: + if prevField != genid.FileDescriptorProto_Extension_field_number { + if numExtensions > 0 { + panic("non-contiguous repeated field") + } + posExtensions = len(b0) - len(b) - n - m + } + numExtensions++ + case genid.FileDescriptorProto_Service_field_number: + if prevField != genid.FileDescriptorProto_Service_field_number { + if numServices > 0 { + panic("non-contiguous repeated field") + } + posServices = len(b0) - len(b) - n - m + } + numServices++ + } + prevField = num + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + prevField = -1 // ignore known field numbers of unknown wire type + } + } + + // If syntax is missing, it is assumed to be proto2. + if fd.L1.Syntax == 0 { + fd.L1.Syntax = pref.Proto2 + } + + // Must allocate all declarations before parsing each descriptor type + // to ensure we handled all descriptors in "flattened ordering". + if numEnums > 0 { + fd.L1.Enums.List = fd.allocEnums(numEnums) + } + if numMessages > 0 { + fd.L1.Messages.List = fd.allocMessages(numMessages) + } + if numExtensions > 0 { + fd.L1.Extensions.List = fd.allocExtensions(numExtensions) + } + if numServices > 0 { + fd.L1.Services.List = fd.allocServices(numServices) + } + + if numEnums > 0 { + b := b0[posEnums:] + for i := range fd.L1.Enums.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + fd.L1.Enums.List[i].unmarshalSeed(v, sb, fd, fd, i) + b = b[n+m:] + } + } + if numMessages > 0 { + b := b0[posMessages:] + for i := range fd.L1.Messages.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + fd.L1.Messages.List[i].unmarshalSeed(v, sb, fd, fd, i) + b = b[n+m:] + } + } + if numExtensions > 0 { + b := b0[posExtensions:] + for i := range fd.L1.Extensions.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + fd.L1.Extensions.List[i].unmarshalSeed(v, sb, fd, fd, i) + b = b[n+m:] + } + } + if numServices > 0 { + b := b0[posServices:] + for i := range fd.L1.Services.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + fd.L1.Services.List[i].unmarshalSeed(v, sb, fd, fd, i) + b = b[n+m:] + } + } +} + +func (ed *Enum) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + ed.L0.ParentFile = pf + ed.L0.Parent = pd + ed.L0.Index = i + + var numValues int + for b := b; len(b) > 0; { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.EnumDescriptorProto_Name_field_number: + ed.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.EnumDescriptorProto_Value_field_number: + numValues++ + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + + // Only construct enum value descriptors for top-level enums since + // they are needed for registration. + if pd != pf { + return + } + ed.L1.eagerValues = true + ed.L2 = new(EnumL2) + ed.L2.Values.List = make([]EnumValue, numValues) + for i := 0; len(b) > 0; { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.EnumDescriptorProto_Value_field_number: + ed.L2.Values.List[i].unmarshalFull(v, sb, pf, ed, i) + i++ + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func (md *Message) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + md.L0.ParentFile = pf + md.L0.Parent = pd + md.L0.Index = i + + var prevField pref.FieldNumber + var numEnums, numMessages, numExtensions int + var posEnums, posMessages, posExtensions int + b0 := b + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.DescriptorProto_Name_field_number: + md.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.DescriptorProto_EnumType_field_number: + if prevField != genid.DescriptorProto_EnumType_field_number { + if numEnums > 0 { + panic("non-contiguous repeated field") + } + posEnums = len(b0) - len(b) - n - m + } + numEnums++ + case genid.DescriptorProto_NestedType_field_number: + if prevField != genid.DescriptorProto_NestedType_field_number { + if numMessages > 0 { + panic("non-contiguous repeated field") + } + posMessages = len(b0) - len(b) - n - m + } + numMessages++ + case genid.DescriptorProto_Extension_field_number: + if prevField != genid.DescriptorProto_Extension_field_number { + if numExtensions > 0 { + panic("non-contiguous repeated field") + } + posExtensions = len(b0) - len(b) - n - m + } + numExtensions++ + case genid.DescriptorProto_Options_field_number: + md.unmarshalSeedOptions(v) + } + prevField = num + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + prevField = -1 // ignore known field numbers of unknown wire type + } + } + + // Must allocate all declarations before parsing each descriptor type + // to ensure we handled all descriptors in "flattened ordering". + if numEnums > 0 { + md.L1.Enums.List = pf.allocEnums(numEnums) + } + if numMessages > 0 { + md.L1.Messages.List = pf.allocMessages(numMessages) + } + if numExtensions > 0 { + md.L1.Extensions.List = pf.allocExtensions(numExtensions) + } + + if numEnums > 0 { + b := b0[posEnums:] + for i := range md.L1.Enums.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + md.L1.Enums.List[i].unmarshalSeed(v, sb, pf, md, i) + b = b[n+m:] + } + } + if numMessages > 0 { + b := b0[posMessages:] + for i := range md.L1.Messages.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + md.L1.Messages.List[i].unmarshalSeed(v, sb, pf, md, i) + b = b[n+m:] + } + } + if numExtensions > 0 { + b := b0[posExtensions:] + for i := range md.L1.Extensions.List { + _, n := protowire.ConsumeVarint(b) + v, m := protowire.ConsumeBytes(b[n:]) + md.L1.Extensions.List[i].unmarshalSeed(v, sb, pf, md, i) + b = b[n+m:] + } + } +} + +func (md *Message) unmarshalSeedOptions(b []byte) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.MessageOptions_MapEntry_field_number: + md.L1.IsMapEntry = protowire.DecodeBool(v) + case genid.MessageOptions_MessageSetWireFormat_field_number: + md.L1.IsMessageSet = protowire.DecodeBool(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func (xd *Extension) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + xd.L0.ParentFile = pf + xd.L0.Parent = pd + xd.L0.Index = i + + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_Number_field_number: + xd.L1.Number = pref.FieldNumber(v) + case genid.FieldDescriptorProto_Label_field_number: + xd.L1.Cardinality = pref.Cardinality(v) + case genid.FieldDescriptorProto_Type_field_number: + xd.L1.Kind = pref.Kind(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_Name_field_number: + xd.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.FieldDescriptorProto_Extendee_field_number: + xd.L1.Extendee = PlaceholderMessage(makeFullName(sb, v)) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func (sd *Service) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + sd.L0.ParentFile = pf + sd.L0.Parent = pd + sd.L0.Index = i + + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.ServiceDescriptorProto_Name_field_number: + sd.L0.FullName = appendFullName(sb, pd.FullName(), v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +var nameBuilderPool = sync.Pool{ + New: func() interface{} { return new(strs.Builder) }, +} + +func getBuilder() *strs.Builder { + return nameBuilderPool.Get().(*strs.Builder) +} +func putBuilder(b *strs.Builder) { + nameBuilderPool.Put(b) +} + +// makeFullName converts b to a protoreflect.FullName, +// where b must start with a leading dot. +func makeFullName(sb *strs.Builder, b []byte) pref.FullName { + if len(b) == 0 || b[0] != '.' { + panic("name reference must be fully qualified") + } + return pref.FullName(sb.MakeString(b[1:])) +} + +func appendFullName(sb *strs.Builder, prefix pref.FullName, suffix []byte) pref.FullName { + return sb.AppendFullName(prefix, pref.Name(strs.UnsafeString(suffix))) +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go new file mode 100644 index 00000000..198451e3 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_lazy.go @@ -0,0 +1,704 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import ( + "reflect" + "sync" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/descopts" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +func (fd *File) lazyRawInit() { + fd.unmarshalFull(fd.builder.RawDescriptor) + fd.resolveMessages() + fd.resolveExtensions() + fd.resolveServices() +} + +func (file *File) resolveMessages() { + var depIdx int32 + for i := range file.allMessages { + md := &file.allMessages[i] + + // Resolve message field dependencies. + for j := range md.L2.Fields.List { + fd := &md.L2.Fields.List[j] + + // Weak fields are resolved upon actual use. + if fd.L1.IsWeak { + continue + } + + // Resolve message field dependency. + switch fd.L1.Kind { + case pref.EnumKind: + fd.L1.Enum = file.resolveEnumDependency(fd.L1.Enum, listFieldDeps, depIdx) + depIdx++ + case pref.MessageKind, pref.GroupKind: + fd.L1.Message = file.resolveMessageDependency(fd.L1.Message, listFieldDeps, depIdx) + depIdx++ + } + + // Default is resolved here since it depends on Enum being resolved. + if v := fd.L1.Default.val; v.IsValid() { + fd.L1.Default = unmarshalDefault(v.Bytes(), fd.L1.Kind, file, fd.L1.Enum) + } + } + } +} + +func (file *File) resolveExtensions() { + var depIdx int32 + for i := range file.allExtensions { + xd := &file.allExtensions[i] + + // Resolve extension field dependency. + switch xd.L1.Kind { + case pref.EnumKind: + xd.L2.Enum = file.resolveEnumDependency(xd.L2.Enum, listExtDeps, depIdx) + depIdx++ + case pref.MessageKind, pref.GroupKind: + xd.L2.Message = file.resolveMessageDependency(xd.L2.Message, listExtDeps, depIdx) + depIdx++ + } + + // Default is resolved here since it depends on Enum being resolved. + if v := xd.L2.Default.val; v.IsValid() { + xd.L2.Default = unmarshalDefault(v.Bytes(), xd.L1.Kind, file, xd.L2.Enum) + } + } +} + +func (file *File) resolveServices() { + var depIdx int32 + for i := range file.allServices { + sd := &file.allServices[i] + + // Resolve method dependencies. + for j := range sd.L2.Methods.List { + md := &sd.L2.Methods.List[j] + md.L1.Input = file.resolveMessageDependency(md.L1.Input, listMethInDeps, depIdx) + md.L1.Output = file.resolveMessageDependency(md.L1.Output, listMethOutDeps, depIdx) + depIdx++ + } + } +} + +func (file *File) resolveEnumDependency(ed pref.EnumDescriptor, i, j int32) pref.EnumDescriptor { + r := file.builder.FileRegistry + if r, ok := r.(resolverByIndex); ok { + if ed2 := r.FindEnumByIndex(i, j, file.allEnums, file.allMessages); ed2 != nil { + return ed2 + } + } + for i := range file.allEnums { + if ed2 := &file.allEnums[i]; ed2.L0.FullName == ed.FullName() { + return ed2 + } + } + if d, _ := r.FindDescriptorByName(ed.FullName()); d != nil { + return d.(pref.EnumDescriptor) + } + return ed +} + +func (file *File) resolveMessageDependency(md pref.MessageDescriptor, i, j int32) pref.MessageDescriptor { + r := file.builder.FileRegistry + if r, ok := r.(resolverByIndex); ok { + if md2 := r.FindMessageByIndex(i, j, file.allEnums, file.allMessages); md2 != nil { + return md2 + } + } + for i := range file.allMessages { + if md2 := &file.allMessages[i]; md2.L0.FullName == md.FullName() { + return md2 + } + } + if d, _ := r.FindDescriptorByName(md.FullName()); d != nil { + return d.(pref.MessageDescriptor) + } + return md +} + +func (fd *File) unmarshalFull(b []byte) { + sb := getBuilder() + defer putBuilder(sb) + + var enumIdx, messageIdx, extensionIdx, serviceIdx int + var rawOptions []byte + fd.L2 = new(FileL2) + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FileDescriptorProto_PublicDependency_field_number: + fd.L2.Imports[v].IsPublic = true + case genid.FileDescriptorProto_WeakDependency_field_number: + fd.L2.Imports[v].IsWeak = true + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.FileDescriptorProto_Dependency_field_number: + path := sb.MakeString(v) + imp, _ := fd.builder.FileRegistry.FindFileByPath(path) + if imp == nil { + imp = PlaceholderFile(path) + } + fd.L2.Imports = append(fd.L2.Imports, pref.FileImport{FileDescriptor: imp}) + case genid.FileDescriptorProto_EnumType_field_number: + fd.L1.Enums.List[enumIdx].unmarshalFull(v, sb) + enumIdx++ + case genid.FileDescriptorProto_MessageType_field_number: + fd.L1.Messages.List[messageIdx].unmarshalFull(v, sb) + messageIdx++ + case genid.FileDescriptorProto_Extension_field_number: + fd.L1.Extensions.List[extensionIdx].unmarshalFull(v, sb) + extensionIdx++ + case genid.FileDescriptorProto_Service_field_number: + fd.L1.Services.List[serviceIdx].unmarshalFull(v, sb) + serviceIdx++ + case genid.FileDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + fd.L2.Options = fd.builder.optionsUnmarshaler(&descopts.File, rawOptions) +} + +func (ed *Enum) unmarshalFull(b []byte, sb *strs.Builder) { + var rawValues [][]byte + var rawOptions []byte + if !ed.L1.eagerValues { + ed.L2 = new(EnumL2) + } + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.EnumDescriptorProto_Value_field_number: + rawValues = append(rawValues, v) + case genid.EnumDescriptorProto_ReservedName_field_number: + ed.L2.ReservedNames.List = append(ed.L2.ReservedNames.List, pref.Name(sb.MakeString(v))) + case genid.EnumDescriptorProto_ReservedRange_field_number: + ed.L2.ReservedRanges.List = append(ed.L2.ReservedRanges.List, unmarshalEnumReservedRange(v)) + case genid.EnumDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + if !ed.L1.eagerValues && len(rawValues) > 0 { + ed.L2.Values.List = make([]EnumValue, len(rawValues)) + for i, b := range rawValues { + ed.L2.Values.List[i].unmarshalFull(b, sb, ed.L0.ParentFile, ed, i) + } + } + ed.L2.Options = ed.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Enum, rawOptions) +} + +func unmarshalEnumReservedRange(b []byte) (r [2]pref.EnumNumber) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.EnumDescriptorProto_EnumReservedRange_Start_field_number: + r[0] = pref.EnumNumber(v) + case genid.EnumDescriptorProto_EnumReservedRange_End_field_number: + r[1] = pref.EnumNumber(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + return r +} + +func (vd *EnumValue) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + vd.L0.ParentFile = pf + vd.L0.Parent = pd + vd.L0.Index = i + + var rawOptions []byte + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.EnumValueDescriptorProto_Number_field_number: + vd.L1.Number = pref.EnumNumber(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.EnumValueDescriptorProto_Name_field_number: + // NOTE: Enum values are in the same scope as the enum parent. + vd.L0.FullName = appendFullName(sb, pd.Parent().FullName(), v) + case genid.EnumValueDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + vd.L1.Options = pf.builder.optionsUnmarshaler(&descopts.EnumValue, rawOptions) +} + +func (md *Message) unmarshalFull(b []byte, sb *strs.Builder) { + var rawFields, rawOneofs [][]byte + var enumIdx, messageIdx, extensionIdx int + var rawOptions []byte + md.L2 = new(MessageL2) + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.DescriptorProto_Field_field_number: + rawFields = append(rawFields, v) + case genid.DescriptorProto_OneofDecl_field_number: + rawOneofs = append(rawOneofs, v) + case genid.DescriptorProto_ReservedName_field_number: + md.L2.ReservedNames.List = append(md.L2.ReservedNames.List, pref.Name(sb.MakeString(v))) + case genid.DescriptorProto_ReservedRange_field_number: + md.L2.ReservedRanges.List = append(md.L2.ReservedRanges.List, unmarshalMessageReservedRange(v)) + case genid.DescriptorProto_ExtensionRange_field_number: + r, rawOptions := unmarshalMessageExtensionRange(v) + opts := md.L0.ParentFile.builder.optionsUnmarshaler(&descopts.ExtensionRange, rawOptions) + md.L2.ExtensionRanges.List = append(md.L2.ExtensionRanges.List, r) + md.L2.ExtensionRangeOptions = append(md.L2.ExtensionRangeOptions, opts) + case genid.DescriptorProto_EnumType_field_number: + md.L1.Enums.List[enumIdx].unmarshalFull(v, sb) + enumIdx++ + case genid.DescriptorProto_NestedType_field_number: + md.L1.Messages.List[messageIdx].unmarshalFull(v, sb) + messageIdx++ + case genid.DescriptorProto_Extension_field_number: + md.L1.Extensions.List[extensionIdx].unmarshalFull(v, sb) + extensionIdx++ + case genid.DescriptorProto_Options_field_number: + md.unmarshalOptions(v) + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + if len(rawFields) > 0 || len(rawOneofs) > 0 { + md.L2.Fields.List = make([]Field, len(rawFields)) + md.L2.Oneofs.List = make([]Oneof, len(rawOneofs)) + for i, b := range rawFields { + fd := &md.L2.Fields.List[i] + fd.unmarshalFull(b, sb, md.L0.ParentFile, md, i) + if fd.L1.Cardinality == pref.Required { + md.L2.RequiredNumbers.List = append(md.L2.RequiredNumbers.List, fd.L1.Number) + } + } + for i, b := range rawOneofs { + od := &md.L2.Oneofs.List[i] + od.unmarshalFull(b, sb, md.L0.ParentFile, md, i) + } + } + md.L2.Options = md.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Message, rawOptions) +} + +func (md *Message) unmarshalOptions(b []byte) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.MessageOptions_MapEntry_field_number: + md.L1.IsMapEntry = protowire.DecodeBool(v) + case genid.MessageOptions_MessageSetWireFormat_field_number: + md.L1.IsMessageSet = protowire.DecodeBool(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func unmarshalMessageReservedRange(b []byte) (r [2]pref.FieldNumber) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.DescriptorProto_ReservedRange_Start_field_number: + r[0] = pref.FieldNumber(v) + case genid.DescriptorProto_ReservedRange_End_field_number: + r[1] = pref.FieldNumber(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + return r +} + +func unmarshalMessageExtensionRange(b []byte) (r [2]pref.FieldNumber, rawOptions []byte) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.DescriptorProto_ExtensionRange_Start_field_number: + r[0] = pref.FieldNumber(v) + case genid.DescriptorProto_ExtensionRange_End_field_number: + r[1] = pref.FieldNumber(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.DescriptorProto_ExtensionRange_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + return r, rawOptions +} + +func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + fd.L0.ParentFile = pf + fd.L0.Parent = pd + fd.L0.Index = i + + var rawTypeName []byte + var rawOptions []byte + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_Number_field_number: + fd.L1.Number = pref.FieldNumber(v) + case genid.FieldDescriptorProto_Label_field_number: + fd.L1.Cardinality = pref.Cardinality(v) + case genid.FieldDescriptorProto_Type_field_number: + fd.L1.Kind = pref.Kind(v) + case genid.FieldDescriptorProto_OneofIndex_field_number: + // In Message.unmarshalFull, we allocate slices for both + // the field and oneof descriptors before unmarshaling either + // of them. This ensures pointers to slice elements are stable. + od := &pd.(*Message).L2.Oneofs.List[v] + od.L1.Fields.List = append(od.L1.Fields.List, fd) + if fd.L1.ContainingOneof != nil { + panic("oneof type already set") + } + fd.L1.ContainingOneof = od + case genid.FieldDescriptorProto_Proto3Optional_field_number: + fd.L1.IsProto3Optional = protowire.DecodeBool(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_Name_field_number: + fd.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.FieldDescriptorProto_JsonName_field_number: + fd.L1.StringName.InitJSON(sb.MakeString(v)) + case genid.FieldDescriptorProto_DefaultValue_field_number: + fd.L1.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveMessages + case genid.FieldDescriptorProto_TypeName_field_number: + rawTypeName = v + case genid.FieldDescriptorProto_Options_field_number: + fd.unmarshalOptions(v) + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + if rawTypeName != nil { + name := makeFullName(sb, rawTypeName) + switch fd.L1.Kind { + case pref.EnumKind: + fd.L1.Enum = PlaceholderEnum(name) + case pref.MessageKind, pref.GroupKind: + fd.L1.Message = PlaceholderMessage(name) + } + } + fd.L1.Options = pf.builder.optionsUnmarshaler(&descopts.Field, rawOptions) +} + +func (fd *Field) unmarshalOptions(b []byte) { + const FieldOptions_EnforceUTF8 = 13 + + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FieldOptions_Packed_field_number: + fd.L1.HasPacked = true + fd.L1.IsPacked = protowire.DecodeBool(v) + case genid.FieldOptions_Weak_field_number: + fd.L1.IsWeak = protowire.DecodeBool(v) + case FieldOptions_EnforceUTF8: + fd.L1.HasEnforceUTF8 = true + fd.L1.EnforceUTF8 = protowire.DecodeBool(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func (od *Oneof) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + od.L0.ParentFile = pf + od.L0.Parent = pd + od.L0.Index = i + + var rawOptions []byte + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.OneofDescriptorProto_Name_field_number: + od.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.OneofDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + od.L1.Options = pf.builder.optionsUnmarshaler(&descopts.Oneof, rawOptions) +} + +func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) { + var rawTypeName []byte + var rawOptions []byte + xd.L2 = new(ExtensionL2) + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_Proto3Optional_field_number: + xd.L2.IsProto3Optional = protowire.DecodeBool(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.FieldDescriptorProto_JsonName_field_number: + xd.L2.StringName.InitJSON(sb.MakeString(v)) + case genid.FieldDescriptorProto_DefaultValue_field_number: + xd.L2.Default.val = pref.ValueOfBytes(v) // temporarily store as bytes; later resolved in resolveExtensions + case genid.FieldDescriptorProto_TypeName_field_number: + rawTypeName = v + case genid.FieldDescriptorProto_Options_field_number: + xd.unmarshalOptions(v) + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + if rawTypeName != nil { + name := makeFullName(sb, rawTypeName) + switch xd.L1.Kind { + case pref.EnumKind: + xd.L2.Enum = PlaceholderEnum(name) + case pref.MessageKind, pref.GroupKind: + xd.L2.Message = PlaceholderMessage(name) + } + } + xd.L2.Options = xd.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Field, rawOptions) +} + +func (xd *Extension) unmarshalOptions(b []byte) { + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.FieldOptions_Packed_field_number: + xd.L2.IsPacked = protowire.DecodeBool(v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } +} + +func (sd *Service) unmarshalFull(b []byte, sb *strs.Builder) { + var rawMethods [][]byte + var rawOptions []byte + sd.L2 = new(ServiceL2) + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.ServiceDescriptorProto_Method_field_number: + rawMethods = append(rawMethods, v) + case genid.ServiceDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + if len(rawMethods) > 0 { + sd.L2.Methods.List = make([]Method, len(rawMethods)) + for i, b := range rawMethods { + sd.L2.Methods.List[i].unmarshalFull(b, sb, sd.L0.ParentFile, sd, i) + } + } + sd.L2.Options = sd.L0.ParentFile.builder.optionsUnmarshaler(&descopts.Service, rawOptions) +} + +func (md *Method) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd pref.Descriptor, i int) { + md.L0.ParentFile = pf + md.L0.Parent = pd + md.L0.Index = i + + var rawOptions []byte + for len(b) > 0 { + num, typ, n := protowire.ConsumeTag(b) + b = b[n:] + switch typ { + case protowire.VarintType: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + switch num { + case genid.MethodDescriptorProto_ClientStreaming_field_number: + md.L1.IsStreamingClient = protowire.DecodeBool(v) + case genid.MethodDescriptorProto_ServerStreaming_field_number: + md.L1.IsStreamingServer = protowire.DecodeBool(v) + } + case protowire.BytesType: + v, m := protowire.ConsumeBytes(b) + b = b[m:] + switch num { + case genid.MethodDescriptorProto_Name_field_number: + md.L0.FullName = appendFullName(sb, pd.FullName(), v) + case genid.MethodDescriptorProto_InputType_field_number: + md.L1.Input = PlaceholderMessage(makeFullName(sb, v)) + case genid.MethodDescriptorProto_OutputType_field_number: + md.L1.Output = PlaceholderMessage(makeFullName(sb, v)) + case genid.MethodDescriptorProto_Options_field_number: + rawOptions = appendOptions(rawOptions, v) + } + default: + m := protowire.ConsumeFieldValue(num, typ, b) + b = b[m:] + } + } + md.L1.Options = pf.builder.optionsUnmarshaler(&descopts.Method, rawOptions) +} + +// appendOptions appends src to dst, where the returned slice is never nil. +// This is necessary to distinguish between empty and unpopulated options. +func appendOptions(dst, src []byte) []byte { + if dst == nil { + dst = []byte{} + } + return append(dst, src...) +} + +// optionsUnmarshaler constructs a lazy unmarshal function for an options message. +// +// The type of message to unmarshal to is passed as a pointer since the +// vars in descopts may not yet be populated at the time this function is called. +func (db *Builder) optionsUnmarshaler(p *pref.ProtoMessage, b []byte) func() pref.ProtoMessage { + if b == nil { + return nil + } + var opts pref.ProtoMessage + var once sync.Once + return func() pref.ProtoMessage { + once.Do(func() { + if *p == nil { + panic("Descriptor.Options called without importing the descriptor package") + } + opts = reflect.New(reflect.TypeOf(*p).Elem()).Interface().(pref.ProtoMessage) + if err := (proto.UnmarshalOptions{ + AllowPartial: true, + Resolver: db.TypeResolver, + }).Unmarshal(b, opts); err != nil { + panic(err) + } + }) + return opts + } +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go new file mode 100644 index 00000000..aa294fff --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list.go @@ -0,0 +1,450 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import ( + "fmt" + "math" + "sort" + "sync" + + "google.golang.org/protobuf/internal/genid" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/descfmt" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type FileImports []pref.FileImport + +func (p *FileImports) Len() int { return len(*p) } +func (p *FileImports) Get(i int) pref.FileImport { return (*p)[i] } +func (p *FileImports) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *FileImports) ProtoInternal(pragma.DoNotImplement) {} + +type Names struct { + List []pref.Name + once sync.Once + has map[pref.Name]int // protected by once +} + +func (p *Names) Len() int { return len(p.List) } +func (p *Names) Get(i int) pref.Name { return p.List[i] } +func (p *Names) Has(s pref.Name) bool { return p.lazyInit().has[s] > 0 } +func (p *Names) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *Names) ProtoInternal(pragma.DoNotImplement) {} +func (p *Names) lazyInit() *Names { + p.once.Do(func() { + if len(p.List) > 0 { + p.has = make(map[pref.Name]int, len(p.List)) + for _, s := range p.List { + p.has[s] = p.has[s] + 1 + } + } + }) + return p +} + +// CheckValid reports any errors with the set of names with an error message +// that completes the sentence: "ranges is invalid because it has ..." +func (p *Names) CheckValid() error { + for s, n := range p.lazyInit().has { + switch { + case n > 1: + return errors.New("duplicate name: %q", s) + case false && !s.IsValid(): + // NOTE: The C++ implementation does not validate the identifier. + // See https://github.com/protocolbuffers/protobuf/issues/6335. + return errors.New("invalid name: %q", s) + } + } + return nil +} + +type EnumRanges struct { + List [][2]pref.EnumNumber // start inclusive; end inclusive + once sync.Once + sorted [][2]pref.EnumNumber // protected by once +} + +func (p *EnumRanges) Len() int { return len(p.List) } +func (p *EnumRanges) Get(i int) [2]pref.EnumNumber { return p.List[i] } +func (p *EnumRanges) Has(n pref.EnumNumber) bool { + for ls := p.lazyInit().sorted; len(ls) > 0; { + i := len(ls) / 2 + switch r := enumRange(ls[i]); { + case n < r.Start(): + ls = ls[:i] // search lower + case n > r.End(): + ls = ls[i+1:] // search upper + default: + return true + } + } + return false +} +func (p *EnumRanges) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *EnumRanges) ProtoInternal(pragma.DoNotImplement) {} +func (p *EnumRanges) lazyInit() *EnumRanges { + p.once.Do(func() { + p.sorted = append(p.sorted, p.List...) + sort.Slice(p.sorted, func(i, j int) bool { + return p.sorted[i][0] < p.sorted[j][0] + }) + }) + return p +} + +// CheckValid reports any errors with the set of names with an error message +// that completes the sentence: "ranges is invalid because it has ..." +func (p *EnumRanges) CheckValid() error { + var rp enumRange + for i, r := range p.lazyInit().sorted { + r := enumRange(r) + switch { + case !(r.Start() <= r.End()): + return errors.New("invalid range: %v", r) + case !(rp.End() < r.Start()) && i > 0: + return errors.New("overlapping ranges: %v with %v", rp, r) + } + rp = r + } + return nil +} + +type enumRange [2]protoreflect.EnumNumber + +func (r enumRange) Start() protoreflect.EnumNumber { return r[0] } // inclusive +func (r enumRange) End() protoreflect.EnumNumber { return r[1] } // inclusive +func (r enumRange) String() string { + if r.Start() == r.End() { + return fmt.Sprintf("%d", r.Start()) + } + return fmt.Sprintf("%d to %d", r.Start(), r.End()) +} + +type FieldRanges struct { + List [][2]pref.FieldNumber // start inclusive; end exclusive + once sync.Once + sorted [][2]pref.FieldNumber // protected by once +} + +func (p *FieldRanges) Len() int { return len(p.List) } +func (p *FieldRanges) Get(i int) [2]pref.FieldNumber { return p.List[i] } +func (p *FieldRanges) Has(n pref.FieldNumber) bool { + for ls := p.lazyInit().sorted; len(ls) > 0; { + i := len(ls) / 2 + switch r := fieldRange(ls[i]); { + case n < r.Start(): + ls = ls[:i] // search lower + case n > r.End(): + ls = ls[i+1:] // search upper + default: + return true + } + } + return false +} +func (p *FieldRanges) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *FieldRanges) ProtoInternal(pragma.DoNotImplement) {} +func (p *FieldRanges) lazyInit() *FieldRanges { + p.once.Do(func() { + p.sorted = append(p.sorted, p.List...) + sort.Slice(p.sorted, func(i, j int) bool { + return p.sorted[i][0] < p.sorted[j][0] + }) + }) + return p +} + +// CheckValid reports any errors with the set of ranges with an error message +// that completes the sentence: "ranges is invalid because it has ..." +func (p *FieldRanges) CheckValid(isMessageSet bool) error { + var rp fieldRange + for i, r := range p.lazyInit().sorted { + r := fieldRange(r) + switch { + case !isValidFieldNumber(r.Start(), isMessageSet): + return errors.New("invalid field number: %d", r.Start()) + case !isValidFieldNumber(r.End(), isMessageSet): + return errors.New("invalid field number: %d", r.End()) + case !(r.Start() <= r.End()): + return errors.New("invalid range: %v", r) + case !(rp.End() < r.Start()) && i > 0: + return errors.New("overlapping ranges: %v with %v", rp, r) + } + rp = r + } + return nil +} + +// isValidFieldNumber reports whether the field number is valid. +// Unlike the FieldNumber.IsValid method, it allows ranges that cover the +// reserved number range. +func isValidFieldNumber(n protoreflect.FieldNumber, isMessageSet bool) bool { + return protowire.MinValidNumber <= n && (n <= protowire.MaxValidNumber || isMessageSet) +} + +// CheckOverlap reports an error if p and q overlap. +func (p *FieldRanges) CheckOverlap(q *FieldRanges) error { + rps := p.lazyInit().sorted + rqs := q.lazyInit().sorted + for pi, qi := 0, 0; pi < len(rps) && qi < len(rqs); { + rp := fieldRange(rps[pi]) + rq := fieldRange(rqs[qi]) + if !(rp.End() < rq.Start() || rq.End() < rp.Start()) { + return errors.New("overlapping ranges: %v with %v", rp, rq) + } + if rp.Start() < rq.Start() { + pi++ + } else { + qi++ + } + } + return nil +} + +type fieldRange [2]protoreflect.FieldNumber + +func (r fieldRange) Start() protoreflect.FieldNumber { return r[0] } // inclusive +func (r fieldRange) End() protoreflect.FieldNumber { return r[1] - 1 } // inclusive +func (r fieldRange) String() string { + if r.Start() == r.End() { + return fmt.Sprintf("%d", r.Start()) + } + return fmt.Sprintf("%d to %d", r.Start(), r.End()) +} + +type FieldNumbers struct { + List []pref.FieldNumber + once sync.Once + has map[pref.FieldNumber]struct{} // protected by once +} + +func (p *FieldNumbers) Len() int { return len(p.List) } +func (p *FieldNumbers) Get(i int) pref.FieldNumber { return p.List[i] } +func (p *FieldNumbers) Has(n pref.FieldNumber) bool { + p.once.Do(func() { + if len(p.List) > 0 { + p.has = make(map[pref.FieldNumber]struct{}, len(p.List)) + for _, n := range p.List { + p.has[n] = struct{}{} + } + } + }) + _, ok := p.has[n] + return ok +} +func (p *FieldNumbers) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *FieldNumbers) ProtoInternal(pragma.DoNotImplement) {} + +type OneofFields struct { + List []pref.FieldDescriptor + once sync.Once + byName map[pref.Name]pref.FieldDescriptor // protected by once + byJSON map[string]pref.FieldDescriptor // protected by once + byText map[string]pref.FieldDescriptor // protected by once + byNum map[pref.FieldNumber]pref.FieldDescriptor // protected by once +} + +func (p *OneofFields) Len() int { return len(p.List) } +func (p *OneofFields) Get(i int) pref.FieldDescriptor { return p.List[i] } +func (p *OneofFields) ByName(s pref.Name) pref.FieldDescriptor { return p.lazyInit().byName[s] } +func (p *OneofFields) ByJSONName(s string) pref.FieldDescriptor { return p.lazyInit().byJSON[s] } +func (p *OneofFields) ByTextName(s string) pref.FieldDescriptor { return p.lazyInit().byText[s] } +func (p *OneofFields) ByNumber(n pref.FieldNumber) pref.FieldDescriptor { return p.lazyInit().byNum[n] } +func (p *OneofFields) Format(s fmt.State, r rune) { descfmt.FormatList(s, r, p) } +func (p *OneofFields) ProtoInternal(pragma.DoNotImplement) {} + +func (p *OneofFields) lazyInit() *OneofFields { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[pref.Name]pref.FieldDescriptor, len(p.List)) + p.byJSON = make(map[string]pref.FieldDescriptor, len(p.List)) + p.byText = make(map[string]pref.FieldDescriptor, len(p.List)) + p.byNum = make(map[pref.FieldNumber]pref.FieldDescriptor, len(p.List)) + for _, f := range p.List { + // Field names and numbers are guaranteed to be unique. + p.byName[f.Name()] = f + p.byJSON[f.JSONName()] = f + p.byText[f.TextName()] = f + p.byNum[f.Number()] = f + } + } + }) + return p +} + +type SourceLocations struct { + // List is a list of SourceLocations. + // The SourceLocation.Next field does not need to be populated + // as it will be lazily populated upon first need. + List []pref.SourceLocation + + // File is the parent file descriptor that these locations are relative to. + // If non-nil, ByDescriptor verifies that the provided descriptor + // is a child of this file descriptor. + File pref.FileDescriptor + + once sync.Once + byPath map[pathKey]int +} + +func (p *SourceLocations) Len() int { return len(p.List) } +func (p *SourceLocations) Get(i int) pref.SourceLocation { return p.lazyInit().List[i] } +func (p *SourceLocations) byKey(k pathKey) pref.SourceLocation { + if i, ok := p.lazyInit().byPath[k]; ok { + return p.List[i] + } + return pref.SourceLocation{} +} +func (p *SourceLocations) ByPath(path pref.SourcePath) pref.SourceLocation { + return p.byKey(newPathKey(path)) +} +func (p *SourceLocations) ByDescriptor(desc pref.Descriptor) pref.SourceLocation { + if p.File != nil && desc != nil && p.File != desc.ParentFile() { + return pref.SourceLocation{} // mismatching parent files + } + var pathArr [16]int32 + path := pathArr[:0] + for { + switch desc.(type) { + case pref.FileDescriptor: + // Reverse the path since it was constructed in reverse. + for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 { + path[i], path[j] = path[j], path[i] + } + return p.byKey(newPathKey(path)) + case pref.MessageDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_MessageType_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_NestedType_field_number)) + default: + return pref.SourceLocation{} + } + case pref.FieldDescriptor: + isExtension := desc.(pref.FieldDescriptor).IsExtension() + path = append(path, int32(desc.Index())) + desc = desc.Parent() + if isExtension { + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_Extension_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_Extension_field_number)) + default: + return pref.SourceLocation{} + } + } else { + switch desc.(type) { + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_Field_field_number)) + default: + return pref.SourceLocation{} + } + } + case pref.OneofDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_OneofDecl_field_number)) + default: + return pref.SourceLocation{} + } + case pref.EnumDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_EnumType_field_number)) + case pref.MessageDescriptor: + path = append(path, int32(genid.DescriptorProto_EnumType_field_number)) + default: + return pref.SourceLocation{} + } + case pref.EnumValueDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.EnumDescriptor: + path = append(path, int32(genid.EnumDescriptorProto_Value_field_number)) + default: + return pref.SourceLocation{} + } + case pref.ServiceDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.FileDescriptor: + path = append(path, int32(genid.FileDescriptorProto_Service_field_number)) + default: + return pref.SourceLocation{} + } + case pref.MethodDescriptor: + path = append(path, int32(desc.Index())) + desc = desc.Parent() + switch desc.(type) { + case pref.ServiceDescriptor: + path = append(path, int32(genid.ServiceDescriptorProto_Method_field_number)) + default: + return pref.SourceLocation{} + } + default: + return pref.SourceLocation{} + } + } +} +func (p *SourceLocations) lazyInit() *SourceLocations { + p.once.Do(func() { + if len(p.List) > 0 { + // Collect all the indexes for a given path. + pathIdxs := make(map[pathKey][]int, len(p.List)) + for i, l := range p.List { + k := newPathKey(l.Path) + pathIdxs[k] = append(pathIdxs[k], i) + } + + // Update the next index for all locations. + p.byPath = make(map[pathKey]int, len(p.List)) + for k, idxs := range pathIdxs { + for i := 0; i < len(idxs)-1; i++ { + p.List[idxs[i]].Next = idxs[i+1] + } + p.List[idxs[len(idxs)-1]].Next = 0 + p.byPath[k] = idxs[0] // record the first location for this path + } + } + }) + return p +} +func (p *SourceLocations) ProtoInternal(pragma.DoNotImplement) {} + +// pathKey is a comparable representation of protoreflect.SourcePath. +type pathKey struct { + arr [16]uint8 // first n-1 path segments; last element is the length + str string // used if the path does not fit in arr +} + +func newPathKey(p pref.SourcePath) (k pathKey) { + if len(p) < len(k.arr) { + for i, ps := range p { + if ps < 0 || math.MaxUint8 <= ps { + return pathKey{str: p.String()} + } + k.arr[i] = uint8(ps) + } + k.arr[len(k.arr)-1] = uint8(len(p)) + return k + } + return pathKey{str: p.String()} +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go new file mode 100644 index 00000000..30db19fd --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc_list_gen.go @@ -0,0 +1,356 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package filedesc + +import ( + "fmt" + "sync" + + "google.golang.org/protobuf/internal/descfmt" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/reflect/protoreflect" +) + +type Enums struct { + List []Enum + once sync.Once + byName map[protoreflect.Name]*Enum // protected by once +} + +func (p *Enums) Len() int { + return len(p.List) +} +func (p *Enums) Get(i int) protoreflect.EnumDescriptor { + return &p.List[i] +} +func (p *Enums) ByName(s protoreflect.Name) protoreflect.EnumDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Enums) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Enums) ProtoInternal(pragma.DoNotImplement) {} +func (p *Enums) lazyInit() *Enums { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Enum, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} + +type EnumValues struct { + List []EnumValue + once sync.Once + byName map[protoreflect.Name]*EnumValue // protected by once + byNum map[protoreflect.EnumNumber]*EnumValue // protected by once +} + +func (p *EnumValues) Len() int { + return len(p.List) +} +func (p *EnumValues) Get(i int) protoreflect.EnumValueDescriptor { + return &p.List[i] +} +func (p *EnumValues) ByName(s protoreflect.Name) protoreflect.EnumValueDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *EnumValues) ByNumber(n protoreflect.EnumNumber) protoreflect.EnumValueDescriptor { + if d := p.lazyInit().byNum[n]; d != nil { + return d + } + return nil +} +func (p *EnumValues) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *EnumValues) ProtoInternal(pragma.DoNotImplement) {} +func (p *EnumValues) lazyInit() *EnumValues { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*EnumValue, len(p.List)) + p.byNum = make(map[protoreflect.EnumNumber]*EnumValue, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + if _, ok := p.byNum[d.Number()]; !ok { + p.byNum[d.Number()] = d + } + } + } + }) + return p +} + +type Messages struct { + List []Message + once sync.Once + byName map[protoreflect.Name]*Message // protected by once +} + +func (p *Messages) Len() int { + return len(p.List) +} +func (p *Messages) Get(i int) protoreflect.MessageDescriptor { + return &p.List[i] +} +func (p *Messages) ByName(s protoreflect.Name) protoreflect.MessageDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Messages) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Messages) ProtoInternal(pragma.DoNotImplement) {} +func (p *Messages) lazyInit() *Messages { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Message, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} + +type Fields struct { + List []Field + once sync.Once + byName map[protoreflect.Name]*Field // protected by once + byJSON map[string]*Field // protected by once + byText map[string]*Field // protected by once + byNum map[protoreflect.FieldNumber]*Field // protected by once +} + +func (p *Fields) Len() int { + return len(p.List) +} +func (p *Fields) Get(i int) protoreflect.FieldDescriptor { + return &p.List[i] +} +func (p *Fields) ByName(s protoreflect.Name) protoreflect.FieldDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Fields) ByJSONName(s string) protoreflect.FieldDescriptor { + if d := p.lazyInit().byJSON[s]; d != nil { + return d + } + return nil +} +func (p *Fields) ByTextName(s string) protoreflect.FieldDescriptor { + if d := p.lazyInit().byText[s]; d != nil { + return d + } + return nil +} +func (p *Fields) ByNumber(n protoreflect.FieldNumber) protoreflect.FieldDescriptor { + if d := p.lazyInit().byNum[n]; d != nil { + return d + } + return nil +} +func (p *Fields) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Fields) ProtoInternal(pragma.DoNotImplement) {} +func (p *Fields) lazyInit() *Fields { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Field, len(p.List)) + p.byJSON = make(map[string]*Field, len(p.List)) + p.byText = make(map[string]*Field, len(p.List)) + p.byNum = make(map[protoreflect.FieldNumber]*Field, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + if _, ok := p.byJSON[d.JSONName()]; !ok { + p.byJSON[d.JSONName()] = d + } + if _, ok := p.byText[d.TextName()]; !ok { + p.byText[d.TextName()] = d + } + if _, ok := p.byNum[d.Number()]; !ok { + p.byNum[d.Number()] = d + } + } + } + }) + return p +} + +type Oneofs struct { + List []Oneof + once sync.Once + byName map[protoreflect.Name]*Oneof // protected by once +} + +func (p *Oneofs) Len() int { + return len(p.List) +} +func (p *Oneofs) Get(i int) protoreflect.OneofDescriptor { + return &p.List[i] +} +func (p *Oneofs) ByName(s protoreflect.Name) protoreflect.OneofDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Oneofs) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Oneofs) ProtoInternal(pragma.DoNotImplement) {} +func (p *Oneofs) lazyInit() *Oneofs { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Oneof, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} + +type Extensions struct { + List []Extension + once sync.Once + byName map[protoreflect.Name]*Extension // protected by once +} + +func (p *Extensions) Len() int { + return len(p.List) +} +func (p *Extensions) Get(i int) protoreflect.ExtensionDescriptor { + return &p.List[i] +} +func (p *Extensions) ByName(s protoreflect.Name) protoreflect.ExtensionDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Extensions) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Extensions) ProtoInternal(pragma.DoNotImplement) {} +func (p *Extensions) lazyInit() *Extensions { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Extension, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} + +type Services struct { + List []Service + once sync.Once + byName map[protoreflect.Name]*Service // protected by once +} + +func (p *Services) Len() int { + return len(p.List) +} +func (p *Services) Get(i int) protoreflect.ServiceDescriptor { + return &p.List[i] +} +func (p *Services) ByName(s protoreflect.Name) protoreflect.ServiceDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Services) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Services) ProtoInternal(pragma.DoNotImplement) {} +func (p *Services) lazyInit() *Services { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Service, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} + +type Methods struct { + List []Method + once sync.Once + byName map[protoreflect.Name]*Method // protected by once +} + +func (p *Methods) Len() int { + return len(p.List) +} +func (p *Methods) Get(i int) protoreflect.MethodDescriptor { + return &p.List[i] +} +func (p *Methods) ByName(s protoreflect.Name) protoreflect.MethodDescriptor { + if d := p.lazyInit().byName[s]; d != nil { + return d + } + return nil +} +func (p *Methods) Format(s fmt.State, r rune) { + descfmt.FormatList(s, r, p) +} +func (p *Methods) ProtoInternal(pragma.DoNotImplement) {} +func (p *Methods) lazyInit() *Methods { + p.once.Do(func() { + if len(p.List) > 0 { + p.byName = make(map[protoreflect.Name]*Method, len(p.List)) + for i := range p.List { + d := &p.List[i] + if _, ok := p.byName[d.Name()]; !ok { + p.byName[d.Name()] = d + } + } + } + }) + return p +} diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/placeholder.go b/vendor/google.golang.org/protobuf/internal/filedesc/placeholder.go new file mode 100644 index 00000000..dbf2c605 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filedesc/placeholder.go @@ -0,0 +1,107 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package filedesc + +import ( + "google.golang.org/protobuf/internal/descopts" + "google.golang.org/protobuf/internal/pragma" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +var ( + emptyNames = new(Names) + emptyEnumRanges = new(EnumRanges) + emptyFieldRanges = new(FieldRanges) + emptyFieldNumbers = new(FieldNumbers) + emptySourceLocations = new(SourceLocations) + + emptyFiles = new(FileImports) + emptyMessages = new(Messages) + emptyFields = new(Fields) + emptyOneofs = new(Oneofs) + emptyEnums = new(Enums) + emptyEnumValues = new(EnumValues) + emptyExtensions = new(Extensions) + emptyServices = new(Services) +) + +// PlaceholderFile is a placeholder, representing only the file path. +type PlaceholderFile string + +func (f PlaceholderFile) ParentFile() pref.FileDescriptor { return f } +func (f PlaceholderFile) Parent() pref.Descriptor { return nil } +func (f PlaceholderFile) Index() int { return 0 } +func (f PlaceholderFile) Syntax() pref.Syntax { return 0 } +func (f PlaceholderFile) Name() pref.Name { return "" } +func (f PlaceholderFile) FullName() pref.FullName { return "" } +func (f PlaceholderFile) IsPlaceholder() bool { return true } +func (f PlaceholderFile) Options() pref.ProtoMessage { return descopts.File } +func (f PlaceholderFile) Path() string { return string(f) } +func (f PlaceholderFile) Package() pref.FullName { return "" } +func (f PlaceholderFile) Imports() pref.FileImports { return emptyFiles } +func (f PlaceholderFile) Messages() pref.MessageDescriptors { return emptyMessages } +func (f PlaceholderFile) Enums() pref.EnumDescriptors { return emptyEnums } +func (f PlaceholderFile) Extensions() pref.ExtensionDescriptors { return emptyExtensions } +func (f PlaceholderFile) Services() pref.ServiceDescriptors { return emptyServices } +func (f PlaceholderFile) SourceLocations() pref.SourceLocations { return emptySourceLocations } +func (f PlaceholderFile) ProtoType(pref.FileDescriptor) { return } +func (f PlaceholderFile) ProtoInternal(pragma.DoNotImplement) { return } + +// PlaceholderEnum is a placeholder, representing only the full name. +type PlaceholderEnum pref.FullName + +func (e PlaceholderEnum) ParentFile() pref.FileDescriptor { return nil } +func (e PlaceholderEnum) Parent() pref.Descriptor { return nil } +func (e PlaceholderEnum) Index() int { return 0 } +func (e PlaceholderEnum) Syntax() pref.Syntax { return 0 } +func (e PlaceholderEnum) Name() pref.Name { return pref.FullName(e).Name() } +func (e PlaceholderEnum) FullName() pref.FullName { return pref.FullName(e) } +func (e PlaceholderEnum) IsPlaceholder() bool { return true } +func (e PlaceholderEnum) Options() pref.ProtoMessage { return descopts.Enum } +func (e PlaceholderEnum) Values() pref.EnumValueDescriptors { return emptyEnumValues } +func (e PlaceholderEnum) ReservedNames() pref.Names { return emptyNames } +func (e PlaceholderEnum) ReservedRanges() pref.EnumRanges { return emptyEnumRanges } +func (e PlaceholderEnum) ProtoType(pref.EnumDescriptor) { return } +func (e PlaceholderEnum) ProtoInternal(pragma.DoNotImplement) { return } + +// PlaceholderEnumValue is a placeholder, representing only the full name. +type PlaceholderEnumValue pref.FullName + +func (e PlaceholderEnumValue) ParentFile() pref.FileDescriptor { return nil } +func (e PlaceholderEnumValue) Parent() pref.Descriptor { return nil } +func (e PlaceholderEnumValue) Index() int { return 0 } +func (e PlaceholderEnumValue) Syntax() pref.Syntax { return 0 } +func (e PlaceholderEnumValue) Name() pref.Name { return pref.FullName(e).Name() } +func (e PlaceholderEnumValue) FullName() pref.FullName { return pref.FullName(e) } +func (e PlaceholderEnumValue) IsPlaceholder() bool { return true } +func (e PlaceholderEnumValue) Options() pref.ProtoMessage { return descopts.EnumValue } +func (e PlaceholderEnumValue) Number() pref.EnumNumber { return 0 } +func (e PlaceholderEnumValue) ProtoType(pref.EnumValueDescriptor) { return } +func (e PlaceholderEnumValue) ProtoInternal(pragma.DoNotImplement) { return } + +// PlaceholderMessage is a placeholder, representing only the full name. +type PlaceholderMessage pref.FullName + +func (m PlaceholderMessage) ParentFile() pref.FileDescriptor { return nil } +func (m PlaceholderMessage) Parent() pref.Descriptor { return nil } +func (m PlaceholderMessage) Index() int { return 0 } +func (m PlaceholderMessage) Syntax() pref.Syntax { return 0 } +func (m PlaceholderMessage) Name() pref.Name { return pref.FullName(m).Name() } +func (m PlaceholderMessage) FullName() pref.FullName { return pref.FullName(m) } +func (m PlaceholderMessage) IsPlaceholder() bool { return true } +func (m PlaceholderMessage) Options() pref.ProtoMessage { return descopts.Message } +func (m PlaceholderMessage) IsMapEntry() bool { return false } +func (m PlaceholderMessage) Fields() pref.FieldDescriptors { return emptyFields } +func (m PlaceholderMessage) Oneofs() pref.OneofDescriptors { return emptyOneofs } +func (m PlaceholderMessage) ReservedNames() pref.Names { return emptyNames } +func (m PlaceholderMessage) ReservedRanges() pref.FieldRanges { return emptyFieldRanges } +func (m PlaceholderMessage) RequiredNumbers() pref.FieldNumbers { return emptyFieldNumbers } +func (m PlaceholderMessage) ExtensionRanges() pref.FieldRanges { return emptyFieldRanges } +func (m PlaceholderMessage) ExtensionRangeOptions(int) pref.ProtoMessage { panic("index out of range") } +func (m PlaceholderMessage) Messages() pref.MessageDescriptors { return emptyMessages } +func (m PlaceholderMessage) Enums() pref.EnumDescriptors { return emptyEnums } +func (m PlaceholderMessage) Extensions() pref.ExtensionDescriptors { return emptyExtensions } +func (m PlaceholderMessage) ProtoType(pref.MessageDescriptor) { return } +func (m PlaceholderMessage) ProtoInternal(pragma.DoNotImplement) { return } diff --git a/vendor/google.golang.org/protobuf/internal/filetype/build.go b/vendor/google.golang.org/protobuf/internal/filetype/build.go new file mode 100644 index 00000000..0a0dd35d --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/filetype/build.go @@ -0,0 +1,297 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package filetype provides functionality for wrapping descriptors +// with Go type information. +package filetype + +import ( + "reflect" + + "google.golang.org/protobuf/internal/descopts" + fdesc "google.golang.org/protobuf/internal/filedesc" + pimpl "google.golang.org/protobuf/internal/impl" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" +) + +// Builder constructs type descriptors from a raw file descriptor +// and associated Go types for each enum and message declaration. +// +// +// Flattened Ordering +// +// The protobuf type system represents declarations as a tree. Certain nodes in +// the tree require us to either associate it with a concrete Go type or to +// resolve a dependency, which is information that must be provided separately +// since it cannot be derived from the file descriptor alone. +// +// However, representing a tree as Go literals is difficult to simply do in a +// space and time efficient way. Thus, we store them as a flattened list of +// objects where the serialization order from the tree-based form is important. +// +// The "flattened ordering" is defined as a tree traversal of all enum, message, +// extension, and service declarations using the following algorithm: +// +// def VisitFileDecls(fd): +// for e in fd.Enums: yield e +// for m in fd.Messages: yield m +// for x in fd.Extensions: yield x +// for s in fd.Services: yield s +// for m in fd.Messages: yield from VisitMessageDecls(m) +// +// def VisitMessageDecls(md): +// for e in md.Enums: yield e +// for m in md.Messages: yield m +// for x in md.Extensions: yield x +// for m in md.Messages: yield from VisitMessageDecls(m) +// +// The traversal starts at the root file descriptor and yields each direct +// declaration within each node before traversing into sub-declarations +// that children themselves may have. +type Builder struct { + // File is the underlying file descriptor builder. + File fdesc.Builder + + // GoTypes is a unique set of the Go types for all declarations and + // dependencies. Each type is represented as a zero value of the Go type. + // + // Declarations are Go types generated for enums and messages directly + // declared (not publicly imported) in the proto source file. + // Messages for map entries are accounted for, but represented by nil. + // Enum declarations in "flattened ordering" come first, followed by + // message declarations in "flattened ordering". + // + // Dependencies are Go types for enums or messages referenced by + // message fields (excluding weak fields), for parent extended messages of + // extension fields, for enums or messages referenced by extension fields, + // and for input and output messages referenced by service methods. + // Dependencies must come after declarations, but the ordering of + // dependencies themselves is unspecified. + GoTypes []interface{} + + // DependencyIndexes is an ordered list of indexes into GoTypes for the + // dependencies of messages, extensions, or services. + // + // There are 5 sub-lists in "flattened ordering" concatenated back-to-back: + // 0. Message field dependencies: list of the enum or message type + // referred to by every message field. + // 1. Extension field targets: list of the extended parent message of + // every extension. + // 2. Extension field dependencies: list of the enum or message type + // referred to by every extension field. + // 3. Service method inputs: list of the input message type + // referred to by every service method. + // 4. Service method outputs: list of the output message type + // referred to by every service method. + // + // The offset into DependencyIndexes for the start of each sub-list + // is appended to the end in reverse order. + DependencyIndexes []int32 + + // EnumInfos is a list of enum infos in "flattened ordering". + EnumInfos []pimpl.EnumInfo + + // MessageInfos is a list of message infos in "flattened ordering". + // If provided, the GoType and PBType for each element is populated. + // + // Requirement: len(MessageInfos) == len(Build.Messages) + MessageInfos []pimpl.MessageInfo + + // ExtensionInfos is a list of extension infos in "flattened ordering". + // Each element is initialized and registered with the protoregistry package. + // + // Requirement: len(LegacyExtensions) == len(Build.Extensions) + ExtensionInfos []pimpl.ExtensionInfo + + // TypeRegistry is the registry to register each type descriptor. + // If nil, it uses protoregistry.GlobalTypes. + TypeRegistry interface { + RegisterMessage(pref.MessageType) error + RegisterEnum(pref.EnumType) error + RegisterExtension(pref.ExtensionType) error + } +} + +// Out is the output of the builder. +type Out struct { + File pref.FileDescriptor +} + +func (tb Builder) Build() (out Out) { + // Replace the resolver with one that resolves dependencies by index, + // which is faster and more reliable than relying on the global registry. + if tb.File.FileRegistry == nil { + tb.File.FileRegistry = preg.GlobalFiles + } + tb.File.FileRegistry = &resolverByIndex{ + goTypes: tb.GoTypes, + depIdxs: tb.DependencyIndexes, + fileRegistry: tb.File.FileRegistry, + } + + // Initialize registry if unpopulated. + if tb.TypeRegistry == nil { + tb.TypeRegistry = preg.GlobalTypes + } + + fbOut := tb.File.Build() + out.File = fbOut.File + + // Process enums. + enumGoTypes := tb.GoTypes[:len(fbOut.Enums)] + if len(tb.EnumInfos) != len(fbOut.Enums) { + panic("mismatching enum lengths") + } + if len(fbOut.Enums) > 0 { + for i := range fbOut.Enums { + tb.EnumInfos[i] = pimpl.EnumInfo{ + GoReflectType: reflect.TypeOf(enumGoTypes[i]), + Desc: &fbOut.Enums[i], + } + // Register enum types. + if err := tb.TypeRegistry.RegisterEnum(&tb.EnumInfos[i]); err != nil { + panic(err) + } + } + } + + // Process messages. + messageGoTypes := tb.GoTypes[len(fbOut.Enums):][:len(fbOut.Messages)] + if len(tb.MessageInfos) != len(fbOut.Messages) { + panic("mismatching message lengths") + } + if len(fbOut.Messages) > 0 { + for i := range fbOut.Messages { + if messageGoTypes[i] == nil { + continue // skip map entry + } + + tb.MessageInfos[i].GoReflectType = reflect.TypeOf(messageGoTypes[i]) + tb.MessageInfos[i].Desc = &fbOut.Messages[i] + + // Register message types. + if err := tb.TypeRegistry.RegisterMessage(&tb.MessageInfos[i]); err != nil { + panic(err) + } + } + + // As a special-case for descriptor.proto, + // locally register concrete message type for the options. + if out.File.Path() == "google/protobuf/descriptor.proto" && out.File.Package() == "google.protobuf" { + for i := range fbOut.Messages { + switch fbOut.Messages[i].Name() { + case "FileOptions": + descopts.File = messageGoTypes[i].(pref.ProtoMessage) + case "EnumOptions": + descopts.Enum = messageGoTypes[i].(pref.ProtoMessage) + case "EnumValueOptions": + descopts.EnumValue = messageGoTypes[i].(pref.ProtoMessage) + case "MessageOptions": + descopts.Message = messageGoTypes[i].(pref.ProtoMessage) + case "FieldOptions": + descopts.Field = messageGoTypes[i].(pref.ProtoMessage) + case "OneofOptions": + descopts.Oneof = messageGoTypes[i].(pref.ProtoMessage) + case "ExtensionRangeOptions": + descopts.ExtensionRange = messageGoTypes[i].(pref.ProtoMessage) + case "ServiceOptions": + descopts.Service = messageGoTypes[i].(pref.ProtoMessage) + case "MethodOptions": + descopts.Method = messageGoTypes[i].(pref.ProtoMessage) + } + } + } + } + + // Process extensions. + if len(tb.ExtensionInfos) != len(fbOut.Extensions) { + panic("mismatching extension lengths") + } + var depIdx int32 + for i := range fbOut.Extensions { + // For enum and message kinds, determine the referent Go type so + // that we can construct their constructors. + const listExtDeps = 2 + var goType reflect.Type + switch fbOut.Extensions[i].L1.Kind { + case pref.EnumKind: + j := depIdxs.Get(tb.DependencyIndexes, listExtDeps, depIdx) + goType = reflect.TypeOf(tb.GoTypes[j]) + depIdx++ + case pref.MessageKind, pref.GroupKind: + j := depIdxs.Get(tb.DependencyIndexes, listExtDeps, depIdx) + goType = reflect.TypeOf(tb.GoTypes[j]) + depIdx++ + default: + goType = goTypeForPBKind[fbOut.Extensions[i].L1.Kind] + } + if fbOut.Extensions[i].IsList() { + goType = reflect.SliceOf(goType) + } + + pimpl.InitExtensionInfo(&tb.ExtensionInfos[i], &fbOut.Extensions[i], goType) + + // Register extension types. + if err := tb.TypeRegistry.RegisterExtension(&tb.ExtensionInfos[i]); err != nil { + panic(err) + } + } + + return out +} + +var goTypeForPBKind = map[pref.Kind]reflect.Type{ + pref.BoolKind: reflect.TypeOf(bool(false)), + pref.Int32Kind: reflect.TypeOf(int32(0)), + pref.Sint32Kind: reflect.TypeOf(int32(0)), + pref.Sfixed32Kind: reflect.TypeOf(int32(0)), + pref.Int64Kind: reflect.TypeOf(int64(0)), + pref.Sint64Kind: reflect.TypeOf(int64(0)), + pref.Sfixed64Kind: reflect.TypeOf(int64(0)), + pref.Uint32Kind: reflect.TypeOf(uint32(0)), + pref.Fixed32Kind: reflect.TypeOf(uint32(0)), + pref.Uint64Kind: reflect.TypeOf(uint64(0)), + pref.Fixed64Kind: reflect.TypeOf(uint64(0)), + pref.FloatKind: reflect.TypeOf(float32(0)), + pref.DoubleKind: reflect.TypeOf(float64(0)), + pref.StringKind: reflect.TypeOf(string("")), + pref.BytesKind: reflect.TypeOf([]byte(nil)), +} + +type depIdxs []int32 + +// Get retrieves the jth element of the ith sub-list. +func (x depIdxs) Get(i, j int32) int32 { + return x[x[int32(len(x))-i-1]+j] +} + +type ( + resolverByIndex struct { + goTypes []interface{} + depIdxs depIdxs + fileRegistry + } + fileRegistry interface { + FindFileByPath(string) (pref.FileDescriptor, error) + FindDescriptorByName(pref.FullName) (pref.Descriptor, error) + RegisterFile(pref.FileDescriptor) error + } +) + +func (r *resolverByIndex) FindEnumByIndex(i, j int32, es []fdesc.Enum, ms []fdesc.Message) pref.EnumDescriptor { + if depIdx := int(r.depIdxs.Get(i, j)); int(depIdx) < len(es)+len(ms) { + return &es[depIdx] + } else { + return pimpl.Export{}.EnumDescriptorOf(r.goTypes[depIdx]) + } +} + +func (r *resolverByIndex) FindMessageByIndex(i, j int32, es []fdesc.Enum, ms []fdesc.Message) pref.MessageDescriptor { + if depIdx := int(r.depIdxs.Get(i, j)); depIdx < len(es)+len(ms) { + return &ms[depIdx-len(es)] + } else { + return pimpl.Export{}.MessageDescriptorOf(r.goTypes[depIdx]) + } +} diff --git a/vendor/google.golang.org/protobuf/internal/flags/flags.go b/vendor/google.golang.org/protobuf/internal/flags/flags.go new file mode 100644 index 00000000..58372dd3 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/flags/flags.go @@ -0,0 +1,24 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package flags provides a set of flags controlled by build tags. +package flags + +// ProtoLegacy specifies whether to enable support for legacy functionality +// such as MessageSets, weak fields, and various other obscure behavior +// that is necessary to maintain backwards compatibility with proto1 or +// the pre-release variants of proto2 and proto3. +// +// This is disabled by default unless built with the "protolegacy" tag. +// +// WARNING: The compatibility agreement covers nothing provided by this flag. +// As such, functionality may suddenly be removed or changed at our discretion. +const ProtoLegacy = protoLegacy + +// LazyUnmarshalExtensions specifies whether to lazily unmarshal extensions. +// +// Lazy extension unmarshaling validates the contents of message-valued +// extension fields at unmarshal time, but defers creating the message +// structure until the extension is first accessed. +const LazyUnmarshalExtensions = ProtoLegacy diff --git a/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_disable.go b/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_disable.go new file mode 100644 index 00000000..a72995f0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_disable.go @@ -0,0 +1,9 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !protolegacy + +package flags + +const protoLegacy = false diff --git a/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_enable.go b/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_enable.go new file mode 100644 index 00000000..772e2f0e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/flags/proto_legacy_enable.go @@ -0,0 +1,9 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build protolegacy + +package flags + +const protoLegacy = true diff --git a/vendor/google.golang.org/protobuf/internal/genid/any_gen.go b/vendor/google.golang.org/protobuf/internal/genid/any_gen.go new file mode 100644 index 00000000..e6f7d47a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/any_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_any_proto = "google/protobuf/any.proto" + +// Names for google.protobuf.Any. +const ( + Any_message_name protoreflect.Name = "Any" + Any_message_fullname protoreflect.FullName = "google.protobuf.Any" +) + +// Field names for google.protobuf.Any. +const ( + Any_TypeUrl_field_name protoreflect.Name = "type_url" + Any_Value_field_name protoreflect.Name = "value" + + Any_TypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Any.type_url" + Any_Value_field_fullname protoreflect.FullName = "google.protobuf.Any.value" +) + +// Field numbers for google.protobuf.Any. +const ( + Any_TypeUrl_field_number protoreflect.FieldNumber = 1 + Any_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/api_gen.go b/vendor/google.golang.org/protobuf/internal/genid/api_gen.go new file mode 100644 index 00000000..df8f9185 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/api_gen.go @@ -0,0 +1,106 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_api_proto = "google/protobuf/api.proto" + +// Names for google.protobuf.Api. +const ( + Api_message_name protoreflect.Name = "Api" + Api_message_fullname protoreflect.FullName = "google.protobuf.Api" +) + +// Field names for google.protobuf.Api. +const ( + Api_Name_field_name protoreflect.Name = "name" + Api_Methods_field_name protoreflect.Name = "methods" + Api_Options_field_name protoreflect.Name = "options" + Api_Version_field_name protoreflect.Name = "version" + Api_SourceContext_field_name protoreflect.Name = "source_context" + Api_Mixins_field_name protoreflect.Name = "mixins" + Api_Syntax_field_name protoreflect.Name = "syntax" + + Api_Name_field_fullname protoreflect.FullName = "google.protobuf.Api.name" + Api_Methods_field_fullname protoreflect.FullName = "google.protobuf.Api.methods" + Api_Options_field_fullname protoreflect.FullName = "google.protobuf.Api.options" + Api_Version_field_fullname protoreflect.FullName = "google.protobuf.Api.version" + Api_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Api.source_context" + Api_Mixins_field_fullname protoreflect.FullName = "google.protobuf.Api.mixins" + Api_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Api.syntax" +) + +// Field numbers for google.protobuf.Api. +const ( + Api_Name_field_number protoreflect.FieldNumber = 1 + Api_Methods_field_number protoreflect.FieldNumber = 2 + Api_Options_field_number protoreflect.FieldNumber = 3 + Api_Version_field_number protoreflect.FieldNumber = 4 + Api_SourceContext_field_number protoreflect.FieldNumber = 5 + Api_Mixins_field_number protoreflect.FieldNumber = 6 + Api_Syntax_field_number protoreflect.FieldNumber = 7 +) + +// Names for google.protobuf.Method. +const ( + Method_message_name protoreflect.Name = "Method" + Method_message_fullname protoreflect.FullName = "google.protobuf.Method" +) + +// Field names for google.protobuf.Method. +const ( + Method_Name_field_name protoreflect.Name = "name" + Method_RequestTypeUrl_field_name protoreflect.Name = "request_type_url" + Method_RequestStreaming_field_name protoreflect.Name = "request_streaming" + Method_ResponseTypeUrl_field_name protoreflect.Name = "response_type_url" + Method_ResponseStreaming_field_name protoreflect.Name = "response_streaming" + Method_Options_field_name protoreflect.Name = "options" + Method_Syntax_field_name protoreflect.Name = "syntax" + + Method_Name_field_fullname protoreflect.FullName = "google.protobuf.Method.name" + Method_RequestTypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Method.request_type_url" + Method_RequestStreaming_field_fullname protoreflect.FullName = "google.protobuf.Method.request_streaming" + Method_ResponseTypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Method.response_type_url" + Method_ResponseStreaming_field_fullname protoreflect.FullName = "google.protobuf.Method.response_streaming" + Method_Options_field_fullname protoreflect.FullName = "google.protobuf.Method.options" + Method_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Method.syntax" +) + +// Field numbers for google.protobuf.Method. +const ( + Method_Name_field_number protoreflect.FieldNumber = 1 + Method_RequestTypeUrl_field_number protoreflect.FieldNumber = 2 + Method_RequestStreaming_field_number protoreflect.FieldNumber = 3 + Method_ResponseTypeUrl_field_number protoreflect.FieldNumber = 4 + Method_ResponseStreaming_field_number protoreflect.FieldNumber = 5 + Method_Options_field_number protoreflect.FieldNumber = 6 + Method_Syntax_field_number protoreflect.FieldNumber = 7 +) + +// Names for google.protobuf.Mixin. +const ( + Mixin_message_name protoreflect.Name = "Mixin" + Mixin_message_fullname protoreflect.FullName = "google.protobuf.Mixin" +) + +// Field names for google.protobuf.Mixin. +const ( + Mixin_Name_field_name protoreflect.Name = "name" + Mixin_Root_field_name protoreflect.Name = "root" + + Mixin_Name_field_fullname protoreflect.FullName = "google.protobuf.Mixin.name" + Mixin_Root_field_fullname protoreflect.FullName = "google.protobuf.Mixin.root" +) + +// Field numbers for google.protobuf.Mixin. +const ( + Mixin_Name_field_number protoreflect.FieldNumber = 1 + Mixin_Root_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go b/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go new file mode 100644 index 00000000..e3cdf1c2 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/descriptor_gen.go @@ -0,0 +1,829 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_descriptor_proto = "google/protobuf/descriptor.proto" + +// Names for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_message_name protoreflect.Name = "FileDescriptorSet" + FileDescriptorSet_message_fullname protoreflect.FullName = "google.protobuf.FileDescriptorSet" +) + +// Field names for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_File_field_name protoreflect.Name = "file" + + FileDescriptorSet_File_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorSet.file" +) + +// Field numbers for google.protobuf.FileDescriptorSet. +const ( + FileDescriptorSet_File_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_message_name protoreflect.Name = "FileDescriptorProto" + FileDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto" +) + +// Field names for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_Name_field_name protoreflect.Name = "name" + FileDescriptorProto_Package_field_name protoreflect.Name = "package" + FileDescriptorProto_Dependency_field_name protoreflect.Name = "dependency" + FileDescriptorProto_PublicDependency_field_name protoreflect.Name = "public_dependency" + FileDescriptorProto_WeakDependency_field_name protoreflect.Name = "weak_dependency" + FileDescriptorProto_MessageType_field_name protoreflect.Name = "message_type" + FileDescriptorProto_EnumType_field_name protoreflect.Name = "enum_type" + FileDescriptorProto_Service_field_name protoreflect.Name = "service" + FileDescriptorProto_Extension_field_name protoreflect.Name = "extension" + FileDescriptorProto_Options_field_name protoreflect.Name = "options" + FileDescriptorProto_SourceCodeInfo_field_name protoreflect.Name = "source_code_info" + FileDescriptorProto_Syntax_field_name protoreflect.Name = "syntax" + + FileDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.name" + FileDescriptorProto_Package_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.package" + FileDescriptorProto_Dependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.dependency" + FileDescriptorProto_PublicDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.public_dependency" + FileDescriptorProto_WeakDependency_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.weak_dependency" + FileDescriptorProto_MessageType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.message_type" + FileDescriptorProto_EnumType_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.enum_type" + FileDescriptorProto_Service_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.service" + FileDescriptorProto_Extension_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.extension" + FileDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.options" + FileDescriptorProto_SourceCodeInfo_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.source_code_info" + FileDescriptorProto_Syntax_field_fullname protoreflect.FullName = "google.protobuf.FileDescriptorProto.syntax" +) + +// Field numbers for google.protobuf.FileDescriptorProto. +const ( + FileDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + FileDescriptorProto_Package_field_number protoreflect.FieldNumber = 2 + FileDescriptorProto_Dependency_field_number protoreflect.FieldNumber = 3 + FileDescriptorProto_PublicDependency_field_number protoreflect.FieldNumber = 10 + FileDescriptorProto_WeakDependency_field_number protoreflect.FieldNumber = 11 + FileDescriptorProto_MessageType_field_number protoreflect.FieldNumber = 4 + FileDescriptorProto_EnumType_field_number protoreflect.FieldNumber = 5 + FileDescriptorProto_Service_field_number protoreflect.FieldNumber = 6 + FileDescriptorProto_Extension_field_number protoreflect.FieldNumber = 7 + FileDescriptorProto_Options_field_number protoreflect.FieldNumber = 8 + FileDescriptorProto_SourceCodeInfo_field_number protoreflect.FieldNumber = 9 + FileDescriptorProto_Syntax_field_number protoreflect.FieldNumber = 12 +) + +// Names for google.protobuf.DescriptorProto. +const ( + DescriptorProto_message_name protoreflect.Name = "DescriptorProto" + DescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto" +) + +// Field names for google.protobuf.DescriptorProto. +const ( + DescriptorProto_Name_field_name protoreflect.Name = "name" + DescriptorProto_Field_field_name protoreflect.Name = "field" + DescriptorProto_Extension_field_name protoreflect.Name = "extension" + DescriptorProto_NestedType_field_name protoreflect.Name = "nested_type" + DescriptorProto_EnumType_field_name protoreflect.Name = "enum_type" + DescriptorProto_ExtensionRange_field_name protoreflect.Name = "extension_range" + DescriptorProto_OneofDecl_field_name protoreflect.Name = "oneof_decl" + DescriptorProto_Options_field_name protoreflect.Name = "options" + DescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" + DescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + + DescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.name" + DescriptorProto_Field_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.field" + DescriptorProto_Extension_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.extension" + DescriptorProto_NestedType_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.nested_type" + DescriptorProto_EnumType_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.enum_type" + DescriptorProto_ExtensionRange_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.extension_range" + DescriptorProto_OneofDecl_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.oneof_decl" + DescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.options" + DescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_range" + DescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.reserved_name" +) + +// Field numbers for google.protobuf.DescriptorProto. +const ( + DescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + DescriptorProto_Field_field_number protoreflect.FieldNumber = 2 + DescriptorProto_Extension_field_number protoreflect.FieldNumber = 6 + DescriptorProto_NestedType_field_number protoreflect.FieldNumber = 3 + DescriptorProto_EnumType_field_number protoreflect.FieldNumber = 4 + DescriptorProto_ExtensionRange_field_number protoreflect.FieldNumber = 5 + DescriptorProto_OneofDecl_field_number protoreflect.FieldNumber = 8 + DescriptorProto_Options_field_number protoreflect.FieldNumber = 7 + DescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 9 + DescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 10 +) + +// Names for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_message_name protoreflect.Name = "ExtensionRange" + DescriptorProto_ExtensionRange_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange" +) + +// Field names for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_Start_field_name protoreflect.Name = "start" + DescriptorProto_ExtensionRange_End_field_name protoreflect.Name = "end" + DescriptorProto_ExtensionRange_Options_field_name protoreflect.Name = "options" + + DescriptorProto_ExtensionRange_Start_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.start" + DescriptorProto_ExtensionRange_End_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.end" + DescriptorProto_ExtensionRange_Options_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ExtensionRange.options" +) + +// Field numbers for google.protobuf.DescriptorProto.ExtensionRange. +const ( + DescriptorProto_ExtensionRange_Start_field_number protoreflect.FieldNumber = 1 + DescriptorProto_ExtensionRange_End_field_number protoreflect.FieldNumber = 2 + DescriptorProto_ExtensionRange_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_message_name protoreflect.Name = "ReservedRange" + DescriptorProto_ReservedRange_message_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange" +) + +// Field names for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_Start_field_name protoreflect.Name = "start" + DescriptorProto_ReservedRange_End_field_name protoreflect.Name = "end" + + DescriptorProto_ReservedRange_Start_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange.start" + DescriptorProto_ReservedRange_End_field_fullname protoreflect.FullName = "google.protobuf.DescriptorProto.ReservedRange.end" +) + +// Field numbers for google.protobuf.DescriptorProto.ReservedRange. +const ( + DescriptorProto_ReservedRange_Start_field_number protoreflect.FieldNumber = 1 + DescriptorProto_ReservedRange_End_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_message_name protoreflect.Name = "ExtensionRangeOptions" + ExtensionRangeOptions_message_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions" +) + +// Field names for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + ExtensionRangeOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ExtensionRangeOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.ExtensionRangeOptions. +const ( + ExtensionRangeOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_message_name protoreflect.Name = "FieldDescriptorProto" + FieldDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto" +) + +// Field names for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_Name_field_name protoreflect.Name = "name" + FieldDescriptorProto_Number_field_name protoreflect.Name = "number" + FieldDescriptorProto_Label_field_name protoreflect.Name = "label" + FieldDescriptorProto_Type_field_name protoreflect.Name = "type" + FieldDescriptorProto_TypeName_field_name protoreflect.Name = "type_name" + FieldDescriptorProto_Extendee_field_name protoreflect.Name = "extendee" + FieldDescriptorProto_DefaultValue_field_name protoreflect.Name = "default_value" + FieldDescriptorProto_OneofIndex_field_name protoreflect.Name = "oneof_index" + FieldDescriptorProto_JsonName_field_name protoreflect.Name = "json_name" + FieldDescriptorProto_Options_field_name protoreflect.Name = "options" + FieldDescriptorProto_Proto3Optional_field_name protoreflect.Name = "proto3_optional" + + FieldDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.name" + FieldDescriptorProto_Number_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.number" + FieldDescriptorProto_Label_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.label" + FieldDescriptorProto_Type_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.type" + FieldDescriptorProto_TypeName_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.type_name" + FieldDescriptorProto_Extendee_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.extendee" + FieldDescriptorProto_DefaultValue_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.default_value" + FieldDescriptorProto_OneofIndex_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.oneof_index" + FieldDescriptorProto_JsonName_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.json_name" + FieldDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.options" + FieldDescriptorProto_Proto3Optional_field_fullname protoreflect.FullName = "google.protobuf.FieldDescriptorProto.proto3_optional" +) + +// Field numbers for google.protobuf.FieldDescriptorProto. +const ( + FieldDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + FieldDescriptorProto_Number_field_number protoreflect.FieldNumber = 3 + FieldDescriptorProto_Label_field_number protoreflect.FieldNumber = 4 + FieldDescriptorProto_Type_field_number protoreflect.FieldNumber = 5 + FieldDescriptorProto_TypeName_field_number protoreflect.FieldNumber = 6 + FieldDescriptorProto_Extendee_field_number protoreflect.FieldNumber = 2 + FieldDescriptorProto_DefaultValue_field_number protoreflect.FieldNumber = 7 + FieldDescriptorProto_OneofIndex_field_number protoreflect.FieldNumber = 9 + FieldDescriptorProto_JsonName_field_number protoreflect.FieldNumber = 10 + FieldDescriptorProto_Options_field_number protoreflect.FieldNumber = 8 + FieldDescriptorProto_Proto3Optional_field_number protoreflect.FieldNumber = 17 +) + +// Full and short names for google.protobuf.FieldDescriptorProto.Type. +const ( + FieldDescriptorProto_Type_enum_fullname = "google.protobuf.FieldDescriptorProto.Type" + FieldDescriptorProto_Type_enum_name = "Type" +) + +// Full and short names for google.protobuf.FieldDescriptorProto.Label. +const ( + FieldDescriptorProto_Label_enum_fullname = "google.protobuf.FieldDescriptorProto.Label" + FieldDescriptorProto_Label_enum_name = "Label" +) + +// Names for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_message_name protoreflect.Name = "OneofDescriptorProto" + OneofDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto" +) + +// Field names for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_Name_field_name protoreflect.Name = "name" + OneofDescriptorProto_Options_field_name protoreflect.Name = "options" + + OneofDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto.name" + OneofDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.OneofDescriptorProto.options" +) + +// Field numbers for google.protobuf.OneofDescriptorProto. +const ( + OneofDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + OneofDescriptorProto_Options_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_message_name protoreflect.Name = "EnumDescriptorProto" + EnumDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto" +) + +// Field names for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_Name_field_name protoreflect.Name = "name" + EnumDescriptorProto_Value_field_name protoreflect.Name = "value" + EnumDescriptorProto_Options_field_name protoreflect.Name = "options" + EnumDescriptorProto_ReservedRange_field_name protoreflect.Name = "reserved_range" + EnumDescriptorProto_ReservedName_field_name protoreflect.Name = "reserved_name" + + EnumDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.name" + EnumDescriptorProto_Value_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.value" + EnumDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.options" + EnumDescriptorProto_ReservedRange_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_range" + EnumDescriptorProto_ReservedName_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.reserved_name" +) + +// Field numbers for google.protobuf.EnumDescriptorProto. +const ( + EnumDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + EnumDescriptorProto_Value_field_number protoreflect.FieldNumber = 2 + EnumDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 + EnumDescriptorProto_ReservedRange_field_number protoreflect.FieldNumber = 4 + EnumDescriptorProto_ReservedName_field_number protoreflect.FieldNumber = 5 +) + +// Names for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_message_name protoreflect.Name = "EnumReservedRange" + EnumDescriptorProto_EnumReservedRange_message_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange" +) + +// Field names for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_Start_field_name protoreflect.Name = "start" + EnumDescriptorProto_EnumReservedRange_End_field_name protoreflect.Name = "end" + + EnumDescriptorProto_EnumReservedRange_Start_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange.start" + EnumDescriptorProto_EnumReservedRange_End_field_fullname protoreflect.FullName = "google.protobuf.EnumDescriptorProto.EnumReservedRange.end" +) + +// Field numbers for google.protobuf.EnumDescriptorProto.EnumReservedRange. +const ( + EnumDescriptorProto_EnumReservedRange_Start_field_number protoreflect.FieldNumber = 1 + EnumDescriptorProto_EnumReservedRange_End_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_message_name protoreflect.Name = "EnumValueDescriptorProto" + EnumValueDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto" +) + +// Field names for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_Name_field_name protoreflect.Name = "name" + EnumValueDescriptorProto_Number_field_name protoreflect.Name = "number" + EnumValueDescriptorProto_Options_field_name protoreflect.Name = "options" + + EnumValueDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.name" + EnumValueDescriptorProto_Number_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.number" + EnumValueDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumValueDescriptorProto.options" +) + +// Field numbers for google.protobuf.EnumValueDescriptorProto. +const ( + EnumValueDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + EnumValueDescriptorProto_Number_field_number protoreflect.FieldNumber = 2 + EnumValueDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_message_name protoreflect.Name = "ServiceDescriptorProto" + ServiceDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto" +) + +// Field names for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_Name_field_name protoreflect.Name = "name" + ServiceDescriptorProto_Method_field_name protoreflect.Name = "method" + ServiceDescriptorProto_Options_field_name protoreflect.Name = "options" + + ServiceDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.name" + ServiceDescriptorProto_Method_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.method" + ServiceDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.ServiceDescriptorProto.options" +) + +// Field numbers for google.protobuf.ServiceDescriptorProto. +const ( + ServiceDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + ServiceDescriptorProto_Method_field_number protoreflect.FieldNumber = 2 + ServiceDescriptorProto_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_message_name protoreflect.Name = "MethodDescriptorProto" + MethodDescriptorProto_message_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto" +) + +// Field names for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_Name_field_name protoreflect.Name = "name" + MethodDescriptorProto_InputType_field_name protoreflect.Name = "input_type" + MethodDescriptorProto_OutputType_field_name protoreflect.Name = "output_type" + MethodDescriptorProto_Options_field_name protoreflect.Name = "options" + MethodDescriptorProto_ClientStreaming_field_name protoreflect.Name = "client_streaming" + MethodDescriptorProto_ServerStreaming_field_name protoreflect.Name = "server_streaming" + + MethodDescriptorProto_Name_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.name" + MethodDescriptorProto_InputType_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.input_type" + MethodDescriptorProto_OutputType_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.output_type" + MethodDescriptorProto_Options_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.options" + MethodDescriptorProto_ClientStreaming_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.client_streaming" + MethodDescriptorProto_ServerStreaming_field_fullname protoreflect.FullName = "google.protobuf.MethodDescriptorProto.server_streaming" +) + +// Field numbers for google.protobuf.MethodDescriptorProto. +const ( + MethodDescriptorProto_Name_field_number protoreflect.FieldNumber = 1 + MethodDescriptorProto_InputType_field_number protoreflect.FieldNumber = 2 + MethodDescriptorProto_OutputType_field_number protoreflect.FieldNumber = 3 + MethodDescriptorProto_Options_field_number protoreflect.FieldNumber = 4 + MethodDescriptorProto_ClientStreaming_field_number protoreflect.FieldNumber = 5 + MethodDescriptorProto_ServerStreaming_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.FileOptions. +const ( + FileOptions_message_name protoreflect.Name = "FileOptions" + FileOptions_message_fullname protoreflect.FullName = "google.protobuf.FileOptions" +) + +// Field names for google.protobuf.FileOptions. +const ( + FileOptions_JavaPackage_field_name protoreflect.Name = "java_package" + FileOptions_JavaOuterClassname_field_name protoreflect.Name = "java_outer_classname" + FileOptions_JavaMultipleFiles_field_name protoreflect.Name = "java_multiple_files" + FileOptions_JavaGenerateEqualsAndHash_field_name protoreflect.Name = "java_generate_equals_and_hash" + FileOptions_JavaStringCheckUtf8_field_name protoreflect.Name = "java_string_check_utf8" + FileOptions_OptimizeFor_field_name protoreflect.Name = "optimize_for" + FileOptions_GoPackage_field_name protoreflect.Name = "go_package" + FileOptions_CcGenericServices_field_name protoreflect.Name = "cc_generic_services" + FileOptions_JavaGenericServices_field_name protoreflect.Name = "java_generic_services" + FileOptions_PyGenericServices_field_name protoreflect.Name = "py_generic_services" + FileOptions_PhpGenericServices_field_name protoreflect.Name = "php_generic_services" + FileOptions_Deprecated_field_name protoreflect.Name = "deprecated" + FileOptions_CcEnableArenas_field_name protoreflect.Name = "cc_enable_arenas" + FileOptions_ObjcClassPrefix_field_name protoreflect.Name = "objc_class_prefix" + FileOptions_CsharpNamespace_field_name protoreflect.Name = "csharp_namespace" + FileOptions_SwiftPrefix_field_name protoreflect.Name = "swift_prefix" + FileOptions_PhpClassPrefix_field_name protoreflect.Name = "php_class_prefix" + FileOptions_PhpNamespace_field_name protoreflect.Name = "php_namespace" + FileOptions_PhpMetadataNamespace_field_name protoreflect.Name = "php_metadata_namespace" + FileOptions_RubyPackage_field_name protoreflect.Name = "ruby_package" + FileOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + FileOptions_JavaPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_package" + FileOptions_JavaOuterClassname_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_outer_classname" + FileOptions_JavaMultipleFiles_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_multiple_files" + FileOptions_JavaGenerateEqualsAndHash_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_generate_equals_and_hash" + FileOptions_JavaStringCheckUtf8_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_string_check_utf8" + FileOptions_OptimizeFor_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.optimize_for" + FileOptions_GoPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.go_package" + FileOptions_CcGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_generic_services" + FileOptions_JavaGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_generic_services" + FileOptions_PyGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.py_generic_services" + FileOptions_PhpGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_generic_services" + FileOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.deprecated" + FileOptions_CcEnableArenas_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_enable_arenas" + FileOptions_ObjcClassPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.objc_class_prefix" + FileOptions_CsharpNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.csharp_namespace" + FileOptions_SwiftPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.swift_prefix" + FileOptions_PhpClassPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_class_prefix" + FileOptions_PhpNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_namespace" + FileOptions_PhpMetadataNamespace_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_metadata_namespace" + FileOptions_RubyPackage_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.ruby_package" + FileOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.FileOptions. +const ( + FileOptions_JavaPackage_field_number protoreflect.FieldNumber = 1 + FileOptions_JavaOuterClassname_field_number protoreflect.FieldNumber = 8 + FileOptions_JavaMultipleFiles_field_number protoreflect.FieldNumber = 10 + FileOptions_JavaGenerateEqualsAndHash_field_number protoreflect.FieldNumber = 20 + FileOptions_JavaStringCheckUtf8_field_number protoreflect.FieldNumber = 27 + FileOptions_OptimizeFor_field_number protoreflect.FieldNumber = 9 + FileOptions_GoPackage_field_number protoreflect.FieldNumber = 11 + FileOptions_CcGenericServices_field_number protoreflect.FieldNumber = 16 + FileOptions_JavaGenericServices_field_number protoreflect.FieldNumber = 17 + FileOptions_PyGenericServices_field_number protoreflect.FieldNumber = 18 + FileOptions_PhpGenericServices_field_number protoreflect.FieldNumber = 42 + FileOptions_Deprecated_field_number protoreflect.FieldNumber = 23 + FileOptions_CcEnableArenas_field_number protoreflect.FieldNumber = 31 + FileOptions_ObjcClassPrefix_field_number protoreflect.FieldNumber = 36 + FileOptions_CsharpNamespace_field_number protoreflect.FieldNumber = 37 + FileOptions_SwiftPrefix_field_number protoreflect.FieldNumber = 39 + FileOptions_PhpClassPrefix_field_number protoreflect.FieldNumber = 40 + FileOptions_PhpNamespace_field_number protoreflect.FieldNumber = 41 + FileOptions_PhpMetadataNamespace_field_number protoreflect.FieldNumber = 44 + FileOptions_RubyPackage_field_number protoreflect.FieldNumber = 45 + FileOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.FileOptions.OptimizeMode. +const ( + FileOptions_OptimizeMode_enum_fullname = "google.protobuf.FileOptions.OptimizeMode" + FileOptions_OptimizeMode_enum_name = "OptimizeMode" +) + +// Names for google.protobuf.MessageOptions. +const ( + MessageOptions_message_name protoreflect.Name = "MessageOptions" + MessageOptions_message_fullname protoreflect.FullName = "google.protobuf.MessageOptions" +) + +// Field names for google.protobuf.MessageOptions. +const ( + MessageOptions_MessageSetWireFormat_field_name protoreflect.Name = "message_set_wire_format" + MessageOptions_NoStandardDescriptorAccessor_field_name protoreflect.Name = "no_standard_descriptor_accessor" + MessageOptions_Deprecated_field_name protoreflect.Name = "deprecated" + MessageOptions_MapEntry_field_name protoreflect.Name = "map_entry" + MessageOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + MessageOptions_MessageSetWireFormat_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.message_set_wire_format" + MessageOptions_NoStandardDescriptorAccessor_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.no_standard_descriptor_accessor" + MessageOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.deprecated" + MessageOptions_MapEntry_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.map_entry" + MessageOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MessageOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.MessageOptions. +const ( + MessageOptions_MessageSetWireFormat_field_number protoreflect.FieldNumber = 1 + MessageOptions_NoStandardDescriptorAccessor_field_number protoreflect.FieldNumber = 2 + MessageOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + MessageOptions_MapEntry_field_number protoreflect.FieldNumber = 7 + MessageOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.FieldOptions. +const ( + FieldOptions_message_name protoreflect.Name = "FieldOptions" + FieldOptions_message_fullname protoreflect.FullName = "google.protobuf.FieldOptions" +) + +// Field names for google.protobuf.FieldOptions. +const ( + FieldOptions_Ctype_field_name protoreflect.Name = "ctype" + FieldOptions_Packed_field_name protoreflect.Name = "packed" + FieldOptions_Jstype_field_name protoreflect.Name = "jstype" + FieldOptions_Lazy_field_name protoreflect.Name = "lazy" + FieldOptions_Deprecated_field_name protoreflect.Name = "deprecated" + FieldOptions_Weak_field_name protoreflect.Name = "weak" + FieldOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + FieldOptions_Ctype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.ctype" + FieldOptions_Packed_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.packed" + FieldOptions_Jstype_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.jstype" + FieldOptions_Lazy_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.lazy" + FieldOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.deprecated" + FieldOptions_Weak_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.weak" + FieldOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.FieldOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.FieldOptions. +const ( + FieldOptions_Ctype_field_number protoreflect.FieldNumber = 1 + FieldOptions_Packed_field_number protoreflect.FieldNumber = 2 + FieldOptions_Jstype_field_number protoreflect.FieldNumber = 6 + FieldOptions_Lazy_field_number protoreflect.FieldNumber = 5 + FieldOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + FieldOptions_Weak_field_number protoreflect.FieldNumber = 10 + FieldOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.FieldOptions.CType. +const ( + FieldOptions_CType_enum_fullname = "google.protobuf.FieldOptions.CType" + FieldOptions_CType_enum_name = "CType" +) + +// Full and short names for google.protobuf.FieldOptions.JSType. +const ( + FieldOptions_JSType_enum_fullname = "google.protobuf.FieldOptions.JSType" + FieldOptions_JSType_enum_name = "JSType" +) + +// Names for google.protobuf.OneofOptions. +const ( + OneofOptions_message_name protoreflect.Name = "OneofOptions" + OneofOptions_message_fullname protoreflect.FullName = "google.protobuf.OneofOptions" +) + +// Field names for google.protobuf.OneofOptions. +const ( + OneofOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + OneofOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.OneofOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.OneofOptions. +const ( + OneofOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.EnumOptions. +const ( + EnumOptions_message_name protoreflect.Name = "EnumOptions" + EnumOptions_message_fullname protoreflect.FullName = "google.protobuf.EnumOptions" +) + +// Field names for google.protobuf.EnumOptions. +const ( + EnumOptions_AllowAlias_field_name protoreflect.Name = "allow_alias" + EnumOptions_Deprecated_field_name protoreflect.Name = "deprecated" + EnumOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + EnumOptions_AllowAlias_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.allow_alias" + EnumOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.deprecated" + EnumOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.EnumOptions. +const ( + EnumOptions_AllowAlias_field_number protoreflect.FieldNumber = 2 + EnumOptions_Deprecated_field_number protoreflect.FieldNumber = 3 + EnumOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_message_name protoreflect.Name = "EnumValueOptions" + EnumValueOptions_message_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions" +) + +// Field names for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_Deprecated_field_name protoreflect.Name = "deprecated" + EnumValueOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + EnumValueOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.deprecated" + EnumValueOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.EnumValueOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.EnumValueOptions. +const ( + EnumValueOptions_Deprecated_field_number protoreflect.FieldNumber = 1 + EnumValueOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.ServiceOptions. +const ( + ServiceOptions_message_name protoreflect.Name = "ServiceOptions" + ServiceOptions_message_fullname protoreflect.FullName = "google.protobuf.ServiceOptions" +) + +// Field names for google.protobuf.ServiceOptions. +const ( + ServiceOptions_Deprecated_field_name protoreflect.Name = "deprecated" + ServiceOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + ServiceOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.deprecated" + ServiceOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.ServiceOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.ServiceOptions. +const ( + ServiceOptions_Deprecated_field_number protoreflect.FieldNumber = 33 + ServiceOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Names for google.protobuf.MethodOptions. +const ( + MethodOptions_message_name protoreflect.Name = "MethodOptions" + MethodOptions_message_fullname protoreflect.FullName = "google.protobuf.MethodOptions" +) + +// Field names for google.protobuf.MethodOptions. +const ( + MethodOptions_Deprecated_field_name protoreflect.Name = "deprecated" + MethodOptions_IdempotencyLevel_field_name protoreflect.Name = "idempotency_level" + MethodOptions_UninterpretedOption_field_name protoreflect.Name = "uninterpreted_option" + + MethodOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.deprecated" + MethodOptions_IdempotencyLevel_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.idempotency_level" + MethodOptions_UninterpretedOption_field_fullname protoreflect.FullName = "google.protobuf.MethodOptions.uninterpreted_option" +) + +// Field numbers for google.protobuf.MethodOptions. +const ( + MethodOptions_Deprecated_field_number protoreflect.FieldNumber = 33 + MethodOptions_IdempotencyLevel_field_number protoreflect.FieldNumber = 34 + MethodOptions_UninterpretedOption_field_number protoreflect.FieldNumber = 999 +) + +// Full and short names for google.protobuf.MethodOptions.IdempotencyLevel. +const ( + MethodOptions_IdempotencyLevel_enum_fullname = "google.protobuf.MethodOptions.IdempotencyLevel" + MethodOptions_IdempotencyLevel_enum_name = "IdempotencyLevel" +) + +// Names for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_message_name protoreflect.Name = "UninterpretedOption" + UninterpretedOption_message_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption" +) + +// Field names for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_Name_field_name protoreflect.Name = "name" + UninterpretedOption_IdentifierValue_field_name protoreflect.Name = "identifier_value" + UninterpretedOption_PositiveIntValue_field_name protoreflect.Name = "positive_int_value" + UninterpretedOption_NegativeIntValue_field_name protoreflect.Name = "negative_int_value" + UninterpretedOption_DoubleValue_field_name protoreflect.Name = "double_value" + UninterpretedOption_StringValue_field_name protoreflect.Name = "string_value" + UninterpretedOption_AggregateValue_field_name protoreflect.Name = "aggregate_value" + + UninterpretedOption_Name_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.name" + UninterpretedOption_IdentifierValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.identifier_value" + UninterpretedOption_PositiveIntValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.positive_int_value" + UninterpretedOption_NegativeIntValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.negative_int_value" + UninterpretedOption_DoubleValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.double_value" + UninterpretedOption_StringValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.string_value" + UninterpretedOption_AggregateValue_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.aggregate_value" +) + +// Field numbers for google.protobuf.UninterpretedOption. +const ( + UninterpretedOption_Name_field_number protoreflect.FieldNumber = 2 + UninterpretedOption_IdentifierValue_field_number protoreflect.FieldNumber = 3 + UninterpretedOption_PositiveIntValue_field_number protoreflect.FieldNumber = 4 + UninterpretedOption_NegativeIntValue_field_number protoreflect.FieldNumber = 5 + UninterpretedOption_DoubleValue_field_number protoreflect.FieldNumber = 6 + UninterpretedOption_StringValue_field_number protoreflect.FieldNumber = 7 + UninterpretedOption_AggregateValue_field_number protoreflect.FieldNumber = 8 +) + +// Names for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_message_name protoreflect.Name = "NamePart" + UninterpretedOption_NamePart_message_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart" +) + +// Field names for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_NamePart_field_name protoreflect.Name = "name_part" + UninterpretedOption_NamePart_IsExtension_field_name protoreflect.Name = "is_extension" + + UninterpretedOption_NamePart_NamePart_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart.name_part" + UninterpretedOption_NamePart_IsExtension_field_fullname protoreflect.FullName = "google.protobuf.UninterpretedOption.NamePart.is_extension" +) + +// Field numbers for google.protobuf.UninterpretedOption.NamePart. +const ( + UninterpretedOption_NamePart_NamePart_field_number protoreflect.FieldNumber = 1 + UninterpretedOption_NamePart_IsExtension_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_message_name protoreflect.Name = "SourceCodeInfo" + SourceCodeInfo_message_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo" +) + +// Field names for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_Location_field_name protoreflect.Name = "location" + + SourceCodeInfo_Location_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.location" +) + +// Field numbers for google.protobuf.SourceCodeInfo. +const ( + SourceCodeInfo_Location_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_message_name protoreflect.Name = "Location" + SourceCodeInfo_Location_message_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location" +) + +// Field names for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_Path_field_name protoreflect.Name = "path" + SourceCodeInfo_Location_Span_field_name protoreflect.Name = "span" + SourceCodeInfo_Location_LeadingComments_field_name protoreflect.Name = "leading_comments" + SourceCodeInfo_Location_TrailingComments_field_name protoreflect.Name = "trailing_comments" + SourceCodeInfo_Location_LeadingDetachedComments_field_name protoreflect.Name = "leading_detached_comments" + + SourceCodeInfo_Location_Path_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.path" + SourceCodeInfo_Location_Span_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.span" + SourceCodeInfo_Location_LeadingComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.leading_comments" + SourceCodeInfo_Location_TrailingComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.trailing_comments" + SourceCodeInfo_Location_LeadingDetachedComments_field_fullname protoreflect.FullName = "google.protobuf.SourceCodeInfo.Location.leading_detached_comments" +) + +// Field numbers for google.protobuf.SourceCodeInfo.Location. +const ( + SourceCodeInfo_Location_Path_field_number protoreflect.FieldNumber = 1 + SourceCodeInfo_Location_Span_field_number protoreflect.FieldNumber = 2 + SourceCodeInfo_Location_LeadingComments_field_number protoreflect.FieldNumber = 3 + SourceCodeInfo_Location_TrailingComments_field_number protoreflect.FieldNumber = 4 + SourceCodeInfo_Location_LeadingDetachedComments_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_message_name protoreflect.Name = "GeneratedCodeInfo" + GeneratedCodeInfo_message_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo" +) + +// Field names for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_Annotation_field_name protoreflect.Name = "annotation" + + GeneratedCodeInfo_Annotation_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.annotation" +) + +// Field numbers for google.protobuf.GeneratedCodeInfo. +const ( + GeneratedCodeInfo_Annotation_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_message_name protoreflect.Name = "Annotation" + GeneratedCodeInfo_Annotation_message_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation" +) + +// Field names for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_Path_field_name protoreflect.Name = "path" + GeneratedCodeInfo_Annotation_SourceFile_field_name protoreflect.Name = "source_file" + GeneratedCodeInfo_Annotation_Begin_field_name protoreflect.Name = "begin" + GeneratedCodeInfo_Annotation_End_field_name protoreflect.Name = "end" + + GeneratedCodeInfo_Annotation_Path_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.path" + GeneratedCodeInfo_Annotation_SourceFile_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.source_file" + GeneratedCodeInfo_Annotation_Begin_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.begin" + GeneratedCodeInfo_Annotation_End_field_fullname protoreflect.FullName = "google.protobuf.GeneratedCodeInfo.Annotation.end" +) + +// Field numbers for google.protobuf.GeneratedCodeInfo.Annotation. +const ( + GeneratedCodeInfo_Annotation_Path_field_number protoreflect.FieldNumber = 1 + GeneratedCodeInfo_Annotation_SourceFile_field_number protoreflect.FieldNumber = 2 + GeneratedCodeInfo_Annotation_Begin_field_number protoreflect.FieldNumber = 3 + GeneratedCodeInfo_Annotation_End_field_number protoreflect.FieldNumber = 4 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/doc.go b/vendor/google.golang.org/protobuf/internal/genid/doc.go new file mode 100644 index 00000000..45ccd012 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/doc.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package genid contains constants for declarations in descriptor.proto +// and the well-known types. +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +const GoogleProtobuf_package protoreflect.FullName = "google.protobuf" diff --git a/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go b/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go new file mode 100644 index 00000000..b070ef4f --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/duration_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_duration_proto = "google/protobuf/duration.proto" + +// Names for google.protobuf.Duration. +const ( + Duration_message_name protoreflect.Name = "Duration" + Duration_message_fullname protoreflect.FullName = "google.protobuf.Duration" +) + +// Field names for google.protobuf.Duration. +const ( + Duration_Seconds_field_name protoreflect.Name = "seconds" + Duration_Nanos_field_name protoreflect.Name = "nanos" + + Duration_Seconds_field_fullname protoreflect.FullName = "google.protobuf.Duration.seconds" + Duration_Nanos_field_fullname protoreflect.FullName = "google.protobuf.Duration.nanos" +) + +// Field numbers for google.protobuf.Duration. +const ( + Duration_Seconds_field_number protoreflect.FieldNumber = 1 + Duration_Nanos_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go b/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go new file mode 100644 index 00000000..762abb34 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/empty_gen.go @@ -0,0 +1,19 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_empty_proto = "google/protobuf/empty.proto" + +// Names for google.protobuf.Empty. +const ( + Empty_message_name protoreflect.Name = "Empty" + Empty_message_fullname protoreflect.FullName = "google.protobuf.Empty" +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go b/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go new file mode 100644 index 00000000..70bed453 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/field_mask_gen.go @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_field_mask_proto = "google/protobuf/field_mask.proto" + +// Names for google.protobuf.FieldMask. +const ( + FieldMask_message_name protoreflect.Name = "FieldMask" + FieldMask_message_fullname protoreflect.FullName = "google.protobuf.FieldMask" +) + +// Field names for google.protobuf.FieldMask. +const ( + FieldMask_Paths_field_name protoreflect.Name = "paths" + + FieldMask_Paths_field_fullname protoreflect.FullName = "google.protobuf.FieldMask.paths" +) + +// Field numbers for google.protobuf.FieldMask. +const ( + FieldMask_Paths_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/goname.go b/vendor/google.golang.org/protobuf/internal/genid/goname.go new file mode 100644 index 00000000..693d2e9e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/goname.go @@ -0,0 +1,25 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +// Go names of implementation-specific struct fields in generated messages. +const ( + State_goname = "state" + + SizeCache_goname = "sizeCache" + SizeCacheA_goname = "XXX_sizecache" + + WeakFields_goname = "weakFields" + WeakFieldsA_goname = "XXX_weak" + + UnknownFields_goname = "unknownFields" + UnknownFieldsA_goname = "XXX_unrecognized" + + ExtensionFields_goname = "extensionFields" + ExtensionFieldsA_goname = "XXX_InternalExtensions" + ExtensionFieldsB_goname = "XXX_extensions" + + WeakFieldPrefix_goname = "XXX_weak_" +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/map_entry.go b/vendor/google.golang.org/protobuf/internal/genid/map_entry.go new file mode 100644 index 00000000..8f9ea02f --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/map_entry.go @@ -0,0 +1,16 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +// Generic field names and numbers for synthetic map entry messages. +const ( + MapEntry_Key_field_name protoreflect.Name = "key" + MapEntry_Value_field_name protoreflect.Name = "value" + + MapEntry_Key_field_number protoreflect.FieldNumber = 1 + MapEntry_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go b/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go new file mode 100644 index 00000000..3e99ae16 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/source_context_gen.go @@ -0,0 +1,31 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_source_context_proto = "google/protobuf/source_context.proto" + +// Names for google.protobuf.SourceContext. +const ( + SourceContext_message_name protoreflect.Name = "SourceContext" + SourceContext_message_fullname protoreflect.FullName = "google.protobuf.SourceContext" +) + +// Field names for google.protobuf.SourceContext. +const ( + SourceContext_FileName_field_name protoreflect.Name = "file_name" + + SourceContext_FileName_field_fullname protoreflect.FullName = "google.protobuf.SourceContext.file_name" +) + +// Field numbers for google.protobuf.SourceContext. +const ( + SourceContext_FileName_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go b/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go new file mode 100644 index 00000000..1a38944b --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/struct_gen.go @@ -0,0 +1,116 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_struct_proto = "google/protobuf/struct.proto" + +// Full and short names for google.protobuf.NullValue. +const ( + NullValue_enum_fullname = "google.protobuf.NullValue" + NullValue_enum_name = "NullValue" +) + +// Names for google.protobuf.Struct. +const ( + Struct_message_name protoreflect.Name = "Struct" + Struct_message_fullname protoreflect.FullName = "google.protobuf.Struct" +) + +// Field names for google.protobuf.Struct. +const ( + Struct_Fields_field_name protoreflect.Name = "fields" + + Struct_Fields_field_fullname protoreflect.FullName = "google.protobuf.Struct.fields" +) + +// Field numbers for google.protobuf.Struct. +const ( + Struct_Fields_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_message_name protoreflect.Name = "FieldsEntry" + Struct_FieldsEntry_message_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry" +) + +// Field names for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_Key_field_name protoreflect.Name = "key" + Struct_FieldsEntry_Value_field_name protoreflect.Name = "value" + + Struct_FieldsEntry_Key_field_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry.key" + Struct_FieldsEntry_Value_field_fullname protoreflect.FullName = "google.protobuf.Struct.FieldsEntry.value" +) + +// Field numbers for google.protobuf.Struct.FieldsEntry. +const ( + Struct_FieldsEntry_Key_field_number protoreflect.FieldNumber = 1 + Struct_FieldsEntry_Value_field_number protoreflect.FieldNumber = 2 +) + +// Names for google.protobuf.Value. +const ( + Value_message_name protoreflect.Name = "Value" + Value_message_fullname protoreflect.FullName = "google.protobuf.Value" +) + +// Field names for google.protobuf.Value. +const ( + Value_NullValue_field_name protoreflect.Name = "null_value" + Value_NumberValue_field_name protoreflect.Name = "number_value" + Value_StringValue_field_name protoreflect.Name = "string_value" + Value_BoolValue_field_name protoreflect.Name = "bool_value" + Value_StructValue_field_name protoreflect.Name = "struct_value" + Value_ListValue_field_name protoreflect.Name = "list_value" + + Value_NullValue_field_fullname protoreflect.FullName = "google.protobuf.Value.null_value" + Value_NumberValue_field_fullname protoreflect.FullName = "google.protobuf.Value.number_value" + Value_StringValue_field_fullname protoreflect.FullName = "google.protobuf.Value.string_value" + Value_BoolValue_field_fullname protoreflect.FullName = "google.protobuf.Value.bool_value" + Value_StructValue_field_fullname protoreflect.FullName = "google.protobuf.Value.struct_value" + Value_ListValue_field_fullname protoreflect.FullName = "google.protobuf.Value.list_value" +) + +// Field numbers for google.protobuf.Value. +const ( + Value_NullValue_field_number protoreflect.FieldNumber = 1 + Value_NumberValue_field_number protoreflect.FieldNumber = 2 + Value_StringValue_field_number protoreflect.FieldNumber = 3 + Value_BoolValue_field_number protoreflect.FieldNumber = 4 + Value_StructValue_field_number protoreflect.FieldNumber = 5 + Value_ListValue_field_number protoreflect.FieldNumber = 6 +) + +// Oneof names for google.protobuf.Value. +const ( + Value_Kind_oneof_name protoreflect.Name = "kind" + + Value_Kind_oneof_fullname protoreflect.FullName = "google.protobuf.Value.kind" +) + +// Names for google.protobuf.ListValue. +const ( + ListValue_message_name protoreflect.Name = "ListValue" + ListValue_message_fullname protoreflect.FullName = "google.protobuf.ListValue" +) + +// Field names for google.protobuf.ListValue. +const ( + ListValue_Values_field_name protoreflect.Name = "values" + + ListValue_Values_field_fullname protoreflect.FullName = "google.protobuf.ListValue.values" +) + +// Field numbers for google.protobuf.ListValue. +const ( + ListValue_Values_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go b/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go new file mode 100644 index 00000000..f5cd5634 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/timestamp_gen.go @@ -0,0 +1,34 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_timestamp_proto = "google/protobuf/timestamp.proto" + +// Names for google.protobuf.Timestamp. +const ( + Timestamp_message_name protoreflect.Name = "Timestamp" + Timestamp_message_fullname protoreflect.FullName = "google.protobuf.Timestamp" +) + +// Field names for google.protobuf.Timestamp. +const ( + Timestamp_Seconds_field_name protoreflect.Name = "seconds" + Timestamp_Nanos_field_name protoreflect.Name = "nanos" + + Timestamp_Seconds_field_fullname protoreflect.FullName = "google.protobuf.Timestamp.seconds" + Timestamp_Nanos_field_fullname protoreflect.FullName = "google.protobuf.Timestamp.nanos" +) + +// Field numbers for google.protobuf.Timestamp. +const ( + Timestamp_Seconds_field_number protoreflect.FieldNumber = 1 + Timestamp_Nanos_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/type_gen.go b/vendor/google.golang.org/protobuf/internal/genid/type_gen.go new file mode 100644 index 00000000..3bc71013 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/type_gen.go @@ -0,0 +1,184 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_type_proto = "google/protobuf/type.proto" + +// Full and short names for google.protobuf.Syntax. +const ( + Syntax_enum_fullname = "google.protobuf.Syntax" + Syntax_enum_name = "Syntax" +) + +// Names for google.protobuf.Type. +const ( + Type_message_name protoreflect.Name = "Type" + Type_message_fullname protoreflect.FullName = "google.protobuf.Type" +) + +// Field names for google.protobuf.Type. +const ( + Type_Name_field_name protoreflect.Name = "name" + Type_Fields_field_name protoreflect.Name = "fields" + Type_Oneofs_field_name protoreflect.Name = "oneofs" + Type_Options_field_name protoreflect.Name = "options" + Type_SourceContext_field_name protoreflect.Name = "source_context" + Type_Syntax_field_name protoreflect.Name = "syntax" + + Type_Name_field_fullname protoreflect.FullName = "google.protobuf.Type.name" + Type_Fields_field_fullname protoreflect.FullName = "google.protobuf.Type.fields" + Type_Oneofs_field_fullname protoreflect.FullName = "google.protobuf.Type.oneofs" + Type_Options_field_fullname protoreflect.FullName = "google.protobuf.Type.options" + Type_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Type.source_context" + Type_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Type.syntax" +) + +// Field numbers for google.protobuf.Type. +const ( + Type_Name_field_number protoreflect.FieldNumber = 1 + Type_Fields_field_number protoreflect.FieldNumber = 2 + Type_Oneofs_field_number protoreflect.FieldNumber = 3 + Type_Options_field_number protoreflect.FieldNumber = 4 + Type_SourceContext_field_number protoreflect.FieldNumber = 5 + Type_Syntax_field_number protoreflect.FieldNumber = 6 +) + +// Names for google.protobuf.Field. +const ( + Field_message_name protoreflect.Name = "Field" + Field_message_fullname protoreflect.FullName = "google.protobuf.Field" +) + +// Field names for google.protobuf.Field. +const ( + Field_Kind_field_name protoreflect.Name = "kind" + Field_Cardinality_field_name protoreflect.Name = "cardinality" + Field_Number_field_name protoreflect.Name = "number" + Field_Name_field_name protoreflect.Name = "name" + Field_TypeUrl_field_name protoreflect.Name = "type_url" + Field_OneofIndex_field_name protoreflect.Name = "oneof_index" + Field_Packed_field_name protoreflect.Name = "packed" + Field_Options_field_name protoreflect.Name = "options" + Field_JsonName_field_name protoreflect.Name = "json_name" + Field_DefaultValue_field_name protoreflect.Name = "default_value" + + Field_Kind_field_fullname protoreflect.FullName = "google.protobuf.Field.kind" + Field_Cardinality_field_fullname protoreflect.FullName = "google.protobuf.Field.cardinality" + Field_Number_field_fullname protoreflect.FullName = "google.protobuf.Field.number" + Field_Name_field_fullname protoreflect.FullName = "google.protobuf.Field.name" + Field_TypeUrl_field_fullname protoreflect.FullName = "google.protobuf.Field.type_url" + Field_OneofIndex_field_fullname protoreflect.FullName = "google.protobuf.Field.oneof_index" + Field_Packed_field_fullname protoreflect.FullName = "google.protobuf.Field.packed" + Field_Options_field_fullname protoreflect.FullName = "google.protobuf.Field.options" + Field_JsonName_field_fullname protoreflect.FullName = "google.protobuf.Field.json_name" + Field_DefaultValue_field_fullname protoreflect.FullName = "google.protobuf.Field.default_value" +) + +// Field numbers for google.protobuf.Field. +const ( + Field_Kind_field_number protoreflect.FieldNumber = 1 + Field_Cardinality_field_number protoreflect.FieldNumber = 2 + Field_Number_field_number protoreflect.FieldNumber = 3 + Field_Name_field_number protoreflect.FieldNumber = 4 + Field_TypeUrl_field_number protoreflect.FieldNumber = 6 + Field_OneofIndex_field_number protoreflect.FieldNumber = 7 + Field_Packed_field_number protoreflect.FieldNumber = 8 + Field_Options_field_number protoreflect.FieldNumber = 9 + Field_JsonName_field_number protoreflect.FieldNumber = 10 + Field_DefaultValue_field_number protoreflect.FieldNumber = 11 +) + +// Full and short names for google.protobuf.Field.Kind. +const ( + Field_Kind_enum_fullname = "google.protobuf.Field.Kind" + Field_Kind_enum_name = "Kind" +) + +// Full and short names for google.protobuf.Field.Cardinality. +const ( + Field_Cardinality_enum_fullname = "google.protobuf.Field.Cardinality" + Field_Cardinality_enum_name = "Cardinality" +) + +// Names for google.protobuf.Enum. +const ( + Enum_message_name protoreflect.Name = "Enum" + Enum_message_fullname protoreflect.FullName = "google.protobuf.Enum" +) + +// Field names for google.protobuf.Enum. +const ( + Enum_Name_field_name protoreflect.Name = "name" + Enum_Enumvalue_field_name protoreflect.Name = "enumvalue" + Enum_Options_field_name protoreflect.Name = "options" + Enum_SourceContext_field_name protoreflect.Name = "source_context" + Enum_Syntax_field_name protoreflect.Name = "syntax" + + Enum_Name_field_fullname protoreflect.FullName = "google.protobuf.Enum.name" + Enum_Enumvalue_field_fullname protoreflect.FullName = "google.protobuf.Enum.enumvalue" + Enum_Options_field_fullname protoreflect.FullName = "google.protobuf.Enum.options" + Enum_SourceContext_field_fullname protoreflect.FullName = "google.protobuf.Enum.source_context" + Enum_Syntax_field_fullname protoreflect.FullName = "google.protobuf.Enum.syntax" +) + +// Field numbers for google.protobuf.Enum. +const ( + Enum_Name_field_number protoreflect.FieldNumber = 1 + Enum_Enumvalue_field_number protoreflect.FieldNumber = 2 + Enum_Options_field_number protoreflect.FieldNumber = 3 + Enum_SourceContext_field_number protoreflect.FieldNumber = 4 + Enum_Syntax_field_number protoreflect.FieldNumber = 5 +) + +// Names for google.protobuf.EnumValue. +const ( + EnumValue_message_name protoreflect.Name = "EnumValue" + EnumValue_message_fullname protoreflect.FullName = "google.protobuf.EnumValue" +) + +// Field names for google.protobuf.EnumValue. +const ( + EnumValue_Name_field_name protoreflect.Name = "name" + EnumValue_Number_field_name protoreflect.Name = "number" + EnumValue_Options_field_name protoreflect.Name = "options" + + EnumValue_Name_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.name" + EnumValue_Number_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.number" + EnumValue_Options_field_fullname protoreflect.FullName = "google.protobuf.EnumValue.options" +) + +// Field numbers for google.protobuf.EnumValue. +const ( + EnumValue_Name_field_number protoreflect.FieldNumber = 1 + EnumValue_Number_field_number protoreflect.FieldNumber = 2 + EnumValue_Options_field_number protoreflect.FieldNumber = 3 +) + +// Names for google.protobuf.Option. +const ( + Option_message_name protoreflect.Name = "Option" + Option_message_fullname protoreflect.FullName = "google.protobuf.Option" +) + +// Field names for google.protobuf.Option. +const ( + Option_Name_field_name protoreflect.Name = "name" + Option_Value_field_name protoreflect.Name = "value" + + Option_Name_field_fullname protoreflect.FullName = "google.protobuf.Option.name" + Option_Value_field_fullname protoreflect.FullName = "google.protobuf.Option.value" +) + +// Field numbers for google.protobuf.Option. +const ( + Option_Name_field_number protoreflect.FieldNumber = 1 + Option_Value_field_number protoreflect.FieldNumber = 2 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/wrappers.go b/vendor/google.golang.org/protobuf/internal/genid/wrappers.go new file mode 100644 index 00000000..429384b8 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/wrappers.go @@ -0,0 +1,13 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package genid + +import protoreflect "google.golang.org/protobuf/reflect/protoreflect" + +// Generic field name and number for messages in wrappers.proto. +const ( + WrapperValue_Value_field_name protoreflect.Name = "value" + WrapperValue_Value_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go b/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go new file mode 100644 index 00000000..72527d2a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/genid/wrappers_gen.go @@ -0,0 +1,175 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package genid + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" +) + +const File_google_protobuf_wrappers_proto = "google/protobuf/wrappers.proto" + +// Names for google.protobuf.DoubleValue. +const ( + DoubleValue_message_name protoreflect.Name = "DoubleValue" + DoubleValue_message_fullname protoreflect.FullName = "google.protobuf.DoubleValue" +) + +// Field names for google.protobuf.DoubleValue. +const ( + DoubleValue_Value_field_name protoreflect.Name = "value" + + DoubleValue_Value_field_fullname protoreflect.FullName = "google.protobuf.DoubleValue.value" +) + +// Field numbers for google.protobuf.DoubleValue. +const ( + DoubleValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.FloatValue. +const ( + FloatValue_message_name protoreflect.Name = "FloatValue" + FloatValue_message_fullname protoreflect.FullName = "google.protobuf.FloatValue" +) + +// Field names for google.protobuf.FloatValue. +const ( + FloatValue_Value_field_name protoreflect.Name = "value" + + FloatValue_Value_field_fullname protoreflect.FullName = "google.protobuf.FloatValue.value" +) + +// Field numbers for google.protobuf.FloatValue. +const ( + FloatValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Int64Value. +const ( + Int64Value_message_name protoreflect.Name = "Int64Value" + Int64Value_message_fullname protoreflect.FullName = "google.protobuf.Int64Value" +) + +// Field names for google.protobuf.Int64Value. +const ( + Int64Value_Value_field_name protoreflect.Name = "value" + + Int64Value_Value_field_fullname protoreflect.FullName = "google.protobuf.Int64Value.value" +) + +// Field numbers for google.protobuf.Int64Value. +const ( + Int64Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.UInt64Value. +const ( + UInt64Value_message_name protoreflect.Name = "UInt64Value" + UInt64Value_message_fullname protoreflect.FullName = "google.protobuf.UInt64Value" +) + +// Field names for google.protobuf.UInt64Value. +const ( + UInt64Value_Value_field_name protoreflect.Name = "value" + + UInt64Value_Value_field_fullname protoreflect.FullName = "google.protobuf.UInt64Value.value" +) + +// Field numbers for google.protobuf.UInt64Value. +const ( + UInt64Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.Int32Value. +const ( + Int32Value_message_name protoreflect.Name = "Int32Value" + Int32Value_message_fullname protoreflect.FullName = "google.protobuf.Int32Value" +) + +// Field names for google.protobuf.Int32Value. +const ( + Int32Value_Value_field_name protoreflect.Name = "value" + + Int32Value_Value_field_fullname protoreflect.FullName = "google.protobuf.Int32Value.value" +) + +// Field numbers for google.protobuf.Int32Value. +const ( + Int32Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.UInt32Value. +const ( + UInt32Value_message_name protoreflect.Name = "UInt32Value" + UInt32Value_message_fullname protoreflect.FullName = "google.protobuf.UInt32Value" +) + +// Field names for google.protobuf.UInt32Value. +const ( + UInt32Value_Value_field_name protoreflect.Name = "value" + + UInt32Value_Value_field_fullname protoreflect.FullName = "google.protobuf.UInt32Value.value" +) + +// Field numbers for google.protobuf.UInt32Value. +const ( + UInt32Value_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.BoolValue. +const ( + BoolValue_message_name protoreflect.Name = "BoolValue" + BoolValue_message_fullname protoreflect.FullName = "google.protobuf.BoolValue" +) + +// Field names for google.protobuf.BoolValue. +const ( + BoolValue_Value_field_name protoreflect.Name = "value" + + BoolValue_Value_field_fullname protoreflect.FullName = "google.protobuf.BoolValue.value" +) + +// Field numbers for google.protobuf.BoolValue. +const ( + BoolValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.StringValue. +const ( + StringValue_message_name protoreflect.Name = "StringValue" + StringValue_message_fullname protoreflect.FullName = "google.protobuf.StringValue" +) + +// Field names for google.protobuf.StringValue. +const ( + StringValue_Value_field_name protoreflect.Name = "value" + + StringValue_Value_field_fullname protoreflect.FullName = "google.protobuf.StringValue.value" +) + +// Field numbers for google.protobuf.StringValue. +const ( + StringValue_Value_field_number protoreflect.FieldNumber = 1 +) + +// Names for google.protobuf.BytesValue. +const ( + BytesValue_message_name protoreflect.Name = "BytesValue" + BytesValue_message_fullname protoreflect.FullName = "google.protobuf.BytesValue" +) + +// Field names for google.protobuf.BytesValue. +const ( + BytesValue_Value_field_name protoreflect.Name = "value" + + BytesValue_Value_field_fullname protoreflect.FullName = "google.protobuf.BytesValue.value" +) + +// Field numbers for google.protobuf.BytesValue. +const ( + BytesValue_Value_field_number protoreflect.FieldNumber = 1 +) diff --git a/vendor/google.golang.org/protobuf/internal/impl/api_export.go b/vendor/google.golang.org/protobuf/internal/impl/api_export.go new file mode 100644 index 00000000..abee5f30 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/api_export.go @@ -0,0 +1,177 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "strconv" + + "google.golang.org/protobuf/encoding/prototext" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/proto" + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// Export is a zero-length named type that exists only to export a set of +// functions that we do not want to appear in godoc. +type Export struct{} + +// NewError formats a string according to the format specifier and arguments and +// returns an error that has a "proto" prefix. +func (Export) NewError(f string, x ...interface{}) error { + return errors.New(f, x...) +} + +// enum is any enum type generated by protoc-gen-go +// and must be a named int32 type. +type enum = interface{} + +// EnumOf returns the protoreflect.Enum interface over e. +// It returns nil if e is nil. +func (Export) EnumOf(e enum) pref.Enum { + switch e := e.(type) { + case nil: + return nil + case pref.Enum: + return e + default: + return legacyWrapEnum(reflect.ValueOf(e)) + } +} + +// EnumDescriptorOf returns the protoreflect.EnumDescriptor for e. +// It returns nil if e is nil. +func (Export) EnumDescriptorOf(e enum) pref.EnumDescriptor { + switch e := e.(type) { + case nil: + return nil + case pref.Enum: + return e.Descriptor() + default: + return LegacyLoadEnumDesc(reflect.TypeOf(e)) + } +} + +// EnumTypeOf returns the protoreflect.EnumType for e. +// It returns nil if e is nil. +func (Export) EnumTypeOf(e enum) pref.EnumType { + switch e := e.(type) { + case nil: + return nil + case pref.Enum: + return e.Type() + default: + return legacyLoadEnumType(reflect.TypeOf(e)) + } +} + +// EnumStringOf returns the enum value as a string, either as the name if +// the number is resolvable, or the number formatted as a string. +func (Export) EnumStringOf(ed pref.EnumDescriptor, n pref.EnumNumber) string { + ev := ed.Values().ByNumber(n) + if ev != nil { + return string(ev.Name()) + } + return strconv.Itoa(int(n)) +} + +// message is any message type generated by protoc-gen-go +// and must be a pointer to a named struct type. +type message = interface{} + +// legacyMessageWrapper wraps a v2 message as a v1 message. +type legacyMessageWrapper struct{ m pref.ProtoMessage } + +func (m legacyMessageWrapper) Reset() { proto.Reset(m.m) } +func (m legacyMessageWrapper) String() string { return Export{}.MessageStringOf(m.m) } +func (m legacyMessageWrapper) ProtoMessage() {} + +// ProtoMessageV1Of converts either a v1 or v2 message to a v1 message. +// It returns nil if m is nil. +func (Export) ProtoMessageV1Of(m message) piface.MessageV1 { + switch mv := m.(type) { + case nil: + return nil + case piface.MessageV1: + return mv + case unwrapper: + return Export{}.ProtoMessageV1Of(mv.protoUnwrap()) + case pref.ProtoMessage: + return legacyMessageWrapper{mv} + default: + panic(fmt.Sprintf("message %T is neither a v1 or v2 Message", m)) + } +} + +func (Export) protoMessageV2Of(m message) pref.ProtoMessage { + switch mv := m.(type) { + case nil: + return nil + case pref.ProtoMessage: + return mv + case legacyMessageWrapper: + return mv.m + case piface.MessageV1: + return nil + default: + panic(fmt.Sprintf("message %T is neither a v1 or v2 Message", m)) + } +} + +// ProtoMessageV2Of converts either a v1 or v2 message to a v2 message. +// It returns nil if m is nil. +func (Export) ProtoMessageV2Of(m message) pref.ProtoMessage { + if m == nil { + return nil + } + if mv := (Export{}).protoMessageV2Of(m); mv != nil { + return mv + } + return legacyWrapMessage(reflect.ValueOf(m)).Interface() +} + +// MessageOf returns the protoreflect.Message interface over m. +// It returns nil if m is nil. +func (Export) MessageOf(m message) pref.Message { + if m == nil { + return nil + } + if mv := (Export{}).protoMessageV2Of(m); mv != nil { + return mv.ProtoReflect() + } + return legacyWrapMessage(reflect.ValueOf(m)) +} + +// MessageDescriptorOf returns the protoreflect.MessageDescriptor for m. +// It returns nil if m is nil. +func (Export) MessageDescriptorOf(m message) pref.MessageDescriptor { + if m == nil { + return nil + } + if mv := (Export{}).protoMessageV2Of(m); mv != nil { + return mv.ProtoReflect().Descriptor() + } + return LegacyLoadMessageDesc(reflect.TypeOf(m)) +} + +// MessageTypeOf returns the protoreflect.MessageType for m. +// It returns nil if m is nil. +func (Export) MessageTypeOf(m message) pref.MessageType { + if m == nil { + return nil + } + if mv := (Export{}).protoMessageV2Of(m); mv != nil { + return mv.ProtoReflect().Type() + } + return legacyLoadMessageType(reflect.TypeOf(m), "") +} + +// MessageStringOf returns the message value as a string, +// which is the message serialized in the protobuf text format. +func (Export) MessageStringOf(m pref.ProtoMessage) string { + return prototext.MarshalOptions{Multiline: false}.Format(m) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/checkinit.go b/vendor/google.golang.org/protobuf/internal/impl/checkinit.go new file mode 100644 index 00000000..b82341e5 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/checkinit.go @@ -0,0 +1,141 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "sync" + + "google.golang.org/protobuf/internal/errors" + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +func (mi *MessageInfo) checkInitialized(in piface.CheckInitializedInput) (piface.CheckInitializedOutput, error) { + var p pointer + if ms, ok := in.Message.(*messageState); ok { + p = ms.pointer() + } else { + p = in.Message.(*messageReflectWrapper).pointer() + } + return piface.CheckInitializedOutput{}, mi.checkInitializedPointer(p) +} + +func (mi *MessageInfo) checkInitializedPointer(p pointer) error { + mi.init() + if !mi.needsInitCheck { + return nil + } + if p.IsNil() { + for _, f := range mi.orderedCoderFields { + if f.isRequired { + return errors.RequiredNotSet(string(mi.Desc.Fields().ByNumber(f.num).FullName())) + } + } + return nil + } + if mi.extensionOffset.IsValid() { + e := p.Apply(mi.extensionOffset).Extensions() + if err := mi.isInitExtensions(e); err != nil { + return err + } + } + for _, f := range mi.orderedCoderFields { + if !f.isRequired && f.funcs.isInit == nil { + continue + } + fptr := p.Apply(f.offset) + if f.isPointer && fptr.Elem().IsNil() { + if f.isRequired { + return errors.RequiredNotSet(string(mi.Desc.Fields().ByNumber(f.num).FullName())) + } + continue + } + if f.funcs.isInit == nil { + continue + } + if err := f.funcs.isInit(fptr, f); err != nil { + return err + } + } + return nil +} + +func (mi *MessageInfo) isInitExtensions(ext *map[int32]ExtensionField) error { + if ext == nil { + return nil + } + for _, x := range *ext { + ei := getExtensionFieldInfo(x.Type()) + if ei.funcs.isInit == nil { + continue + } + v := x.Value() + if !v.IsValid() { + continue + } + if err := ei.funcs.isInit(v); err != nil { + return err + } + } + return nil +} + +var ( + needsInitCheckMu sync.Mutex + needsInitCheckMap sync.Map +) + +// needsInitCheck reports whether a message needs to be checked for partial initialization. +// +// It returns true if the message transitively includes any required or extension fields. +func needsInitCheck(md pref.MessageDescriptor) bool { + if v, ok := needsInitCheckMap.Load(md); ok { + if has, ok := v.(bool); ok { + return has + } + } + needsInitCheckMu.Lock() + defer needsInitCheckMu.Unlock() + return needsInitCheckLocked(md) +} + +func needsInitCheckLocked(md pref.MessageDescriptor) (has bool) { + if v, ok := needsInitCheckMap.Load(md); ok { + // If has is true, we've previously determined that this message + // needs init checks. + // + // If has is false, we've previously determined that it can never + // be uninitialized. + // + // If has is not a bool, we've just encountered a cycle in the + // message graph. In this case, it is safe to return false: If + // the message does have required fields, we'll detect them later + // in the graph traversal. + has, ok := v.(bool) + return ok && has + } + needsInitCheckMap.Store(md, struct{}{}) // avoid cycles while descending into this message + defer func() { + needsInitCheckMap.Store(md, has) + }() + if md.RequiredNumbers().Len() > 0 { + return true + } + if md.ExtensionRanges().Len() > 0 { + return true + } + for i := 0; i < md.Fields().Len(); i++ { + fd := md.Fields().Get(i) + // Map keys are never messages, so just consider the map value. + if fd.IsMap() { + fd = fd.MapValue() + } + fmd := fd.Message() + if fmd != nil && needsInitCheckLocked(fmd) { + return true + } + } + return false +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_extension.go b/vendor/google.golang.org/protobuf/internal/impl/codec_extension.go new file mode 100644 index 00000000..08d35170 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_extension.go @@ -0,0 +1,223 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "sync" + "sync/atomic" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type extensionFieldInfo struct { + wiretag uint64 + tagsize int + unmarshalNeedsValue bool + funcs valueCoderFuncs + validation validationInfo +} + +var legacyExtensionFieldInfoCache sync.Map // map[protoreflect.ExtensionType]*extensionFieldInfo + +func getExtensionFieldInfo(xt pref.ExtensionType) *extensionFieldInfo { + if xi, ok := xt.(*ExtensionInfo); ok { + xi.lazyInit() + return xi.info + } + return legacyLoadExtensionFieldInfo(xt) +} + +// legacyLoadExtensionFieldInfo dynamically loads a *ExtensionInfo for xt. +func legacyLoadExtensionFieldInfo(xt pref.ExtensionType) *extensionFieldInfo { + if xi, ok := legacyExtensionFieldInfoCache.Load(xt); ok { + return xi.(*extensionFieldInfo) + } + e := makeExtensionFieldInfo(xt.TypeDescriptor()) + if e, ok := legacyMessageTypeCache.LoadOrStore(xt, e); ok { + return e.(*extensionFieldInfo) + } + return e +} + +func makeExtensionFieldInfo(xd pref.ExtensionDescriptor) *extensionFieldInfo { + var wiretag uint64 + if !xd.IsPacked() { + wiretag = protowire.EncodeTag(xd.Number(), wireTypes[xd.Kind()]) + } else { + wiretag = protowire.EncodeTag(xd.Number(), protowire.BytesType) + } + e := &extensionFieldInfo{ + wiretag: wiretag, + tagsize: protowire.SizeVarint(wiretag), + funcs: encoderFuncsForValue(xd), + } + // Does the unmarshal function need a value passed to it? + // This is true for composite types, where we pass in a message, list, or map to fill in, + // and for enums, where we pass in a prototype value to specify the concrete enum type. + switch xd.Kind() { + case pref.MessageKind, pref.GroupKind, pref.EnumKind: + e.unmarshalNeedsValue = true + default: + if xd.Cardinality() == pref.Repeated { + e.unmarshalNeedsValue = true + } + } + return e +} + +type lazyExtensionValue struct { + atomicOnce uint32 // atomically set if value is valid + mu sync.Mutex + xi *extensionFieldInfo + value pref.Value + b []byte + fn func() pref.Value +} + +type ExtensionField struct { + typ pref.ExtensionType + + // value is either the value of GetValue, + // or a *lazyExtensionValue that then returns the value of GetValue. + value pref.Value + lazy *lazyExtensionValue +} + +func (f *ExtensionField) appendLazyBytes(xt pref.ExtensionType, xi *extensionFieldInfo, num protowire.Number, wtyp protowire.Type, b []byte) { + if f.lazy == nil { + f.lazy = &lazyExtensionValue{xi: xi} + } + f.typ = xt + f.lazy.xi = xi + f.lazy.b = protowire.AppendTag(f.lazy.b, num, wtyp) + f.lazy.b = append(f.lazy.b, b...) +} + +func (f *ExtensionField) canLazy(xt pref.ExtensionType) bool { + if f.typ == nil { + return true + } + if f.typ == xt && f.lazy != nil && atomic.LoadUint32(&f.lazy.atomicOnce) == 0 { + return true + } + return false +} + +func (f *ExtensionField) lazyInit() { + f.lazy.mu.Lock() + defer f.lazy.mu.Unlock() + if atomic.LoadUint32(&f.lazy.atomicOnce) == 1 { + return + } + if f.lazy.xi != nil { + b := f.lazy.b + val := f.typ.New() + for len(b) > 0 { + var tag uint64 + if b[0] < 0x80 { + tag = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + tag = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + tag, n = protowire.ConsumeVarint(b) + if n < 0 { + panic(errors.New("bad tag in lazy extension decoding")) + } + b = b[n:] + } + num := protowire.Number(tag >> 3) + wtyp := protowire.Type(tag & 7) + var out unmarshalOutput + var err error + val, out, err = f.lazy.xi.funcs.unmarshal(b, val, num, wtyp, lazyUnmarshalOptions) + if err != nil { + panic(errors.New("decode failure in lazy extension decoding: %v", err)) + } + b = b[out.n:] + } + f.lazy.value = val + } else { + f.lazy.value = f.lazy.fn() + } + f.lazy.xi = nil + f.lazy.fn = nil + f.lazy.b = nil + atomic.StoreUint32(&f.lazy.atomicOnce, 1) +} + +// Set sets the type and value of the extension field. +// This must not be called concurrently. +func (f *ExtensionField) Set(t pref.ExtensionType, v pref.Value) { + f.typ = t + f.value = v + f.lazy = nil +} + +// SetLazy sets the type and a value that is to be lazily evaluated upon first use. +// This must not be called concurrently. +func (f *ExtensionField) SetLazy(t pref.ExtensionType, fn func() pref.Value) { + f.typ = t + f.lazy = &lazyExtensionValue{fn: fn} +} + +// Value returns the value of the extension field. +// This may be called concurrently. +func (f *ExtensionField) Value() pref.Value { + if f.lazy != nil { + if atomic.LoadUint32(&f.lazy.atomicOnce) == 0 { + f.lazyInit() + } + return f.lazy.value + } + return f.value +} + +// Type returns the type of the extension field. +// This may be called concurrently. +func (f ExtensionField) Type() pref.ExtensionType { + return f.typ +} + +// IsSet returns whether the extension field is set. +// This may be called concurrently. +func (f ExtensionField) IsSet() bool { + return f.typ != nil +} + +// IsLazy reports whether a field is lazily encoded. +// It is exported for testing. +func IsLazy(m pref.Message, fd pref.FieldDescriptor) bool { + var mi *MessageInfo + var p pointer + switch m := m.(type) { + case *messageState: + mi = m.messageInfo() + p = m.pointer() + case *messageReflectWrapper: + mi = m.messageInfo() + p = m.pointer() + default: + return false + } + xd, ok := fd.(pref.ExtensionTypeDescriptor) + if !ok { + return false + } + xt := xd.Type() + ext := mi.extensionMap(p) + if ext == nil { + return false + } + f, ok := (*ext)[int32(fd.Number())] + if !ok { + return false + } + return f.typ == xt && f.lazy != nil && atomic.LoadUint32(&f.lazy.atomicOnce) == 0 +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_field.go b/vendor/google.golang.org/protobuf/internal/impl/codec_field.go new file mode 100644 index 00000000..cb4b482d --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_field.go @@ -0,0 +1,830 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "sync" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/proto" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +type errInvalidUTF8 struct{} + +func (errInvalidUTF8) Error() string { return "string field contains invalid UTF-8" } +func (errInvalidUTF8) InvalidUTF8() bool { return true } +func (errInvalidUTF8) Unwrap() error { return errors.Error } + +// initOneofFieldCoders initializes the fast-path functions for the fields in a oneof. +// +// For size, marshal, and isInit operations, functions are set only on the first field +// in the oneof. The functions are called when the oneof is non-nil, and will dispatch +// to the appropriate field-specific function as necessary. +// +// The unmarshal function is set on each field individually as usual. +func (mi *MessageInfo) initOneofFieldCoders(od pref.OneofDescriptor, si structInfo) { + fs := si.oneofsByName[od.Name()] + ft := fs.Type + oneofFields := make(map[reflect.Type]*coderFieldInfo) + needIsInit := false + fields := od.Fields() + for i, lim := 0, fields.Len(); i < lim; i++ { + fd := od.Fields().Get(i) + num := fd.Number() + // Make a copy of the original coderFieldInfo for use in unmarshaling. + // + // oneofFields[oneofType].funcs.marshal is the field-specific marshal function. + // + // mi.coderFields[num].marshal is set on only the first field in the oneof, + // and dispatches to the field-specific marshaler in oneofFields. + cf := *mi.coderFields[num] + ot := si.oneofWrappersByNumber[num] + cf.ft = ot.Field(0).Type + cf.mi, cf.funcs = fieldCoder(fd, cf.ft) + oneofFields[ot] = &cf + if cf.funcs.isInit != nil { + needIsInit = true + } + mi.coderFields[num].funcs.unmarshal = func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + var vw reflect.Value // pointer to wrapper type + vi := p.AsValueOf(ft).Elem() // oneof field value of interface kind + if !vi.IsNil() && !vi.Elem().IsNil() && vi.Elem().Elem().Type() == ot { + vw = vi.Elem() + } else { + vw = reflect.New(ot) + } + out, err := cf.funcs.unmarshal(b, pointerOfValue(vw).Apply(zeroOffset), wtyp, &cf, opts) + if err != nil { + return out, err + } + vi.Set(vw) + return out, nil + } + } + getInfo := func(p pointer) (pointer, *coderFieldInfo) { + v := p.AsValueOf(ft).Elem() + if v.IsNil() { + return pointer{}, nil + } + v = v.Elem() // interface -> *struct + if v.IsNil() { + return pointer{}, nil + } + return pointerOfValue(v).Apply(zeroOffset), oneofFields[v.Elem().Type()] + } + first := mi.coderFields[od.Fields().Get(0).Number()] + first.funcs.size = func(p pointer, _ *coderFieldInfo, opts marshalOptions) int { + p, info := getInfo(p) + if info == nil || info.funcs.size == nil { + return 0 + } + return info.funcs.size(p, info, opts) + } + first.funcs.marshal = func(b []byte, p pointer, _ *coderFieldInfo, opts marshalOptions) ([]byte, error) { + p, info := getInfo(p) + if info == nil || info.funcs.marshal == nil { + return b, nil + } + return info.funcs.marshal(b, p, info, opts) + } + first.funcs.merge = func(dst, src pointer, _ *coderFieldInfo, opts mergeOptions) { + srcp, srcinfo := getInfo(src) + if srcinfo == nil || srcinfo.funcs.merge == nil { + return + } + dstp, dstinfo := getInfo(dst) + if dstinfo != srcinfo { + dst.AsValueOf(ft).Elem().Set(reflect.New(src.AsValueOf(ft).Elem().Elem().Elem().Type())) + dstp = pointerOfValue(dst.AsValueOf(ft).Elem().Elem()).Apply(zeroOffset) + } + srcinfo.funcs.merge(dstp, srcp, srcinfo, opts) + } + if needIsInit { + first.funcs.isInit = func(p pointer, _ *coderFieldInfo) error { + p, info := getInfo(p) + if info == nil || info.funcs.isInit == nil { + return nil + } + return info.funcs.isInit(p, info) + } + } +} + +func makeWeakMessageFieldCoder(fd pref.FieldDescriptor) pointerCoderFuncs { + var once sync.Once + var messageType pref.MessageType + lazyInit := func() { + once.Do(func() { + messageName := fd.Message().FullName() + messageType, _ = preg.GlobalTypes.FindMessageByName(messageName) + }) + } + + return pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + m, ok := p.WeakFields().get(f.num) + if !ok { + return 0 + } + lazyInit() + if messageType == nil { + panic(fmt.Sprintf("weak message %v is not linked in", fd.Message().FullName())) + } + return sizeMessage(m, f.tagsize, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + m, ok := p.WeakFields().get(f.num) + if !ok { + return b, nil + } + lazyInit() + if messageType == nil { + panic(fmt.Sprintf("weak message %v is not linked in", fd.Message().FullName())) + } + return appendMessage(b, m, f.wiretag, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + fs := p.WeakFields() + m, ok := fs.get(f.num) + if !ok { + lazyInit() + if messageType == nil { + return unmarshalOutput{}, errUnknown + } + m = messageType.New().Interface() + fs.set(f.num, m) + } + return consumeMessage(b, m, wtyp, opts) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + m, ok := p.WeakFields().get(f.num) + if !ok { + return nil + } + return proto.CheckInitialized(m) + }, + merge: func(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + sm, ok := src.WeakFields().get(f.num) + if !ok { + return + } + dm, ok := dst.WeakFields().get(f.num) + if !ok { + lazyInit() + if messageType == nil { + panic(fmt.Sprintf("weak message %v is not linked in", fd.Message().FullName())) + } + dm = messageType.New().Interface() + dst.WeakFields().set(f.num, dm) + } + opts.Merge(dm, sm) + }, + } +} + +func makeMessageFieldCoder(fd pref.FieldDescriptor, ft reflect.Type) pointerCoderFuncs { + if mi := getMessageInfo(ft); mi != nil { + funcs := pointerCoderFuncs{ + size: sizeMessageInfo, + marshal: appendMessageInfo, + unmarshal: consumeMessageInfo, + merge: mergeMessage, + } + if needsInitCheck(mi.Desc) { + funcs.isInit = isInitMessageInfo + } + return funcs + } else { + return pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + m := asMessage(p.AsValueOf(ft).Elem()) + return sizeMessage(m, f.tagsize, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + m := asMessage(p.AsValueOf(ft).Elem()) + return appendMessage(b, m, f.wiretag, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + mp := p.AsValueOf(ft).Elem() + if mp.IsNil() { + mp.Set(reflect.New(ft.Elem())) + } + return consumeMessage(b, asMessage(mp), wtyp, opts) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + m := asMessage(p.AsValueOf(ft).Elem()) + return proto.CheckInitialized(m) + }, + merge: mergeMessage, + } + } +} + +func sizeMessageInfo(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return protowire.SizeBytes(f.mi.sizePointer(p.Elem(), opts)) + f.tagsize +} + +func appendMessageInfo(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(f.mi.sizePointer(p.Elem(), opts))) + return f.mi.marshalAppendPointer(b, p.Elem(), opts) +} + +func consumeMessageInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if p.Elem().IsNil() { + p.SetPointer(pointerOfValue(reflect.New(f.mi.GoReflectType.Elem()))) + } + o, err := f.mi.unmarshalPointer(v, p.Elem(), 0, opts) + if err != nil { + return out, err + } + out.n = n + out.initialized = o.initialized + return out, nil +} + +func isInitMessageInfo(p pointer, f *coderFieldInfo) error { + return f.mi.checkInitializedPointer(p.Elem()) +} + +func sizeMessage(m proto.Message, tagsize int, _ marshalOptions) int { + return protowire.SizeBytes(proto.Size(m)) + tagsize +} + +func appendMessage(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(proto.Size(m))) + return opts.Options().MarshalAppend(b, m) +} + +func consumeMessage(b []byte, m proto.Message, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: v, + Message: m.ProtoReflect(), + }) + if err != nil { + return out, err + } + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return out, nil +} + +func sizeMessageValue(v pref.Value, tagsize int, opts marshalOptions) int { + m := v.Message().Interface() + return sizeMessage(m, tagsize, opts) +} + +func appendMessageValue(b []byte, v pref.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + m := v.Message().Interface() + return appendMessage(b, m, wiretag, opts) +} + +func consumeMessageValue(b []byte, v pref.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (pref.Value, unmarshalOutput, error) { + m := v.Message().Interface() + out, err := consumeMessage(b, m, wtyp, opts) + return v, out, err +} + +func isInitMessageValue(v pref.Value) error { + m := v.Message().Interface() + return proto.CheckInitialized(m) +} + +var coderMessageValue = valueCoderFuncs{ + size: sizeMessageValue, + marshal: appendMessageValue, + unmarshal: consumeMessageValue, + isInit: isInitMessageValue, + merge: mergeMessageValue, +} + +func sizeGroupValue(v pref.Value, tagsize int, opts marshalOptions) int { + m := v.Message().Interface() + return sizeGroup(m, tagsize, opts) +} + +func appendGroupValue(b []byte, v pref.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + m := v.Message().Interface() + return appendGroup(b, m, wiretag, opts) +} + +func consumeGroupValue(b []byte, v pref.Value, num protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (pref.Value, unmarshalOutput, error) { + m := v.Message().Interface() + out, err := consumeGroup(b, m, num, wtyp, opts) + return v, out, err +} + +var coderGroupValue = valueCoderFuncs{ + size: sizeGroupValue, + marshal: appendGroupValue, + unmarshal: consumeGroupValue, + isInit: isInitMessageValue, + merge: mergeMessageValue, +} + +func makeGroupFieldCoder(fd pref.FieldDescriptor, ft reflect.Type) pointerCoderFuncs { + num := fd.Number() + if mi := getMessageInfo(ft); mi != nil { + funcs := pointerCoderFuncs{ + size: sizeGroupType, + marshal: appendGroupType, + unmarshal: consumeGroupType, + merge: mergeMessage, + } + if needsInitCheck(mi.Desc) { + funcs.isInit = isInitMessageInfo + } + return funcs + } else { + return pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + m := asMessage(p.AsValueOf(ft).Elem()) + return sizeGroup(m, f.tagsize, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + m := asMessage(p.AsValueOf(ft).Elem()) + return appendGroup(b, m, f.wiretag, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + mp := p.AsValueOf(ft).Elem() + if mp.IsNil() { + mp.Set(reflect.New(ft.Elem())) + } + return consumeGroup(b, asMessage(mp), num, wtyp, opts) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + m := asMessage(p.AsValueOf(ft).Elem()) + return proto.CheckInitialized(m) + }, + merge: mergeMessage, + } + } +} + +func sizeGroupType(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return 2*f.tagsize + f.mi.sizePointer(p.Elem(), opts) +} + +func appendGroupType(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, f.wiretag) // start group + b, err := f.mi.marshalAppendPointer(b, p.Elem(), opts) + b = protowire.AppendVarint(b, f.wiretag+1) // end group + return b, err +} + +func consumeGroupType(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.StartGroupType { + return out, errUnknown + } + if p.Elem().IsNil() { + p.SetPointer(pointerOfValue(reflect.New(f.mi.GoReflectType.Elem()))) + } + return f.mi.unmarshalPointer(b, p.Elem(), f.num, opts) +} + +func sizeGroup(m proto.Message, tagsize int, _ marshalOptions) int { + return 2*tagsize + proto.Size(m) +} + +func appendGroup(b []byte, m proto.Message, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) // start group + b, err := opts.Options().MarshalAppend(b, m) + b = protowire.AppendVarint(b, wiretag+1) // end group + return b, err +} + +func consumeGroup(b []byte, m proto.Message, num protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.StartGroupType { + return out, errUnknown + } + b, n := protowire.ConsumeGroup(num, b) + if n < 0 { + return out, errDecode + } + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: b, + Message: m.ProtoReflect(), + }) + if err != nil { + return out, err + } + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return out, nil +} + +func makeMessageSliceFieldCoder(fd pref.FieldDescriptor, ft reflect.Type) pointerCoderFuncs { + if mi := getMessageInfo(ft); mi != nil { + funcs := pointerCoderFuncs{ + size: sizeMessageSliceInfo, + marshal: appendMessageSliceInfo, + unmarshal: consumeMessageSliceInfo, + merge: mergeMessageSlice, + } + if needsInitCheck(mi.Desc) { + funcs.isInit = isInitMessageSliceInfo + } + return funcs + } + return pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return sizeMessageSlice(p, ft, f.tagsize, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + return appendMessageSlice(b, p, f.wiretag, ft, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + return consumeMessageSlice(b, p, ft, wtyp, opts) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + return isInitMessageSlice(p, ft) + }, + merge: mergeMessageSlice, + } +} + +func sizeMessageSliceInfo(p pointer, f *coderFieldInfo, opts marshalOptions) int { + s := p.PointerSlice() + n := 0 + for _, v := range s { + n += protowire.SizeBytes(f.mi.sizePointer(v, opts)) + f.tagsize + } + return n +} + +func appendMessageSliceInfo(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := p.PointerSlice() + var err error + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + siz := f.mi.sizePointer(v, opts) + b = protowire.AppendVarint(b, uint64(siz)) + b, err = f.mi.marshalAppendPointer(b, v, opts) + if err != nil { + return b, err + } + } + return b, nil +} + +func consumeMessageSliceInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + m := reflect.New(f.mi.GoReflectType.Elem()).Interface() + mp := pointerOfIface(m) + o, err := f.mi.unmarshalPointer(v, mp, 0, opts) + if err != nil { + return out, err + } + p.AppendPointerSlice(mp) + out.n = n + out.initialized = o.initialized + return out, nil +} + +func isInitMessageSliceInfo(p pointer, f *coderFieldInfo) error { + s := p.PointerSlice() + for _, v := range s { + if err := f.mi.checkInitializedPointer(v); err != nil { + return err + } + } + return nil +} + +func sizeMessageSlice(p pointer, goType reflect.Type, tagsize int, _ marshalOptions) int { + s := p.PointerSlice() + n := 0 + for _, v := range s { + m := asMessage(v.AsValueOf(goType.Elem())) + n += protowire.SizeBytes(proto.Size(m)) + tagsize + } + return n +} + +func appendMessageSlice(b []byte, p pointer, wiretag uint64, goType reflect.Type, opts marshalOptions) ([]byte, error) { + s := p.PointerSlice() + var err error + for _, v := range s { + m := asMessage(v.AsValueOf(goType.Elem())) + b = protowire.AppendVarint(b, wiretag) + siz := proto.Size(m) + b = protowire.AppendVarint(b, uint64(siz)) + b, err = opts.Options().MarshalAppend(b, m) + if err != nil { + return b, err + } + } + return b, nil +} + +func consumeMessageSlice(b []byte, p pointer, goType reflect.Type, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + mp := reflect.New(goType.Elem()) + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: v, + Message: asMessage(mp).ProtoReflect(), + }) + if err != nil { + return out, err + } + p.AppendPointerSlice(pointerOfValue(mp)) + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return out, nil +} + +func isInitMessageSlice(p pointer, goType reflect.Type) error { + s := p.PointerSlice() + for _, v := range s { + m := asMessage(v.AsValueOf(goType.Elem())) + if err := proto.CheckInitialized(m); err != nil { + return err + } + } + return nil +} + +// Slices of messages + +func sizeMessageSliceValue(listv pref.Value, tagsize int, opts marshalOptions) int { + list := listv.List() + n := 0 + for i, llen := 0, list.Len(); i < llen; i++ { + m := list.Get(i).Message().Interface() + n += protowire.SizeBytes(proto.Size(m)) + tagsize + } + return n +} + +func appendMessageSliceValue(b []byte, listv pref.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + mopts := opts.Options() + for i, llen := 0, list.Len(); i < llen; i++ { + m := list.Get(i).Message().Interface() + b = protowire.AppendVarint(b, wiretag) + siz := proto.Size(m) + b = protowire.AppendVarint(b, uint64(siz)) + var err error + b, err = mopts.MarshalAppend(b, m) + if err != nil { + return b, err + } + } + return b, nil +} + +func consumeMessageSliceValue(b []byte, listv pref.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ pref.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp != protowire.BytesType { + return pref.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return pref.Value{}, out, errDecode + } + m := list.NewElement() + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: v, + Message: m.Message(), + }) + if err != nil { + return pref.Value{}, out, err + } + list.Append(m) + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return listv, out, nil +} + +func isInitMessageSliceValue(listv pref.Value) error { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + m := list.Get(i).Message().Interface() + if err := proto.CheckInitialized(m); err != nil { + return err + } + } + return nil +} + +var coderMessageSliceValue = valueCoderFuncs{ + size: sizeMessageSliceValue, + marshal: appendMessageSliceValue, + unmarshal: consumeMessageSliceValue, + isInit: isInitMessageSliceValue, + merge: mergeMessageListValue, +} + +func sizeGroupSliceValue(listv pref.Value, tagsize int, opts marshalOptions) int { + list := listv.List() + n := 0 + for i, llen := 0, list.Len(); i < llen; i++ { + m := list.Get(i).Message().Interface() + n += 2*tagsize + proto.Size(m) + } + return n +} + +func appendGroupSliceValue(b []byte, listv pref.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + mopts := opts.Options() + for i, llen := 0, list.Len(); i < llen; i++ { + m := list.Get(i).Message().Interface() + b = protowire.AppendVarint(b, wiretag) // start group + var err error + b, err = mopts.MarshalAppend(b, m) + if err != nil { + return b, err + } + b = protowire.AppendVarint(b, wiretag+1) // end group + } + return b, nil +} + +func consumeGroupSliceValue(b []byte, listv pref.Value, num protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ pref.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp != protowire.StartGroupType { + return pref.Value{}, out, errUnknown + } + b, n := protowire.ConsumeGroup(num, b) + if n < 0 { + return pref.Value{}, out, errDecode + } + m := list.NewElement() + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: b, + Message: m.Message(), + }) + if err != nil { + return pref.Value{}, out, err + } + list.Append(m) + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return listv, out, nil +} + +var coderGroupSliceValue = valueCoderFuncs{ + size: sizeGroupSliceValue, + marshal: appendGroupSliceValue, + unmarshal: consumeGroupSliceValue, + isInit: isInitMessageSliceValue, + merge: mergeMessageListValue, +} + +func makeGroupSliceFieldCoder(fd pref.FieldDescriptor, ft reflect.Type) pointerCoderFuncs { + num := fd.Number() + if mi := getMessageInfo(ft); mi != nil { + funcs := pointerCoderFuncs{ + size: sizeGroupSliceInfo, + marshal: appendGroupSliceInfo, + unmarshal: consumeGroupSliceInfo, + merge: mergeMessageSlice, + } + if needsInitCheck(mi.Desc) { + funcs.isInit = isInitMessageSliceInfo + } + return funcs + } + return pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return sizeGroupSlice(p, ft, f.tagsize, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + return appendGroupSlice(b, p, f.wiretag, ft, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + return consumeGroupSlice(b, p, num, wtyp, ft, opts) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + return isInitMessageSlice(p, ft) + }, + merge: mergeMessageSlice, + } +} + +func sizeGroupSlice(p pointer, messageType reflect.Type, tagsize int, _ marshalOptions) int { + s := p.PointerSlice() + n := 0 + for _, v := range s { + m := asMessage(v.AsValueOf(messageType.Elem())) + n += 2*tagsize + proto.Size(m) + } + return n +} + +func appendGroupSlice(b []byte, p pointer, wiretag uint64, messageType reflect.Type, opts marshalOptions) ([]byte, error) { + s := p.PointerSlice() + var err error + for _, v := range s { + m := asMessage(v.AsValueOf(messageType.Elem())) + b = protowire.AppendVarint(b, wiretag) // start group + b, err = opts.Options().MarshalAppend(b, m) + if err != nil { + return b, err + } + b = protowire.AppendVarint(b, wiretag+1) // end group + } + return b, nil +} + +func consumeGroupSlice(b []byte, p pointer, num protowire.Number, wtyp protowire.Type, goType reflect.Type, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.StartGroupType { + return out, errUnknown + } + b, n := protowire.ConsumeGroup(num, b) + if n < 0 { + return out, errDecode + } + mp := reflect.New(goType.Elem()) + o, err := opts.Options().UnmarshalState(piface.UnmarshalInput{ + Buf: b, + Message: asMessage(mp).ProtoReflect(), + }) + if err != nil { + return out, err + } + p.AppendPointerSlice(pointerOfValue(mp)) + out.n = n + out.initialized = o.Flags&piface.UnmarshalInitialized != 0 + return out, nil +} + +func sizeGroupSliceInfo(p pointer, f *coderFieldInfo, opts marshalOptions) int { + s := p.PointerSlice() + n := 0 + for _, v := range s { + n += 2*f.tagsize + f.mi.sizePointer(v, opts) + } + return n +} + +func appendGroupSliceInfo(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := p.PointerSlice() + var err error + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) // start group + b, err = f.mi.marshalAppendPointer(b, v, opts) + if err != nil { + return b, err + } + b = protowire.AppendVarint(b, f.wiretag+1) // end group + } + return b, nil +} + +func consumeGroupSliceInfo(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + if wtyp != protowire.StartGroupType { + return unmarshalOutput{}, errUnknown + } + m := reflect.New(f.mi.GoReflectType.Elem()).Interface() + mp := pointerOfIface(m) + out, err := f.mi.unmarshalPointer(b, mp, f.num, opts) + if err != nil { + return out, err + } + p.AppendPointerSlice(mp) + return out, nil +} + +func asMessage(v reflect.Value) pref.ProtoMessage { + if m, ok := v.Interface().(pref.ProtoMessage); ok { + return m + } + return legacyWrapMessage(v).Interface() +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go b/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go new file mode 100644 index 00000000..1a509b63 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_gen.go @@ -0,0 +1,5637 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package impl + +import ( + "math" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// sizeBool returns the size of wire encoding a bool pointer as a Bool. +func sizeBool(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Bool() + return f.tagsize + protowire.SizeVarint(protowire.EncodeBool(v)) +} + +// appendBool wire encodes a bool pointer as a Bool. +func appendBool(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bool() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v)) + return b, nil +} + +// consumeBool wire decodes a bool pointer as a Bool. +func consumeBool(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Bool() = protowire.DecodeBool(v) + out.n = n + return out, nil +} + +var coderBool = pointerCoderFuncs{ + size: sizeBool, + marshal: appendBool, + unmarshal: consumeBool, + merge: mergeBool, +} + +// sizeBoolNoZero returns the size of wire encoding a bool pointer as a Bool. +// The zero value is not encoded. +func sizeBoolNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Bool() + if v == false { + return 0 + } + return f.tagsize + protowire.SizeVarint(protowire.EncodeBool(v)) +} + +// appendBoolNoZero wire encodes a bool pointer as a Bool. +// The zero value is not encoded. +func appendBoolNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bool() + if v == false { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v)) + return b, nil +} + +var coderBoolNoZero = pointerCoderFuncs{ + size: sizeBoolNoZero, + marshal: appendBoolNoZero, + unmarshal: consumeBool, + merge: mergeBoolNoZero, +} + +// sizeBoolPtr returns the size of wire encoding a *bool pointer as a Bool. +// It panics if the pointer is nil. +func sizeBoolPtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.BoolPtr() + return f.tagsize + protowire.SizeVarint(protowire.EncodeBool(v)) +} + +// appendBoolPtr wire encodes a *bool pointer as a Bool. +// It panics if the pointer is nil. +func appendBoolPtr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.BoolPtr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v)) + return b, nil +} + +// consumeBoolPtr wire decodes a *bool pointer as a Bool. +func consumeBoolPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.BoolPtr() + if *vp == nil { + *vp = new(bool) + } + **vp = protowire.DecodeBool(v) + out.n = n + return out, nil +} + +var coderBoolPtr = pointerCoderFuncs{ + size: sizeBoolPtr, + marshal: appendBoolPtr, + unmarshal: consumeBoolPtr, + merge: mergeBoolPtr, +} + +// sizeBoolSlice returns the size of wire encoding a []bool pointer as a repeated Bool. +func sizeBoolSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.BoolSlice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(protowire.EncodeBool(v)) + } + return size +} + +// appendBoolSlice encodes a []bool pointer as a repeated Bool. +func appendBoolSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.BoolSlice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v)) + } + return b, nil +} + +// consumeBoolSlice wire decodes a []bool pointer as a repeated Bool. +func consumeBoolSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.BoolSlice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, protowire.DecodeBool(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, protowire.DecodeBool(v)) + out.n = n + return out, nil +} + +var coderBoolSlice = pointerCoderFuncs{ + size: sizeBoolSlice, + marshal: appendBoolSlice, + unmarshal: consumeBoolSlice, + merge: mergeBoolSlice, +} + +// sizeBoolPackedSlice returns the size of wire encoding a []bool pointer as a packed repeated Bool. +func sizeBoolPackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.BoolSlice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeBool(v)) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendBoolPackedSlice encodes a []bool pointer as a packed repeated Bool. +func appendBoolPackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.BoolSlice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeBool(v)) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, protowire.EncodeBool(v)) + } + return b, nil +} + +var coderBoolPackedSlice = pointerCoderFuncs{ + size: sizeBoolPackedSlice, + marshal: appendBoolPackedSlice, + unmarshal: consumeBoolSlice, + merge: mergeBoolSlice, +} + +// sizeBoolValue returns the size of wire encoding a bool value as a Bool. +func sizeBoolValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(protowire.EncodeBool(v.Bool())) +} + +// appendBoolValue encodes a bool value as a Bool. +func appendBoolValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v.Bool())) + return b, nil +} + +// consumeBoolValue decodes a bool value as a Bool. +func consumeBoolValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfBool(protowire.DecodeBool(v)), out, nil +} + +var coderBoolValue = valueCoderFuncs{ + size: sizeBoolValue, + marshal: appendBoolValue, + unmarshal: consumeBoolValue, + merge: mergeScalarValue, +} + +// sizeBoolSliceValue returns the size of wire encoding a []bool value as a repeated Bool. +func sizeBoolSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(protowire.EncodeBool(v.Bool())) + } + return size +} + +// appendBoolSliceValue encodes a []bool value as a repeated Bool. +func appendBoolSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeBool(v.Bool())) + } + return b, nil +} + +// consumeBoolSliceValue wire decodes a []bool value as a repeated Bool. +func consumeBoolSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) + out.n = n + return listv, out, nil +} + +var coderBoolSliceValue = valueCoderFuncs{ + size: sizeBoolSliceValue, + marshal: appendBoolSliceValue, + unmarshal: consumeBoolSliceValue, + merge: mergeListValue, +} + +// sizeBoolPackedSliceValue returns the size of wire encoding a []bool value as a packed repeated Bool. +func sizeBoolPackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeBool(v.Bool())) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendBoolPackedSliceValue encodes a []bool value as a packed repeated Bool. +func appendBoolPackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeBool(v.Bool())) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, protowire.EncodeBool(v.Bool())) + } + return b, nil +} + +var coderBoolPackedSliceValue = valueCoderFuncs{ + size: sizeBoolPackedSliceValue, + marshal: appendBoolPackedSliceValue, + unmarshal: consumeBoolSliceValue, + merge: mergeListValue, +} + +// sizeEnumValue returns the size of wire encoding a value as a Enum. +func sizeEnumValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(uint64(v.Enum())) +} + +// appendEnumValue encodes a value as a Enum. +func appendEnumValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(v.Enum())) + return b, nil +} + +// consumeEnumValue decodes a value as a Enum. +func consumeEnumValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfEnum(protoreflect.EnumNumber(v)), out, nil +} + +var coderEnumValue = valueCoderFuncs{ + size: sizeEnumValue, + marshal: appendEnumValue, + unmarshal: consumeEnumValue, + merge: mergeScalarValue, +} + +// sizeEnumSliceValue returns the size of wire encoding a [] value as a repeated Enum. +func sizeEnumSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(uint64(v.Enum())) + } + return size +} + +// appendEnumSliceValue encodes a [] value as a repeated Enum. +func appendEnumSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(v.Enum())) + } + return b, nil +} + +// consumeEnumSliceValue wire decodes a [] value as a repeated Enum. +func consumeEnumSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) + out.n = n + return listv, out, nil +} + +var coderEnumSliceValue = valueCoderFuncs{ + size: sizeEnumSliceValue, + marshal: appendEnumSliceValue, + unmarshal: consumeEnumSliceValue, + merge: mergeListValue, +} + +// sizeEnumPackedSliceValue returns the size of wire encoding a [] value as a packed repeated Enum. +func sizeEnumPackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(v.Enum())) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendEnumPackedSliceValue encodes a [] value as a packed repeated Enum. +func appendEnumPackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(v.Enum())) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, uint64(v.Enum())) + } + return b, nil +} + +var coderEnumPackedSliceValue = valueCoderFuncs{ + size: sizeEnumPackedSliceValue, + marshal: appendEnumPackedSliceValue, + unmarshal: consumeEnumSliceValue, + merge: mergeListValue, +} + +// sizeInt32 returns the size of wire encoding a int32 pointer as a Int32. +func sizeInt32(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int32() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt32 wire encodes a int32 pointer as a Int32. +func appendInt32(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeInt32 wire decodes a int32 pointer as a Int32. +func consumeInt32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Int32() = int32(v) + out.n = n + return out, nil +} + +var coderInt32 = pointerCoderFuncs{ + size: sizeInt32, + marshal: appendInt32, + unmarshal: consumeInt32, + merge: mergeInt32, +} + +// sizeInt32NoZero returns the size of wire encoding a int32 pointer as a Int32. +// The zero value is not encoded. +func sizeInt32NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int32() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt32NoZero wire encodes a int32 pointer as a Int32. +// The zero value is not encoded. +func appendInt32NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +var coderInt32NoZero = pointerCoderFuncs{ + size: sizeInt32NoZero, + marshal: appendInt32NoZero, + unmarshal: consumeInt32, + merge: mergeInt32NoZero, +} + +// sizeInt32Ptr returns the size of wire encoding a *int32 pointer as a Int32. +// It panics if the pointer is nil. +func sizeInt32Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Int32Ptr() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt32Ptr wire encodes a *int32 pointer as a Int32. +// It panics if the pointer is nil. +func appendInt32Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeInt32Ptr wire decodes a *int32 pointer as a Int32. +func consumeInt32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Int32Ptr() + if *vp == nil { + *vp = new(int32) + } + **vp = int32(v) + out.n = n + return out, nil +} + +var coderInt32Ptr = pointerCoderFuncs{ + size: sizeInt32Ptr, + marshal: appendInt32Ptr, + unmarshal: consumeInt32Ptr, + merge: mergeInt32Ptr, +} + +// sizeInt32Slice returns the size of wire encoding a []int32 pointer as a repeated Int32. +func sizeInt32Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(uint64(v)) + } + return size +} + +// appendInt32Slice encodes a []int32 pointer as a repeated Int32. +func appendInt32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +// consumeInt32Slice wire decodes a []int32 pointer as a repeated Int32. +func consumeInt32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, int32(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, int32(v)) + out.n = n + return out, nil +} + +var coderInt32Slice = pointerCoderFuncs{ + size: sizeInt32Slice, + marshal: appendInt32Slice, + unmarshal: consumeInt32Slice, + merge: mergeInt32Slice, +} + +// sizeInt32PackedSlice returns the size of wire encoding a []int32 pointer as a packed repeated Int32. +func sizeInt32PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendInt32PackedSlice encodes a []int32 pointer as a packed repeated Int32. +func appendInt32PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +var coderInt32PackedSlice = pointerCoderFuncs{ + size: sizeInt32PackedSlice, + marshal: appendInt32PackedSlice, + unmarshal: consumeInt32Slice, + merge: mergeInt32Slice, +} + +// sizeInt32Value returns the size of wire encoding a int32 value as a Int32. +func sizeInt32Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(uint64(int32(v.Int()))) +} + +// appendInt32Value encodes a int32 value as a Int32. +func appendInt32Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(int32(v.Int()))) + return b, nil +} + +// consumeInt32Value decodes a int32 value as a Int32. +func consumeInt32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt32(int32(v)), out, nil +} + +var coderInt32Value = valueCoderFuncs{ + size: sizeInt32Value, + marshal: appendInt32Value, + unmarshal: consumeInt32Value, + merge: mergeScalarValue, +} + +// sizeInt32SliceValue returns the size of wire encoding a []int32 value as a repeated Int32. +func sizeInt32SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(uint64(int32(v.Int()))) + } + return size +} + +// appendInt32SliceValue encodes a []int32 value as a repeated Int32. +func appendInt32SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(int32(v.Int()))) + } + return b, nil +} + +// consumeInt32SliceValue wire decodes a []int32 value as a repeated Int32. +func consumeInt32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + out.n = n + return listv, out, nil +} + +var coderInt32SliceValue = valueCoderFuncs{ + size: sizeInt32SliceValue, + marshal: appendInt32SliceValue, + unmarshal: consumeInt32SliceValue, + merge: mergeListValue, +} + +// sizeInt32PackedSliceValue returns the size of wire encoding a []int32 value as a packed repeated Int32. +func sizeInt32PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(int32(v.Int()))) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendInt32PackedSliceValue encodes a []int32 value as a packed repeated Int32. +func appendInt32PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(int32(v.Int()))) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, uint64(int32(v.Int()))) + } + return b, nil +} + +var coderInt32PackedSliceValue = valueCoderFuncs{ + size: sizeInt32PackedSliceValue, + marshal: appendInt32PackedSliceValue, + unmarshal: consumeInt32SliceValue, + merge: mergeListValue, +} + +// sizeSint32 returns the size of wire encoding a int32 pointer as a Sint32. +func sizeSint32(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int32() + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) +} + +// appendSint32 wire encodes a int32 pointer as a Sint32. +func appendSint32(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(v))) + return b, nil +} + +// consumeSint32 wire decodes a int32 pointer as a Sint32. +func consumeSint32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Int32() = int32(protowire.DecodeZigZag(v & math.MaxUint32)) + out.n = n + return out, nil +} + +var coderSint32 = pointerCoderFuncs{ + size: sizeSint32, + marshal: appendSint32, + unmarshal: consumeSint32, + merge: mergeInt32, +} + +// sizeSint32NoZero returns the size of wire encoding a int32 pointer as a Sint32. +// The zero value is not encoded. +func sizeSint32NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int32() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) +} + +// appendSint32NoZero wire encodes a int32 pointer as a Sint32. +// The zero value is not encoded. +func appendSint32NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(v))) + return b, nil +} + +var coderSint32NoZero = pointerCoderFuncs{ + size: sizeSint32NoZero, + marshal: appendSint32NoZero, + unmarshal: consumeSint32, + merge: mergeInt32NoZero, +} + +// sizeSint32Ptr returns the size of wire encoding a *int32 pointer as a Sint32. +// It panics if the pointer is nil. +func sizeSint32Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Int32Ptr() + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) +} + +// appendSint32Ptr wire encodes a *int32 pointer as a Sint32. +// It panics if the pointer is nil. +func appendSint32Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(v))) + return b, nil +} + +// consumeSint32Ptr wire decodes a *int32 pointer as a Sint32. +func consumeSint32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Int32Ptr() + if *vp == nil { + *vp = new(int32) + } + **vp = int32(protowire.DecodeZigZag(v & math.MaxUint32)) + out.n = n + return out, nil +} + +var coderSint32Ptr = pointerCoderFuncs{ + size: sizeSint32Ptr, + marshal: appendSint32Ptr, + unmarshal: consumeSint32Ptr, + merge: mergeInt32Ptr, +} + +// sizeSint32Slice returns the size of wire encoding a []int32 pointer as a repeated Sint32. +func sizeSint32Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) + } + return size +} + +// appendSint32Slice encodes a []int32 pointer as a repeated Sint32. +func appendSint32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(v))) + } + return b, nil +} + +// consumeSint32Slice wire decodes a []int32 pointer as a repeated Sint32. +func consumeSint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, int32(protowire.DecodeZigZag(v&math.MaxUint32))) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, int32(protowire.DecodeZigZag(v&math.MaxUint32))) + out.n = n + return out, nil +} + +var coderSint32Slice = pointerCoderFuncs{ + size: sizeSint32Slice, + marshal: appendSint32Slice, + unmarshal: consumeSint32Slice, + merge: mergeInt32Slice, +} + +// sizeSint32PackedSlice returns the size of wire encoding a []int32 pointer as a packed repeated Sint32. +func sizeSint32PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendSint32PackedSlice encodes a []int32 pointer as a packed repeated Sint32. +func appendSint32PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeZigZag(int64(v))) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(v))) + } + return b, nil +} + +var coderSint32PackedSlice = pointerCoderFuncs{ + size: sizeSint32PackedSlice, + marshal: appendSint32PackedSlice, + unmarshal: consumeSint32Slice, + merge: mergeInt32Slice, +} + +// sizeSint32Value returns the size of wire encoding a int32 value as a Sint32. +func sizeSint32Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(int32(v.Int())))) +} + +// appendSint32Value encodes a int32 value as a Sint32. +func appendSint32Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(int32(v.Int())))) + return b, nil +} + +// consumeSint32Value decodes a int32 value as a Sint32. +func consumeSint32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32))), out, nil +} + +var coderSint32Value = valueCoderFuncs{ + size: sizeSint32Value, + marshal: appendSint32Value, + unmarshal: consumeSint32Value, + merge: mergeScalarValue, +} + +// sizeSint32SliceValue returns the size of wire encoding a []int32 value as a repeated Sint32. +func sizeSint32SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(protowire.EncodeZigZag(int64(int32(v.Int())))) + } + return size +} + +// appendSint32SliceValue encodes a []int32 value as a repeated Sint32. +func appendSint32SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(int32(v.Int())))) + } + return b, nil +} + +// consumeSint32SliceValue wire decodes a []int32 value as a repeated Sint32. +func consumeSint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) + out.n = n + return listv, out, nil +} + +var coderSint32SliceValue = valueCoderFuncs{ + size: sizeSint32SliceValue, + marshal: appendSint32SliceValue, + unmarshal: consumeSint32SliceValue, + merge: mergeListValue, +} + +// sizeSint32PackedSliceValue returns the size of wire encoding a []int32 value as a packed repeated Sint32. +func sizeSint32PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeZigZag(int64(int32(v.Int())))) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendSint32PackedSliceValue encodes a []int32 value as a packed repeated Sint32. +func appendSint32PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeZigZag(int64(int32(v.Int())))) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(int32(v.Int())))) + } + return b, nil +} + +var coderSint32PackedSliceValue = valueCoderFuncs{ + size: sizeSint32PackedSliceValue, + marshal: appendSint32PackedSliceValue, + unmarshal: consumeSint32SliceValue, + merge: mergeListValue, +} + +// sizeUint32 returns the size of wire encoding a uint32 pointer as a Uint32. +func sizeUint32(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint32() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendUint32 wire encodes a uint32 pointer as a Uint32. +func appendUint32(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeUint32 wire decodes a uint32 pointer as a Uint32. +func consumeUint32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Uint32() = uint32(v) + out.n = n + return out, nil +} + +var coderUint32 = pointerCoderFuncs{ + size: sizeUint32, + marshal: appendUint32, + unmarshal: consumeUint32, + merge: mergeUint32, +} + +// sizeUint32NoZero returns the size of wire encoding a uint32 pointer as a Uint32. +// The zero value is not encoded. +func sizeUint32NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint32() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendUint32NoZero wire encodes a uint32 pointer as a Uint32. +// The zero value is not encoded. +func appendUint32NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint32() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +var coderUint32NoZero = pointerCoderFuncs{ + size: sizeUint32NoZero, + marshal: appendUint32NoZero, + unmarshal: consumeUint32, + merge: mergeUint32NoZero, +} + +// sizeUint32Ptr returns the size of wire encoding a *uint32 pointer as a Uint32. +// It panics if the pointer is nil. +func sizeUint32Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Uint32Ptr() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendUint32Ptr wire encodes a *uint32 pointer as a Uint32. +// It panics if the pointer is nil. +func appendUint32Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Uint32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeUint32Ptr wire decodes a *uint32 pointer as a Uint32. +func consumeUint32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Uint32Ptr() + if *vp == nil { + *vp = new(uint32) + } + **vp = uint32(v) + out.n = n + return out, nil +} + +var coderUint32Ptr = pointerCoderFuncs{ + size: sizeUint32Ptr, + marshal: appendUint32Ptr, + unmarshal: consumeUint32Ptr, + merge: mergeUint32Ptr, +} + +// sizeUint32Slice returns the size of wire encoding a []uint32 pointer as a repeated Uint32. +func sizeUint32Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint32Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(uint64(v)) + } + return size +} + +// appendUint32Slice encodes a []uint32 pointer as a repeated Uint32. +func appendUint32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +// consumeUint32Slice wire decodes a []uint32 pointer as a repeated Uint32. +func consumeUint32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Uint32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, uint32(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, uint32(v)) + out.n = n + return out, nil +} + +var coderUint32Slice = pointerCoderFuncs{ + size: sizeUint32Slice, + marshal: appendUint32Slice, + unmarshal: consumeUint32Slice, + merge: mergeUint32Slice, +} + +// sizeUint32PackedSlice returns the size of wire encoding a []uint32 pointer as a packed repeated Uint32. +func sizeUint32PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint32Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendUint32PackedSlice encodes a []uint32 pointer as a packed repeated Uint32. +func appendUint32PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +var coderUint32PackedSlice = pointerCoderFuncs{ + size: sizeUint32PackedSlice, + marshal: appendUint32PackedSlice, + unmarshal: consumeUint32Slice, + merge: mergeUint32Slice, +} + +// sizeUint32Value returns the size of wire encoding a uint32 value as a Uint32. +func sizeUint32Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(uint64(uint32(v.Uint()))) +} + +// appendUint32Value encodes a uint32 value as a Uint32. +func appendUint32Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(uint32(v.Uint()))) + return b, nil +} + +// consumeUint32Value decodes a uint32 value as a Uint32. +func consumeUint32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfUint32(uint32(v)), out, nil +} + +var coderUint32Value = valueCoderFuncs{ + size: sizeUint32Value, + marshal: appendUint32Value, + unmarshal: consumeUint32Value, + merge: mergeScalarValue, +} + +// sizeUint32SliceValue returns the size of wire encoding a []uint32 value as a repeated Uint32. +func sizeUint32SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(uint64(uint32(v.Uint()))) + } + return size +} + +// appendUint32SliceValue encodes a []uint32 value as a repeated Uint32. +func appendUint32SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(uint32(v.Uint()))) + } + return b, nil +} + +// consumeUint32SliceValue wire decodes a []uint32 value as a repeated Uint32. +func consumeUint32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + out.n = n + return listv, out, nil +} + +var coderUint32SliceValue = valueCoderFuncs{ + size: sizeUint32SliceValue, + marshal: appendUint32SliceValue, + unmarshal: consumeUint32SliceValue, + merge: mergeListValue, +} + +// sizeUint32PackedSliceValue returns the size of wire encoding a []uint32 value as a packed repeated Uint32. +func sizeUint32PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(uint32(v.Uint()))) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendUint32PackedSliceValue encodes a []uint32 value as a packed repeated Uint32. +func appendUint32PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(uint32(v.Uint()))) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, uint64(uint32(v.Uint()))) + } + return b, nil +} + +var coderUint32PackedSliceValue = valueCoderFuncs{ + size: sizeUint32PackedSliceValue, + marshal: appendUint32PackedSliceValue, + unmarshal: consumeUint32SliceValue, + merge: mergeListValue, +} + +// sizeInt64 returns the size of wire encoding a int64 pointer as a Int64. +func sizeInt64(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int64() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt64 wire encodes a int64 pointer as a Int64. +func appendInt64(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeInt64 wire decodes a int64 pointer as a Int64. +func consumeInt64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Int64() = int64(v) + out.n = n + return out, nil +} + +var coderInt64 = pointerCoderFuncs{ + size: sizeInt64, + marshal: appendInt64, + unmarshal: consumeInt64, + merge: mergeInt64, +} + +// sizeInt64NoZero returns the size of wire encoding a int64 pointer as a Int64. +// The zero value is not encoded. +func sizeInt64NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int64() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt64NoZero wire encodes a int64 pointer as a Int64. +// The zero value is not encoded. +func appendInt64NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +var coderInt64NoZero = pointerCoderFuncs{ + size: sizeInt64NoZero, + marshal: appendInt64NoZero, + unmarshal: consumeInt64, + merge: mergeInt64NoZero, +} + +// sizeInt64Ptr returns the size of wire encoding a *int64 pointer as a Int64. +// It panics if the pointer is nil. +func sizeInt64Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Int64Ptr() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +// appendInt64Ptr wire encodes a *int64 pointer as a Int64. +// It panics if the pointer is nil. +func appendInt64Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +// consumeInt64Ptr wire decodes a *int64 pointer as a Int64. +func consumeInt64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Int64Ptr() + if *vp == nil { + *vp = new(int64) + } + **vp = int64(v) + out.n = n + return out, nil +} + +var coderInt64Ptr = pointerCoderFuncs{ + size: sizeInt64Ptr, + marshal: appendInt64Ptr, + unmarshal: consumeInt64Ptr, + merge: mergeInt64Ptr, +} + +// sizeInt64Slice returns the size of wire encoding a []int64 pointer as a repeated Int64. +func sizeInt64Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(uint64(v)) + } + return size +} + +// appendInt64Slice encodes a []int64 pointer as a repeated Int64. +func appendInt64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +// consumeInt64Slice wire decodes a []int64 pointer as a repeated Int64. +func consumeInt64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, int64(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, int64(v)) + out.n = n + return out, nil +} + +var coderInt64Slice = pointerCoderFuncs{ + size: sizeInt64Slice, + marshal: appendInt64Slice, + unmarshal: consumeInt64Slice, + merge: mergeInt64Slice, +} + +// sizeInt64PackedSlice returns the size of wire encoding a []int64 pointer as a packed repeated Int64. +func sizeInt64PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendInt64PackedSlice encodes a []int64 pointer as a packed repeated Int64. +func appendInt64PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(uint64(v)) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, uint64(v)) + } + return b, nil +} + +var coderInt64PackedSlice = pointerCoderFuncs{ + size: sizeInt64PackedSlice, + marshal: appendInt64PackedSlice, + unmarshal: consumeInt64Slice, + merge: mergeInt64Slice, +} + +// sizeInt64Value returns the size of wire encoding a int64 value as a Int64. +func sizeInt64Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(uint64(v.Int())) +} + +// appendInt64Value encodes a int64 value as a Int64. +func appendInt64Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(v.Int())) + return b, nil +} + +// consumeInt64Value decodes a int64 value as a Int64. +func consumeInt64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt64(int64(v)), out, nil +} + +var coderInt64Value = valueCoderFuncs{ + size: sizeInt64Value, + marshal: appendInt64Value, + unmarshal: consumeInt64Value, + merge: mergeScalarValue, +} + +// sizeInt64SliceValue returns the size of wire encoding a []int64 value as a repeated Int64. +func sizeInt64SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(uint64(v.Int())) + } + return size +} + +// appendInt64SliceValue encodes a []int64 value as a repeated Int64. +func appendInt64SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, uint64(v.Int())) + } + return b, nil +} + +// consumeInt64SliceValue wire decodes a []int64 value as a repeated Int64. +func consumeInt64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + out.n = n + return listv, out, nil +} + +var coderInt64SliceValue = valueCoderFuncs{ + size: sizeInt64SliceValue, + marshal: appendInt64SliceValue, + unmarshal: consumeInt64SliceValue, + merge: mergeListValue, +} + +// sizeInt64PackedSliceValue returns the size of wire encoding a []int64 value as a packed repeated Int64. +func sizeInt64PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(v.Int())) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendInt64PackedSliceValue encodes a []int64 value as a packed repeated Int64. +func appendInt64PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(uint64(v.Int())) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, uint64(v.Int())) + } + return b, nil +} + +var coderInt64PackedSliceValue = valueCoderFuncs{ + size: sizeInt64PackedSliceValue, + marshal: appendInt64PackedSliceValue, + unmarshal: consumeInt64SliceValue, + merge: mergeListValue, +} + +// sizeSint64 returns the size of wire encoding a int64 pointer as a Sint64. +func sizeSint64(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int64() + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v)) +} + +// appendSint64 wire encodes a int64 pointer as a Sint64. +func appendSint64(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v)) + return b, nil +} + +// consumeSint64 wire decodes a int64 pointer as a Sint64. +func consumeSint64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Int64() = protowire.DecodeZigZag(v) + out.n = n + return out, nil +} + +var coderSint64 = pointerCoderFuncs{ + size: sizeSint64, + marshal: appendSint64, + unmarshal: consumeSint64, + merge: mergeInt64, +} + +// sizeSint64NoZero returns the size of wire encoding a int64 pointer as a Sint64. +// The zero value is not encoded. +func sizeSint64NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int64() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v)) +} + +// appendSint64NoZero wire encodes a int64 pointer as a Sint64. +// The zero value is not encoded. +func appendSint64NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v)) + return b, nil +} + +var coderSint64NoZero = pointerCoderFuncs{ + size: sizeSint64NoZero, + marshal: appendSint64NoZero, + unmarshal: consumeSint64, + merge: mergeInt64NoZero, +} + +// sizeSint64Ptr returns the size of wire encoding a *int64 pointer as a Sint64. +// It panics if the pointer is nil. +func sizeSint64Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Int64Ptr() + return f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v)) +} + +// appendSint64Ptr wire encodes a *int64 pointer as a Sint64. +// It panics if the pointer is nil. +func appendSint64Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v)) + return b, nil +} + +// consumeSint64Ptr wire decodes a *int64 pointer as a Sint64. +func consumeSint64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Int64Ptr() + if *vp == nil { + *vp = new(int64) + } + **vp = protowire.DecodeZigZag(v) + out.n = n + return out, nil +} + +var coderSint64Ptr = pointerCoderFuncs{ + size: sizeSint64Ptr, + marshal: appendSint64Ptr, + unmarshal: consumeSint64Ptr, + merge: mergeInt64Ptr, +} + +// sizeSint64Slice returns the size of wire encoding a []int64 pointer as a repeated Sint64. +func sizeSint64Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v)) + } + return size +} + +// appendSint64Slice encodes a []int64 pointer as a repeated Sint64. +func appendSint64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v)) + } + return b, nil +} + +// consumeSint64Slice wire decodes a []int64 pointer as a repeated Sint64. +func consumeSint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, protowire.DecodeZigZag(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, protowire.DecodeZigZag(v)) + out.n = n + return out, nil +} + +var coderSint64Slice = pointerCoderFuncs{ + size: sizeSint64Slice, + marshal: appendSint64Slice, + unmarshal: consumeSint64Slice, + merge: mergeInt64Slice, +} + +// sizeSint64PackedSlice returns the size of wire encoding a []int64 pointer as a packed repeated Sint64. +func sizeSint64PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeZigZag(v)) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendSint64PackedSlice encodes a []int64 pointer as a packed repeated Sint64. +func appendSint64PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(protowire.EncodeZigZag(v)) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v)) + } + return b, nil +} + +var coderSint64PackedSlice = pointerCoderFuncs{ + size: sizeSint64PackedSlice, + marshal: appendSint64PackedSlice, + unmarshal: consumeSint64Slice, + merge: mergeInt64Slice, +} + +// sizeSint64Value returns the size of wire encoding a int64 value as a Sint64. +func sizeSint64Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v.Int())) +} + +// appendSint64Value encodes a int64 value as a Sint64. +func appendSint64Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v.Int())) + return b, nil +} + +// consumeSint64Value decodes a int64 value as a Sint64. +func consumeSint64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt64(protowire.DecodeZigZag(v)), out, nil +} + +var coderSint64Value = valueCoderFuncs{ + size: sizeSint64Value, + marshal: appendSint64Value, + unmarshal: consumeSint64Value, + merge: mergeScalarValue, +} + +// sizeSint64SliceValue returns the size of wire encoding a []int64 value as a repeated Sint64. +func sizeSint64SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(protowire.EncodeZigZag(v.Int())) + } + return size +} + +// appendSint64SliceValue encodes a []int64 value as a repeated Sint64. +func appendSint64SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v.Int())) + } + return b, nil +} + +// consumeSint64SliceValue wire decodes a []int64 value as a repeated Sint64. +func consumeSint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) + out.n = n + return listv, out, nil +} + +var coderSint64SliceValue = valueCoderFuncs{ + size: sizeSint64SliceValue, + marshal: appendSint64SliceValue, + unmarshal: consumeSint64SliceValue, + merge: mergeListValue, +} + +// sizeSint64PackedSliceValue returns the size of wire encoding a []int64 value as a packed repeated Sint64. +func sizeSint64PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeZigZag(v.Int())) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendSint64PackedSliceValue encodes a []int64 value as a packed repeated Sint64. +func appendSint64PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(protowire.EncodeZigZag(v.Int())) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v.Int())) + } + return b, nil +} + +var coderSint64PackedSliceValue = valueCoderFuncs{ + size: sizeSint64PackedSliceValue, + marshal: appendSint64PackedSliceValue, + unmarshal: consumeSint64SliceValue, + merge: mergeListValue, +} + +// sizeUint64 returns the size of wire encoding a uint64 pointer as a Uint64. +func sizeUint64(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint64() + return f.tagsize + protowire.SizeVarint(v) +} + +// appendUint64 wire encodes a uint64 pointer as a Uint64. +func appendUint64(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, v) + return b, nil +} + +// consumeUint64 wire decodes a uint64 pointer as a Uint64. +func consumeUint64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *p.Uint64() = v + out.n = n + return out, nil +} + +var coderUint64 = pointerCoderFuncs{ + size: sizeUint64, + marshal: appendUint64, + unmarshal: consumeUint64, + merge: mergeUint64, +} + +// sizeUint64NoZero returns the size of wire encoding a uint64 pointer as a Uint64. +// The zero value is not encoded. +func sizeUint64NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint64() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeVarint(v) +} + +// appendUint64NoZero wire encodes a uint64 pointer as a Uint64. +// The zero value is not encoded. +func appendUint64NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint64() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, v) + return b, nil +} + +var coderUint64NoZero = pointerCoderFuncs{ + size: sizeUint64NoZero, + marshal: appendUint64NoZero, + unmarshal: consumeUint64, + merge: mergeUint64NoZero, +} + +// sizeUint64Ptr returns the size of wire encoding a *uint64 pointer as a Uint64. +// It panics if the pointer is nil. +func sizeUint64Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.Uint64Ptr() + return f.tagsize + protowire.SizeVarint(v) +} + +// appendUint64Ptr wire encodes a *uint64 pointer as a Uint64. +// It panics if the pointer is nil. +func appendUint64Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Uint64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, v) + return b, nil +} + +// consumeUint64Ptr wire decodes a *uint64 pointer as a Uint64. +func consumeUint64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + vp := p.Uint64Ptr() + if *vp == nil { + *vp = new(uint64) + } + **vp = v + out.n = n + return out, nil +} + +var coderUint64Ptr = pointerCoderFuncs{ + size: sizeUint64Ptr, + marshal: appendUint64Ptr, + unmarshal: consumeUint64Ptr, + merge: mergeUint64Ptr, +} + +// sizeUint64Slice returns the size of wire encoding a []uint64 pointer as a repeated Uint64. +func sizeUint64Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint64Slice() + for _, v := range s { + size += f.tagsize + protowire.SizeVarint(v) + } + return size +} + +// appendUint64Slice encodes a []uint64 pointer as a repeated Uint64. +func appendUint64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, v) + } + return b, nil +} + +// consumeUint64Slice wire decodes a []uint64 pointer as a repeated Uint64. +func consumeUint64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Uint64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + s = append(s, v) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return out, errDecode + } + *sp = append(*sp, v) + out.n = n + return out, nil +} + +var coderUint64Slice = pointerCoderFuncs{ + size: sizeUint64Slice, + marshal: appendUint64Slice, + unmarshal: consumeUint64Slice, + merge: mergeUint64Slice, +} + +// sizeUint64PackedSlice returns the size of wire encoding a []uint64 pointer as a packed repeated Uint64. +func sizeUint64PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint64Slice() + if len(s) == 0 { + return 0 + } + n := 0 + for _, v := range s { + n += protowire.SizeVarint(v) + } + return f.tagsize + protowire.SizeBytes(n) +} + +// appendUint64PackedSlice encodes a []uint64 pointer as a packed repeated Uint64. +func appendUint64PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for _, v := range s { + n += protowire.SizeVarint(v) + } + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendVarint(b, v) + } + return b, nil +} + +var coderUint64PackedSlice = pointerCoderFuncs{ + size: sizeUint64PackedSlice, + marshal: appendUint64PackedSlice, + unmarshal: consumeUint64Slice, + merge: mergeUint64Slice, +} + +// sizeUint64Value returns the size of wire encoding a uint64 value as a Uint64. +func sizeUint64Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeVarint(v.Uint()) +} + +// appendUint64Value encodes a uint64 value as a Uint64. +func appendUint64Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, v.Uint()) + return b, nil +} + +// consumeUint64Value decodes a uint64 value as a Uint64. +func consumeUint64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfUint64(v), out, nil +} + +var coderUint64Value = valueCoderFuncs{ + size: sizeUint64Value, + marshal: appendUint64Value, + unmarshal: consumeUint64Value, + merge: mergeScalarValue, +} + +// sizeUint64SliceValue returns the size of wire encoding a []uint64 value as a repeated Uint64. +func sizeUint64SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeVarint(v.Uint()) + } + return size +} + +// appendUint64SliceValue encodes a []uint64 value as a repeated Uint64. +func appendUint64SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendVarint(b, v.Uint()) + } + return b, nil +} + +// consumeUint64SliceValue wire decodes a []uint64 value as a repeated Uint64. +func consumeUint64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.VarintType { + return protoreflect.Value{}, out, errUnknown + } + var v uint64 + var n int + if len(b) >= 1 && b[0] < 0x80 { + v = uint64(b[0]) + n = 1 + } else if len(b) >= 2 && b[1] < 128 { + v = uint64(b[0]&0x7f) + uint64(b[1])<<7 + n = 2 + } else { + v, n = protowire.ConsumeVarint(b) + } + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + out.n = n + return listv, out, nil +} + +var coderUint64SliceValue = valueCoderFuncs{ + size: sizeUint64SliceValue, + marshal: appendUint64SliceValue, + unmarshal: consumeUint64SliceValue, + merge: mergeListValue, +} + +// sizeUint64PackedSliceValue returns the size of wire encoding a []uint64 value as a packed repeated Uint64. +func sizeUint64PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := 0 + for i, llen := 0, llen; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(v.Uint()) + } + return tagsize + protowire.SizeBytes(n) +} + +// appendUint64PackedSliceValue encodes a []uint64 value as a packed repeated Uint64. +func appendUint64PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := 0 + for i := 0; i < llen; i++ { + v := list.Get(i) + n += protowire.SizeVarint(v.Uint()) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, v.Uint()) + } + return b, nil +} + +var coderUint64PackedSliceValue = valueCoderFuncs{ + size: sizeUint64PackedSliceValue, + marshal: appendUint64PackedSliceValue, + unmarshal: consumeUint64SliceValue, + merge: mergeListValue, +} + +// sizeSfixed32 returns the size of wire encoding a int32 pointer as a Sfixed32. +func sizeSfixed32(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed32() +} + +// appendSfixed32 wire encodes a int32 pointer as a Sfixed32. +func appendSfixed32(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, uint32(v)) + return b, nil +} + +// consumeSfixed32 wire decodes a int32 pointer as a Sfixed32. +func consumeSfixed32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *p.Int32() = int32(v) + out.n = n + return out, nil +} + +var coderSfixed32 = pointerCoderFuncs{ + size: sizeSfixed32, + marshal: appendSfixed32, + unmarshal: consumeSfixed32, + merge: mergeInt32, +} + +// sizeSfixed32NoZero returns the size of wire encoding a int32 pointer as a Sfixed32. +// The zero value is not encoded. +func sizeSfixed32NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int32() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeFixed32() +} + +// appendSfixed32NoZero wire encodes a int32 pointer as a Sfixed32. +// The zero value is not encoded. +func appendSfixed32NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int32() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, uint32(v)) + return b, nil +} + +var coderSfixed32NoZero = pointerCoderFuncs{ + size: sizeSfixed32NoZero, + marshal: appendSfixed32NoZero, + unmarshal: consumeSfixed32, + merge: mergeInt32NoZero, +} + +// sizeSfixed32Ptr returns the size of wire encoding a *int32 pointer as a Sfixed32. +// It panics if the pointer is nil. +func sizeSfixed32Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed32() +} + +// appendSfixed32Ptr wire encodes a *int32 pointer as a Sfixed32. +// It panics if the pointer is nil. +func appendSfixed32Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, uint32(v)) + return b, nil +} + +// consumeSfixed32Ptr wire decodes a *int32 pointer as a Sfixed32. +func consumeSfixed32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + vp := p.Int32Ptr() + if *vp == nil { + *vp = new(int32) + } + **vp = int32(v) + out.n = n + return out, nil +} + +var coderSfixed32Ptr = pointerCoderFuncs{ + size: sizeSfixed32Ptr, + marshal: appendSfixed32Ptr, + unmarshal: consumeSfixed32Ptr, + merge: mergeInt32Ptr, +} + +// sizeSfixed32Slice returns the size of wire encoding a []int32 pointer as a repeated Sfixed32. +func sizeSfixed32Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed32()) + return size +} + +// appendSfixed32Slice encodes a []int32 pointer as a repeated Sfixed32. +func appendSfixed32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, uint32(v)) + } + return b, nil +} + +// consumeSfixed32Slice wire decodes a []int32 pointer as a repeated Sfixed32. +func consumeSfixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + s = append(s, int32(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, int32(v)) + out.n = n + return out, nil +} + +var coderSfixed32Slice = pointerCoderFuncs{ + size: sizeSfixed32Slice, + marshal: appendSfixed32Slice, + unmarshal: consumeSfixed32Slice, + merge: mergeInt32Slice, +} + +// sizeSfixed32PackedSlice returns the size of wire encoding a []int32 pointer as a packed repeated Sfixed32. +func sizeSfixed32PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int32Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed32() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendSfixed32PackedSlice encodes a []int32 pointer as a packed repeated Sfixed32. +func appendSfixed32PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed32(b, uint32(v)) + } + return b, nil +} + +var coderSfixed32PackedSlice = pointerCoderFuncs{ + size: sizeSfixed32PackedSlice, + marshal: appendSfixed32PackedSlice, + unmarshal: consumeSfixed32Slice, + merge: mergeInt32Slice, +} + +// sizeSfixed32Value returns the size of wire encoding a int32 value as a Sfixed32. +func sizeSfixed32Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed32() +} + +// appendSfixed32Value encodes a int32 value as a Sfixed32. +func appendSfixed32Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, uint32(v.Int())) + return b, nil +} + +// consumeSfixed32Value decodes a int32 value as a Sfixed32. +func consumeSfixed32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt32(int32(v)), out, nil +} + +var coderSfixed32Value = valueCoderFuncs{ + size: sizeSfixed32Value, + marshal: appendSfixed32Value, + unmarshal: consumeSfixed32Value, + merge: mergeScalarValue, +} + +// sizeSfixed32SliceValue returns the size of wire encoding a []int32 value as a repeated Sfixed32. +func sizeSfixed32SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed32()) + return size +} + +// appendSfixed32SliceValue encodes a []int32 value as a repeated Sfixed32. +func appendSfixed32SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, uint32(v.Int())) + } + return b, nil +} + +// consumeSfixed32SliceValue wire decodes a []int32 value as a repeated Sfixed32. +func consumeSfixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + out.n = n + return listv, out, nil +} + +var coderSfixed32SliceValue = valueCoderFuncs{ + size: sizeSfixed32SliceValue, + marshal: appendSfixed32SliceValue, + unmarshal: consumeSfixed32SliceValue, + merge: mergeListValue, +} + +// sizeSfixed32PackedSliceValue returns the size of wire encoding a []int32 value as a packed repeated Sfixed32. +func sizeSfixed32PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed32() + return tagsize + protowire.SizeBytes(n) +} + +// appendSfixed32PackedSliceValue encodes a []int32 value as a packed repeated Sfixed32. +func appendSfixed32PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed32(b, uint32(v.Int())) + } + return b, nil +} + +var coderSfixed32PackedSliceValue = valueCoderFuncs{ + size: sizeSfixed32PackedSliceValue, + marshal: appendSfixed32PackedSliceValue, + unmarshal: consumeSfixed32SliceValue, + merge: mergeListValue, +} + +// sizeFixed32 returns the size of wire encoding a uint32 pointer as a Fixed32. +func sizeFixed32(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed32() +} + +// appendFixed32 wire encodes a uint32 pointer as a Fixed32. +func appendFixed32(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, v) + return b, nil +} + +// consumeFixed32 wire decodes a uint32 pointer as a Fixed32. +func consumeFixed32(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *p.Uint32() = v + out.n = n + return out, nil +} + +var coderFixed32 = pointerCoderFuncs{ + size: sizeFixed32, + marshal: appendFixed32, + unmarshal: consumeFixed32, + merge: mergeUint32, +} + +// sizeFixed32NoZero returns the size of wire encoding a uint32 pointer as a Fixed32. +// The zero value is not encoded. +func sizeFixed32NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint32() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeFixed32() +} + +// appendFixed32NoZero wire encodes a uint32 pointer as a Fixed32. +// The zero value is not encoded. +func appendFixed32NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint32() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, v) + return b, nil +} + +var coderFixed32NoZero = pointerCoderFuncs{ + size: sizeFixed32NoZero, + marshal: appendFixed32NoZero, + unmarshal: consumeFixed32, + merge: mergeUint32NoZero, +} + +// sizeFixed32Ptr returns the size of wire encoding a *uint32 pointer as a Fixed32. +// It panics if the pointer is nil. +func sizeFixed32Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed32() +} + +// appendFixed32Ptr wire encodes a *uint32 pointer as a Fixed32. +// It panics if the pointer is nil. +func appendFixed32Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Uint32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, v) + return b, nil +} + +// consumeFixed32Ptr wire decodes a *uint32 pointer as a Fixed32. +func consumeFixed32Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + vp := p.Uint32Ptr() + if *vp == nil { + *vp = new(uint32) + } + **vp = v + out.n = n + return out, nil +} + +var coderFixed32Ptr = pointerCoderFuncs{ + size: sizeFixed32Ptr, + marshal: appendFixed32Ptr, + unmarshal: consumeFixed32Ptr, + merge: mergeUint32Ptr, +} + +// sizeFixed32Slice returns the size of wire encoding a []uint32 pointer as a repeated Fixed32. +func sizeFixed32Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint32Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed32()) + return size +} + +// appendFixed32Slice encodes a []uint32 pointer as a repeated Fixed32. +func appendFixed32Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, v) + } + return b, nil +} + +// consumeFixed32Slice wire decodes a []uint32 pointer as a repeated Fixed32. +func consumeFixed32Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Uint32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + s = append(s, v) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, v) + out.n = n + return out, nil +} + +var coderFixed32Slice = pointerCoderFuncs{ + size: sizeFixed32Slice, + marshal: appendFixed32Slice, + unmarshal: consumeFixed32Slice, + merge: mergeUint32Slice, +} + +// sizeFixed32PackedSlice returns the size of wire encoding a []uint32 pointer as a packed repeated Fixed32. +func sizeFixed32PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint32Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed32() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendFixed32PackedSlice encodes a []uint32 pointer as a packed repeated Fixed32. +func appendFixed32PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed32(b, v) + } + return b, nil +} + +var coderFixed32PackedSlice = pointerCoderFuncs{ + size: sizeFixed32PackedSlice, + marshal: appendFixed32PackedSlice, + unmarshal: consumeFixed32Slice, + merge: mergeUint32Slice, +} + +// sizeFixed32Value returns the size of wire encoding a uint32 value as a Fixed32. +func sizeFixed32Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed32() +} + +// appendFixed32Value encodes a uint32 value as a Fixed32. +func appendFixed32Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, uint32(v.Uint())) + return b, nil +} + +// consumeFixed32Value decodes a uint32 value as a Fixed32. +func consumeFixed32Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfUint32(uint32(v)), out, nil +} + +var coderFixed32Value = valueCoderFuncs{ + size: sizeFixed32Value, + marshal: appendFixed32Value, + unmarshal: consumeFixed32Value, + merge: mergeScalarValue, +} + +// sizeFixed32SliceValue returns the size of wire encoding a []uint32 value as a repeated Fixed32. +func sizeFixed32SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed32()) + return size +} + +// appendFixed32SliceValue encodes a []uint32 value as a repeated Fixed32. +func appendFixed32SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, uint32(v.Uint())) + } + return b, nil +} + +// consumeFixed32SliceValue wire decodes a []uint32 value as a repeated Fixed32. +func consumeFixed32SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + out.n = n + return listv, out, nil +} + +var coderFixed32SliceValue = valueCoderFuncs{ + size: sizeFixed32SliceValue, + marshal: appendFixed32SliceValue, + unmarshal: consumeFixed32SliceValue, + merge: mergeListValue, +} + +// sizeFixed32PackedSliceValue returns the size of wire encoding a []uint32 value as a packed repeated Fixed32. +func sizeFixed32PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed32() + return tagsize + protowire.SizeBytes(n) +} + +// appendFixed32PackedSliceValue encodes a []uint32 value as a packed repeated Fixed32. +func appendFixed32PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed32(b, uint32(v.Uint())) + } + return b, nil +} + +var coderFixed32PackedSliceValue = valueCoderFuncs{ + size: sizeFixed32PackedSliceValue, + marshal: appendFixed32PackedSliceValue, + unmarshal: consumeFixed32SliceValue, + merge: mergeListValue, +} + +// sizeFloat returns the size of wire encoding a float32 pointer as a Float. +func sizeFloat(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed32() +} + +// appendFloat wire encodes a float32 pointer as a Float. +func appendFloat(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Float32() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(v)) + return b, nil +} + +// consumeFloat wire decodes a float32 pointer as a Float. +func consumeFloat(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *p.Float32() = math.Float32frombits(v) + out.n = n + return out, nil +} + +var coderFloat = pointerCoderFuncs{ + size: sizeFloat, + marshal: appendFloat, + unmarshal: consumeFloat, + merge: mergeFloat32, +} + +// sizeFloatNoZero returns the size of wire encoding a float32 pointer as a Float. +// The zero value is not encoded. +func sizeFloatNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Float32() + if v == 0 && !math.Signbit(float64(v)) { + return 0 + } + return f.tagsize + protowire.SizeFixed32() +} + +// appendFloatNoZero wire encodes a float32 pointer as a Float. +// The zero value is not encoded. +func appendFloatNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Float32() + if v == 0 && !math.Signbit(float64(v)) { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(v)) + return b, nil +} + +var coderFloatNoZero = pointerCoderFuncs{ + size: sizeFloatNoZero, + marshal: appendFloatNoZero, + unmarshal: consumeFloat, + merge: mergeFloat32NoZero, +} + +// sizeFloatPtr returns the size of wire encoding a *float32 pointer as a Float. +// It panics if the pointer is nil. +func sizeFloatPtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed32() +} + +// appendFloatPtr wire encodes a *float32 pointer as a Float. +// It panics if the pointer is nil. +func appendFloatPtr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Float32Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(v)) + return b, nil +} + +// consumeFloatPtr wire decodes a *float32 pointer as a Float. +func consumeFloatPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + vp := p.Float32Ptr() + if *vp == nil { + *vp = new(float32) + } + **vp = math.Float32frombits(v) + out.n = n + return out, nil +} + +var coderFloatPtr = pointerCoderFuncs{ + size: sizeFloatPtr, + marshal: appendFloatPtr, + unmarshal: consumeFloatPtr, + merge: mergeFloat32Ptr, +} + +// sizeFloatSlice returns the size of wire encoding a []float32 pointer as a repeated Float. +func sizeFloatSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Float32Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed32()) + return size +} + +// appendFloatSlice encodes a []float32 pointer as a repeated Float. +func appendFloatSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Float32Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(v)) + } + return b, nil +} + +// consumeFloatSlice wire decodes a []float32 pointer as a repeated Float. +func consumeFloatSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Float32Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + s = append(s, math.Float32frombits(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed32Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, math.Float32frombits(v)) + out.n = n + return out, nil +} + +var coderFloatSlice = pointerCoderFuncs{ + size: sizeFloatSlice, + marshal: appendFloatSlice, + unmarshal: consumeFloatSlice, + merge: mergeFloat32Slice, +} + +// sizeFloatPackedSlice returns the size of wire encoding a []float32 pointer as a packed repeated Float. +func sizeFloatPackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Float32Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed32() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendFloatPackedSlice encodes a []float32 pointer as a packed repeated Float. +func appendFloatPackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Float32Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed32(b, math.Float32bits(v)) + } + return b, nil +} + +var coderFloatPackedSlice = pointerCoderFuncs{ + size: sizeFloatPackedSlice, + marshal: appendFloatPackedSlice, + unmarshal: consumeFloatSlice, + merge: mergeFloat32Slice, +} + +// sizeFloatValue returns the size of wire encoding a float32 value as a Float. +func sizeFloatValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed32() +} + +// appendFloatValue encodes a float32 value as a Float. +func appendFloatValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(float32(v.Float()))) + return b, nil +} + +// consumeFloatValue decodes a float32 value as a Float. +func consumeFloatValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v))), out, nil +} + +var coderFloatValue = valueCoderFuncs{ + size: sizeFloatValue, + marshal: appendFloatValue, + unmarshal: consumeFloatValue, + merge: mergeScalarValue, +} + +// sizeFloatSliceValue returns the size of wire encoding a []float32 value as a repeated Float. +func sizeFloatSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed32()) + return size +} + +// appendFloatSliceValue encodes a []float32 value as a repeated Float. +func appendFloatSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed32(b, math.Float32bits(float32(v.Float()))) + } + return b, nil +} + +// consumeFloatSliceValue wire decodes a []float32 value as a repeated Float. +func consumeFloatSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed32Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) + out.n = n + return listv, out, nil +} + +var coderFloatSliceValue = valueCoderFuncs{ + size: sizeFloatSliceValue, + marshal: appendFloatSliceValue, + unmarshal: consumeFloatSliceValue, + merge: mergeListValue, +} + +// sizeFloatPackedSliceValue returns the size of wire encoding a []float32 value as a packed repeated Float. +func sizeFloatPackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed32() + return tagsize + protowire.SizeBytes(n) +} + +// appendFloatPackedSliceValue encodes a []float32 value as a packed repeated Float. +func appendFloatPackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed32() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed32(b, math.Float32bits(float32(v.Float()))) + } + return b, nil +} + +var coderFloatPackedSliceValue = valueCoderFuncs{ + size: sizeFloatPackedSliceValue, + marshal: appendFloatPackedSliceValue, + unmarshal: consumeFloatSliceValue, + merge: mergeListValue, +} + +// sizeSfixed64 returns the size of wire encoding a int64 pointer as a Sfixed64. +func sizeSfixed64(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed64() +} + +// appendSfixed64 wire encodes a int64 pointer as a Sfixed64. +func appendSfixed64(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, uint64(v)) + return b, nil +} + +// consumeSfixed64 wire decodes a int64 pointer as a Sfixed64. +func consumeSfixed64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *p.Int64() = int64(v) + out.n = n + return out, nil +} + +var coderSfixed64 = pointerCoderFuncs{ + size: sizeSfixed64, + marshal: appendSfixed64, + unmarshal: consumeSfixed64, + merge: mergeInt64, +} + +// sizeSfixed64NoZero returns the size of wire encoding a int64 pointer as a Sfixed64. +// The zero value is not encoded. +func sizeSfixed64NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Int64() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeFixed64() +} + +// appendSfixed64NoZero wire encodes a int64 pointer as a Sfixed64. +// The zero value is not encoded. +func appendSfixed64NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Int64() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, uint64(v)) + return b, nil +} + +var coderSfixed64NoZero = pointerCoderFuncs{ + size: sizeSfixed64NoZero, + marshal: appendSfixed64NoZero, + unmarshal: consumeSfixed64, + merge: mergeInt64NoZero, +} + +// sizeSfixed64Ptr returns the size of wire encoding a *int64 pointer as a Sfixed64. +// It panics if the pointer is nil. +func sizeSfixed64Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed64() +} + +// appendSfixed64Ptr wire encodes a *int64 pointer as a Sfixed64. +// It panics if the pointer is nil. +func appendSfixed64Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Int64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, uint64(v)) + return b, nil +} + +// consumeSfixed64Ptr wire decodes a *int64 pointer as a Sfixed64. +func consumeSfixed64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + vp := p.Int64Ptr() + if *vp == nil { + *vp = new(int64) + } + **vp = int64(v) + out.n = n + return out, nil +} + +var coderSfixed64Ptr = pointerCoderFuncs{ + size: sizeSfixed64Ptr, + marshal: appendSfixed64Ptr, + unmarshal: consumeSfixed64Ptr, + merge: mergeInt64Ptr, +} + +// sizeSfixed64Slice returns the size of wire encoding a []int64 pointer as a repeated Sfixed64. +func sizeSfixed64Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed64()) + return size +} + +// appendSfixed64Slice encodes a []int64 pointer as a repeated Sfixed64. +func appendSfixed64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, uint64(v)) + } + return b, nil +} + +// consumeSfixed64Slice wire decodes a []int64 pointer as a repeated Sfixed64. +func consumeSfixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Int64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + s = append(s, int64(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, int64(v)) + out.n = n + return out, nil +} + +var coderSfixed64Slice = pointerCoderFuncs{ + size: sizeSfixed64Slice, + marshal: appendSfixed64Slice, + unmarshal: consumeSfixed64Slice, + merge: mergeInt64Slice, +} + +// sizeSfixed64PackedSlice returns the size of wire encoding a []int64 pointer as a packed repeated Sfixed64. +func sizeSfixed64PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Int64Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed64() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendSfixed64PackedSlice encodes a []int64 pointer as a packed repeated Sfixed64. +func appendSfixed64PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Int64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed64(b, uint64(v)) + } + return b, nil +} + +var coderSfixed64PackedSlice = pointerCoderFuncs{ + size: sizeSfixed64PackedSlice, + marshal: appendSfixed64PackedSlice, + unmarshal: consumeSfixed64Slice, + merge: mergeInt64Slice, +} + +// sizeSfixed64Value returns the size of wire encoding a int64 value as a Sfixed64. +func sizeSfixed64Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed64() +} + +// appendSfixed64Value encodes a int64 value as a Sfixed64. +func appendSfixed64Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, uint64(v.Int())) + return b, nil +} + +// consumeSfixed64Value decodes a int64 value as a Sfixed64. +func consumeSfixed64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfInt64(int64(v)), out, nil +} + +var coderSfixed64Value = valueCoderFuncs{ + size: sizeSfixed64Value, + marshal: appendSfixed64Value, + unmarshal: consumeSfixed64Value, + merge: mergeScalarValue, +} + +// sizeSfixed64SliceValue returns the size of wire encoding a []int64 value as a repeated Sfixed64. +func sizeSfixed64SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed64()) + return size +} + +// appendSfixed64SliceValue encodes a []int64 value as a repeated Sfixed64. +func appendSfixed64SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, uint64(v.Int())) + } + return b, nil +} + +// consumeSfixed64SliceValue wire decodes a []int64 value as a repeated Sfixed64. +func consumeSfixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + out.n = n + return listv, out, nil +} + +var coderSfixed64SliceValue = valueCoderFuncs{ + size: sizeSfixed64SliceValue, + marshal: appendSfixed64SliceValue, + unmarshal: consumeSfixed64SliceValue, + merge: mergeListValue, +} + +// sizeSfixed64PackedSliceValue returns the size of wire encoding a []int64 value as a packed repeated Sfixed64. +func sizeSfixed64PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed64() + return tagsize + protowire.SizeBytes(n) +} + +// appendSfixed64PackedSliceValue encodes a []int64 value as a packed repeated Sfixed64. +func appendSfixed64PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed64(b, uint64(v.Int())) + } + return b, nil +} + +var coderSfixed64PackedSliceValue = valueCoderFuncs{ + size: sizeSfixed64PackedSliceValue, + marshal: appendSfixed64PackedSliceValue, + unmarshal: consumeSfixed64SliceValue, + merge: mergeListValue, +} + +// sizeFixed64 returns the size of wire encoding a uint64 pointer as a Fixed64. +func sizeFixed64(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed64() +} + +// appendFixed64 wire encodes a uint64 pointer as a Fixed64. +func appendFixed64(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, v) + return b, nil +} + +// consumeFixed64 wire decodes a uint64 pointer as a Fixed64. +func consumeFixed64(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *p.Uint64() = v + out.n = n + return out, nil +} + +var coderFixed64 = pointerCoderFuncs{ + size: sizeFixed64, + marshal: appendFixed64, + unmarshal: consumeFixed64, + merge: mergeUint64, +} + +// sizeFixed64NoZero returns the size of wire encoding a uint64 pointer as a Fixed64. +// The zero value is not encoded. +func sizeFixed64NoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Uint64() + if v == 0 { + return 0 + } + return f.tagsize + protowire.SizeFixed64() +} + +// appendFixed64NoZero wire encodes a uint64 pointer as a Fixed64. +// The zero value is not encoded. +func appendFixed64NoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Uint64() + if v == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, v) + return b, nil +} + +var coderFixed64NoZero = pointerCoderFuncs{ + size: sizeFixed64NoZero, + marshal: appendFixed64NoZero, + unmarshal: consumeFixed64, + merge: mergeUint64NoZero, +} + +// sizeFixed64Ptr returns the size of wire encoding a *uint64 pointer as a Fixed64. +// It panics if the pointer is nil. +func sizeFixed64Ptr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed64() +} + +// appendFixed64Ptr wire encodes a *uint64 pointer as a Fixed64. +// It panics if the pointer is nil. +func appendFixed64Ptr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Uint64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, v) + return b, nil +} + +// consumeFixed64Ptr wire decodes a *uint64 pointer as a Fixed64. +func consumeFixed64Ptr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + vp := p.Uint64Ptr() + if *vp == nil { + *vp = new(uint64) + } + **vp = v + out.n = n + return out, nil +} + +var coderFixed64Ptr = pointerCoderFuncs{ + size: sizeFixed64Ptr, + marshal: appendFixed64Ptr, + unmarshal: consumeFixed64Ptr, + merge: mergeUint64Ptr, +} + +// sizeFixed64Slice returns the size of wire encoding a []uint64 pointer as a repeated Fixed64. +func sizeFixed64Slice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint64Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed64()) + return size +} + +// appendFixed64Slice encodes a []uint64 pointer as a repeated Fixed64. +func appendFixed64Slice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, v) + } + return b, nil +} + +// consumeFixed64Slice wire decodes a []uint64 pointer as a repeated Fixed64. +func consumeFixed64Slice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Uint64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + s = append(s, v) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, v) + out.n = n + return out, nil +} + +var coderFixed64Slice = pointerCoderFuncs{ + size: sizeFixed64Slice, + marshal: appendFixed64Slice, + unmarshal: consumeFixed64Slice, + merge: mergeUint64Slice, +} + +// sizeFixed64PackedSlice returns the size of wire encoding a []uint64 pointer as a packed repeated Fixed64. +func sizeFixed64PackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Uint64Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed64() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendFixed64PackedSlice encodes a []uint64 pointer as a packed repeated Fixed64. +func appendFixed64PackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Uint64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed64(b, v) + } + return b, nil +} + +var coderFixed64PackedSlice = pointerCoderFuncs{ + size: sizeFixed64PackedSlice, + marshal: appendFixed64PackedSlice, + unmarshal: consumeFixed64Slice, + merge: mergeUint64Slice, +} + +// sizeFixed64Value returns the size of wire encoding a uint64 value as a Fixed64. +func sizeFixed64Value(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed64() +} + +// appendFixed64Value encodes a uint64 value as a Fixed64. +func appendFixed64Value(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, v.Uint()) + return b, nil +} + +// consumeFixed64Value decodes a uint64 value as a Fixed64. +func consumeFixed64Value(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfUint64(v), out, nil +} + +var coderFixed64Value = valueCoderFuncs{ + size: sizeFixed64Value, + marshal: appendFixed64Value, + unmarshal: consumeFixed64Value, + merge: mergeScalarValue, +} + +// sizeFixed64SliceValue returns the size of wire encoding a []uint64 value as a repeated Fixed64. +func sizeFixed64SliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed64()) + return size +} + +// appendFixed64SliceValue encodes a []uint64 value as a repeated Fixed64. +func appendFixed64SliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, v.Uint()) + } + return b, nil +} + +// consumeFixed64SliceValue wire decodes a []uint64 value as a repeated Fixed64. +func consumeFixed64SliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + out.n = n + return listv, out, nil +} + +var coderFixed64SliceValue = valueCoderFuncs{ + size: sizeFixed64SliceValue, + marshal: appendFixed64SliceValue, + unmarshal: consumeFixed64SliceValue, + merge: mergeListValue, +} + +// sizeFixed64PackedSliceValue returns the size of wire encoding a []uint64 value as a packed repeated Fixed64. +func sizeFixed64PackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed64() + return tagsize + protowire.SizeBytes(n) +} + +// appendFixed64PackedSliceValue encodes a []uint64 value as a packed repeated Fixed64. +func appendFixed64PackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed64(b, v.Uint()) + } + return b, nil +} + +var coderFixed64PackedSliceValue = valueCoderFuncs{ + size: sizeFixed64PackedSliceValue, + marshal: appendFixed64PackedSliceValue, + unmarshal: consumeFixed64SliceValue, + merge: mergeListValue, +} + +// sizeDouble returns the size of wire encoding a float64 pointer as a Double. +func sizeDouble(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + + return f.tagsize + protowire.SizeFixed64() +} + +// appendDouble wire encodes a float64 pointer as a Double. +func appendDouble(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Float64() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v)) + return b, nil +} + +// consumeDouble wire decodes a float64 pointer as a Double. +func consumeDouble(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *p.Float64() = math.Float64frombits(v) + out.n = n + return out, nil +} + +var coderDouble = pointerCoderFuncs{ + size: sizeDouble, + marshal: appendDouble, + unmarshal: consumeDouble, + merge: mergeFloat64, +} + +// sizeDoubleNoZero returns the size of wire encoding a float64 pointer as a Double. +// The zero value is not encoded. +func sizeDoubleNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Float64() + if v == 0 && !math.Signbit(float64(v)) { + return 0 + } + return f.tagsize + protowire.SizeFixed64() +} + +// appendDoubleNoZero wire encodes a float64 pointer as a Double. +// The zero value is not encoded. +func appendDoubleNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Float64() + if v == 0 && !math.Signbit(float64(v)) { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v)) + return b, nil +} + +var coderDoubleNoZero = pointerCoderFuncs{ + size: sizeDoubleNoZero, + marshal: appendDoubleNoZero, + unmarshal: consumeDouble, + merge: mergeFloat64NoZero, +} + +// sizeDoublePtr returns the size of wire encoding a *float64 pointer as a Double. +// It panics if the pointer is nil. +func sizeDoublePtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return f.tagsize + protowire.SizeFixed64() +} + +// appendDoublePtr wire encodes a *float64 pointer as a Double. +// It panics if the pointer is nil. +func appendDoublePtr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.Float64Ptr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v)) + return b, nil +} + +// consumeDoublePtr wire decodes a *float64 pointer as a Double. +func consumeDoublePtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + vp := p.Float64Ptr() + if *vp == nil { + *vp = new(float64) + } + **vp = math.Float64frombits(v) + out.n = n + return out, nil +} + +var coderDoublePtr = pointerCoderFuncs{ + size: sizeDoublePtr, + marshal: appendDoublePtr, + unmarshal: consumeDoublePtr, + merge: mergeFloat64Ptr, +} + +// sizeDoubleSlice returns the size of wire encoding a []float64 pointer as a repeated Double. +func sizeDoubleSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Float64Slice() + size = len(s) * (f.tagsize + protowire.SizeFixed64()) + return size +} + +// appendDoubleSlice encodes a []float64 pointer as a repeated Double. +func appendDoubleSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Float64Slice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v)) + } + return b, nil +} + +// consumeDoubleSlice wire decodes a []float64 pointer as a repeated Double. +func consumeDoubleSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.Float64Slice() + if wtyp == protowire.BytesType { + s := *sp + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + s = append(s, math.Float64frombits(v)) + b = b[n:] + } + *sp = s + out.n = n + return out, nil + } + if wtyp != protowire.Fixed64Type { + return out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, math.Float64frombits(v)) + out.n = n + return out, nil +} + +var coderDoubleSlice = pointerCoderFuncs{ + size: sizeDoubleSlice, + marshal: appendDoubleSlice, + unmarshal: consumeDoubleSlice, + merge: mergeFloat64Slice, +} + +// sizeDoublePackedSlice returns the size of wire encoding a []float64 pointer as a packed repeated Double. +func sizeDoublePackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.Float64Slice() + if len(s) == 0 { + return 0 + } + n := len(s) * protowire.SizeFixed64() + return f.tagsize + protowire.SizeBytes(n) +} + +// appendDoublePackedSlice encodes a []float64 pointer as a packed repeated Double. +func appendDoublePackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.Float64Slice() + if len(s) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := len(s) * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for _, v := range s { + b = protowire.AppendFixed64(b, math.Float64bits(v)) + } + return b, nil +} + +var coderDoublePackedSlice = pointerCoderFuncs{ + size: sizeDoublePackedSlice, + marshal: appendDoublePackedSlice, + unmarshal: consumeDoubleSlice, + merge: mergeFloat64Slice, +} + +// sizeDoubleValue returns the size of wire encoding a float64 value as a Double. +func sizeDoubleValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeFixed64() +} + +// appendDoubleValue encodes a float64 value as a Double. +func appendDoubleValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v.Float())) + return b, nil +} + +// consumeDoubleValue decodes a float64 value as a Double. +func consumeDoubleValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfFloat64(math.Float64frombits(v)), out, nil +} + +var coderDoubleValue = valueCoderFuncs{ + size: sizeDoubleValue, + marshal: appendDoubleValue, + unmarshal: consumeDoubleValue, + merge: mergeScalarValue, +} + +// sizeDoubleSliceValue returns the size of wire encoding a []float64 value as a repeated Double. +func sizeDoubleSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + size = list.Len() * (tagsize + protowire.SizeFixed64()) + return size +} + +// appendDoubleSliceValue encodes a []float64 value as a repeated Double. +func appendDoubleSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendFixed64(b, math.Float64bits(v.Float())) + } + return b, nil +} + +// consumeDoubleSliceValue wire decodes a []float64 value as a repeated Double. +func consumeDoubleSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) + b = b[n:] + } + out.n = n + return listv, out, nil + } + if wtyp != protowire.Fixed64Type { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) + out.n = n + return listv, out, nil +} + +var coderDoubleSliceValue = valueCoderFuncs{ + size: sizeDoubleSliceValue, + marshal: appendDoubleSliceValue, + unmarshal: consumeDoubleSliceValue, + merge: mergeListValue, +} + +// sizeDoublePackedSliceValue returns the size of wire encoding a []float64 value as a packed repeated Double. +func sizeDoublePackedSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return 0 + } + n := llen * protowire.SizeFixed64() + return tagsize + protowire.SizeBytes(n) +} + +// appendDoublePackedSliceValue encodes a []float64 value as a packed repeated Double. +func appendDoublePackedSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + llen := list.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, wiretag) + n := llen * protowire.SizeFixed64() + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + v := list.Get(i) + b = protowire.AppendFixed64(b, math.Float64bits(v.Float())) + } + return b, nil +} + +var coderDoublePackedSliceValue = valueCoderFuncs{ + size: sizeDoublePackedSliceValue, + marshal: appendDoublePackedSliceValue, + unmarshal: consumeDoubleSliceValue, + merge: mergeListValue, +} + +// sizeString returns the size of wire encoding a string pointer as a String. +func sizeString(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.String() + return f.tagsize + protowire.SizeBytes(len(v)) +} + +// appendString wire encodes a string pointer as a String. +func appendString(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.String() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + return b, nil +} + +// consumeString wire decodes a string pointer as a String. +func consumeString(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + *p.String() = string(v) + out.n = n + return out, nil +} + +var coderString = pointerCoderFuncs{ + size: sizeString, + marshal: appendString, + unmarshal: consumeString, + merge: mergeString, +} + +// appendStringValidateUTF8 wire encodes a string pointer as a String. +func appendStringValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.String() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + if !utf8.ValidString(v) { + return b, errInvalidUTF8{} + } + return b, nil +} + +// consumeStringValidateUTF8 wire decodes a string pointer as a String. +func consumeStringValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + *p.String() = string(v) + out.n = n + return out, nil +} + +var coderStringValidateUTF8 = pointerCoderFuncs{ + size: sizeString, + marshal: appendStringValidateUTF8, + unmarshal: consumeStringValidateUTF8, + merge: mergeString, +} + +// sizeStringNoZero returns the size of wire encoding a string pointer as a String. +// The zero value is not encoded. +func sizeStringNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.String() + if len(v) == 0 { + return 0 + } + return f.tagsize + protowire.SizeBytes(len(v)) +} + +// appendStringNoZero wire encodes a string pointer as a String. +// The zero value is not encoded. +func appendStringNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.String() + if len(v) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + return b, nil +} + +var coderStringNoZero = pointerCoderFuncs{ + size: sizeStringNoZero, + marshal: appendStringNoZero, + unmarshal: consumeString, + merge: mergeStringNoZero, +} + +// appendStringNoZeroValidateUTF8 wire encodes a string pointer as a String. +// The zero value is not encoded. +func appendStringNoZeroValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.String() + if len(v) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + if !utf8.ValidString(v) { + return b, errInvalidUTF8{} + } + return b, nil +} + +var coderStringNoZeroValidateUTF8 = pointerCoderFuncs{ + size: sizeStringNoZero, + marshal: appendStringNoZeroValidateUTF8, + unmarshal: consumeStringValidateUTF8, + merge: mergeStringNoZero, +} + +// sizeStringPtr returns the size of wire encoding a *string pointer as a String. +// It panics if the pointer is nil. +func sizeStringPtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := **p.StringPtr() + return f.tagsize + protowire.SizeBytes(len(v)) +} + +// appendStringPtr wire encodes a *string pointer as a String. +// It panics if the pointer is nil. +func appendStringPtr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.StringPtr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + return b, nil +} + +// consumeStringPtr wire decodes a *string pointer as a String. +func consumeStringPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + vp := p.StringPtr() + if *vp == nil { + *vp = new(string) + } + **vp = string(v) + out.n = n + return out, nil +} + +var coderStringPtr = pointerCoderFuncs{ + size: sizeStringPtr, + marshal: appendStringPtr, + unmarshal: consumeStringPtr, + merge: mergeStringPtr, +} + +// appendStringPtrValidateUTF8 wire encodes a *string pointer as a String. +// It panics if the pointer is nil. +func appendStringPtrValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := **p.StringPtr() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + if !utf8.ValidString(v) { + return b, errInvalidUTF8{} + } + return b, nil +} + +// consumeStringPtrValidateUTF8 wire decodes a *string pointer as a String. +func consumeStringPtrValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + vp := p.StringPtr() + if *vp == nil { + *vp = new(string) + } + **vp = string(v) + out.n = n + return out, nil +} + +var coderStringPtrValidateUTF8 = pointerCoderFuncs{ + size: sizeStringPtr, + marshal: appendStringPtrValidateUTF8, + unmarshal: consumeStringPtrValidateUTF8, + merge: mergeStringPtr, +} + +// sizeStringSlice returns the size of wire encoding a []string pointer as a repeated String. +func sizeStringSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.StringSlice() + for _, v := range s { + size += f.tagsize + protowire.SizeBytes(len(v)) + } + return size +} + +// appendStringSlice encodes a []string pointer as a repeated String. +func appendStringSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.StringSlice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + } + return b, nil +} + +// consumeStringSlice wire decodes a []string pointer as a repeated String. +func consumeStringSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.StringSlice() + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, string(v)) + out.n = n + return out, nil +} + +var coderStringSlice = pointerCoderFuncs{ + size: sizeStringSlice, + marshal: appendStringSlice, + unmarshal: consumeStringSlice, + merge: mergeStringSlice, +} + +// appendStringSliceValidateUTF8 encodes a []string pointer as a repeated String. +func appendStringSliceValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.StringSlice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendString(b, v) + if !utf8.ValidString(v) { + return b, errInvalidUTF8{} + } + } + return b, nil +} + +// consumeStringSliceValidateUTF8 wire decodes a []string pointer as a repeated String. +func consumeStringSliceValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + sp := p.StringSlice() + *sp = append(*sp, string(v)) + out.n = n + return out, nil +} + +var coderStringSliceValidateUTF8 = pointerCoderFuncs{ + size: sizeStringSlice, + marshal: appendStringSliceValidateUTF8, + unmarshal: consumeStringSliceValidateUTF8, + merge: mergeStringSlice, +} + +// sizeStringValue returns the size of wire encoding a string value as a String. +func sizeStringValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeBytes(len(v.String())) +} + +// appendStringValue encodes a string value as a String. +func appendStringValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendString(b, v.String()) + return b, nil +} + +// consumeStringValue decodes a string value as a String. +func consumeStringValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfString(string(v)), out, nil +} + +var coderStringValue = valueCoderFuncs{ + size: sizeStringValue, + marshal: appendStringValue, + unmarshal: consumeStringValue, + merge: mergeScalarValue, +} + +// appendStringValueValidateUTF8 encodes a string value as a String. +func appendStringValueValidateUTF8(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendString(b, v.String()) + if !utf8.ValidString(v.String()) { + return b, errInvalidUTF8{} + } + return b, nil +} + +// consumeStringValueValidateUTF8 decodes a string value as a String. +func consumeStringValueValidateUTF8(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + if !utf8.Valid(v) { + return protoreflect.Value{}, out, errInvalidUTF8{} + } + out.n = n + return protoreflect.ValueOfString(string(v)), out, nil +} + +var coderStringValueValidateUTF8 = valueCoderFuncs{ + size: sizeStringValue, + marshal: appendStringValueValidateUTF8, + unmarshal: consumeStringValueValidateUTF8, + merge: mergeScalarValue, +} + +// sizeStringSliceValue returns the size of wire encoding a []string value as a repeated String. +func sizeStringSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeBytes(len(v.String())) + } + return size +} + +// appendStringSliceValue encodes a []string value as a repeated String. +func appendStringSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendString(b, v.String()) + } + return b, nil +} + +// consumeStringSliceValue wire decodes a []string value as a repeated String. +func consumeStringSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp != protowire.BytesType { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfString(string(v))) + out.n = n + return listv, out, nil +} + +var coderStringSliceValue = valueCoderFuncs{ + size: sizeStringSliceValue, + marshal: appendStringSliceValue, + unmarshal: consumeStringSliceValue, + merge: mergeListValue, +} + +// sizeBytes returns the size of wire encoding a []byte pointer as a Bytes. +func sizeBytes(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Bytes() + return f.tagsize + protowire.SizeBytes(len(v)) +} + +// appendBytes wire encodes a []byte pointer as a Bytes. +func appendBytes(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bytes() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + return b, nil +} + +// consumeBytes wire decodes a []byte pointer as a Bytes. +func consumeBytes(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + *p.Bytes() = append(emptyBuf[:], v...) + out.n = n + return out, nil +} + +var coderBytes = pointerCoderFuncs{ + size: sizeBytes, + marshal: appendBytes, + unmarshal: consumeBytes, + merge: mergeBytes, +} + +// appendBytesValidateUTF8 wire encodes a []byte pointer as a Bytes. +func appendBytesValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bytes() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + if !utf8.Valid(v) { + return b, errInvalidUTF8{} + } + return b, nil +} + +// consumeBytesValidateUTF8 wire decodes a []byte pointer as a Bytes. +func consumeBytesValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + *p.Bytes() = append(emptyBuf[:], v...) + out.n = n + return out, nil +} + +var coderBytesValidateUTF8 = pointerCoderFuncs{ + size: sizeBytes, + marshal: appendBytesValidateUTF8, + unmarshal: consumeBytesValidateUTF8, + merge: mergeBytes, +} + +// sizeBytesNoZero returns the size of wire encoding a []byte pointer as a Bytes. +// The zero value is not encoded. +func sizeBytesNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + v := *p.Bytes() + if len(v) == 0 { + return 0 + } + return f.tagsize + protowire.SizeBytes(len(v)) +} + +// appendBytesNoZero wire encodes a []byte pointer as a Bytes. +// The zero value is not encoded. +func appendBytesNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bytes() + if len(v) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + return b, nil +} + +// consumeBytesNoZero wire decodes a []byte pointer as a Bytes. +// The zero value is not decoded. +func consumeBytesNoZero(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + *p.Bytes() = append(([]byte)(nil), v...) + out.n = n + return out, nil +} + +var coderBytesNoZero = pointerCoderFuncs{ + size: sizeBytesNoZero, + marshal: appendBytesNoZero, + unmarshal: consumeBytesNoZero, + merge: mergeBytesNoZero, +} + +// appendBytesNoZeroValidateUTF8 wire encodes a []byte pointer as a Bytes. +// The zero value is not encoded. +func appendBytesNoZeroValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := *p.Bytes() + if len(v) == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + if !utf8.Valid(v) { + return b, errInvalidUTF8{} + } + return b, nil +} + +// consumeBytesNoZeroValidateUTF8 wire decodes a []byte pointer as a Bytes. +func consumeBytesNoZeroValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + *p.Bytes() = append(([]byte)(nil), v...) + out.n = n + return out, nil +} + +var coderBytesNoZeroValidateUTF8 = pointerCoderFuncs{ + size: sizeBytesNoZero, + marshal: appendBytesNoZeroValidateUTF8, + unmarshal: consumeBytesNoZeroValidateUTF8, + merge: mergeBytesNoZero, +} + +// sizeBytesSlice returns the size of wire encoding a [][]byte pointer as a repeated Bytes. +func sizeBytesSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := *p.BytesSlice() + for _, v := range s { + size += f.tagsize + protowire.SizeBytes(len(v)) + } + return size +} + +// appendBytesSlice encodes a [][]byte pointer as a repeated Bytes. +func appendBytesSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.BytesSlice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + } + return b, nil +} + +// consumeBytesSlice wire decodes a [][]byte pointer as a repeated Bytes. +func consumeBytesSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + sp := p.BytesSlice() + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + *sp = append(*sp, append(emptyBuf[:], v...)) + out.n = n + return out, nil +} + +var coderBytesSlice = pointerCoderFuncs{ + size: sizeBytesSlice, + marshal: appendBytesSlice, + unmarshal: consumeBytesSlice, + merge: mergeBytesSlice, +} + +// appendBytesSliceValidateUTF8 encodes a [][]byte pointer as a repeated Bytes. +func appendBytesSliceValidateUTF8(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := *p.BytesSlice() + for _, v := range s { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendBytes(b, v) + if !utf8.Valid(v) { + return b, errInvalidUTF8{} + } + } + return b, nil +} + +// consumeBytesSliceValidateUTF8 wire decodes a [][]byte pointer as a repeated Bytes. +func consumeBytesSliceValidateUTF8(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + if !utf8.Valid(v) { + return out, errInvalidUTF8{} + } + sp := p.BytesSlice() + *sp = append(*sp, append(emptyBuf[:], v...)) + out.n = n + return out, nil +} + +var coderBytesSliceValidateUTF8 = pointerCoderFuncs{ + size: sizeBytesSlice, + marshal: appendBytesSliceValidateUTF8, + unmarshal: consumeBytesSliceValidateUTF8, + merge: mergeBytesSlice, +} + +// sizeBytesValue returns the size of wire encoding a []byte value as a Bytes. +func sizeBytesValue(v protoreflect.Value, tagsize int, opts marshalOptions) int { + return tagsize + protowire.SizeBytes(len(v.Bytes())) +} + +// appendBytesValue encodes a []byte value as a Bytes. +func appendBytesValue(b []byte, v protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendBytes(b, v.Bytes()) + return b, nil +} + +// consumeBytesValue decodes a []byte value as a Bytes. +func consumeBytesValue(b []byte, _ protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + out.n = n + return protoreflect.ValueOfBytes(append(emptyBuf[:], v...)), out, nil +} + +var coderBytesValue = valueCoderFuncs{ + size: sizeBytesValue, + marshal: appendBytesValue, + unmarshal: consumeBytesValue, + merge: mergeBytesValue, +} + +// sizeBytesSliceValue returns the size of wire encoding a [][]byte value as a repeated Bytes. +func sizeBytesSliceValue(listv protoreflect.Value, tagsize int, opts marshalOptions) (size int) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + size += tagsize + protowire.SizeBytes(len(v.Bytes())) + } + return size +} + +// appendBytesSliceValue encodes a [][]byte value as a repeated Bytes. +func appendBytesSliceValue(b []byte, listv protoreflect.Value, wiretag uint64, opts marshalOptions) ([]byte, error) { + list := listv.List() + for i, llen := 0, list.Len(); i < llen; i++ { + v := list.Get(i) + b = protowire.AppendVarint(b, wiretag) + b = protowire.AppendBytes(b, v.Bytes()) + } + return b, nil +} + +// consumeBytesSliceValue wire decodes a [][]byte value as a repeated Bytes. +func consumeBytesSliceValue(b []byte, listv protoreflect.Value, _ protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (_ protoreflect.Value, out unmarshalOutput, err error) { + list := listv.List() + if wtyp != protowire.BytesType { + return protoreflect.Value{}, out, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return protoreflect.Value{}, out, errDecode + } + list.Append(protoreflect.ValueOfBytes(append(emptyBuf[:], v...))) + out.n = n + return listv, out, nil +} + +var coderBytesSliceValue = valueCoderFuncs{ + size: sizeBytesSliceValue, + marshal: appendBytesSliceValue, + unmarshal: consumeBytesSliceValue, + merge: mergeBytesListValue, +} + +// We append to an empty array rather than a nil []byte to get non-nil zero-length byte slices. +var emptyBuf [0]byte + +var wireTypes = map[protoreflect.Kind]protowire.Type{ + protoreflect.BoolKind: protowire.VarintType, + protoreflect.EnumKind: protowire.VarintType, + protoreflect.Int32Kind: protowire.VarintType, + protoreflect.Sint32Kind: protowire.VarintType, + protoreflect.Uint32Kind: protowire.VarintType, + protoreflect.Int64Kind: protowire.VarintType, + protoreflect.Sint64Kind: protowire.VarintType, + protoreflect.Uint64Kind: protowire.VarintType, + protoreflect.Sfixed32Kind: protowire.Fixed32Type, + protoreflect.Fixed32Kind: protowire.Fixed32Type, + protoreflect.FloatKind: protowire.Fixed32Type, + protoreflect.Sfixed64Kind: protowire.Fixed64Type, + protoreflect.Fixed64Kind: protowire.Fixed64Type, + protoreflect.DoubleKind: protowire.Fixed64Type, + protoreflect.StringKind: protowire.BytesType, + protoreflect.BytesKind: protowire.BytesType, + protoreflect.MessageKind: protowire.BytesType, + protoreflect.GroupKind: protowire.StartGroupType, +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_map.go b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go new file mode 100644 index 00000000..c1245fef --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_map.go @@ -0,0 +1,388 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "reflect" + "sort" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/genid" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type mapInfo struct { + goType reflect.Type + keyWiretag uint64 + valWiretag uint64 + keyFuncs valueCoderFuncs + valFuncs valueCoderFuncs + keyZero pref.Value + keyKind pref.Kind + conv *mapConverter +} + +func encoderFuncsForMap(fd pref.FieldDescriptor, ft reflect.Type) (valueMessage *MessageInfo, funcs pointerCoderFuncs) { + // TODO: Consider generating specialized map coders. + keyField := fd.MapKey() + valField := fd.MapValue() + keyWiretag := protowire.EncodeTag(1, wireTypes[keyField.Kind()]) + valWiretag := protowire.EncodeTag(2, wireTypes[valField.Kind()]) + keyFuncs := encoderFuncsForValue(keyField) + valFuncs := encoderFuncsForValue(valField) + conv := newMapConverter(ft, fd) + + mapi := &mapInfo{ + goType: ft, + keyWiretag: keyWiretag, + valWiretag: valWiretag, + keyFuncs: keyFuncs, + valFuncs: valFuncs, + keyZero: keyField.Default(), + keyKind: keyField.Kind(), + conv: conv, + } + if valField.Kind() == pref.MessageKind { + valueMessage = getMessageInfo(ft.Elem()) + } + + funcs = pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return sizeMap(p.AsValueOf(ft).Elem(), mapi, f, opts) + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + return appendMap(b, p.AsValueOf(ft).Elem(), mapi, f, opts) + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + mp := p.AsValueOf(ft) + if mp.Elem().IsNil() { + mp.Elem().Set(reflect.MakeMap(mapi.goType)) + } + if f.mi == nil { + return consumeMap(b, mp.Elem(), wtyp, mapi, f, opts) + } else { + return consumeMapOfMessage(b, mp.Elem(), wtyp, mapi, f, opts) + } + }, + } + switch valField.Kind() { + case pref.MessageKind: + funcs.merge = mergeMapOfMessage + case pref.BytesKind: + funcs.merge = mergeMapOfBytes + default: + funcs.merge = mergeMap + } + if valFuncs.isInit != nil { + funcs.isInit = func(p pointer, f *coderFieldInfo) error { + return isInitMap(p.AsValueOf(ft).Elem(), mapi, f) + } + } + return valueMessage, funcs +} + +const ( + mapKeyTagSize = 1 // field 1, tag size 1. + mapValTagSize = 1 // field 2, tag size 2. +) + +func sizeMap(mapv reflect.Value, mapi *mapInfo, f *coderFieldInfo, opts marshalOptions) int { + if mapv.Len() == 0 { + return 0 + } + n := 0 + iter := mapRange(mapv) + for iter.Next() { + key := mapi.conv.keyConv.PBValueOf(iter.Key()).MapKey() + keySize := mapi.keyFuncs.size(key.Value(), mapKeyTagSize, opts) + var valSize int + value := mapi.conv.valConv.PBValueOf(iter.Value()) + if f.mi == nil { + valSize = mapi.valFuncs.size(value, mapValTagSize, opts) + } else { + p := pointerOfValue(iter.Value()) + valSize += mapValTagSize + valSize += protowire.SizeBytes(f.mi.sizePointer(p, opts)) + } + n += f.tagsize + protowire.SizeBytes(keySize+valSize) + } + return n +} + +func consumeMap(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + var ( + key = mapi.keyZero + val = mapi.conv.valConv.New() + ) + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return out, errDecode + } + if num > protowire.MaxValidNumber { + return out, errDecode + } + b = b[n:] + err := errUnknown + switch num { + case genid.MapEntry_Key_field_number: + var v pref.Value + var o unmarshalOutput + v, o, err = mapi.keyFuncs.unmarshal(b, key, num, wtyp, opts) + if err != nil { + break + } + key = v + n = o.n + case genid.MapEntry_Value_field_number: + var v pref.Value + var o unmarshalOutput + v, o, err = mapi.valFuncs.unmarshal(b, val, num, wtyp, opts) + if err != nil { + break + } + val = v + n = o.n + } + if err == errUnknown { + n = protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return out, errDecode + } + } else if err != nil { + return out, err + } + b = b[n:] + } + mapv.SetMapIndex(mapi.conv.keyConv.GoValueOf(key), mapi.conv.valConv.GoValueOf(val)) + out.n = n + return out, nil +} + +func consumeMapOfMessage(b []byte, mapv reflect.Value, wtyp protowire.Type, mapi *mapInfo, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.BytesType { + return out, errUnknown + } + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + var ( + key = mapi.keyZero + val = reflect.New(f.mi.GoReflectType.Elem()) + ) + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return out, errDecode + } + if num > protowire.MaxValidNumber { + return out, errDecode + } + b = b[n:] + err := errUnknown + switch num { + case 1: + var v pref.Value + var o unmarshalOutput + v, o, err = mapi.keyFuncs.unmarshal(b, key, num, wtyp, opts) + if err != nil { + break + } + key = v + n = o.n + case 2: + if wtyp != protowire.BytesType { + break + } + var v []byte + v, n = protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + var o unmarshalOutput + o, err = f.mi.unmarshalPointer(v, pointerOfValue(val), 0, opts) + if o.initialized { + // Consider this map item initialized so long as we see + // an initialized value. + out.initialized = true + } + } + if err == errUnknown { + n = protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return out, errDecode + } + } else if err != nil { + return out, err + } + b = b[n:] + } + mapv.SetMapIndex(mapi.conv.keyConv.GoValueOf(key), val) + out.n = n + return out, nil +} + +func appendMapItem(b []byte, keyrv, valrv reflect.Value, mapi *mapInfo, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + if f.mi == nil { + key := mapi.conv.keyConv.PBValueOf(keyrv).MapKey() + val := mapi.conv.valConv.PBValueOf(valrv) + size := 0 + size += mapi.keyFuncs.size(key.Value(), mapKeyTagSize, opts) + size += mapi.valFuncs.size(val, mapValTagSize, opts) + b = protowire.AppendVarint(b, uint64(size)) + b, err := mapi.keyFuncs.marshal(b, key.Value(), mapi.keyWiretag, opts) + if err != nil { + return nil, err + } + return mapi.valFuncs.marshal(b, val, mapi.valWiretag, opts) + } else { + key := mapi.conv.keyConv.PBValueOf(keyrv).MapKey() + val := pointerOfValue(valrv) + valSize := f.mi.sizePointer(val, opts) + size := 0 + size += mapi.keyFuncs.size(key.Value(), mapKeyTagSize, opts) + size += mapValTagSize + protowire.SizeBytes(valSize) + b = protowire.AppendVarint(b, uint64(size)) + b, err := mapi.keyFuncs.marshal(b, key.Value(), mapi.keyWiretag, opts) + if err != nil { + return nil, err + } + b = protowire.AppendVarint(b, mapi.valWiretag) + b = protowire.AppendVarint(b, uint64(valSize)) + return f.mi.marshalAppendPointer(b, val, opts) + } +} + +func appendMap(b []byte, mapv reflect.Value, mapi *mapInfo, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + if mapv.Len() == 0 { + return b, nil + } + if opts.Deterministic() { + return appendMapDeterministic(b, mapv, mapi, f, opts) + } + iter := mapRange(mapv) + for iter.Next() { + var err error + b = protowire.AppendVarint(b, f.wiretag) + b, err = appendMapItem(b, iter.Key(), iter.Value(), mapi, f, opts) + if err != nil { + return b, err + } + } + return b, nil +} + +func appendMapDeterministic(b []byte, mapv reflect.Value, mapi *mapInfo, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + keys := mapv.MapKeys() + sort.Slice(keys, func(i, j int) bool { + switch keys[i].Kind() { + case reflect.Bool: + return !keys[i].Bool() && keys[j].Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return keys[i].Int() < keys[j].Int() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return keys[i].Uint() < keys[j].Uint() + case reflect.Float32, reflect.Float64: + return keys[i].Float() < keys[j].Float() + case reflect.String: + return keys[i].String() < keys[j].String() + default: + panic("invalid kind: " + keys[i].Kind().String()) + } + }) + for _, key := range keys { + var err error + b = protowire.AppendVarint(b, f.wiretag) + b, err = appendMapItem(b, key, mapv.MapIndex(key), mapi, f, opts) + if err != nil { + return b, err + } + } + return b, nil +} + +func isInitMap(mapv reflect.Value, mapi *mapInfo, f *coderFieldInfo) error { + if mi := f.mi; mi != nil { + mi.init() + if !mi.needsInitCheck { + return nil + } + iter := mapRange(mapv) + for iter.Next() { + val := pointerOfValue(iter.Value()) + if err := mi.checkInitializedPointer(val); err != nil { + return err + } + } + } else { + iter := mapRange(mapv) + for iter.Next() { + val := mapi.conv.valConv.PBValueOf(iter.Value()) + if err := mapi.valFuncs.isInit(val); err != nil { + return err + } + } + } + return nil +} + +func mergeMap(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + dstm := dst.AsValueOf(f.ft).Elem() + srcm := src.AsValueOf(f.ft).Elem() + if srcm.Len() == 0 { + return + } + if dstm.IsNil() { + dstm.Set(reflect.MakeMap(f.ft)) + } + iter := mapRange(srcm) + for iter.Next() { + dstm.SetMapIndex(iter.Key(), iter.Value()) + } +} + +func mergeMapOfBytes(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + dstm := dst.AsValueOf(f.ft).Elem() + srcm := src.AsValueOf(f.ft).Elem() + if srcm.Len() == 0 { + return + } + if dstm.IsNil() { + dstm.Set(reflect.MakeMap(f.ft)) + } + iter := mapRange(srcm) + for iter.Next() { + dstm.SetMapIndex(iter.Key(), reflect.ValueOf(append(emptyBuf[:], iter.Value().Bytes()...))) + } +} + +func mergeMapOfMessage(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + dstm := dst.AsValueOf(f.ft).Elem() + srcm := src.AsValueOf(f.ft).Elem() + if srcm.Len() == 0 { + return + } + if dstm.IsNil() { + dstm.Set(reflect.MakeMap(f.ft)) + } + iter := mapRange(srcm) + for iter.Next() { + val := reflect.New(f.ft.Elem().Elem()) + if f.mi != nil { + f.mi.mergePointer(pointerOfValue(val), pointerOfValue(iter.Value()), opts) + } else { + opts.Merge(asMessage(val), asMessage(iter.Value())) + } + dstm.SetMapIndex(iter.Key(), val) + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_map_go111.go b/vendor/google.golang.org/protobuf/internal/impl/codec_map_go111.go new file mode 100644 index 00000000..2706bb67 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_map_go111.go @@ -0,0 +1,37 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.12 + +package impl + +import "reflect" + +type mapIter struct { + v reflect.Value + keys []reflect.Value +} + +// mapRange provides a less-efficient equivalent to +// the Go 1.12 reflect.Value.MapRange method. +func mapRange(v reflect.Value) *mapIter { + return &mapIter{v: v} +} + +func (i *mapIter) Next() bool { + if i.keys == nil { + i.keys = i.v.MapKeys() + } else { + i.keys = i.keys[1:] + } + return len(i.keys) > 0 +} + +func (i *mapIter) Key() reflect.Value { + return i.keys[0] +} + +func (i *mapIter) Value() reflect.Value { + return i.v.MapIndex(i.keys[0]) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_map_go112.go b/vendor/google.golang.org/protobuf/internal/impl/codec_map_go112.go new file mode 100644 index 00000000..1533ef60 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_map_go112.go @@ -0,0 +1,11 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.12 + +package impl + +import "reflect" + +func mapRange(v reflect.Value) *reflect.MapIter { return v.MapRange() } diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_message.go b/vendor/google.golang.org/protobuf/internal/impl/codec_message.go new file mode 100644 index 00000000..cd40527f --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_message.go @@ -0,0 +1,217 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "sort" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/order" + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// coderMessageInfo contains per-message information used by the fast-path functions. +// This is a different type from MessageInfo to keep MessageInfo as general-purpose as +// possible. +type coderMessageInfo struct { + methods piface.Methods + + orderedCoderFields []*coderFieldInfo + denseCoderFields []*coderFieldInfo + coderFields map[protowire.Number]*coderFieldInfo + sizecacheOffset offset + unknownOffset offset + unknownPtrKind bool + extensionOffset offset + needsInitCheck bool + isMessageSet bool + numRequiredFields uint8 +} + +type coderFieldInfo struct { + funcs pointerCoderFuncs // fast-path per-field functions + mi *MessageInfo // field's message + ft reflect.Type + validation validationInfo // information used by message validation + num pref.FieldNumber // field number + offset offset // struct field offset + wiretag uint64 // field tag (number + wire type) + tagsize int // size of the varint-encoded tag + isPointer bool // true if IsNil may be called on the struct field + isRequired bool // true if field is required +} + +func (mi *MessageInfo) makeCoderMethods(t reflect.Type, si structInfo) { + mi.sizecacheOffset = invalidOffset + mi.unknownOffset = invalidOffset + mi.extensionOffset = invalidOffset + + if si.sizecacheOffset.IsValid() && si.sizecacheType == sizecacheType { + mi.sizecacheOffset = si.sizecacheOffset + } + if si.unknownOffset.IsValid() && (si.unknownType == unknownFieldsAType || si.unknownType == unknownFieldsBType) { + mi.unknownOffset = si.unknownOffset + mi.unknownPtrKind = si.unknownType.Kind() == reflect.Ptr + } + if si.extensionOffset.IsValid() && si.extensionType == extensionFieldsType { + mi.extensionOffset = si.extensionOffset + } + + mi.coderFields = make(map[protowire.Number]*coderFieldInfo) + fields := mi.Desc.Fields() + preallocFields := make([]coderFieldInfo, fields.Len()) + for i := 0; i < fields.Len(); i++ { + fd := fields.Get(i) + + fs := si.fieldsByNumber[fd.Number()] + isOneof := fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic() + if isOneof { + fs = si.oneofsByName[fd.ContainingOneof().Name()] + } + ft := fs.Type + var wiretag uint64 + if !fd.IsPacked() { + wiretag = protowire.EncodeTag(fd.Number(), wireTypes[fd.Kind()]) + } else { + wiretag = protowire.EncodeTag(fd.Number(), protowire.BytesType) + } + var fieldOffset offset + var funcs pointerCoderFuncs + var childMessage *MessageInfo + switch { + case ft == nil: + // This never occurs for generated message types. + // It implies that a hand-crafted type has missing Go fields + // for specific protobuf message fields. + funcs = pointerCoderFuncs{ + size: func(p pointer, f *coderFieldInfo, opts marshalOptions) int { + return 0 + }, + marshal: func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + return nil, nil + }, + unmarshal: func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) { + panic("missing Go struct field for " + string(fd.FullName())) + }, + isInit: func(p pointer, f *coderFieldInfo) error { + panic("missing Go struct field for " + string(fd.FullName())) + }, + merge: func(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + panic("missing Go struct field for " + string(fd.FullName())) + }, + } + case isOneof: + fieldOffset = offsetOf(fs, mi.Exporter) + case fd.IsWeak(): + fieldOffset = si.weakOffset + funcs = makeWeakMessageFieldCoder(fd) + default: + fieldOffset = offsetOf(fs, mi.Exporter) + childMessage, funcs = fieldCoder(fd, ft) + } + cf := &preallocFields[i] + *cf = coderFieldInfo{ + num: fd.Number(), + offset: fieldOffset, + wiretag: wiretag, + ft: ft, + tagsize: protowire.SizeVarint(wiretag), + funcs: funcs, + mi: childMessage, + validation: newFieldValidationInfo(mi, si, fd, ft), + isPointer: fd.Cardinality() == pref.Repeated || fd.HasPresence(), + isRequired: fd.Cardinality() == pref.Required, + } + mi.orderedCoderFields = append(mi.orderedCoderFields, cf) + mi.coderFields[cf.num] = cf + } + for i, oneofs := 0, mi.Desc.Oneofs(); i < oneofs.Len(); i++ { + if od := oneofs.Get(i); !od.IsSynthetic() { + mi.initOneofFieldCoders(od, si) + } + } + if messageset.IsMessageSet(mi.Desc) { + if !mi.extensionOffset.IsValid() { + panic(fmt.Sprintf("%v: MessageSet with no extensions field", mi.Desc.FullName())) + } + if !mi.unknownOffset.IsValid() { + panic(fmt.Sprintf("%v: MessageSet with no unknown field", mi.Desc.FullName())) + } + mi.isMessageSet = true + } + sort.Slice(mi.orderedCoderFields, func(i, j int) bool { + return mi.orderedCoderFields[i].num < mi.orderedCoderFields[j].num + }) + + var maxDense pref.FieldNumber + for _, cf := range mi.orderedCoderFields { + if cf.num >= 16 && cf.num >= 2*maxDense { + break + } + maxDense = cf.num + } + mi.denseCoderFields = make([]*coderFieldInfo, maxDense+1) + for _, cf := range mi.orderedCoderFields { + if int(cf.num) >= len(mi.denseCoderFields) { + break + } + mi.denseCoderFields[cf.num] = cf + } + + // To preserve compatibility with historic wire output, marshal oneofs last. + if mi.Desc.Oneofs().Len() > 0 { + sort.Slice(mi.orderedCoderFields, func(i, j int) bool { + fi := fields.ByNumber(mi.orderedCoderFields[i].num) + fj := fields.ByNumber(mi.orderedCoderFields[j].num) + return order.LegacyFieldOrder(fi, fj) + }) + } + + mi.needsInitCheck = needsInitCheck(mi.Desc) + if mi.methods.Marshal == nil && mi.methods.Size == nil { + mi.methods.Flags |= piface.SupportMarshalDeterministic + mi.methods.Marshal = mi.marshal + mi.methods.Size = mi.size + } + if mi.methods.Unmarshal == nil { + mi.methods.Flags |= piface.SupportUnmarshalDiscardUnknown + mi.methods.Unmarshal = mi.unmarshal + } + if mi.methods.CheckInitialized == nil { + mi.methods.CheckInitialized = mi.checkInitialized + } + if mi.methods.Merge == nil { + mi.methods.Merge = mi.merge + } +} + +// getUnknownBytes returns a *[]byte for the unknown fields. +// It is the caller's responsibility to check whether the pointer is nil. +// This function is specially designed to be inlineable. +func (mi *MessageInfo) getUnknownBytes(p pointer) *[]byte { + if mi.unknownPtrKind { + return *p.Apply(mi.unknownOffset).BytesPtr() + } else { + return p.Apply(mi.unknownOffset).Bytes() + } +} + +// mutableUnknownBytes returns a *[]byte for the unknown fields. +// The returned pointer is guaranteed to not be nil. +func (mi *MessageInfo) mutableUnknownBytes(p pointer) *[]byte { + if mi.unknownPtrKind { + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + *bp = new([]byte) + } + return *bp + } else { + return p.Apply(mi.unknownOffset).Bytes() + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go b/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go new file mode 100644 index 00000000..b7a23faf --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_messageset.go @@ -0,0 +1,123 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "sort" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" +) + +func sizeMessageSet(mi *MessageInfo, p pointer, opts marshalOptions) (size int) { + if !flags.ProtoLegacy { + return 0 + } + + ext := *p.Apply(mi.extensionOffset).Extensions() + for _, x := range ext { + xi := getExtensionFieldInfo(x.Type()) + if xi.funcs.size == nil { + continue + } + num, _ := protowire.DecodeTag(xi.wiretag) + size += messageset.SizeField(num) + size += xi.funcs.size(x.Value(), protowire.SizeTag(messageset.FieldMessage), opts) + } + + if u := mi.getUnknownBytes(p); u != nil { + size += messageset.SizeUnknown(*u) + } + + return size +} + +func marshalMessageSet(mi *MessageInfo, b []byte, p pointer, opts marshalOptions) ([]byte, error) { + if !flags.ProtoLegacy { + return b, errors.New("no support for message_set_wire_format") + } + + ext := *p.Apply(mi.extensionOffset).Extensions() + switch len(ext) { + case 0: + case 1: + // Fast-path for one extension: Don't bother sorting the keys. + for _, x := range ext { + var err error + b, err = marshalMessageSetField(mi, b, x, opts) + if err != nil { + return b, err + } + } + default: + // Sort the keys to provide a deterministic encoding. + // Not sure this is required, but the old code does it. + keys := make([]int, 0, len(ext)) + for k := range ext { + keys = append(keys, int(k)) + } + sort.Ints(keys) + for _, k := range keys { + var err error + b, err = marshalMessageSetField(mi, b, ext[int32(k)], opts) + if err != nil { + return b, err + } + } + } + + if u := mi.getUnknownBytes(p); u != nil { + var err error + b, err = messageset.AppendUnknown(b, *u) + if err != nil { + return b, err + } + } + + return b, nil +} + +func marshalMessageSetField(mi *MessageInfo, b []byte, x ExtensionField, opts marshalOptions) ([]byte, error) { + xi := getExtensionFieldInfo(x.Type()) + num, _ := protowire.DecodeTag(xi.wiretag) + b = messageset.AppendFieldStart(b, num) + b, err := xi.funcs.marshal(b, x.Value(), protowire.EncodeTag(messageset.FieldMessage, protowire.BytesType), opts) + if err != nil { + return b, err + } + b = messageset.AppendFieldEnd(b) + return b, nil +} + +func unmarshalMessageSet(mi *MessageInfo, b []byte, p pointer, opts unmarshalOptions) (out unmarshalOutput, err error) { + if !flags.ProtoLegacy { + return out, errors.New("no support for message_set_wire_format") + } + + ep := p.Apply(mi.extensionOffset).Extensions() + if *ep == nil { + *ep = make(map[int32]ExtensionField) + } + ext := *ep + initialized := true + err = messageset.Unmarshal(b, true, func(num protowire.Number, v []byte) error { + o, err := mi.unmarshalExtension(v, num, protowire.BytesType, ext, opts) + if err == errUnknown { + u := mi.mutableUnknownBytes(p) + *u = protowire.AppendTag(*u, num, protowire.BytesType) + *u = append(*u, v...) + return nil + } + if !o.initialized { + initialized = false + } + return err + }) + out.n = len(b) + out.initialized = initialized + return out, err +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go new file mode 100644 index 00000000..90705e3a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_reflect.go @@ -0,0 +1,209 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego appengine + +package impl + +import ( + "reflect" + + "google.golang.org/protobuf/encoding/protowire" +) + +func sizeEnum(p pointer, f *coderFieldInfo, _ marshalOptions) (size int) { + v := p.v.Elem().Int() + return f.tagsize + protowire.SizeVarint(uint64(v)) +} + +func appendEnum(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + v := p.v.Elem().Int() + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(v)) + return b, nil +} + +func consumeEnum(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, _ unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return out, errDecode + } + p.v.Elem().SetInt(int64(v)) + out.n = n + return out, nil +} + +func mergeEnum(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + dst.v.Elem().Set(src.v.Elem()) +} + +var coderEnum = pointerCoderFuncs{ + size: sizeEnum, + marshal: appendEnum, + unmarshal: consumeEnum, + merge: mergeEnum, +} + +func sizeEnumNoZero(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + if p.v.Elem().Int() == 0 { + return 0 + } + return sizeEnum(p, f, opts) +} + +func appendEnumNoZero(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + if p.v.Elem().Int() == 0 { + return b, nil + } + return appendEnum(b, p, f, opts) +} + +func mergeEnumNoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + if src.v.Elem().Int() != 0 { + dst.v.Elem().Set(src.v.Elem()) + } +} + +var coderEnumNoZero = pointerCoderFuncs{ + size: sizeEnumNoZero, + marshal: appendEnumNoZero, + unmarshal: consumeEnum, + merge: mergeEnumNoZero, +} + +func sizeEnumPtr(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + return sizeEnum(pointer{p.v.Elem()}, f, opts) +} + +func appendEnumPtr(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + return appendEnum(b, pointer{p.v.Elem()}, f, opts) +} + +func consumeEnumPtr(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + if wtyp != protowire.VarintType { + return out, errUnknown + } + if p.v.Elem().IsNil() { + p.v.Elem().Set(reflect.New(p.v.Elem().Type().Elem())) + } + return consumeEnum(b, pointer{p.v.Elem()}, wtyp, f, opts) +} + +func mergeEnumPtr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + if !src.v.Elem().IsNil() { + v := reflect.New(dst.v.Type().Elem().Elem()) + v.Elem().Set(src.v.Elem().Elem()) + dst.v.Elem().Set(v) + } +} + +var coderEnumPtr = pointerCoderFuncs{ + size: sizeEnumPtr, + marshal: appendEnumPtr, + unmarshal: consumeEnumPtr, + merge: mergeEnumPtr, +} + +func sizeEnumSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := p.v.Elem() + for i, llen := 0, s.Len(); i < llen; i++ { + size += protowire.SizeVarint(uint64(s.Index(i).Int())) + f.tagsize + } + return size +} + +func appendEnumSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := p.v.Elem() + for i, llen := 0, s.Len(); i < llen; i++ { + b = protowire.AppendVarint(b, f.wiretag) + b = protowire.AppendVarint(b, uint64(s.Index(i).Int())) + } + return b, nil +} + +func consumeEnumSlice(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (out unmarshalOutput, err error) { + s := p.v.Elem() + if wtyp == protowire.BytesType { + b, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, errDecode + } + for len(b) > 0 { + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return out, errDecode + } + rv := reflect.New(s.Type().Elem()).Elem() + rv.SetInt(int64(v)) + s.Set(reflect.Append(s, rv)) + b = b[n:] + } + out.n = n + return out, nil + } + if wtyp != protowire.VarintType { + return out, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return out, errDecode + } + rv := reflect.New(s.Type().Elem()).Elem() + rv.SetInt(int64(v)) + s.Set(reflect.Append(s, rv)) + out.n = n + return out, nil +} + +func mergeEnumSlice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + dst.v.Elem().Set(reflect.AppendSlice(dst.v.Elem(), src.v.Elem())) +} + +var coderEnumSlice = pointerCoderFuncs{ + size: sizeEnumSlice, + marshal: appendEnumSlice, + unmarshal: consumeEnumSlice, + merge: mergeEnumSlice, +} + +func sizeEnumPackedSlice(p pointer, f *coderFieldInfo, opts marshalOptions) (size int) { + s := p.v.Elem() + llen := s.Len() + if llen == 0 { + return 0 + } + n := 0 + for i := 0; i < llen; i++ { + n += protowire.SizeVarint(uint64(s.Index(i).Int())) + } + return f.tagsize + protowire.SizeBytes(n) +} + +func appendEnumPackedSlice(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) { + s := p.v.Elem() + llen := s.Len() + if llen == 0 { + return b, nil + } + b = protowire.AppendVarint(b, f.wiretag) + n := 0 + for i := 0; i < llen; i++ { + n += protowire.SizeVarint(uint64(s.Index(i).Int())) + } + b = protowire.AppendVarint(b, uint64(n)) + for i := 0; i < llen; i++ { + b = protowire.AppendVarint(b, uint64(s.Index(i).Int())) + } + return b, nil +} + +var coderEnumPackedSlice = pointerCoderFuncs{ + size: sizeEnumPackedSlice, + marshal: appendEnumPackedSlice, + unmarshal: consumeEnumSlice, + merge: mergeEnumSlice, +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_tables.go b/vendor/google.golang.org/protobuf/internal/impl/codec_tables.go new file mode 100644 index 00000000..e8997123 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_tables.go @@ -0,0 +1,557 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/strs" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// pointerCoderFuncs is a set of pointer encoding functions. +type pointerCoderFuncs struct { + mi *MessageInfo + size func(p pointer, f *coderFieldInfo, opts marshalOptions) int + marshal func(b []byte, p pointer, f *coderFieldInfo, opts marshalOptions) ([]byte, error) + unmarshal func(b []byte, p pointer, wtyp protowire.Type, f *coderFieldInfo, opts unmarshalOptions) (unmarshalOutput, error) + isInit func(p pointer, f *coderFieldInfo) error + merge func(dst, src pointer, f *coderFieldInfo, opts mergeOptions) +} + +// valueCoderFuncs is a set of protoreflect.Value encoding functions. +type valueCoderFuncs struct { + size func(v pref.Value, tagsize int, opts marshalOptions) int + marshal func(b []byte, v pref.Value, wiretag uint64, opts marshalOptions) ([]byte, error) + unmarshal func(b []byte, v pref.Value, num protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (pref.Value, unmarshalOutput, error) + isInit func(v pref.Value) error + merge func(dst, src pref.Value, opts mergeOptions) pref.Value +} + +// fieldCoder returns pointer functions for a field, used for operating on +// struct fields. +func fieldCoder(fd pref.FieldDescriptor, ft reflect.Type) (*MessageInfo, pointerCoderFuncs) { + switch { + case fd.IsMap(): + return encoderFuncsForMap(fd, ft) + case fd.Cardinality() == pref.Repeated && !fd.IsPacked(): + // Repeated fields (not packed). + if ft.Kind() != reflect.Slice { + break + } + ft := ft.Elem() + switch fd.Kind() { + case pref.BoolKind: + if ft.Kind() == reflect.Bool { + return nil, coderBoolSlice + } + case pref.EnumKind: + if ft.Kind() == reflect.Int32 { + return nil, coderEnumSlice + } + case pref.Int32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderInt32Slice + } + case pref.Sint32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSint32Slice + } + case pref.Uint32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderUint32Slice + } + case pref.Int64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderInt64Slice + } + case pref.Sint64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSint64Slice + } + case pref.Uint64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderUint64Slice + } + case pref.Sfixed32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSfixed32Slice + } + case pref.Fixed32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderFixed32Slice + } + case pref.FloatKind: + if ft.Kind() == reflect.Float32 { + return nil, coderFloatSlice + } + case pref.Sfixed64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSfixed64Slice + } + case pref.Fixed64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderFixed64Slice + } + case pref.DoubleKind: + if ft.Kind() == reflect.Float64 { + return nil, coderDoubleSlice + } + case pref.StringKind: + if ft.Kind() == reflect.String && strs.EnforceUTF8(fd) { + return nil, coderStringSliceValidateUTF8 + } + if ft.Kind() == reflect.String { + return nil, coderStringSlice + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 && strs.EnforceUTF8(fd) { + return nil, coderBytesSliceValidateUTF8 + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytesSlice + } + case pref.BytesKind: + if ft.Kind() == reflect.String { + return nil, coderStringSlice + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytesSlice + } + case pref.MessageKind: + return getMessageInfo(ft), makeMessageSliceFieldCoder(fd, ft) + case pref.GroupKind: + return getMessageInfo(ft), makeGroupSliceFieldCoder(fd, ft) + } + case fd.Cardinality() == pref.Repeated && fd.IsPacked(): + // Packed repeated fields. + // + // Only repeated fields of primitive numeric types + // (Varint, Fixed32, or Fixed64 wire type) can be packed. + if ft.Kind() != reflect.Slice { + break + } + ft := ft.Elem() + switch fd.Kind() { + case pref.BoolKind: + if ft.Kind() == reflect.Bool { + return nil, coderBoolPackedSlice + } + case pref.EnumKind: + if ft.Kind() == reflect.Int32 { + return nil, coderEnumPackedSlice + } + case pref.Int32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderInt32PackedSlice + } + case pref.Sint32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSint32PackedSlice + } + case pref.Uint32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderUint32PackedSlice + } + case pref.Int64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderInt64PackedSlice + } + case pref.Sint64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSint64PackedSlice + } + case pref.Uint64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderUint64PackedSlice + } + case pref.Sfixed32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSfixed32PackedSlice + } + case pref.Fixed32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderFixed32PackedSlice + } + case pref.FloatKind: + if ft.Kind() == reflect.Float32 { + return nil, coderFloatPackedSlice + } + case pref.Sfixed64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSfixed64PackedSlice + } + case pref.Fixed64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderFixed64PackedSlice + } + case pref.DoubleKind: + if ft.Kind() == reflect.Float64 { + return nil, coderDoublePackedSlice + } + } + case fd.Kind() == pref.MessageKind: + return getMessageInfo(ft), makeMessageFieldCoder(fd, ft) + case fd.Kind() == pref.GroupKind: + return getMessageInfo(ft), makeGroupFieldCoder(fd, ft) + case fd.Syntax() == pref.Proto3 && fd.ContainingOneof() == nil: + // Populated oneof fields always encode even if set to the zero value, + // which normally are not encoded in proto3. + switch fd.Kind() { + case pref.BoolKind: + if ft.Kind() == reflect.Bool { + return nil, coderBoolNoZero + } + case pref.EnumKind: + if ft.Kind() == reflect.Int32 { + return nil, coderEnumNoZero + } + case pref.Int32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderInt32NoZero + } + case pref.Sint32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSint32NoZero + } + case pref.Uint32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderUint32NoZero + } + case pref.Int64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderInt64NoZero + } + case pref.Sint64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSint64NoZero + } + case pref.Uint64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderUint64NoZero + } + case pref.Sfixed32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSfixed32NoZero + } + case pref.Fixed32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderFixed32NoZero + } + case pref.FloatKind: + if ft.Kind() == reflect.Float32 { + return nil, coderFloatNoZero + } + case pref.Sfixed64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSfixed64NoZero + } + case pref.Fixed64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderFixed64NoZero + } + case pref.DoubleKind: + if ft.Kind() == reflect.Float64 { + return nil, coderDoubleNoZero + } + case pref.StringKind: + if ft.Kind() == reflect.String && strs.EnforceUTF8(fd) { + return nil, coderStringNoZeroValidateUTF8 + } + if ft.Kind() == reflect.String { + return nil, coderStringNoZero + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 && strs.EnforceUTF8(fd) { + return nil, coderBytesNoZeroValidateUTF8 + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytesNoZero + } + case pref.BytesKind: + if ft.Kind() == reflect.String { + return nil, coderStringNoZero + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytesNoZero + } + } + case ft.Kind() == reflect.Ptr: + ft := ft.Elem() + switch fd.Kind() { + case pref.BoolKind: + if ft.Kind() == reflect.Bool { + return nil, coderBoolPtr + } + case pref.EnumKind: + if ft.Kind() == reflect.Int32 { + return nil, coderEnumPtr + } + case pref.Int32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderInt32Ptr + } + case pref.Sint32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSint32Ptr + } + case pref.Uint32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderUint32Ptr + } + case pref.Int64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderInt64Ptr + } + case pref.Sint64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSint64Ptr + } + case pref.Uint64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderUint64Ptr + } + case pref.Sfixed32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSfixed32Ptr + } + case pref.Fixed32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderFixed32Ptr + } + case pref.FloatKind: + if ft.Kind() == reflect.Float32 { + return nil, coderFloatPtr + } + case pref.Sfixed64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSfixed64Ptr + } + case pref.Fixed64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderFixed64Ptr + } + case pref.DoubleKind: + if ft.Kind() == reflect.Float64 { + return nil, coderDoublePtr + } + case pref.StringKind: + if ft.Kind() == reflect.String && strs.EnforceUTF8(fd) { + return nil, coderStringPtrValidateUTF8 + } + if ft.Kind() == reflect.String { + return nil, coderStringPtr + } + case pref.BytesKind: + if ft.Kind() == reflect.String { + return nil, coderStringPtr + } + } + default: + switch fd.Kind() { + case pref.BoolKind: + if ft.Kind() == reflect.Bool { + return nil, coderBool + } + case pref.EnumKind: + if ft.Kind() == reflect.Int32 { + return nil, coderEnum + } + case pref.Int32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderInt32 + } + case pref.Sint32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSint32 + } + case pref.Uint32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderUint32 + } + case pref.Int64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderInt64 + } + case pref.Sint64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSint64 + } + case pref.Uint64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderUint64 + } + case pref.Sfixed32Kind: + if ft.Kind() == reflect.Int32 { + return nil, coderSfixed32 + } + case pref.Fixed32Kind: + if ft.Kind() == reflect.Uint32 { + return nil, coderFixed32 + } + case pref.FloatKind: + if ft.Kind() == reflect.Float32 { + return nil, coderFloat + } + case pref.Sfixed64Kind: + if ft.Kind() == reflect.Int64 { + return nil, coderSfixed64 + } + case pref.Fixed64Kind: + if ft.Kind() == reflect.Uint64 { + return nil, coderFixed64 + } + case pref.DoubleKind: + if ft.Kind() == reflect.Float64 { + return nil, coderDouble + } + case pref.StringKind: + if ft.Kind() == reflect.String && strs.EnforceUTF8(fd) { + return nil, coderStringValidateUTF8 + } + if ft.Kind() == reflect.String { + return nil, coderString + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 && strs.EnforceUTF8(fd) { + return nil, coderBytesValidateUTF8 + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytes + } + case pref.BytesKind: + if ft.Kind() == reflect.String { + return nil, coderString + } + if ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 { + return nil, coderBytes + } + } + } + panic(fmt.Sprintf("invalid type: no encoder for %v %v %v/%v", fd.FullName(), fd.Cardinality(), fd.Kind(), ft)) +} + +// encoderFuncsForValue returns value functions for a field, used for +// extension values and map encoding. +func encoderFuncsForValue(fd pref.FieldDescriptor) valueCoderFuncs { + switch { + case fd.Cardinality() == pref.Repeated && !fd.IsPacked(): + switch fd.Kind() { + case pref.BoolKind: + return coderBoolSliceValue + case pref.EnumKind: + return coderEnumSliceValue + case pref.Int32Kind: + return coderInt32SliceValue + case pref.Sint32Kind: + return coderSint32SliceValue + case pref.Uint32Kind: + return coderUint32SliceValue + case pref.Int64Kind: + return coderInt64SliceValue + case pref.Sint64Kind: + return coderSint64SliceValue + case pref.Uint64Kind: + return coderUint64SliceValue + case pref.Sfixed32Kind: + return coderSfixed32SliceValue + case pref.Fixed32Kind: + return coderFixed32SliceValue + case pref.FloatKind: + return coderFloatSliceValue + case pref.Sfixed64Kind: + return coderSfixed64SliceValue + case pref.Fixed64Kind: + return coderFixed64SliceValue + case pref.DoubleKind: + return coderDoubleSliceValue + case pref.StringKind: + // We don't have a UTF-8 validating coder for repeated string fields. + // Value coders are used for extensions and maps. + // Extensions are never proto3, and maps never contain lists. + return coderStringSliceValue + case pref.BytesKind: + return coderBytesSliceValue + case pref.MessageKind: + return coderMessageSliceValue + case pref.GroupKind: + return coderGroupSliceValue + } + case fd.Cardinality() == pref.Repeated && fd.IsPacked(): + switch fd.Kind() { + case pref.BoolKind: + return coderBoolPackedSliceValue + case pref.EnumKind: + return coderEnumPackedSliceValue + case pref.Int32Kind: + return coderInt32PackedSliceValue + case pref.Sint32Kind: + return coderSint32PackedSliceValue + case pref.Uint32Kind: + return coderUint32PackedSliceValue + case pref.Int64Kind: + return coderInt64PackedSliceValue + case pref.Sint64Kind: + return coderSint64PackedSliceValue + case pref.Uint64Kind: + return coderUint64PackedSliceValue + case pref.Sfixed32Kind: + return coderSfixed32PackedSliceValue + case pref.Fixed32Kind: + return coderFixed32PackedSliceValue + case pref.FloatKind: + return coderFloatPackedSliceValue + case pref.Sfixed64Kind: + return coderSfixed64PackedSliceValue + case pref.Fixed64Kind: + return coderFixed64PackedSliceValue + case pref.DoubleKind: + return coderDoublePackedSliceValue + } + default: + switch fd.Kind() { + default: + case pref.BoolKind: + return coderBoolValue + case pref.EnumKind: + return coderEnumValue + case pref.Int32Kind: + return coderInt32Value + case pref.Sint32Kind: + return coderSint32Value + case pref.Uint32Kind: + return coderUint32Value + case pref.Int64Kind: + return coderInt64Value + case pref.Sint64Kind: + return coderSint64Value + case pref.Uint64Kind: + return coderUint64Value + case pref.Sfixed32Kind: + return coderSfixed32Value + case pref.Fixed32Kind: + return coderFixed32Value + case pref.FloatKind: + return coderFloatValue + case pref.Sfixed64Kind: + return coderSfixed64Value + case pref.Fixed64Kind: + return coderFixed64Value + case pref.DoubleKind: + return coderDoubleValue + case pref.StringKind: + if strs.EnforceUTF8(fd) { + return coderStringValueValidateUTF8 + } + return coderStringValue + case pref.BytesKind: + return coderBytesValue + case pref.MessageKind: + return coderMessageValue + case pref.GroupKind: + return coderGroupValue + } + } + panic(fmt.Sprintf("invalid field: no encoder for %v %v %v", fd.FullName(), fd.Cardinality(), fd.Kind())) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/codec_unsafe.go b/vendor/google.golang.org/protobuf/internal/impl/codec_unsafe.go new file mode 100644 index 00000000..e118af1e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/codec_unsafe.go @@ -0,0 +1,17 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego,!appengine + +package impl + +// When using unsafe pointers, we can just treat enum values as int32s. + +var ( + coderEnumNoZero = coderInt32NoZero + coderEnum = coderInt32 + coderEnumPtr = coderInt32Ptr + coderEnumSlice = coderInt32Slice + coderEnumPackedSlice = coderInt32PackedSlice +) diff --git a/vendor/google.golang.org/protobuf/internal/impl/convert.go b/vendor/google.golang.org/protobuf/internal/impl/convert.go new file mode 100644 index 00000000..acd61bb5 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/convert.go @@ -0,0 +1,496 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// unwrapper unwraps the value to the underlying value. +// This is implemented by List and Map. +type unwrapper interface { + protoUnwrap() interface{} +} + +// A Converter coverts to/from Go reflect.Value types and protobuf protoreflect.Value types. +type Converter interface { + // PBValueOf converts a reflect.Value to a protoreflect.Value. + PBValueOf(reflect.Value) pref.Value + + // GoValueOf converts a protoreflect.Value to a reflect.Value. + GoValueOf(pref.Value) reflect.Value + + // IsValidPB returns whether a protoreflect.Value is compatible with this type. + IsValidPB(pref.Value) bool + + // IsValidGo returns whether a reflect.Value is compatible with this type. + IsValidGo(reflect.Value) bool + + // New returns a new field value. + // For scalars, it returns the default value of the field. + // For composite types, it returns a new mutable value. + New() pref.Value + + // Zero returns a new field value. + // For scalars, it returns the default value of the field. + // For composite types, it returns an immutable, empty value. + Zero() pref.Value +} + +// NewConverter matches a Go type with a protobuf field and returns a Converter +// that converts between the two. Enums must be a named int32 kind that +// implements protoreflect.Enum, and messages must be pointer to a named +// struct type that implements protoreflect.ProtoMessage. +// +// This matcher deliberately supports a wider range of Go types than what +// protoc-gen-go historically generated to be able to automatically wrap some +// v1 messages generated by other forks of protoc-gen-go. +func NewConverter(t reflect.Type, fd pref.FieldDescriptor) Converter { + switch { + case fd.IsList(): + return newListConverter(t, fd) + case fd.IsMap(): + return newMapConverter(t, fd) + default: + return newSingularConverter(t, fd) + } + panic(fmt.Sprintf("invalid Go type %v for field %v", t, fd.FullName())) +} + +var ( + boolType = reflect.TypeOf(bool(false)) + int32Type = reflect.TypeOf(int32(0)) + int64Type = reflect.TypeOf(int64(0)) + uint32Type = reflect.TypeOf(uint32(0)) + uint64Type = reflect.TypeOf(uint64(0)) + float32Type = reflect.TypeOf(float32(0)) + float64Type = reflect.TypeOf(float64(0)) + stringType = reflect.TypeOf(string("")) + bytesType = reflect.TypeOf([]byte(nil)) + byteType = reflect.TypeOf(byte(0)) +) + +var ( + boolZero = pref.ValueOfBool(false) + int32Zero = pref.ValueOfInt32(0) + int64Zero = pref.ValueOfInt64(0) + uint32Zero = pref.ValueOfUint32(0) + uint64Zero = pref.ValueOfUint64(0) + float32Zero = pref.ValueOfFloat32(0) + float64Zero = pref.ValueOfFloat64(0) + stringZero = pref.ValueOfString("") + bytesZero = pref.ValueOfBytes(nil) +) + +func newSingularConverter(t reflect.Type, fd pref.FieldDescriptor) Converter { + defVal := func(fd pref.FieldDescriptor, zero pref.Value) pref.Value { + if fd.Cardinality() == pref.Repeated { + // Default isn't defined for repeated fields. + return zero + } + return fd.Default() + } + switch fd.Kind() { + case pref.BoolKind: + if t.Kind() == reflect.Bool { + return &boolConverter{t, defVal(fd, boolZero)} + } + case pref.Int32Kind, pref.Sint32Kind, pref.Sfixed32Kind: + if t.Kind() == reflect.Int32 { + return &int32Converter{t, defVal(fd, int32Zero)} + } + case pref.Int64Kind, pref.Sint64Kind, pref.Sfixed64Kind: + if t.Kind() == reflect.Int64 { + return &int64Converter{t, defVal(fd, int64Zero)} + } + case pref.Uint32Kind, pref.Fixed32Kind: + if t.Kind() == reflect.Uint32 { + return &uint32Converter{t, defVal(fd, uint32Zero)} + } + case pref.Uint64Kind, pref.Fixed64Kind: + if t.Kind() == reflect.Uint64 { + return &uint64Converter{t, defVal(fd, uint64Zero)} + } + case pref.FloatKind: + if t.Kind() == reflect.Float32 { + return &float32Converter{t, defVal(fd, float32Zero)} + } + case pref.DoubleKind: + if t.Kind() == reflect.Float64 { + return &float64Converter{t, defVal(fd, float64Zero)} + } + case pref.StringKind: + if t.Kind() == reflect.String || (t.Kind() == reflect.Slice && t.Elem() == byteType) { + return &stringConverter{t, defVal(fd, stringZero)} + } + case pref.BytesKind: + if t.Kind() == reflect.String || (t.Kind() == reflect.Slice && t.Elem() == byteType) { + return &bytesConverter{t, defVal(fd, bytesZero)} + } + case pref.EnumKind: + // Handle enums, which must be a named int32 type. + if t.Kind() == reflect.Int32 { + return newEnumConverter(t, fd) + } + case pref.MessageKind, pref.GroupKind: + return newMessageConverter(t) + } + panic(fmt.Sprintf("invalid Go type %v for field %v", t, fd.FullName())) +} + +type boolConverter struct { + goType reflect.Type + def pref.Value +} + +func (c *boolConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfBool(v.Bool()) +} +func (c *boolConverter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(v.Bool()).Convert(c.goType) +} +func (c *boolConverter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(bool) + return ok +} +func (c *boolConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *boolConverter) New() pref.Value { return c.def } +func (c *boolConverter) Zero() pref.Value { return c.def } + +type int32Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *int32Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfInt32(int32(v.Int())) +} +func (c *int32Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(int32(v.Int())).Convert(c.goType) +} +func (c *int32Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(int32) + return ok +} +func (c *int32Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *int32Converter) New() pref.Value { return c.def } +func (c *int32Converter) Zero() pref.Value { return c.def } + +type int64Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *int64Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfInt64(int64(v.Int())) +} +func (c *int64Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(int64(v.Int())).Convert(c.goType) +} +func (c *int64Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(int64) + return ok +} +func (c *int64Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *int64Converter) New() pref.Value { return c.def } +func (c *int64Converter) Zero() pref.Value { return c.def } + +type uint32Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *uint32Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfUint32(uint32(v.Uint())) +} +func (c *uint32Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(uint32(v.Uint())).Convert(c.goType) +} +func (c *uint32Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(uint32) + return ok +} +func (c *uint32Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *uint32Converter) New() pref.Value { return c.def } +func (c *uint32Converter) Zero() pref.Value { return c.def } + +type uint64Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *uint64Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfUint64(uint64(v.Uint())) +} +func (c *uint64Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(uint64(v.Uint())).Convert(c.goType) +} +func (c *uint64Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(uint64) + return ok +} +func (c *uint64Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *uint64Converter) New() pref.Value { return c.def } +func (c *uint64Converter) Zero() pref.Value { return c.def } + +type float32Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *float32Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfFloat32(float32(v.Float())) +} +func (c *float32Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(float32(v.Float())).Convert(c.goType) +} +func (c *float32Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(float32) + return ok +} +func (c *float32Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *float32Converter) New() pref.Value { return c.def } +func (c *float32Converter) Zero() pref.Value { return c.def } + +type float64Converter struct { + goType reflect.Type + def pref.Value +} + +func (c *float64Converter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfFloat64(float64(v.Float())) +} +func (c *float64Converter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(float64(v.Float())).Convert(c.goType) +} +func (c *float64Converter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(float64) + return ok +} +func (c *float64Converter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *float64Converter) New() pref.Value { return c.def } +func (c *float64Converter) Zero() pref.Value { return c.def } + +type stringConverter struct { + goType reflect.Type + def pref.Value +} + +func (c *stringConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfString(v.Convert(stringType).String()) +} +func (c *stringConverter) GoValueOf(v pref.Value) reflect.Value { + // pref.Value.String never panics, so we go through an interface + // conversion here to check the type. + s := v.Interface().(string) + if c.goType.Kind() == reflect.Slice && s == "" { + return reflect.Zero(c.goType) // ensure empty string is []byte(nil) + } + return reflect.ValueOf(s).Convert(c.goType) +} +func (c *stringConverter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(string) + return ok +} +func (c *stringConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *stringConverter) New() pref.Value { return c.def } +func (c *stringConverter) Zero() pref.Value { return c.def } + +type bytesConverter struct { + goType reflect.Type + def pref.Value +} + +func (c *bytesConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + if c.goType.Kind() == reflect.String && v.Len() == 0 { + return pref.ValueOfBytes(nil) // ensure empty string is []byte(nil) + } + return pref.ValueOfBytes(v.Convert(bytesType).Bytes()) +} +func (c *bytesConverter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(v.Bytes()).Convert(c.goType) +} +func (c *bytesConverter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().([]byte) + return ok +} +func (c *bytesConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} +func (c *bytesConverter) New() pref.Value { return c.def } +func (c *bytesConverter) Zero() pref.Value { return c.def } + +type enumConverter struct { + goType reflect.Type + def pref.Value +} + +func newEnumConverter(goType reflect.Type, fd pref.FieldDescriptor) Converter { + var def pref.Value + if fd.Cardinality() == pref.Repeated { + def = pref.ValueOfEnum(fd.Enum().Values().Get(0).Number()) + } else { + def = fd.Default() + } + return &enumConverter{goType, def} +} + +func (c *enumConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfEnum(pref.EnumNumber(v.Int())) +} + +func (c *enumConverter) GoValueOf(v pref.Value) reflect.Value { + return reflect.ValueOf(v.Enum()).Convert(c.goType) +} + +func (c *enumConverter) IsValidPB(v pref.Value) bool { + _, ok := v.Interface().(pref.EnumNumber) + return ok +} + +func (c *enumConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} + +func (c *enumConverter) New() pref.Value { + return c.def +} + +func (c *enumConverter) Zero() pref.Value { + return c.def +} + +type messageConverter struct { + goType reflect.Type +} + +func newMessageConverter(goType reflect.Type) Converter { + return &messageConverter{goType} +} + +func (c *messageConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + if c.isNonPointer() { + if v.CanAddr() { + v = v.Addr() // T => *T + } else { + v = reflect.Zero(reflect.PtrTo(v.Type())) + } + } + if m, ok := v.Interface().(pref.ProtoMessage); ok { + return pref.ValueOfMessage(m.ProtoReflect()) + } + return pref.ValueOfMessage(legacyWrapMessage(v)) +} + +func (c *messageConverter) GoValueOf(v pref.Value) reflect.Value { + m := v.Message() + var rv reflect.Value + if u, ok := m.(unwrapper); ok { + rv = reflect.ValueOf(u.protoUnwrap()) + } else { + rv = reflect.ValueOf(m.Interface()) + } + if c.isNonPointer() { + if rv.Type() != reflect.PtrTo(c.goType) { + panic(fmt.Sprintf("invalid type: got %v, want %v", rv.Type(), reflect.PtrTo(c.goType))) + } + if !rv.IsNil() { + rv = rv.Elem() // *T => T + } else { + rv = reflect.Zero(rv.Type().Elem()) + } + } + if rv.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", rv.Type(), c.goType)) + } + return rv +} + +func (c *messageConverter) IsValidPB(v pref.Value) bool { + m := v.Message() + var rv reflect.Value + if u, ok := m.(unwrapper); ok { + rv = reflect.ValueOf(u.protoUnwrap()) + } else { + rv = reflect.ValueOf(m.Interface()) + } + if c.isNonPointer() { + return rv.Type() == reflect.PtrTo(c.goType) + } + return rv.Type() == c.goType +} + +func (c *messageConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} + +func (c *messageConverter) New() pref.Value { + if c.isNonPointer() { + return c.PBValueOf(reflect.New(c.goType).Elem()) + } + return c.PBValueOf(reflect.New(c.goType.Elem())) +} + +func (c *messageConverter) Zero() pref.Value { + return c.PBValueOf(reflect.Zero(c.goType)) +} + +// isNonPointer reports whether the type is a non-pointer type. +// This never occurs for generated message types. +func (c *messageConverter) isNonPointer() bool { + return c.goType.Kind() != reflect.Ptr +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/convert_list.go b/vendor/google.golang.org/protobuf/internal/impl/convert_list.go new file mode 100644 index 00000000..6fccab52 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/convert_list.go @@ -0,0 +1,141 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +func newListConverter(t reflect.Type, fd pref.FieldDescriptor) Converter { + switch { + case t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Slice: + return &listPtrConverter{t, newSingularConverter(t.Elem().Elem(), fd)} + case t.Kind() == reflect.Slice: + return &listConverter{t, newSingularConverter(t.Elem(), fd)} + } + panic(fmt.Sprintf("invalid Go type %v for field %v", t, fd.FullName())) +} + +type listConverter struct { + goType reflect.Type // []T + c Converter +} + +func (c *listConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + pv := reflect.New(c.goType) + pv.Elem().Set(v) + return pref.ValueOfList(&listReflect{pv, c.c}) +} + +func (c *listConverter) GoValueOf(v pref.Value) reflect.Value { + rv := v.List().(*listReflect).v + if rv.IsNil() { + return reflect.Zero(c.goType) + } + return rv.Elem() +} + +func (c *listConverter) IsValidPB(v pref.Value) bool { + list, ok := v.Interface().(*listReflect) + if !ok { + return false + } + return list.v.Type().Elem() == c.goType +} + +func (c *listConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} + +func (c *listConverter) New() pref.Value { + return pref.ValueOfList(&listReflect{reflect.New(c.goType), c.c}) +} + +func (c *listConverter) Zero() pref.Value { + return pref.ValueOfList(&listReflect{reflect.Zero(reflect.PtrTo(c.goType)), c.c}) +} + +type listPtrConverter struct { + goType reflect.Type // *[]T + c Converter +} + +func (c *listPtrConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfList(&listReflect{v, c.c}) +} + +func (c *listPtrConverter) GoValueOf(v pref.Value) reflect.Value { + return v.List().(*listReflect).v +} + +func (c *listPtrConverter) IsValidPB(v pref.Value) bool { + list, ok := v.Interface().(*listReflect) + if !ok { + return false + } + return list.v.Type() == c.goType +} + +func (c *listPtrConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} + +func (c *listPtrConverter) New() pref.Value { + return c.PBValueOf(reflect.New(c.goType.Elem())) +} + +func (c *listPtrConverter) Zero() pref.Value { + return c.PBValueOf(reflect.Zero(c.goType)) +} + +type listReflect struct { + v reflect.Value // *[]T + conv Converter +} + +func (ls *listReflect) Len() int { + if ls.v.IsNil() { + return 0 + } + return ls.v.Elem().Len() +} +func (ls *listReflect) Get(i int) pref.Value { + return ls.conv.PBValueOf(ls.v.Elem().Index(i)) +} +func (ls *listReflect) Set(i int, v pref.Value) { + ls.v.Elem().Index(i).Set(ls.conv.GoValueOf(v)) +} +func (ls *listReflect) Append(v pref.Value) { + ls.v.Elem().Set(reflect.Append(ls.v.Elem(), ls.conv.GoValueOf(v))) +} +func (ls *listReflect) AppendMutable() pref.Value { + if _, ok := ls.conv.(*messageConverter); !ok { + panic("invalid AppendMutable on list with non-message type") + } + v := ls.NewElement() + ls.Append(v) + return v +} +func (ls *listReflect) Truncate(i int) { + ls.v.Elem().Set(ls.v.Elem().Slice(0, i)) +} +func (ls *listReflect) NewElement() pref.Value { + return ls.conv.New() +} +func (ls *listReflect) IsValid() bool { + return !ls.v.IsNil() +} +func (ls *listReflect) protoUnwrap() interface{} { + return ls.v.Interface() +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/convert_map.go b/vendor/google.golang.org/protobuf/internal/impl/convert_map.go new file mode 100644 index 00000000..de06b259 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/convert_map.go @@ -0,0 +1,121 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type mapConverter struct { + goType reflect.Type // map[K]V + keyConv, valConv Converter +} + +func newMapConverter(t reflect.Type, fd pref.FieldDescriptor) *mapConverter { + if t.Kind() != reflect.Map { + panic(fmt.Sprintf("invalid Go type %v for field %v", t, fd.FullName())) + } + return &mapConverter{ + goType: t, + keyConv: newSingularConverter(t.Key(), fd.MapKey()), + valConv: newSingularConverter(t.Elem(), fd.MapValue()), + } +} + +func (c *mapConverter) PBValueOf(v reflect.Value) pref.Value { + if v.Type() != c.goType { + panic(fmt.Sprintf("invalid type: got %v, want %v", v.Type(), c.goType)) + } + return pref.ValueOfMap(&mapReflect{v, c.keyConv, c.valConv}) +} + +func (c *mapConverter) GoValueOf(v pref.Value) reflect.Value { + return v.Map().(*mapReflect).v +} + +func (c *mapConverter) IsValidPB(v pref.Value) bool { + mapv, ok := v.Interface().(*mapReflect) + if !ok { + return false + } + return mapv.v.Type() == c.goType +} + +func (c *mapConverter) IsValidGo(v reflect.Value) bool { + return v.IsValid() && v.Type() == c.goType +} + +func (c *mapConverter) New() pref.Value { + return c.PBValueOf(reflect.MakeMap(c.goType)) +} + +func (c *mapConverter) Zero() pref.Value { + return c.PBValueOf(reflect.Zero(c.goType)) +} + +type mapReflect struct { + v reflect.Value // map[K]V + keyConv Converter + valConv Converter +} + +func (ms *mapReflect) Len() int { + return ms.v.Len() +} +func (ms *mapReflect) Has(k pref.MapKey) bool { + rk := ms.keyConv.GoValueOf(k.Value()) + rv := ms.v.MapIndex(rk) + return rv.IsValid() +} +func (ms *mapReflect) Get(k pref.MapKey) pref.Value { + rk := ms.keyConv.GoValueOf(k.Value()) + rv := ms.v.MapIndex(rk) + if !rv.IsValid() { + return pref.Value{} + } + return ms.valConv.PBValueOf(rv) +} +func (ms *mapReflect) Set(k pref.MapKey, v pref.Value) { + rk := ms.keyConv.GoValueOf(k.Value()) + rv := ms.valConv.GoValueOf(v) + ms.v.SetMapIndex(rk, rv) +} +func (ms *mapReflect) Clear(k pref.MapKey) { + rk := ms.keyConv.GoValueOf(k.Value()) + ms.v.SetMapIndex(rk, reflect.Value{}) +} +func (ms *mapReflect) Mutable(k pref.MapKey) pref.Value { + if _, ok := ms.valConv.(*messageConverter); !ok { + panic("invalid Mutable on map with non-message value type") + } + v := ms.Get(k) + if !v.IsValid() { + v = ms.NewValue() + ms.Set(k, v) + } + return v +} +func (ms *mapReflect) Range(f func(pref.MapKey, pref.Value) bool) { + iter := mapRange(ms.v) + for iter.Next() { + k := ms.keyConv.PBValueOf(iter.Key()).MapKey() + v := ms.valConv.PBValueOf(iter.Value()) + if !f(k, v) { + return + } + } +} +func (ms *mapReflect) NewValue() pref.Value { + return ms.valConv.New() +} +func (ms *mapReflect) IsValid() bool { + return !ms.v.IsNil() +} +func (ms *mapReflect) protoUnwrap() interface{} { + return ms.v.Interface() +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/decode.go b/vendor/google.golang.org/protobuf/internal/impl/decode.go new file mode 100644 index 00000000..949dc49a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/decode.go @@ -0,0 +1,276 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "math/bits" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoiface" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +var errDecode = errors.New("cannot parse invalid wire-format data") + +type unmarshalOptions struct { + flags protoiface.UnmarshalInputFlags + resolver interface { + FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) + FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) + } +} + +func (o unmarshalOptions) Options() proto.UnmarshalOptions { + return proto.UnmarshalOptions{ + Merge: true, + AllowPartial: true, + DiscardUnknown: o.DiscardUnknown(), + Resolver: o.resolver, + } +} + +func (o unmarshalOptions) DiscardUnknown() bool { return o.flags&piface.UnmarshalDiscardUnknown != 0 } + +func (o unmarshalOptions) IsDefault() bool { + return o.flags == 0 && o.resolver == preg.GlobalTypes +} + +var lazyUnmarshalOptions = unmarshalOptions{ + resolver: preg.GlobalTypes, +} + +type unmarshalOutput struct { + n int // number of bytes consumed + initialized bool +} + +// unmarshal is protoreflect.Methods.Unmarshal. +func (mi *MessageInfo) unmarshal(in piface.UnmarshalInput) (piface.UnmarshalOutput, error) { + var p pointer + if ms, ok := in.Message.(*messageState); ok { + p = ms.pointer() + } else { + p = in.Message.(*messageReflectWrapper).pointer() + } + out, err := mi.unmarshalPointer(in.Buf, p, 0, unmarshalOptions{ + flags: in.Flags, + resolver: in.Resolver, + }) + var flags piface.UnmarshalOutputFlags + if out.initialized { + flags |= piface.UnmarshalInitialized + } + return piface.UnmarshalOutput{ + Flags: flags, + }, err +} + +// errUnknown is returned during unmarshaling to indicate a parse error that +// should result in a field being placed in the unknown fields section (for example, +// when the wire type doesn't match) as opposed to the entire unmarshal operation +// failing (for example, when a field extends past the available input). +// +// This is a sentinel error which should never be visible to the user. +var errUnknown = errors.New("unknown") + +func (mi *MessageInfo) unmarshalPointer(b []byte, p pointer, groupTag protowire.Number, opts unmarshalOptions) (out unmarshalOutput, err error) { + mi.init() + if flags.ProtoLegacy && mi.isMessageSet { + return unmarshalMessageSet(mi, b, p, opts) + } + initialized := true + var requiredMask uint64 + var exts *map[int32]ExtensionField + start := len(b) + for len(b) > 0 { + // Parse the tag (field number and wire type). + var tag uint64 + if b[0] < 0x80 { + tag = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + tag = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + tag, n = protowire.ConsumeVarint(b) + if n < 0 { + return out, errDecode + } + b = b[n:] + } + var num protowire.Number + if n := tag >> 3; n < uint64(protowire.MinValidNumber) || n > uint64(protowire.MaxValidNumber) { + return out, errDecode + } else { + num = protowire.Number(n) + } + wtyp := protowire.Type(tag & 7) + + if wtyp == protowire.EndGroupType { + if num != groupTag { + return out, errDecode + } + groupTag = 0 + break + } + + var f *coderFieldInfo + if int(num) < len(mi.denseCoderFields) { + f = mi.denseCoderFields[num] + } else { + f = mi.coderFields[num] + } + var n int + err := errUnknown + switch { + case f != nil: + if f.funcs.unmarshal == nil { + break + } + var o unmarshalOutput + o, err = f.funcs.unmarshal(b, p.Apply(f.offset), wtyp, f, opts) + n = o.n + if err != nil { + break + } + requiredMask |= f.validation.requiredBit + if f.funcs.isInit != nil && !o.initialized { + initialized = false + } + default: + // Possible extension. + if exts == nil && mi.extensionOffset.IsValid() { + exts = p.Apply(mi.extensionOffset).Extensions() + if *exts == nil { + *exts = make(map[int32]ExtensionField) + } + } + if exts == nil { + break + } + var o unmarshalOutput + o, err = mi.unmarshalExtension(b, num, wtyp, *exts, opts) + if err != nil { + break + } + n = o.n + if !o.initialized { + initialized = false + } + } + if err != nil { + if err != errUnknown { + return out, err + } + n = protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return out, errDecode + } + if !opts.DiscardUnknown() && mi.unknownOffset.IsValid() { + u := mi.mutableUnknownBytes(p) + *u = protowire.AppendTag(*u, num, wtyp) + *u = append(*u, b[:n]...) + } + } + b = b[n:] + } + if groupTag != 0 { + return out, errDecode + } + if mi.numRequiredFields > 0 && bits.OnesCount64(requiredMask) != int(mi.numRequiredFields) { + initialized = false + } + if initialized { + out.initialized = true + } + out.n = start - len(b) + return out, nil +} + +func (mi *MessageInfo) unmarshalExtension(b []byte, num protowire.Number, wtyp protowire.Type, exts map[int32]ExtensionField, opts unmarshalOptions) (out unmarshalOutput, err error) { + x := exts[int32(num)] + xt := x.Type() + if xt == nil { + var err error + xt, err = opts.resolver.FindExtensionByNumber(mi.Desc.FullName(), num) + if err != nil { + if err == preg.NotFound { + return out, errUnknown + } + return out, errors.New("%v: unable to resolve extension %v: %v", mi.Desc.FullName(), num, err) + } + } + xi := getExtensionFieldInfo(xt) + if xi.funcs.unmarshal == nil { + return out, errUnknown + } + if flags.LazyUnmarshalExtensions { + if opts.IsDefault() && x.canLazy(xt) { + out, valid := skipExtension(b, xi, num, wtyp, opts) + switch valid { + case ValidationValid: + if out.initialized { + x.appendLazyBytes(xt, xi, num, wtyp, b[:out.n]) + exts[int32(num)] = x + return out, nil + } + case ValidationInvalid: + return out, errDecode + case ValidationUnknown: + } + } + } + ival := x.Value() + if !ival.IsValid() && xi.unmarshalNeedsValue { + // Create a new message, list, or map value to fill in. + // For enums, create a prototype value to let the unmarshal func know the + // concrete type. + ival = xt.New() + } + v, out, err := xi.funcs.unmarshal(b, ival, num, wtyp, opts) + if err != nil { + return out, err + } + if xi.funcs.isInit == nil { + out.initialized = true + } + x.Set(xt, v) + exts[int32(num)] = x + return out, nil +} + +func skipExtension(b []byte, xi *extensionFieldInfo, num protowire.Number, wtyp protowire.Type, opts unmarshalOptions) (out unmarshalOutput, _ ValidationStatus) { + if xi.validation.mi == nil { + return out, ValidationUnknown + } + xi.validation.mi.init() + switch xi.validation.typ { + case validationTypeMessage: + if wtyp != protowire.BytesType { + return out, ValidationUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return out, ValidationUnknown + } + out, st := xi.validation.mi.validate(v, 0, opts) + out.n = n + return out, st + case validationTypeGroup: + if wtyp != protowire.StartGroupType { + return out, ValidationUnknown + } + out, st := xi.validation.mi.validate(b, num, opts) + return out, st + default: + return out, ValidationUnknown + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/encode.go b/vendor/google.golang.org/protobuf/internal/impl/encode.go new file mode 100644 index 00000000..845c67d6 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/encode.go @@ -0,0 +1,201 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "math" + "sort" + "sync/atomic" + + "google.golang.org/protobuf/internal/flags" + proto "google.golang.org/protobuf/proto" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +type marshalOptions struct { + flags piface.MarshalInputFlags +} + +func (o marshalOptions) Options() proto.MarshalOptions { + return proto.MarshalOptions{ + AllowPartial: true, + Deterministic: o.Deterministic(), + UseCachedSize: o.UseCachedSize(), + } +} + +func (o marshalOptions) Deterministic() bool { return o.flags&piface.MarshalDeterministic != 0 } +func (o marshalOptions) UseCachedSize() bool { return o.flags&piface.MarshalUseCachedSize != 0 } + +// size is protoreflect.Methods.Size. +func (mi *MessageInfo) size(in piface.SizeInput) piface.SizeOutput { + var p pointer + if ms, ok := in.Message.(*messageState); ok { + p = ms.pointer() + } else { + p = in.Message.(*messageReflectWrapper).pointer() + } + size := mi.sizePointer(p, marshalOptions{ + flags: in.Flags, + }) + return piface.SizeOutput{Size: size} +} + +func (mi *MessageInfo) sizePointer(p pointer, opts marshalOptions) (size int) { + mi.init() + if p.IsNil() { + return 0 + } + if opts.UseCachedSize() && mi.sizecacheOffset.IsValid() { + if size := atomic.LoadInt32(p.Apply(mi.sizecacheOffset).Int32()); size >= 0 { + return int(size) + } + } + return mi.sizePointerSlow(p, opts) +} + +func (mi *MessageInfo) sizePointerSlow(p pointer, opts marshalOptions) (size int) { + if flags.ProtoLegacy && mi.isMessageSet { + size = sizeMessageSet(mi, p, opts) + if mi.sizecacheOffset.IsValid() { + atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size)) + } + return size + } + if mi.extensionOffset.IsValid() { + e := p.Apply(mi.extensionOffset).Extensions() + size += mi.sizeExtensions(e, opts) + } + for _, f := range mi.orderedCoderFields { + if f.funcs.size == nil { + continue + } + fptr := p.Apply(f.offset) + if f.isPointer && fptr.Elem().IsNil() { + continue + } + size += f.funcs.size(fptr, f, opts) + } + if mi.unknownOffset.IsValid() { + if u := mi.getUnknownBytes(p); u != nil { + size += len(*u) + } + } + if mi.sizecacheOffset.IsValid() { + if size > math.MaxInt32 { + // The size is too large for the int32 sizecache field. + // We will need to recompute the size when encoding; + // unfortunately expensive, but better than invalid output. + atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), -1) + } else { + atomic.StoreInt32(p.Apply(mi.sizecacheOffset).Int32(), int32(size)) + } + } + return size +} + +// marshal is protoreflect.Methods.Marshal. +func (mi *MessageInfo) marshal(in piface.MarshalInput) (out piface.MarshalOutput, err error) { + var p pointer + if ms, ok := in.Message.(*messageState); ok { + p = ms.pointer() + } else { + p = in.Message.(*messageReflectWrapper).pointer() + } + b, err := mi.marshalAppendPointer(in.Buf, p, marshalOptions{ + flags: in.Flags, + }) + return piface.MarshalOutput{Buf: b}, err +} + +func (mi *MessageInfo) marshalAppendPointer(b []byte, p pointer, opts marshalOptions) ([]byte, error) { + mi.init() + if p.IsNil() { + return b, nil + } + if flags.ProtoLegacy && mi.isMessageSet { + return marshalMessageSet(mi, b, p, opts) + } + var err error + // The old marshaler encodes extensions at beginning. + if mi.extensionOffset.IsValid() { + e := p.Apply(mi.extensionOffset).Extensions() + // TODO: Special handling for MessageSet? + b, err = mi.appendExtensions(b, e, opts) + if err != nil { + return b, err + } + } + for _, f := range mi.orderedCoderFields { + if f.funcs.marshal == nil { + continue + } + fptr := p.Apply(f.offset) + if f.isPointer && fptr.Elem().IsNil() { + continue + } + b, err = f.funcs.marshal(b, fptr, f, opts) + if err != nil { + return b, err + } + } + if mi.unknownOffset.IsValid() && !mi.isMessageSet { + if u := mi.getUnknownBytes(p); u != nil { + b = append(b, (*u)...) + } + } + return b, nil +} + +func (mi *MessageInfo) sizeExtensions(ext *map[int32]ExtensionField, opts marshalOptions) (n int) { + if ext == nil { + return 0 + } + for _, x := range *ext { + xi := getExtensionFieldInfo(x.Type()) + if xi.funcs.size == nil { + continue + } + n += xi.funcs.size(x.Value(), xi.tagsize, opts) + } + return n +} + +func (mi *MessageInfo) appendExtensions(b []byte, ext *map[int32]ExtensionField, opts marshalOptions) ([]byte, error) { + if ext == nil { + return b, nil + } + + switch len(*ext) { + case 0: + return b, nil + case 1: + // Fast-path for one extension: Don't bother sorting the keys. + var err error + for _, x := range *ext { + xi := getExtensionFieldInfo(x.Type()) + b, err = xi.funcs.marshal(b, x.Value(), xi.wiretag, opts) + } + return b, err + default: + // Sort the keys to provide a deterministic encoding. + // Not sure this is required, but the old code does it. + keys := make([]int, 0, len(*ext)) + for k := range *ext { + keys = append(keys, int(k)) + } + sort.Ints(keys) + var err error + for _, k := range keys { + x := (*ext)[int32(k)] + xi := getExtensionFieldInfo(x.Type()) + b, err = xi.funcs.marshal(b, x.Value(), xi.wiretag, opts) + if err != nil { + return b, err + } + } + return b, nil + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/enum.go b/vendor/google.golang.org/protobuf/internal/impl/enum.go new file mode 100644 index 00000000..8c1eab4b --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/enum.go @@ -0,0 +1,21 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "reflect" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type EnumInfo struct { + GoReflectType reflect.Type // int32 kind + Desc pref.EnumDescriptor +} + +func (t *EnumInfo) New(n pref.EnumNumber) pref.Enum { + return reflect.ValueOf(n).Convert(t.GoReflectType).Interface().(pref.Enum) +} +func (t *EnumInfo) Descriptor() pref.EnumDescriptor { return t.Desc } diff --git a/vendor/google.golang.org/protobuf/internal/impl/extension.go b/vendor/google.golang.org/protobuf/internal/impl/extension.go new file mode 100644 index 00000000..e904fd99 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/extension.go @@ -0,0 +1,156 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "reflect" + "sync" + "sync/atomic" + + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// ExtensionInfo implements ExtensionType. +// +// This type contains a number of exported fields for legacy compatibility. +// The only non-deprecated use of this type is through the methods of the +// ExtensionType interface. +type ExtensionInfo struct { + // An ExtensionInfo may exist in several stages of initialization. + // + // extensionInfoUninitialized: Some or all of the legacy exported + // fields may be set, but none of the unexported fields have been + // initialized. This is the starting state for an ExtensionInfo + // in legacy generated code. + // + // extensionInfoDescInit: The desc field is set, but other unexported fields + // may not be initialized. Legacy exported fields may or may not be set. + // This is the starting state for an ExtensionInfo in newly generated code. + // + // extensionInfoFullInit: The ExtensionInfo is fully initialized. + // This state is only entered after lazy initialization is complete. + init uint32 + mu sync.Mutex + + goType reflect.Type + desc extensionTypeDescriptor + conv Converter + info *extensionFieldInfo // for fast-path method implementations + + // ExtendedType is a typed nil-pointer to the parent message type that + // is being extended. It is possible for this to be unpopulated in v2 + // since the message may no longer implement the MessageV1 interface. + // + // Deprecated: Use the ExtendedType method instead. + ExtendedType piface.MessageV1 + + // ExtensionType is the zero value of the extension type. + // + // For historical reasons, reflect.TypeOf(ExtensionType) and the + // type returned by InterfaceOf may not be identical. + // + // Deprecated: Use InterfaceOf(xt.Zero()) instead. + ExtensionType interface{} + + // Field is the field number of the extension. + // + // Deprecated: Use the Descriptor().Number method instead. + Field int32 + + // Name is the fully qualified name of extension. + // + // Deprecated: Use the Descriptor().FullName method instead. + Name string + + // Tag is the protobuf struct tag used in the v1 API. + // + // Deprecated: Do not use. + Tag string + + // Filename is the proto filename in which the extension is defined. + // + // Deprecated: Use Descriptor().ParentFile().Path() instead. + Filename string +} + +// Stages of initialization: See the ExtensionInfo.init field. +const ( + extensionInfoUninitialized = 0 + extensionInfoDescInit = 1 + extensionInfoFullInit = 2 +) + +func InitExtensionInfo(xi *ExtensionInfo, xd pref.ExtensionDescriptor, goType reflect.Type) { + xi.goType = goType + xi.desc = extensionTypeDescriptor{xd, xi} + xi.init = extensionInfoDescInit +} + +func (xi *ExtensionInfo) New() pref.Value { + return xi.lazyInit().New() +} +func (xi *ExtensionInfo) Zero() pref.Value { + return xi.lazyInit().Zero() +} +func (xi *ExtensionInfo) ValueOf(v interface{}) pref.Value { + return xi.lazyInit().PBValueOf(reflect.ValueOf(v)) +} +func (xi *ExtensionInfo) InterfaceOf(v pref.Value) interface{} { + return xi.lazyInit().GoValueOf(v).Interface() +} +func (xi *ExtensionInfo) IsValidValue(v pref.Value) bool { + return xi.lazyInit().IsValidPB(v) +} +func (xi *ExtensionInfo) IsValidInterface(v interface{}) bool { + return xi.lazyInit().IsValidGo(reflect.ValueOf(v)) +} +func (xi *ExtensionInfo) TypeDescriptor() pref.ExtensionTypeDescriptor { + if atomic.LoadUint32(&xi.init) < extensionInfoDescInit { + xi.lazyInitSlow() + } + return &xi.desc +} + +func (xi *ExtensionInfo) lazyInit() Converter { + if atomic.LoadUint32(&xi.init) < extensionInfoFullInit { + xi.lazyInitSlow() + } + return xi.conv +} + +func (xi *ExtensionInfo) lazyInitSlow() { + xi.mu.Lock() + defer xi.mu.Unlock() + + if xi.init == extensionInfoFullInit { + return + } + defer atomic.StoreUint32(&xi.init, extensionInfoFullInit) + + if xi.desc.ExtensionDescriptor == nil { + xi.initFromLegacy() + } + if !xi.desc.ExtensionDescriptor.IsPlaceholder() { + if xi.ExtensionType == nil { + xi.initToLegacy() + } + xi.conv = NewConverter(xi.goType, xi.desc.ExtensionDescriptor) + xi.info = makeExtensionFieldInfo(xi.desc.ExtensionDescriptor) + xi.info.validation = newValidationInfo(xi.desc.ExtensionDescriptor, xi.goType) + } +} + +type extensionTypeDescriptor struct { + pref.ExtensionDescriptor + xi *ExtensionInfo +} + +func (xtd *extensionTypeDescriptor) Type() pref.ExtensionType { + return xtd.xi +} +func (xtd *extensionTypeDescriptor) Descriptor() pref.ExtensionDescriptor { + return xtd.ExtensionDescriptor +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_enum.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_enum.go new file mode 100644 index 00000000..f7d7ffb5 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_enum.go @@ -0,0 +1,219 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "strings" + "sync" + + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// legacyEnumName returns the name of enums used in legacy code. +// It is neither the protobuf full name nor the qualified Go name, +// but rather an odd hybrid of both. +func legacyEnumName(ed pref.EnumDescriptor) string { + var protoPkg string + enumName := string(ed.FullName()) + if fd := ed.ParentFile(); fd != nil { + protoPkg = string(fd.Package()) + enumName = strings.TrimPrefix(enumName, protoPkg+".") + } + if protoPkg == "" { + return strs.GoCamelCase(enumName) + } + return protoPkg + "." + strs.GoCamelCase(enumName) +} + +// legacyWrapEnum wraps v as a protoreflect.Enum, +// where v must be a int32 kind and not implement the v2 API already. +func legacyWrapEnum(v reflect.Value) pref.Enum { + et := legacyLoadEnumType(v.Type()) + return et.New(pref.EnumNumber(v.Int())) +} + +var legacyEnumTypeCache sync.Map // map[reflect.Type]protoreflect.EnumType + +// legacyLoadEnumType dynamically loads a protoreflect.EnumType for t, +// where t must be an int32 kind and not implement the v2 API already. +func legacyLoadEnumType(t reflect.Type) pref.EnumType { + // Fast-path: check if a EnumType is cached for this concrete type. + if et, ok := legacyEnumTypeCache.Load(t); ok { + return et.(pref.EnumType) + } + + // Slow-path: derive enum descriptor and initialize EnumType. + var et pref.EnumType + ed := LegacyLoadEnumDesc(t) + et = &legacyEnumType{ + desc: ed, + goType: t, + } + if et, ok := legacyEnumTypeCache.LoadOrStore(t, et); ok { + return et.(pref.EnumType) + } + return et +} + +type legacyEnumType struct { + desc pref.EnumDescriptor + goType reflect.Type + m sync.Map // map[protoreflect.EnumNumber]proto.Enum +} + +func (t *legacyEnumType) New(n pref.EnumNumber) pref.Enum { + if e, ok := t.m.Load(n); ok { + return e.(pref.Enum) + } + e := &legacyEnumWrapper{num: n, pbTyp: t, goTyp: t.goType} + t.m.Store(n, e) + return e +} +func (t *legacyEnumType) Descriptor() pref.EnumDescriptor { + return t.desc +} + +type legacyEnumWrapper struct { + num pref.EnumNumber + pbTyp pref.EnumType + goTyp reflect.Type +} + +func (e *legacyEnumWrapper) Descriptor() pref.EnumDescriptor { + return e.pbTyp.Descriptor() +} +func (e *legacyEnumWrapper) Type() pref.EnumType { + return e.pbTyp +} +func (e *legacyEnumWrapper) Number() pref.EnumNumber { + return e.num +} +func (e *legacyEnumWrapper) ProtoReflect() pref.Enum { + return e +} +func (e *legacyEnumWrapper) protoUnwrap() interface{} { + v := reflect.New(e.goTyp).Elem() + v.SetInt(int64(e.num)) + return v.Interface() +} + +var ( + _ pref.Enum = (*legacyEnumWrapper)(nil) + _ unwrapper = (*legacyEnumWrapper)(nil) +) + +var legacyEnumDescCache sync.Map // map[reflect.Type]protoreflect.EnumDescriptor + +// LegacyLoadEnumDesc returns an EnumDescriptor derived from the Go type, +// which must be an int32 kind and not implement the v2 API already. +// +// This is exported for testing purposes. +func LegacyLoadEnumDesc(t reflect.Type) pref.EnumDescriptor { + // Fast-path: check if an EnumDescriptor is cached for this concrete type. + if ed, ok := legacyEnumDescCache.Load(t); ok { + return ed.(pref.EnumDescriptor) + } + + // Slow-path: initialize EnumDescriptor from the raw descriptor. + ev := reflect.Zero(t).Interface() + if _, ok := ev.(pref.Enum); ok { + panic(fmt.Sprintf("%v already implements proto.Enum", t)) + } + edV1, ok := ev.(enumV1) + if !ok { + return aberrantLoadEnumDesc(t) + } + b, idxs := edV1.EnumDescriptor() + + var ed pref.EnumDescriptor + if len(idxs) == 1 { + ed = legacyLoadFileDesc(b).Enums().Get(idxs[0]) + } else { + md := legacyLoadFileDesc(b).Messages().Get(idxs[0]) + for _, i := range idxs[1 : len(idxs)-1] { + md = md.Messages().Get(i) + } + ed = md.Enums().Get(idxs[len(idxs)-1]) + } + if ed, ok := legacyEnumDescCache.LoadOrStore(t, ed); ok { + return ed.(protoreflect.EnumDescriptor) + } + return ed +} + +var aberrantEnumDescCache sync.Map // map[reflect.Type]protoreflect.EnumDescriptor + +// aberrantLoadEnumDesc returns an EnumDescriptor derived from the Go type, +// which must not implement protoreflect.Enum or enumV1. +// +// If the type does not implement enumV1, then there is no reliable +// way to derive the original protobuf type information. +// We are unable to use the global enum registry since it is +// unfortunately keyed by the protobuf full name, which we also do not know. +// Thus, this produces some bogus enum descriptor based on the Go type name. +func aberrantLoadEnumDesc(t reflect.Type) pref.EnumDescriptor { + // Fast-path: check if an EnumDescriptor is cached for this concrete type. + if ed, ok := aberrantEnumDescCache.Load(t); ok { + return ed.(pref.EnumDescriptor) + } + + // Slow-path: construct a bogus, but unique EnumDescriptor. + ed := &filedesc.Enum{L2: new(filedesc.EnumL2)} + ed.L0.FullName = AberrantDeriveFullName(t) // e.g., github_com.user.repo.MyEnum + ed.L0.ParentFile = filedesc.SurrogateProto3 + ed.L2.Values.List = append(ed.L2.Values.List, filedesc.EnumValue{}) + + // TODO: Use the presence of a UnmarshalJSON method to determine proto2? + + vd := &ed.L2.Values.List[0] + vd.L0.FullName = ed.L0.FullName + "_UNKNOWN" // e.g., github_com.user.repo.MyEnum_UNKNOWN + vd.L0.ParentFile = ed.L0.ParentFile + vd.L0.Parent = ed + + // TODO: We could use the String method to obtain some enum value names by + // starting at 0 and print the enum until it produces invalid identifiers. + // An exhaustive query is clearly impractical, but can be best-effort. + + if ed, ok := aberrantEnumDescCache.LoadOrStore(t, ed); ok { + return ed.(pref.EnumDescriptor) + } + return ed +} + +// AberrantDeriveFullName derives a fully qualified protobuf name for the given Go type +// The provided name is not guaranteed to be stable nor universally unique. +// It should be sufficiently unique within a program. +// +// This is exported for testing purposes. +func AberrantDeriveFullName(t reflect.Type) pref.FullName { + sanitize := func(r rune) rune { + switch { + case r == '/': + return '.' + case 'a' <= r && r <= 'z', 'A' <= r && r <= 'Z', '0' <= r && r <= '9': + return r + default: + return '_' + } + } + prefix := strings.Map(sanitize, t.PkgPath()) + suffix := strings.Map(sanitize, t.Name()) + if suffix == "" { + suffix = fmt.Sprintf("UnknownX%X", reflect.ValueOf(t).Pointer()) + } + + ss := append(strings.Split(prefix, "."), suffix) + for i, s := range ss { + if s == "" || ('0' <= s[0] && s[0] <= '9') { + ss[i] = "x" + s + } + } + return pref.FullName(strings.Join(ss, ".")) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go new file mode 100644 index 00000000..e3fb0b57 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_export.go @@ -0,0 +1,92 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "encoding/binary" + "encoding/json" + "hash/crc32" + "math" + "reflect" + + "google.golang.org/protobuf/internal/errors" + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// These functions exist to support exported APIs in generated protobufs. +// While these are deprecated, they cannot be removed for compatibility reasons. + +// LegacyEnumName returns the name of enums used in legacy code. +func (Export) LegacyEnumName(ed pref.EnumDescriptor) string { + return legacyEnumName(ed) +} + +// LegacyMessageTypeOf returns the protoreflect.MessageType for m, +// with name used as the message name if necessary. +func (Export) LegacyMessageTypeOf(m piface.MessageV1, name pref.FullName) pref.MessageType { + if mv := (Export{}).protoMessageV2Of(m); mv != nil { + return mv.ProtoReflect().Type() + } + return legacyLoadMessageType(reflect.TypeOf(m), name) +} + +// UnmarshalJSONEnum unmarshals an enum from a JSON-encoded input. +// The input can either be a string representing the enum value by name, +// or a number representing the enum number itself. +func (Export) UnmarshalJSONEnum(ed pref.EnumDescriptor, b []byte) (pref.EnumNumber, error) { + if b[0] == '"' { + var name pref.Name + if err := json.Unmarshal(b, &name); err != nil { + return 0, errors.New("invalid input for enum %v: %s", ed.FullName(), b) + } + ev := ed.Values().ByName(name) + if ev == nil { + return 0, errors.New("invalid value for enum %v: %s", ed.FullName(), name) + } + return ev.Number(), nil + } else { + var num pref.EnumNumber + if err := json.Unmarshal(b, &num); err != nil { + return 0, errors.New("invalid input for enum %v: %s", ed.FullName(), b) + } + return num, nil + } +} + +// CompressGZIP compresses the input as a GZIP-encoded file. +// The current implementation does no compression. +func (Export) CompressGZIP(in []byte) (out []byte) { + // RFC 1952, section 2.3.1. + var gzipHeader = [10]byte{0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff} + + // RFC 1951, section 3.2.4. + var blockHeader [5]byte + const maxBlockSize = math.MaxUint16 + numBlocks := 1 + len(in)/maxBlockSize + + // RFC 1952, section 2.3.1. + var gzipFooter [8]byte + binary.LittleEndian.PutUint32(gzipFooter[0:4], crc32.ChecksumIEEE(in)) + binary.LittleEndian.PutUint32(gzipFooter[4:8], uint32(len(in))) + + // Encode the input without compression using raw DEFLATE blocks. + out = make([]byte, 0, len(gzipHeader)+len(blockHeader)*numBlocks+len(in)+len(gzipFooter)) + out = append(out, gzipHeader[:]...) + for blockHeader[0] == 0 { + blockSize := maxBlockSize + if blockSize > len(in) { + blockHeader[0] = 0x01 // final bit per RFC 1951, section 3.2.3. + blockSize = len(in) + } + binary.LittleEndian.PutUint16(blockHeader[1:3], uint16(blockSize)^0x0000) + binary.LittleEndian.PutUint16(blockHeader[3:5], uint16(blockSize)^0xffff) + out = append(out, blockHeader[:]...) + out = append(out, in[:blockSize]...) + in = in[blockSize:] + } + out = append(out, gzipFooter[:]...) + return out +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go new file mode 100644 index 00000000..49e72316 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_extension.go @@ -0,0 +1,176 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "reflect" + + "google.golang.org/protobuf/internal/descopts" + "google.golang.org/protobuf/internal/encoding/messageset" + ptag "google.golang.org/protobuf/internal/encoding/tag" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/pragma" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +func (xi *ExtensionInfo) initToLegacy() { + xd := xi.desc + var parent piface.MessageV1 + messageName := xd.ContainingMessage().FullName() + if mt, _ := preg.GlobalTypes.FindMessageByName(messageName); mt != nil { + // Create a new parent message and unwrap it if possible. + mv := mt.New().Interface() + t := reflect.TypeOf(mv) + if mv, ok := mv.(unwrapper); ok { + t = reflect.TypeOf(mv.protoUnwrap()) + } + + // Check whether the message implements the legacy v1 Message interface. + mz := reflect.Zero(t).Interface() + if mz, ok := mz.(piface.MessageV1); ok { + parent = mz + } + } + + // Determine the v1 extension type, which is unfortunately not the same as + // the v2 ExtensionType.GoType. + extType := xi.goType + switch extType.Kind() { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + extType = reflect.PtrTo(extType) // T -> *T for singular scalar fields + } + + // Reconstruct the legacy enum full name. + var enumName string + if xd.Kind() == pref.EnumKind { + enumName = legacyEnumName(xd.Enum()) + } + + // Derive the proto file that the extension was declared within. + var filename string + if fd := xd.ParentFile(); fd != nil { + filename = fd.Path() + } + + // For MessageSet extensions, the name used is the parent message. + name := xd.FullName() + if messageset.IsMessageSetExtension(xd) { + name = name.Parent() + } + + xi.ExtendedType = parent + xi.ExtensionType = reflect.Zero(extType).Interface() + xi.Field = int32(xd.Number()) + xi.Name = string(name) + xi.Tag = ptag.Marshal(xd, enumName) + xi.Filename = filename +} + +// initFromLegacy initializes an ExtensionInfo from +// the contents of the deprecated exported fields of the type. +func (xi *ExtensionInfo) initFromLegacy() { + // The v1 API returns "type incomplete" descriptors where only the + // field number is specified. In such a case, use a placeholder. + if xi.ExtendedType == nil || xi.ExtensionType == nil { + xd := placeholderExtension{ + name: pref.FullName(xi.Name), + number: pref.FieldNumber(xi.Field), + } + xi.desc = extensionTypeDescriptor{xd, xi} + return + } + + // Resolve enum or message dependencies. + var ed pref.EnumDescriptor + var md pref.MessageDescriptor + t := reflect.TypeOf(xi.ExtensionType) + isOptional := t.Kind() == reflect.Ptr && t.Elem().Kind() != reflect.Struct + isRepeated := t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 + if isOptional || isRepeated { + t = t.Elem() + } + switch v := reflect.Zero(t).Interface().(type) { + case pref.Enum: + ed = v.Descriptor() + case enumV1: + ed = LegacyLoadEnumDesc(t) + case pref.ProtoMessage: + md = v.ProtoReflect().Descriptor() + case messageV1: + md = LegacyLoadMessageDesc(t) + } + + // Derive basic field information from the struct tag. + var evs pref.EnumValueDescriptors + if ed != nil { + evs = ed.Values() + } + fd := ptag.Unmarshal(xi.Tag, t, evs).(*filedesc.Field) + + // Construct a v2 ExtensionType. + xd := &filedesc.Extension{L2: new(filedesc.ExtensionL2)} + xd.L0.ParentFile = filedesc.SurrogateProto2 + xd.L0.FullName = pref.FullName(xi.Name) + xd.L1.Number = pref.FieldNumber(xi.Field) + xd.L1.Cardinality = fd.L1.Cardinality + xd.L1.Kind = fd.L1.Kind + xd.L2.IsPacked = fd.L1.IsPacked + xd.L2.Default = fd.L1.Default + xd.L1.Extendee = Export{}.MessageDescriptorOf(xi.ExtendedType) + xd.L2.Enum = ed + xd.L2.Message = md + + // Derive real extension field name for MessageSets. + if messageset.IsMessageSet(xd.L1.Extendee) && md.FullName() == xd.L0.FullName { + xd.L0.FullName = xd.L0.FullName.Append(messageset.ExtensionName) + } + + tt := reflect.TypeOf(xi.ExtensionType) + if isOptional { + tt = tt.Elem() + } + xi.goType = tt + xi.desc = extensionTypeDescriptor{xd, xi} +} + +type placeholderExtension struct { + name pref.FullName + number pref.FieldNumber +} + +func (x placeholderExtension) ParentFile() pref.FileDescriptor { return nil } +func (x placeholderExtension) Parent() pref.Descriptor { return nil } +func (x placeholderExtension) Index() int { return 0 } +func (x placeholderExtension) Syntax() pref.Syntax { return 0 } +func (x placeholderExtension) Name() pref.Name { return x.name.Name() } +func (x placeholderExtension) FullName() pref.FullName { return x.name } +func (x placeholderExtension) IsPlaceholder() bool { return true } +func (x placeholderExtension) Options() pref.ProtoMessage { return descopts.Field } +func (x placeholderExtension) Number() pref.FieldNumber { return x.number } +func (x placeholderExtension) Cardinality() pref.Cardinality { return 0 } +func (x placeholderExtension) Kind() pref.Kind { return 0 } +func (x placeholderExtension) HasJSONName() bool { return false } +func (x placeholderExtension) JSONName() string { return "[" + string(x.name) + "]" } +func (x placeholderExtension) TextName() string { return "[" + string(x.name) + "]" } +func (x placeholderExtension) HasPresence() bool { return false } +func (x placeholderExtension) HasOptionalKeyword() bool { return false } +func (x placeholderExtension) IsExtension() bool { return true } +func (x placeholderExtension) IsWeak() bool { return false } +func (x placeholderExtension) IsPacked() bool { return false } +func (x placeholderExtension) IsList() bool { return false } +func (x placeholderExtension) IsMap() bool { return false } +func (x placeholderExtension) MapKey() pref.FieldDescriptor { return nil } +func (x placeholderExtension) MapValue() pref.FieldDescriptor { return nil } +func (x placeholderExtension) HasDefault() bool { return false } +func (x placeholderExtension) Default() pref.Value { return pref.Value{} } +func (x placeholderExtension) DefaultEnumValue() pref.EnumValueDescriptor { return nil } +func (x placeholderExtension) ContainingOneof() pref.OneofDescriptor { return nil } +func (x placeholderExtension) ContainingMessage() pref.MessageDescriptor { return nil } +func (x placeholderExtension) Enum() pref.EnumDescriptor { return nil } +func (x placeholderExtension) Message() pref.MessageDescriptor { return nil } +func (x placeholderExtension) ProtoType(pref.FieldDescriptor) { return } +func (x placeholderExtension) ProtoInternal(pragma.DoNotImplement) { return } diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_file.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_file.go new file mode 100644 index 00000000..9ab09108 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_file.go @@ -0,0 +1,81 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "bytes" + "compress/gzip" + "io/ioutil" + "sync" + + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +// Every enum and message type generated by protoc-gen-go since commit 2fc053c5 +// on February 25th, 2016 has had a method to get the raw descriptor. +// Types that were not generated by protoc-gen-go or were generated prior +// to that version are not supported. +// +// The []byte returned is the encoded form of a FileDescriptorProto message +// compressed using GZIP. The []int is the path from the top-level file +// to the specific message or enum declaration. +type ( + enumV1 interface { + EnumDescriptor() ([]byte, []int) + } + messageV1 interface { + Descriptor() ([]byte, []int) + } +) + +var legacyFileDescCache sync.Map // map[*byte]protoreflect.FileDescriptor + +// legacyLoadFileDesc unmarshals b as a compressed FileDescriptorProto message. +// +// This assumes that b is immutable and that b does not refer to part of a +// concatenated series of GZIP files (which would require shenanigans that +// rely on the concatenation properties of both protobufs and GZIP). +// File descriptors generated by protoc-gen-go do not rely on that property. +func legacyLoadFileDesc(b []byte) protoreflect.FileDescriptor { + // Fast-path: check whether we already have a cached file descriptor. + if fd, ok := legacyFileDescCache.Load(&b[0]); ok { + return fd.(protoreflect.FileDescriptor) + } + + // Slow-path: decompress and unmarshal the file descriptor proto. + zr, err := gzip.NewReader(bytes.NewReader(b)) + if err != nil { + panic(err) + } + b2, err := ioutil.ReadAll(zr) + if err != nil { + panic(err) + } + + fd := filedesc.Builder{ + RawDescriptor: b2, + FileRegistry: resolverOnly{protoregistry.GlobalFiles}, // do not register back to global registry + }.Build().File + if fd, ok := legacyFileDescCache.LoadOrStore(&b[0], fd); ok { + return fd.(protoreflect.FileDescriptor) + } + return fd +} + +type resolverOnly struct { + reg *protoregistry.Files +} + +func (r resolverOnly) FindFileByPath(path string) (protoreflect.FileDescriptor, error) { + return r.reg.FindFileByPath(path) +} +func (r resolverOnly) FindDescriptorByName(name protoreflect.FullName) (protoreflect.Descriptor, error) { + return r.reg.FindDescriptorByName(name) +} +func (resolverOnly) RegisterFile(protoreflect.FileDescriptor) error { + return nil +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go b/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go new file mode 100644 index 00000000..3759b010 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/legacy_message.go @@ -0,0 +1,558 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "strings" + "sync" + + "google.golang.org/protobuf/internal/descopts" + ptag "google.golang.org/protobuf/internal/encoding/tag" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// legacyWrapMessage wraps v as a protoreflect.Message, +// where v must be a *struct kind and not implement the v2 API already. +func legacyWrapMessage(v reflect.Value) pref.Message { + t := v.Type() + if t.Kind() != reflect.Ptr || t.Elem().Kind() != reflect.Struct { + return aberrantMessage{v: v} + } + mt := legacyLoadMessageInfo(t, "") + return mt.MessageOf(v.Interface()) +} + +// legacyLoadMessageType dynamically loads a protoreflect.Type for t, +// where t must be not implement the v2 API already. +// The provided name is used if it cannot be determined from the message. +func legacyLoadMessageType(t reflect.Type, name pref.FullName) protoreflect.MessageType { + if t.Kind() != reflect.Ptr || t.Elem().Kind() != reflect.Struct { + return aberrantMessageType{t} + } + return legacyLoadMessageInfo(t, name) +} + +var legacyMessageTypeCache sync.Map // map[reflect.Type]*MessageInfo + +// legacyLoadMessageInfo dynamically loads a *MessageInfo for t, +// where t must be a *struct kind and not implement the v2 API already. +// The provided name is used if it cannot be determined from the message. +func legacyLoadMessageInfo(t reflect.Type, name pref.FullName) *MessageInfo { + // Fast-path: check if a MessageInfo is cached for this concrete type. + if mt, ok := legacyMessageTypeCache.Load(t); ok { + return mt.(*MessageInfo) + } + + // Slow-path: derive message descriptor and initialize MessageInfo. + mi := &MessageInfo{ + Desc: legacyLoadMessageDesc(t, name), + GoReflectType: t, + } + + var hasMarshal, hasUnmarshal bool + v := reflect.Zero(t).Interface() + if _, hasMarshal = v.(legacyMarshaler); hasMarshal { + mi.methods.Marshal = legacyMarshal + + // We have no way to tell whether the type's Marshal method + // supports deterministic serialization or not, but this + // preserves the v1 implementation's behavior of always + // calling Marshal methods when present. + mi.methods.Flags |= piface.SupportMarshalDeterministic + } + if _, hasUnmarshal = v.(legacyUnmarshaler); hasUnmarshal { + mi.methods.Unmarshal = legacyUnmarshal + } + if _, hasMerge := v.(legacyMerger); hasMerge || (hasMarshal && hasUnmarshal) { + mi.methods.Merge = legacyMerge + } + + if mi, ok := legacyMessageTypeCache.LoadOrStore(t, mi); ok { + return mi.(*MessageInfo) + } + return mi +} + +var legacyMessageDescCache sync.Map // map[reflect.Type]protoreflect.MessageDescriptor + +// LegacyLoadMessageDesc returns an MessageDescriptor derived from the Go type, +// which should be a *struct kind and must not implement the v2 API already. +// +// This is exported for testing purposes. +func LegacyLoadMessageDesc(t reflect.Type) pref.MessageDescriptor { + return legacyLoadMessageDesc(t, "") +} +func legacyLoadMessageDesc(t reflect.Type, name pref.FullName) pref.MessageDescriptor { + // Fast-path: check if a MessageDescriptor is cached for this concrete type. + if mi, ok := legacyMessageDescCache.Load(t); ok { + return mi.(pref.MessageDescriptor) + } + + // Slow-path: initialize MessageDescriptor from the raw descriptor. + mv := reflect.Zero(t).Interface() + if _, ok := mv.(pref.ProtoMessage); ok { + panic(fmt.Sprintf("%v already implements proto.Message", t)) + } + mdV1, ok := mv.(messageV1) + if !ok { + return aberrantLoadMessageDesc(t, name) + } + + // If this is a dynamic message type where there isn't a 1-1 mapping between + // Go and protobuf types, calling the Descriptor method on the zero value of + // the message type isn't likely to work. If it panics, swallow the panic and + // continue as if the Descriptor method wasn't present. + b, idxs := func() ([]byte, []int) { + defer func() { + recover() + }() + return mdV1.Descriptor() + }() + if b == nil { + return aberrantLoadMessageDesc(t, name) + } + + // If the Go type has no fields, then this might be a proto3 empty message + // from before the size cache was added. If there are any fields, check to + // see that at least one of them looks like something we generated. + if t.Elem().Kind() == reflect.Struct { + if nfield := t.Elem().NumField(); nfield > 0 { + hasProtoField := false + for i := 0; i < nfield; i++ { + f := t.Elem().Field(i) + if f.Tag.Get("protobuf") != "" || f.Tag.Get("protobuf_oneof") != "" || strings.HasPrefix(f.Name, "XXX_") { + hasProtoField = true + break + } + } + if !hasProtoField { + return aberrantLoadMessageDesc(t, name) + } + } + } + + md := legacyLoadFileDesc(b).Messages().Get(idxs[0]) + for _, i := range idxs[1:] { + md = md.Messages().Get(i) + } + if name != "" && md.FullName() != name { + panic(fmt.Sprintf("mismatching message name: got %v, want %v", md.FullName(), name)) + } + if md, ok := legacyMessageDescCache.LoadOrStore(t, md); ok { + return md.(protoreflect.MessageDescriptor) + } + return md +} + +var ( + aberrantMessageDescLock sync.Mutex + aberrantMessageDescCache map[reflect.Type]protoreflect.MessageDescriptor +) + +// aberrantLoadMessageDesc returns an MessageDescriptor derived from the Go type, +// which must not implement protoreflect.ProtoMessage or messageV1. +// +// This is a best-effort derivation of the message descriptor using the protobuf +// tags on the struct fields. +func aberrantLoadMessageDesc(t reflect.Type, name pref.FullName) pref.MessageDescriptor { + aberrantMessageDescLock.Lock() + defer aberrantMessageDescLock.Unlock() + if aberrantMessageDescCache == nil { + aberrantMessageDescCache = make(map[reflect.Type]protoreflect.MessageDescriptor) + } + return aberrantLoadMessageDescReentrant(t, name) +} +func aberrantLoadMessageDescReentrant(t reflect.Type, name pref.FullName) pref.MessageDescriptor { + // Fast-path: check if an MessageDescriptor is cached for this concrete type. + if md, ok := aberrantMessageDescCache[t]; ok { + return md + } + + // Slow-path: construct a descriptor from the Go struct type (best-effort). + // Cache the MessageDescriptor early on so that we can resolve internal + // cyclic references. + md := &filedesc.Message{L2: new(filedesc.MessageL2)} + md.L0.FullName = aberrantDeriveMessageName(t, name) + md.L0.ParentFile = filedesc.SurrogateProto2 + aberrantMessageDescCache[t] = md + + if t.Kind() != reflect.Ptr || t.Elem().Kind() != reflect.Struct { + return md + } + + // Try to determine if the message is using proto3 by checking scalars. + for i := 0; i < t.Elem().NumField(); i++ { + f := t.Elem().Field(i) + if tag := f.Tag.Get("protobuf"); tag != "" { + switch f.Type.Kind() { + case reflect.Bool, reflect.Int32, reflect.Int64, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String: + md.L0.ParentFile = filedesc.SurrogateProto3 + } + for _, s := range strings.Split(tag, ",") { + if s == "proto3" { + md.L0.ParentFile = filedesc.SurrogateProto3 + } + } + } + } + + // Obtain a list of oneof wrapper types. + var oneofWrappers []reflect.Type + for _, method := range []string{"XXX_OneofFuncs", "XXX_OneofWrappers"} { + if fn, ok := t.MethodByName(method); ok { + for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) { + if vs, ok := v.Interface().([]interface{}); ok { + for _, v := range vs { + oneofWrappers = append(oneofWrappers, reflect.TypeOf(v)) + } + } + } + } + } + + // Obtain a list of the extension ranges. + if fn, ok := t.MethodByName("ExtensionRangeArray"); ok { + vs := fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))})[0] + for i := 0; i < vs.Len(); i++ { + v := vs.Index(i) + md.L2.ExtensionRanges.List = append(md.L2.ExtensionRanges.List, [2]pref.FieldNumber{ + pref.FieldNumber(v.FieldByName("Start").Int()), + pref.FieldNumber(v.FieldByName("End").Int() + 1), + }) + md.L2.ExtensionRangeOptions = append(md.L2.ExtensionRangeOptions, nil) + } + } + + // Derive the message fields by inspecting the struct fields. + for i := 0; i < t.Elem().NumField(); i++ { + f := t.Elem().Field(i) + if tag := f.Tag.Get("protobuf"); tag != "" { + tagKey := f.Tag.Get("protobuf_key") + tagVal := f.Tag.Get("protobuf_val") + aberrantAppendField(md, f.Type, tag, tagKey, tagVal) + } + if tag := f.Tag.Get("protobuf_oneof"); tag != "" { + n := len(md.L2.Oneofs.List) + md.L2.Oneofs.List = append(md.L2.Oneofs.List, filedesc.Oneof{}) + od := &md.L2.Oneofs.List[n] + od.L0.FullName = md.FullName().Append(pref.Name(tag)) + od.L0.ParentFile = md.L0.ParentFile + od.L0.Parent = md + od.L0.Index = n + + for _, t := range oneofWrappers { + if t.Implements(f.Type) { + f := t.Elem().Field(0) + if tag := f.Tag.Get("protobuf"); tag != "" { + aberrantAppendField(md, f.Type, tag, "", "") + fd := &md.L2.Fields.List[len(md.L2.Fields.List)-1] + fd.L1.ContainingOneof = od + od.L1.Fields.List = append(od.L1.Fields.List, fd) + } + } + } + } + } + + return md +} + +func aberrantDeriveMessageName(t reflect.Type, name pref.FullName) pref.FullName { + if name.IsValid() { + return name + } + func() { + defer func() { recover() }() // swallow possible nil panics + if m, ok := reflect.Zero(t).Interface().(interface{ XXX_MessageName() string }); ok { + name = pref.FullName(m.XXX_MessageName()) + } + }() + if name.IsValid() { + return name + } + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + return AberrantDeriveFullName(t) +} + +func aberrantAppendField(md *filedesc.Message, goType reflect.Type, tag, tagKey, tagVal string) { + t := goType + isOptional := t.Kind() == reflect.Ptr && t.Elem().Kind() != reflect.Struct + isRepeated := t.Kind() == reflect.Slice && t.Elem().Kind() != reflect.Uint8 + if isOptional || isRepeated { + t = t.Elem() + } + fd := ptag.Unmarshal(tag, t, placeholderEnumValues{}).(*filedesc.Field) + + // Append field descriptor to the message. + n := len(md.L2.Fields.List) + md.L2.Fields.List = append(md.L2.Fields.List, *fd) + fd = &md.L2.Fields.List[n] + fd.L0.FullName = md.FullName().Append(fd.Name()) + fd.L0.ParentFile = md.L0.ParentFile + fd.L0.Parent = md + fd.L0.Index = n + + if fd.L1.IsWeak || fd.L1.HasPacked { + fd.L1.Options = func() pref.ProtoMessage { + opts := descopts.Field.ProtoReflect().New() + if fd.L1.IsWeak { + opts.Set(opts.Descriptor().Fields().ByName("weak"), protoreflect.ValueOfBool(true)) + } + if fd.L1.HasPacked { + opts.Set(opts.Descriptor().Fields().ByName("packed"), protoreflect.ValueOfBool(fd.L1.IsPacked)) + } + return opts.Interface() + } + } + + // Populate Enum and Message. + if fd.Enum() == nil && fd.Kind() == pref.EnumKind { + switch v := reflect.Zero(t).Interface().(type) { + case pref.Enum: + fd.L1.Enum = v.Descriptor() + default: + fd.L1.Enum = LegacyLoadEnumDesc(t) + } + } + if fd.Message() == nil && (fd.Kind() == pref.MessageKind || fd.Kind() == pref.GroupKind) { + switch v := reflect.Zero(t).Interface().(type) { + case pref.ProtoMessage: + fd.L1.Message = v.ProtoReflect().Descriptor() + case messageV1: + fd.L1.Message = LegacyLoadMessageDesc(t) + default: + if t.Kind() == reflect.Map { + n := len(md.L1.Messages.List) + md.L1.Messages.List = append(md.L1.Messages.List, filedesc.Message{L2: new(filedesc.MessageL2)}) + md2 := &md.L1.Messages.List[n] + md2.L0.FullName = md.FullName().Append(pref.Name(strs.MapEntryName(string(fd.Name())))) + md2.L0.ParentFile = md.L0.ParentFile + md2.L0.Parent = md + md2.L0.Index = n + + md2.L1.IsMapEntry = true + md2.L2.Options = func() pref.ProtoMessage { + opts := descopts.Message.ProtoReflect().New() + opts.Set(opts.Descriptor().Fields().ByName("map_entry"), protoreflect.ValueOfBool(true)) + return opts.Interface() + } + + aberrantAppendField(md2, t.Key(), tagKey, "", "") + aberrantAppendField(md2, t.Elem(), tagVal, "", "") + + fd.L1.Message = md2 + break + } + fd.L1.Message = aberrantLoadMessageDescReentrant(t, "") + } + } +} + +type placeholderEnumValues struct { + protoreflect.EnumValueDescriptors +} + +func (placeholderEnumValues) ByNumber(n pref.EnumNumber) pref.EnumValueDescriptor { + return filedesc.PlaceholderEnumValue(pref.FullName(fmt.Sprintf("UNKNOWN_%d", n))) +} + +// legacyMarshaler is the proto.Marshaler interface superseded by protoiface.Methoder. +type legacyMarshaler interface { + Marshal() ([]byte, error) +} + +// legacyUnmarshaler is the proto.Unmarshaler interface superseded by protoiface.Methoder. +type legacyUnmarshaler interface { + Unmarshal([]byte) error +} + +// legacyMerger is the proto.Merger interface superseded by protoiface.Methoder. +type legacyMerger interface { + Merge(protoiface.MessageV1) +} + +var aberrantProtoMethods = &piface.Methods{ + Marshal: legacyMarshal, + Unmarshal: legacyUnmarshal, + Merge: legacyMerge, + + // We have no way to tell whether the type's Marshal method + // supports deterministic serialization or not, but this + // preserves the v1 implementation's behavior of always + // calling Marshal methods when present. + Flags: piface.SupportMarshalDeterministic, +} + +func legacyMarshal(in piface.MarshalInput) (piface.MarshalOutput, error) { + v := in.Message.(unwrapper).protoUnwrap() + marshaler, ok := v.(legacyMarshaler) + if !ok { + return piface.MarshalOutput{}, errors.New("%T does not implement Marshal", v) + } + out, err := marshaler.Marshal() + if in.Buf != nil { + out = append(in.Buf, out...) + } + return piface.MarshalOutput{ + Buf: out, + }, err +} + +func legacyUnmarshal(in piface.UnmarshalInput) (piface.UnmarshalOutput, error) { + v := in.Message.(unwrapper).protoUnwrap() + unmarshaler, ok := v.(legacyUnmarshaler) + if !ok { + return piface.UnmarshalOutput{}, errors.New("%T does not implement Unmarshal", v) + } + return piface.UnmarshalOutput{}, unmarshaler.Unmarshal(in.Buf) +} + +func legacyMerge(in piface.MergeInput) piface.MergeOutput { + // Check whether this supports the legacy merger. + dstv := in.Destination.(unwrapper).protoUnwrap() + merger, ok := dstv.(legacyMerger) + if ok { + merger.Merge(Export{}.ProtoMessageV1Of(in.Source)) + return piface.MergeOutput{Flags: piface.MergeComplete} + } + + // If legacy merger is unavailable, implement merge in terms of + // a marshal and unmarshal operation. + srcv := in.Source.(unwrapper).protoUnwrap() + marshaler, ok := srcv.(legacyMarshaler) + if !ok { + return piface.MergeOutput{} + } + dstv = in.Destination.(unwrapper).protoUnwrap() + unmarshaler, ok := dstv.(legacyUnmarshaler) + if !ok { + return piface.MergeOutput{} + } + b, err := marshaler.Marshal() + if err != nil { + return piface.MergeOutput{} + } + err = unmarshaler.Unmarshal(b) + if err != nil { + return piface.MergeOutput{} + } + return piface.MergeOutput{Flags: piface.MergeComplete} +} + +// aberrantMessageType implements MessageType for all types other than pointer-to-struct. +type aberrantMessageType struct { + t reflect.Type +} + +func (mt aberrantMessageType) New() pref.Message { + if mt.t.Kind() == reflect.Ptr { + return aberrantMessage{reflect.New(mt.t.Elem())} + } + return aberrantMessage{reflect.Zero(mt.t)} +} +func (mt aberrantMessageType) Zero() pref.Message { + return aberrantMessage{reflect.Zero(mt.t)} +} +func (mt aberrantMessageType) GoType() reflect.Type { + return mt.t +} +func (mt aberrantMessageType) Descriptor() pref.MessageDescriptor { + return LegacyLoadMessageDesc(mt.t) +} + +// aberrantMessage implements Message for all types other than pointer-to-struct. +// +// When the underlying type implements legacyMarshaler or legacyUnmarshaler, +// the aberrant Message can be marshaled or unmarshaled. Otherwise, there is +// not much that can be done with values of this type. +type aberrantMessage struct { + v reflect.Value +} + +// Reset implements the v1 proto.Message.Reset method. +func (m aberrantMessage) Reset() { + if mr, ok := m.v.Interface().(interface{ Reset() }); ok { + mr.Reset() + return + } + if m.v.Kind() == reflect.Ptr && !m.v.IsNil() { + m.v.Elem().Set(reflect.Zero(m.v.Type().Elem())) + } +} + +func (m aberrantMessage) ProtoReflect() pref.Message { + return m +} + +func (m aberrantMessage) Descriptor() pref.MessageDescriptor { + return LegacyLoadMessageDesc(m.v.Type()) +} +func (m aberrantMessage) Type() pref.MessageType { + return aberrantMessageType{m.v.Type()} +} +func (m aberrantMessage) New() pref.Message { + if m.v.Type().Kind() == reflect.Ptr { + return aberrantMessage{reflect.New(m.v.Type().Elem())} + } + return aberrantMessage{reflect.Zero(m.v.Type())} +} +func (m aberrantMessage) Interface() pref.ProtoMessage { + return m +} +func (m aberrantMessage) Range(f func(pref.FieldDescriptor, pref.Value) bool) { + return +} +func (m aberrantMessage) Has(pref.FieldDescriptor) bool { + return false +} +func (m aberrantMessage) Clear(pref.FieldDescriptor) { + panic("invalid Message.Clear on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) Get(fd pref.FieldDescriptor) pref.Value { + if fd.Default().IsValid() { + return fd.Default() + } + panic("invalid Message.Get on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) Set(pref.FieldDescriptor, pref.Value) { + panic("invalid Message.Set on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) Mutable(pref.FieldDescriptor) pref.Value { + panic("invalid Message.Mutable on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) NewField(pref.FieldDescriptor) pref.Value { + panic("invalid Message.NewField on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) WhichOneof(pref.OneofDescriptor) pref.FieldDescriptor { + panic("invalid Message.WhichOneof descriptor on " + string(m.Descriptor().FullName())) +} +func (m aberrantMessage) GetUnknown() pref.RawFields { + return nil +} +func (m aberrantMessage) SetUnknown(pref.RawFields) { + // SetUnknown discards its input on messages which don't support unknown field storage. +} +func (m aberrantMessage) IsValid() bool { + if m.v.Kind() == reflect.Ptr { + return !m.v.IsNil() + } + return false +} +func (m aberrantMessage) ProtoMethods() *piface.Methods { + return aberrantProtoMethods +} +func (m aberrantMessage) protoUnwrap() interface{} { + return m.v.Interface() +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/merge.go b/vendor/google.golang.org/protobuf/internal/impl/merge.go new file mode 100644 index 00000000..c65bbc04 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/merge.go @@ -0,0 +1,176 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + "google.golang.org/protobuf/proto" + pref "google.golang.org/protobuf/reflect/protoreflect" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +type mergeOptions struct{} + +func (o mergeOptions) Merge(dst, src proto.Message) { + proto.Merge(dst, src) +} + +// merge is protoreflect.Methods.Merge. +func (mi *MessageInfo) merge(in piface.MergeInput) piface.MergeOutput { + dp, ok := mi.getPointer(in.Destination) + if !ok { + return piface.MergeOutput{} + } + sp, ok := mi.getPointer(in.Source) + if !ok { + return piface.MergeOutput{} + } + mi.mergePointer(dp, sp, mergeOptions{}) + return piface.MergeOutput{Flags: piface.MergeComplete} +} + +func (mi *MessageInfo) mergePointer(dst, src pointer, opts mergeOptions) { + mi.init() + if dst.IsNil() { + panic(fmt.Sprintf("invalid value: merging into nil message")) + } + if src.IsNil() { + return + } + for _, f := range mi.orderedCoderFields { + if f.funcs.merge == nil { + continue + } + sfptr := src.Apply(f.offset) + if f.isPointer && sfptr.Elem().IsNil() { + continue + } + f.funcs.merge(dst.Apply(f.offset), sfptr, f, opts) + } + if mi.extensionOffset.IsValid() { + sext := src.Apply(mi.extensionOffset).Extensions() + dext := dst.Apply(mi.extensionOffset).Extensions() + if *dext == nil { + *dext = make(map[int32]ExtensionField) + } + for num, sx := range *sext { + xt := sx.Type() + xi := getExtensionFieldInfo(xt) + if xi.funcs.merge == nil { + continue + } + dx := (*dext)[num] + var dv pref.Value + if dx.Type() == sx.Type() { + dv = dx.Value() + } + if !dv.IsValid() && xi.unmarshalNeedsValue { + dv = xt.New() + } + dv = xi.funcs.merge(dv, sx.Value(), opts) + dx.Set(sx.Type(), dv) + (*dext)[num] = dx + } + } + if mi.unknownOffset.IsValid() { + su := mi.getUnknownBytes(src) + if su != nil && len(*su) > 0 { + du := mi.mutableUnknownBytes(dst) + *du = append(*du, *su...) + } + } +} + +func mergeScalarValue(dst, src pref.Value, opts mergeOptions) pref.Value { + return src +} + +func mergeBytesValue(dst, src pref.Value, opts mergeOptions) pref.Value { + return pref.ValueOfBytes(append(emptyBuf[:], src.Bytes()...)) +} + +func mergeListValue(dst, src pref.Value, opts mergeOptions) pref.Value { + dstl := dst.List() + srcl := src.List() + for i, llen := 0, srcl.Len(); i < llen; i++ { + dstl.Append(srcl.Get(i)) + } + return dst +} + +func mergeBytesListValue(dst, src pref.Value, opts mergeOptions) pref.Value { + dstl := dst.List() + srcl := src.List() + for i, llen := 0, srcl.Len(); i < llen; i++ { + sb := srcl.Get(i).Bytes() + db := append(emptyBuf[:], sb...) + dstl.Append(pref.ValueOfBytes(db)) + } + return dst +} + +func mergeMessageListValue(dst, src pref.Value, opts mergeOptions) pref.Value { + dstl := dst.List() + srcl := src.List() + for i, llen := 0, srcl.Len(); i < llen; i++ { + sm := srcl.Get(i).Message() + dm := proto.Clone(sm.Interface()).ProtoReflect() + dstl.Append(pref.ValueOfMessage(dm)) + } + return dst +} + +func mergeMessageValue(dst, src pref.Value, opts mergeOptions) pref.Value { + opts.Merge(dst.Message().Interface(), src.Message().Interface()) + return dst +} + +func mergeMessage(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + if f.mi != nil { + if dst.Elem().IsNil() { + dst.SetPointer(pointerOfValue(reflect.New(f.mi.GoReflectType.Elem()))) + } + f.mi.mergePointer(dst.Elem(), src.Elem(), opts) + } else { + dm := dst.AsValueOf(f.ft).Elem() + sm := src.AsValueOf(f.ft).Elem() + if dm.IsNil() { + dm.Set(reflect.New(f.ft.Elem())) + } + opts.Merge(asMessage(dm), asMessage(sm)) + } +} + +func mergeMessageSlice(dst, src pointer, f *coderFieldInfo, opts mergeOptions) { + for _, sp := range src.PointerSlice() { + dm := reflect.New(f.ft.Elem().Elem()) + if f.mi != nil { + f.mi.mergePointer(pointerOfValue(dm), sp, opts) + } else { + opts.Merge(asMessage(dm), asMessage(sp.AsValueOf(f.ft.Elem().Elem()))) + } + dst.AppendPointerSlice(pointerOfValue(dm)) + } +} + +func mergeBytes(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Bytes() = append(emptyBuf[:], *src.Bytes()...) +} + +func mergeBytesNoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Bytes() + if len(v) > 0 { + *dst.Bytes() = append(emptyBuf[:], v...) + } +} + +func mergeBytesSlice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.BytesSlice() + for _, v := range *src.BytesSlice() { + *ds = append(*ds, append(emptyBuf[:], v...)) + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/merge_gen.go b/vendor/google.golang.org/protobuf/internal/impl/merge_gen.go new file mode 100644 index 00000000..8816c274 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/merge_gen.go @@ -0,0 +1,209 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package impl + +import () + +func mergeBool(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Bool() = *src.Bool() +} + +func mergeBoolNoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Bool() + if v != false { + *dst.Bool() = v + } +} + +func mergeBoolPtr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.BoolPtr() + if p != nil { + v := *p + *dst.BoolPtr() = &v + } +} + +func mergeBoolSlice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.BoolSlice() + ss := src.BoolSlice() + *ds = append(*ds, *ss...) +} + +func mergeInt32(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Int32() = *src.Int32() +} + +func mergeInt32NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Int32() + if v != 0 { + *dst.Int32() = v + } +} + +func mergeInt32Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Int32Ptr() + if p != nil { + v := *p + *dst.Int32Ptr() = &v + } +} + +func mergeInt32Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Int32Slice() + ss := src.Int32Slice() + *ds = append(*ds, *ss...) +} + +func mergeUint32(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Uint32() = *src.Uint32() +} + +func mergeUint32NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Uint32() + if v != 0 { + *dst.Uint32() = v + } +} + +func mergeUint32Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Uint32Ptr() + if p != nil { + v := *p + *dst.Uint32Ptr() = &v + } +} + +func mergeUint32Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Uint32Slice() + ss := src.Uint32Slice() + *ds = append(*ds, *ss...) +} + +func mergeInt64(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Int64() = *src.Int64() +} + +func mergeInt64NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Int64() + if v != 0 { + *dst.Int64() = v + } +} + +func mergeInt64Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Int64Ptr() + if p != nil { + v := *p + *dst.Int64Ptr() = &v + } +} + +func mergeInt64Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Int64Slice() + ss := src.Int64Slice() + *ds = append(*ds, *ss...) +} + +func mergeUint64(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Uint64() = *src.Uint64() +} + +func mergeUint64NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Uint64() + if v != 0 { + *dst.Uint64() = v + } +} + +func mergeUint64Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Uint64Ptr() + if p != nil { + v := *p + *dst.Uint64Ptr() = &v + } +} + +func mergeUint64Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Uint64Slice() + ss := src.Uint64Slice() + *ds = append(*ds, *ss...) +} + +func mergeFloat32(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Float32() = *src.Float32() +} + +func mergeFloat32NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Float32() + if v != 0 { + *dst.Float32() = v + } +} + +func mergeFloat32Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Float32Ptr() + if p != nil { + v := *p + *dst.Float32Ptr() = &v + } +} + +func mergeFloat32Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Float32Slice() + ss := src.Float32Slice() + *ds = append(*ds, *ss...) +} + +func mergeFloat64(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.Float64() = *src.Float64() +} + +func mergeFloat64NoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.Float64() + if v != 0 { + *dst.Float64() = v + } +} + +func mergeFloat64Ptr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.Float64Ptr() + if p != nil { + v := *p + *dst.Float64Ptr() = &v + } +} + +func mergeFloat64Slice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.Float64Slice() + ss := src.Float64Slice() + *ds = append(*ds, *ss...) +} + +func mergeString(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + *dst.String() = *src.String() +} + +func mergeStringNoZero(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + v := *src.String() + if v != "" { + *dst.String() = v + } +} + +func mergeStringPtr(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + p := *src.StringPtr() + if p != nil { + v := *p + *dst.StringPtr() = &v + } +} + +func mergeStringSlice(dst, src pointer, _ *coderFieldInfo, _ mergeOptions) { + ds := dst.StringSlice() + ss := src.StringSlice() + *ds = append(*ds, *ss...) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/message.go b/vendor/google.golang.org/protobuf/internal/impl/message.go new file mode 100644 index 00000000..a104e28e --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/message.go @@ -0,0 +1,276 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "sync/atomic" + + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/reflect/protoreflect" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" +) + +// MessageInfo provides protobuf related functionality for a given Go type +// that represents a message. A given instance of MessageInfo is tied to +// exactly one Go type, which must be a pointer to a struct type. +// +// The exported fields must be populated before any methods are called +// and cannot be mutated after set. +type MessageInfo struct { + // GoReflectType is the underlying message Go type and must be populated. + GoReflectType reflect.Type // pointer to struct + + // Desc is the underlying message descriptor type and must be populated. + Desc pref.MessageDescriptor + + // Exporter must be provided in a purego environment in order to provide + // access to unexported fields. + Exporter exporter + + // OneofWrappers is list of pointers to oneof wrapper struct types. + OneofWrappers []interface{} + + initMu sync.Mutex // protects all unexported fields + initDone uint32 + + reflectMessageInfo // for reflection implementation + coderMessageInfo // for fast-path method implementations +} + +// exporter is a function that returns a reference to the ith field of v, +// where v is a pointer to a struct. It returns nil if it does not support +// exporting the requested field (e.g., already exported). +type exporter func(v interface{}, i int) interface{} + +// getMessageInfo returns the MessageInfo for any message type that +// is generated by our implementation of protoc-gen-go (for v2 and on). +// If it is unable to obtain a MessageInfo, it returns nil. +func getMessageInfo(mt reflect.Type) *MessageInfo { + m, ok := reflect.Zero(mt).Interface().(pref.ProtoMessage) + if !ok { + return nil + } + mr, ok := m.ProtoReflect().(interface{ ProtoMessageInfo() *MessageInfo }) + if !ok { + return nil + } + return mr.ProtoMessageInfo() +} + +func (mi *MessageInfo) init() { + // This function is called in the hot path. Inline the sync.Once logic, + // since allocating a closure for Once.Do is expensive. + // Keep init small to ensure that it can be inlined. + if atomic.LoadUint32(&mi.initDone) == 0 { + mi.initOnce() + } +} + +func (mi *MessageInfo) initOnce() { + mi.initMu.Lock() + defer mi.initMu.Unlock() + if mi.initDone == 1 { + return + } + + t := mi.GoReflectType + if t.Kind() != reflect.Ptr && t.Elem().Kind() != reflect.Struct { + panic(fmt.Sprintf("got %v, want *struct kind", t)) + } + t = t.Elem() + + si := mi.makeStructInfo(t) + mi.makeReflectFuncs(t, si) + mi.makeCoderMethods(t, si) + + atomic.StoreUint32(&mi.initDone, 1) +} + +// getPointer returns the pointer for a message, which should be of +// the type of the MessageInfo. If the message is of a different type, +// it returns ok==false. +func (mi *MessageInfo) getPointer(m pref.Message) (p pointer, ok bool) { + switch m := m.(type) { + case *messageState: + return m.pointer(), m.messageInfo() == mi + case *messageReflectWrapper: + return m.pointer(), m.messageInfo() == mi + } + return pointer{}, false +} + +type ( + SizeCache = int32 + WeakFields = map[int32]protoreflect.ProtoMessage + UnknownFields = unknownFieldsA // TODO: switch to unknownFieldsB + unknownFieldsA = []byte + unknownFieldsB = *[]byte + ExtensionFields = map[int32]ExtensionField +) + +var ( + sizecacheType = reflect.TypeOf(SizeCache(0)) + weakFieldsType = reflect.TypeOf(WeakFields(nil)) + unknownFieldsAType = reflect.TypeOf(unknownFieldsA(nil)) + unknownFieldsBType = reflect.TypeOf(unknownFieldsB(nil)) + extensionFieldsType = reflect.TypeOf(ExtensionFields(nil)) +) + +type structInfo struct { + sizecacheOffset offset + sizecacheType reflect.Type + weakOffset offset + weakType reflect.Type + unknownOffset offset + unknownType reflect.Type + extensionOffset offset + extensionType reflect.Type + + fieldsByNumber map[pref.FieldNumber]reflect.StructField + oneofsByName map[pref.Name]reflect.StructField + oneofWrappersByType map[reflect.Type]pref.FieldNumber + oneofWrappersByNumber map[pref.FieldNumber]reflect.Type +} + +func (mi *MessageInfo) makeStructInfo(t reflect.Type) structInfo { + si := structInfo{ + sizecacheOffset: invalidOffset, + weakOffset: invalidOffset, + unknownOffset: invalidOffset, + extensionOffset: invalidOffset, + + fieldsByNumber: map[pref.FieldNumber]reflect.StructField{}, + oneofsByName: map[pref.Name]reflect.StructField{}, + oneofWrappersByType: map[reflect.Type]pref.FieldNumber{}, + oneofWrappersByNumber: map[pref.FieldNumber]reflect.Type{}, + } + +fieldLoop: + for i := 0; i < t.NumField(); i++ { + switch f := t.Field(i); f.Name { + case genid.SizeCache_goname, genid.SizeCacheA_goname: + if f.Type == sizecacheType { + si.sizecacheOffset = offsetOf(f, mi.Exporter) + si.sizecacheType = f.Type + } + case genid.WeakFields_goname, genid.WeakFieldsA_goname: + if f.Type == weakFieldsType { + si.weakOffset = offsetOf(f, mi.Exporter) + si.weakType = f.Type + } + case genid.UnknownFields_goname, genid.UnknownFieldsA_goname: + if f.Type == unknownFieldsAType || f.Type == unknownFieldsBType { + si.unknownOffset = offsetOf(f, mi.Exporter) + si.unknownType = f.Type + } + case genid.ExtensionFields_goname, genid.ExtensionFieldsA_goname, genid.ExtensionFieldsB_goname: + if f.Type == extensionFieldsType { + si.extensionOffset = offsetOf(f, mi.Exporter) + si.extensionType = f.Type + } + default: + for _, s := range strings.Split(f.Tag.Get("protobuf"), ",") { + if len(s) > 0 && strings.Trim(s, "0123456789") == "" { + n, _ := strconv.ParseUint(s, 10, 64) + si.fieldsByNumber[pref.FieldNumber(n)] = f + continue fieldLoop + } + } + if s := f.Tag.Get("protobuf_oneof"); len(s) > 0 { + si.oneofsByName[pref.Name(s)] = f + continue fieldLoop + } + } + } + + // Derive a mapping of oneof wrappers to fields. + oneofWrappers := mi.OneofWrappers + for _, method := range []string{"XXX_OneofFuncs", "XXX_OneofWrappers"} { + if fn, ok := reflect.PtrTo(t).MethodByName(method); ok { + for _, v := range fn.Func.Call([]reflect.Value{reflect.Zero(fn.Type.In(0))}) { + if vs, ok := v.Interface().([]interface{}); ok { + oneofWrappers = vs + } + } + } + } + for _, v := range oneofWrappers { + tf := reflect.TypeOf(v).Elem() + f := tf.Field(0) + for _, s := range strings.Split(f.Tag.Get("protobuf"), ",") { + if len(s) > 0 && strings.Trim(s, "0123456789") == "" { + n, _ := strconv.ParseUint(s, 10, 64) + si.oneofWrappersByType[tf] = pref.FieldNumber(n) + si.oneofWrappersByNumber[pref.FieldNumber(n)] = tf + break + } + } + } + + return si +} + +func (mi *MessageInfo) New() protoreflect.Message { + return mi.MessageOf(reflect.New(mi.GoReflectType.Elem()).Interface()) +} +func (mi *MessageInfo) Zero() protoreflect.Message { + return mi.MessageOf(reflect.Zero(mi.GoReflectType).Interface()) +} +func (mi *MessageInfo) Descriptor() protoreflect.MessageDescriptor { + return mi.Desc +} +func (mi *MessageInfo) Enum(i int) protoreflect.EnumType { + mi.init() + fd := mi.Desc.Fields().Get(i) + return Export{}.EnumTypeOf(mi.fieldTypes[fd.Number()]) +} +func (mi *MessageInfo) Message(i int) protoreflect.MessageType { + mi.init() + fd := mi.Desc.Fields().Get(i) + switch { + case fd.IsWeak(): + mt, _ := preg.GlobalTypes.FindMessageByName(fd.Message().FullName()) + return mt + case fd.IsMap(): + return mapEntryType{fd.Message(), mi.fieldTypes[fd.Number()]} + default: + return Export{}.MessageTypeOf(mi.fieldTypes[fd.Number()]) + } +} + +type mapEntryType struct { + desc protoreflect.MessageDescriptor + valType interface{} // zero value of enum or message type +} + +func (mt mapEntryType) New() protoreflect.Message { + return nil +} +func (mt mapEntryType) Zero() protoreflect.Message { + return nil +} +func (mt mapEntryType) Descriptor() protoreflect.MessageDescriptor { + return mt.desc +} +func (mt mapEntryType) Enum(i int) protoreflect.EnumType { + fd := mt.desc.Fields().Get(i) + if fd.Enum() == nil { + return nil + } + return Export{}.EnumTypeOf(mt.valType) +} +func (mt mapEntryType) Message(i int) protoreflect.MessageType { + fd := mt.desc.Fields().Get(i) + if fd.Message() == nil { + return nil + } + return Export{}.MessageTypeOf(mt.valType) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go new file mode 100644 index 00000000..9488b726 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/message_reflect.go @@ -0,0 +1,465 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "reflect" + + "google.golang.org/protobuf/internal/detrand" + "google.golang.org/protobuf/internal/pragma" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type reflectMessageInfo struct { + fields map[pref.FieldNumber]*fieldInfo + oneofs map[pref.Name]*oneofInfo + + // fieldTypes contains the zero value of an enum or message field. + // For lists, it contains the element type. + // For maps, it contains the entry value type. + fieldTypes map[pref.FieldNumber]interface{} + + // denseFields is a subset of fields where: + // 0 < fieldDesc.Number() < len(denseFields) + // It provides faster access to the fieldInfo, but may be incomplete. + denseFields []*fieldInfo + + // rangeInfos is a list of all fields (not belonging to a oneof) and oneofs. + rangeInfos []interface{} // either *fieldInfo or *oneofInfo + + getUnknown func(pointer) pref.RawFields + setUnknown func(pointer, pref.RawFields) + extensionMap func(pointer) *extensionMap + + nilMessage atomicNilMessage +} + +// makeReflectFuncs generates the set of functions to support reflection. +func (mi *MessageInfo) makeReflectFuncs(t reflect.Type, si structInfo) { + mi.makeKnownFieldsFunc(si) + mi.makeUnknownFieldsFunc(t, si) + mi.makeExtensionFieldsFunc(t, si) + mi.makeFieldTypes(si) +} + +// makeKnownFieldsFunc generates functions for operations that can be performed +// on each protobuf message field. It takes in a reflect.Type representing the +// Go struct and matches message fields with struct fields. +// +// This code assumes that the struct is well-formed and panics if there are +// any discrepancies. +func (mi *MessageInfo) makeKnownFieldsFunc(si structInfo) { + mi.fields = map[pref.FieldNumber]*fieldInfo{} + md := mi.Desc + fds := md.Fields() + for i := 0; i < fds.Len(); i++ { + fd := fds.Get(i) + fs := si.fieldsByNumber[fd.Number()] + isOneof := fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic() + if isOneof { + fs = si.oneofsByName[fd.ContainingOneof().Name()] + } + var fi fieldInfo + switch { + case fs.Type == nil: + fi = fieldInfoForMissing(fd) // never occurs for officially generated message types + case isOneof: + fi = fieldInfoForOneof(fd, fs, mi.Exporter, si.oneofWrappersByNumber[fd.Number()]) + case fd.IsMap(): + fi = fieldInfoForMap(fd, fs, mi.Exporter) + case fd.IsList(): + fi = fieldInfoForList(fd, fs, mi.Exporter) + case fd.IsWeak(): + fi = fieldInfoForWeakMessage(fd, si.weakOffset) + case fd.Message() != nil: + fi = fieldInfoForMessage(fd, fs, mi.Exporter) + default: + fi = fieldInfoForScalar(fd, fs, mi.Exporter) + } + mi.fields[fd.Number()] = &fi + } + + mi.oneofs = map[pref.Name]*oneofInfo{} + for i := 0; i < md.Oneofs().Len(); i++ { + od := md.Oneofs().Get(i) + mi.oneofs[od.Name()] = makeOneofInfo(od, si, mi.Exporter) + } + + mi.denseFields = make([]*fieldInfo, fds.Len()*2) + for i := 0; i < fds.Len(); i++ { + if fd := fds.Get(i); int(fd.Number()) < len(mi.denseFields) { + mi.denseFields[fd.Number()] = mi.fields[fd.Number()] + } + } + + for i := 0; i < fds.Len(); { + fd := fds.Get(i) + if od := fd.ContainingOneof(); od != nil && !od.IsSynthetic() { + mi.rangeInfos = append(mi.rangeInfos, mi.oneofs[od.Name()]) + i += od.Fields().Len() + } else { + mi.rangeInfos = append(mi.rangeInfos, mi.fields[fd.Number()]) + i++ + } + } + + // Introduce instability to iteration order, but keep it deterministic. + if len(mi.rangeInfos) > 1 && detrand.Bool() { + i := detrand.Intn(len(mi.rangeInfos) - 1) + mi.rangeInfos[i], mi.rangeInfos[i+1] = mi.rangeInfos[i+1], mi.rangeInfos[i] + } +} + +func (mi *MessageInfo) makeUnknownFieldsFunc(t reflect.Type, si structInfo) { + switch { + case si.unknownOffset.IsValid() && si.unknownType == unknownFieldsAType: + // Handle as []byte. + mi.getUnknown = func(p pointer) pref.RawFields { + if p.IsNil() { + return nil + } + return *p.Apply(mi.unknownOffset).Bytes() + } + mi.setUnknown = func(p pointer, b pref.RawFields) { + if p.IsNil() { + panic("invalid SetUnknown on nil Message") + } + *p.Apply(mi.unknownOffset).Bytes() = b + } + case si.unknownOffset.IsValid() && si.unknownType == unknownFieldsBType: + // Handle as *[]byte. + mi.getUnknown = func(p pointer) pref.RawFields { + if p.IsNil() { + return nil + } + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + return nil + } + return **bp + } + mi.setUnknown = func(p pointer, b pref.RawFields) { + if p.IsNil() { + panic("invalid SetUnknown on nil Message") + } + bp := p.Apply(mi.unknownOffset).BytesPtr() + if *bp == nil { + *bp = new([]byte) + } + **bp = b + } + default: + mi.getUnknown = func(pointer) pref.RawFields { + return nil + } + mi.setUnknown = func(p pointer, _ pref.RawFields) { + if p.IsNil() { + panic("invalid SetUnknown on nil Message") + } + } + } +} + +func (mi *MessageInfo) makeExtensionFieldsFunc(t reflect.Type, si structInfo) { + if si.extensionOffset.IsValid() { + mi.extensionMap = func(p pointer) *extensionMap { + if p.IsNil() { + return (*extensionMap)(nil) + } + v := p.Apply(si.extensionOffset).AsValueOf(extensionFieldsType) + return (*extensionMap)(v.Interface().(*map[int32]ExtensionField)) + } + } else { + mi.extensionMap = func(pointer) *extensionMap { + return (*extensionMap)(nil) + } + } +} +func (mi *MessageInfo) makeFieldTypes(si structInfo) { + md := mi.Desc + fds := md.Fields() + for i := 0; i < fds.Len(); i++ { + var ft reflect.Type + fd := fds.Get(i) + fs := si.fieldsByNumber[fd.Number()] + isOneof := fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic() + if isOneof { + fs = si.oneofsByName[fd.ContainingOneof().Name()] + } + var isMessage bool + switch { + case fs.Type == nil: + continue // never occurs for officially generated message types + case isOneof: + if fd.Enum() != nil || fd.Message() != nil { + ft = si.oneofWrappersByNumber[fd.Number()].Field(0).Type + } + case fd.IsMap(): + if fd.MapValue().Enum() != nil || fd.MapValue().Message() != nil { + ft = fs.Type.Elem() + } + isMessage = fd.MapValue().Message() != nil + case fd.IsList(): + if fd.Enum() != nil || fd.Message() != nil { + ft = fs.Type.Elem() + } + isMessage = fd.Message() != nil + case fd.Enum() != nil: + ft = fs.Type + if fd.HasPresence() && ft.Kind() == reflect.Ptr { + ft = ft.Elem() + } + case fd.Message() != nil: + ft = fs.Type + if fd.IsWeak() { + ft = nil + } + isMessage = true + } + if isMessage && ft != nil && ft.Kind() != reflect.Ptr { + ft = reflect.PtrTo(ft) // never occurs for officially generated message types + } + if ft != nil { + if mi.fieldTypes == nil { + mi.fieldTypes = make(map[pref.FieldNumber]interface{}) + } + mi.fieldTypes[fd.Number()] = reflect.Zero(ft).Interface() + } + } +} + +type extensionMap map[int32]ExtensionField + +func (m *extensionMap) Range(f func(pref.FieldDescriptor, pref.Value) bool) { + if m != nil { + for _, x := range *m { + xd := x.Type().TypeDescriptor() + v := x.Value() + if xd.IsList() && v.List().Len() == 0 { + continue + } + if !f(xd, v) { + return + } + } + } +} +func (m *extensionMap) Has(xt pref.ExtensionType) (ok bool) { + if m == nil { + return false + } + xd := xt.TypeDescriptor() + x, ok := (*m)[int32(xd.Number())] + if !ok { + return false + } + switch { + case xd.IsList(): + return x.Value().List().Len() > 0 + case xd.IsMap(): + return x.Value().Map().Len() > 0 + case xd.Message() != nil: + return x.Value().Message().IsValid() + } + return true +} +func (m *extensionMap) Clear(xt pref.ExtensionType) { + delete(*m, int32(xt.TypeDescriptor().Number())) +} +func (m *extensionMap) Get(xt pref.ExtensionType) pref.Value { + xd := xt.TypeDescriptor() + if m != nil { + if x, ok := (*m)[int32(xd.Number())]; ok { + return x.Value() + } + } + return xt.Zero() +} +func (m *extensionMap) Set(xt pref.ExtensionType, v pref.Value) { + xd := xt.TypeDescriptor() + isValid := true + switch { + case !xt.IsValidValue(v): + isValid = false + case xd.IsList(): + isValid = v.List().IsValid() + case xd.IsMap(): + isValid = v.Map().IsValid() + case xd.Message() != nil: + isValid = v.Message().IsValid() + } + if !isValid { + panic(fmt.Sprintf("%v: assigning invalid value", xt.TypeDescriptor().FullName())) + } + + if *m == nil { + *m = make(map[int32]ExtensionField) + } + var x ExtensionField + x.Set(xt, v) + (*m)[int32(xd.Number())] = x +} +func (m *extensionMap) Mutable(xt pref.ExtensionType) pref.Value { + xd := xt.TypeDescriptor() + if xd.Kind() != pref.MessageKind && xd.Kind() != pref.GroupKind && !xd.IsList() && !xd.IsMap() { + panic("invalid Mutable on field with non-composite type") + } + if x, ok := (*m)[int32(xd.Number())]; ok { + return x.Value() + } + v := xt.New() + m.Set(xt, v) + return v +} + +// MessageState is a data structure that is nested as the first field in a +// concrete message. It provides a way to implement the ProtoReflect method +// in an allocation-free way without needing to have a shadow Go type generated +// for every message type. This technique only works using unsafe. +// +// +// Example generated code: +// +// type M struct { +// state protoimpl.MessageState +// +// Field1 int32 +// Field2 string +// Field3 *BarMessage +// ... +// } +// +// func (m *M) ProtoReflect() protoreflect.Message { +// mi := &file_fizz_buzz_proto_msgInfos[5] +// if protoimpl.UnsafeEnabled && m != nil { +// ms := protoimpl.X.MessageStateOf(Pointer(m)) +// if ms.LoadMessageInfo() == nil { +// ms.StoreMessageInfo(mi) +// } +// return ms +// } +// return mi.MessageOf(m) +// } +// +// The MessageState type holds a *MessageInfo, which must be atomically set to +// the message info associated with a given message instance. +// By unsafely converting a *M into a *MessageState, the MessageState object +// has access to all the information needed to implement protobuf reflection. +// It has access to the message info as its first field, and a pointer to the +// MessageState is identical to a pointer to the concrete message value. +// +// +// Requirements: +// • The type M must implement protoreflect.ProtoMessage. +// • The address of m must not be nil. +// • The address of m and the address of m.state must be equal, +// even though they are different Go types. +type MessageState struct { + pragma.NoUnkeyedLiterals + pragma.DoNotCompare + pragma.DoNotCopy + + atomicMessageInfo *MessageInfo +} + +type messageState MessageState + +var ( + _ pref.Message = (*messageState)(nil) + _ unwrapper = (*messageState)(nil) +) + +// messageDataType is a tuple of a pointer to the message data and +// a pointer to the message type. It is a generalized way of providing a +// reflective view over a message instance. The disadvantage of this approach +// is the need to allocate this tuple of 16B. +type messageDataType struct { + p pointer + mi *MessageInfo +} + +type ( + messageReflectWrapper messageDataType + messageIfaceWrapper messageDataType +) + +var ( + _ pref.Message = (*messageReflectWrapper)(nil) + _ unwrapper = (*messageReflectWrapper)(nil) + _ pref.ProtoMessage = (*messageIfaceWrapper)(nil) + _ unwrapper = (*messageIfaceWrapper)(nil) +) + +// MessageOf returns a reflective view over a message. The input must be a +// pointer to a named Go struct. If the provided type has a ProtoReflect method, +// it must be implemented by calling this method. +func (mi *MessageInfo) MessageOf(m interface{}) pref.Message { + if reflect.TypeOf(m) != mi.GoReflectType { + panic(fmt.Sprintf("type mismatch: got %T, want %v", m, mi.GoReflectType)) + } + p := pointerOfIface(m) + if p.IsNil() { + return mi.nilMessage.Init(mi) + } + return &messageReflectWrapper{p, mi} +} + +func (m *messageReflectWrapper) pointer() pointer { return m.p } +func (m *messageReflectWrapper) messageInfo() *MessageInfo { return m.mi } + +// Reset implements the v1 proto.Message.Reset method. +func (m *messageIfaceWrapper) Reset() { + if mr, ok := m.protoUnwrap().(interface{ Reset() }); ok { + mr.Reset() + return + } + rv := reflect.ValueOf(m.protoUnwrap()) + if rv.Kind() == reflect.Ptr && !rv.IsNil() { + rv.Elem().Set(reflect.Zero(rv.Type().Elem())) + } +} +func (m *messageIfaceWrapper) ProtoReflect() pref.Message { + return (*messageReflectWrapper)(m) +} +func (m *messageIfaceWrapper) protoUnwrap() interface{} { + return m.p.AsIfaceOf(m.mi.GoReflectType.Elem()) +} + +// checkField verifies that the provided field descriptor is valid. +// Exactly one of the returned values is populated. +func (mi *MessageInfo) checkField(fd pref.FieldDescriptor) (*fieldInfo, pref.ExtensionType) { + var fi *fieldInfo + if n := fd.Number(); 0 < n && int(n) < len(mi.denseFields) { + fi = mi.denseFields[n] + } else { + fi = mi.fields[n] + } + if fi != nil { + if fi.fieldDesc != fd { + if got, want := fd.FullName(), fi.fieldDesc.FullName(); got != want { + panic(fmt.Sprintf("mismatching field: got %v, want %v", got, want)) + } + panic(fmt.Sprintf("mismatching field: %v", fd.FullName())) + } + return fi, nil + } + + if fd.IsExtension() { + if got, want := fd.ContainingMessage().FullName(), mi.Desc.FullName(); got != want { + // TODO: Should this be exact containing message descriptor match? + panic(fmt.Sprintf("extension %v has mismatching containing message: got %v, want %v", fd.FullName(), got, want)) + } + if !mi.Desc.ExtensionRanges().Has(fd.Number()) { + panic(fmt.Sprintf("extension %v extends %v outside the extension range", fd.FullName(), mi.Desc.FullName())) + } + xtd, ok := fd.(pref.ExtensionTypeDescriptor) + if !ok { + panic(fmt.Sprintf("extension %v does not implement protoreflect.ExtensionTypeDescriptor", fd.FullName())) + } + return nil, xtd.Type() + } + panic(fmt.Sprintf("field %v is invalid", fd.FullName())) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/message_reflect_field.go b/vendor/google.golang.org/protobuf/internal/impl/message_reflect_field.go new file mode 100644 index 00000000..343cf872 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/message_reflect_field.go @@ -0,0 +1,543 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "math" + "reflect" + "sync" + + "google.golang.org/protobuf/internal/flags" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" +) + +type fieldInfo struct { + fieldDesc pref.FieldDescriptor + + // These fields are used for protobuf reflection support. + has func(pointer) bool + clear func(pointer) + get func(pointer) pref.Value + set func(pointer, pref.Value) + mutable func(pointer) pref.Value + newMessage func() pref.Message + newField func() pref.Value +} + +func fieldInfoForMissing(fd pref.FieldDescriptor) fieldInfo { + // This never occurs for generated message types. + // It implies that a hand-crafted type has missing Go fields + // for specific protobuf message fields. + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + return false + }, + clear: func(p pointer) { + panic("missing Go struct field for " + string(fd.FullName())) + }, + get: func(p pointer) pref.Value { + return fd.Default() + }, + set: func(p pointer, v pref.Value) { + panic("missing Go struct field for " + string(fd.FullName())) + }, + mutable: func(p pointer) pref.Value { + panic("missing Go struct field for " + string(fd.FullName())) + }, + newMessage: func() pref.Message { + panic("missing Go struct field for " + string(fd.FullName())) + }, + newField: func() pref.Value { + if v := fd.Default(); v.IsValid() { + return v + } + panic("missing Go struct field for " + string(fd.FullName())) + }, + } +} + +func fieldInfoForOneof(fd pref.FieldDescriptor, fs reflect.StructField, x exporter, ot reflect.Type) fieldInfo { + ft := fs.Type + if ft.Kind() != reflect.Interface { + panic(fmt.Sprintf("field %v has invalid type: got %v, want interface kind", fd.FullName(), ft)) + } + if ot.Kind() != reflect.Struct { + panic(fmt.Sprintf("field %v has invalid type: got %v, want struct kind", fd.FullName(), ot)) + } + if !reflect.PtrTo(ot).Implements(ft) { + panic(fmt.Sprintf("field %v has invalid type: %v does not implement %v", fd.FullName(), ot, ft)) + } + conv := NewConverter(ot.Field(0).Type, fd) + isMessage := fd.Message() != nil + + // TODO: Implement unsafe fast path? + fieldOffset := offsetOf(fs, x) + return fieldInfo{ + // NOTE: The logic below intentionally assumes that oneof fields are + // well-formatted. That is, the oneof interface never contains a + // typed nil pointer to one of the wrapper structs. + + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() || rv.Elem().Type().Elem() != ot || rv.Elem().IsNil() { + return false + } + return true + }, + clear: func(p pointer) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() || rv.Elem().Type().Elem() != ot { + // NOTE: We intentionally don't check for rv.Elem().IsNil() + // so that (*OneofWrapperType)(nil) gets cleared to nil. + return + } + rv.Set(reflect.Zero(rv.Type())) + }, + get: func(p pointer) pref.Value { + if p.IsNil() { + return conv.Zero() + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() || rv.Elem().Type().Elem() != ot || rv.Elem().IsNil() { + return conv.Zero() + } + rv = rv.Elem().Elem().Field(0) + return conv.PBValueOf(rv) + }, + set: func(p pointer, v pref.Value) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() || rv.Elem().Type().Elem() != ot || rv.Elem().IsNil() { + rv.Set(reflect.New(ot)) + } + rv = rv.Elem().Elem().Field(0) + rv.Set(conv.GoValueOf(v)) + }, + mutable: func(p pointer) pref.Value { + if !isMessage { + panic(fmt.Sprintf("field %v with invalid Mutable call on field with non-composite type", fd.FullName())) + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() || rv.Elem().Type().Elem() != ot || rv.Elem().IsNil() { + rv.Set(reflect.New(ot)) + } + rv = rv.Elem().Elem().Field(0) + if rv.Kind() == reflect.Ptr && rv.IsNil() { + rv.Set(conv.GoValueOf(pref.ValueOfMessage(conv.New().Message()))) + } + return conv.PBValueOf(rv) + }, + newMessage: func() pref.Message { + return conv.New().Message() + }, + newField: func() pref.Value { + return conv.New() + }, + } +} + +func fieldInfoForMap(fd pref.FieldDescriptor, fs reflect.StructField, x exporter) fieldInfo { + ft := fs.Type + if ft.Kind() != reflect.Map { + panic(fmt.Sprintf("field %v has invalid type: got %v, want map kind", fd.FullName(), ft)) + } + conv := NewConverter(ft, fd) + + // TODO: Implement unsafe fast path? + fieldOffset := offsetOf(fs, x) + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + return rv.Len() > 0 + }, + clear: func(p pointer) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + rv.Set(reflect.Zero(rv.Type())) + }, + get: func(p pointer) pref.Value { + if p.IsNil() { + return conv.Zero() + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.Len() == 0 { + return conv.Zero() + } + return conv.PBValueOf(rv) + }, + set: func(p pointer, v pref.Value) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + pv := conv.GoValueOf(v) + if pv.IsNil() { + panic(fmt.Sprintf("map field %v cannot be set with read-only value", fd.FullName())) + } + rv.Set(pv) + }, + mutable: func(p pointer) pref.Value { + v := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if v.IsNil() { + v.Set(reflect.MakeMap(fs.Type)) + } + return conv.PBValueOf(v) + }, + newField: func() pref.Value { + return conv.New() + }, + } +} + +func fieldInfoForList(fd pref.FieldDescriptor, fs reflect.StructField, x exporter) fieldInfo { + ft := fs.Type + if ft.Kind() != reflect.Slice { + panic(fmt.Sprintf("field %v has invalid type: got %v, want slice kind", fd.FullName(), ft)) + } + conv := NewConverter(reflect.PtrTo(ft), fd) + + // TODO: Implement unsafe fast path? + fieldOffset := offsetOf(fs, x) + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + return rv.Len() > 0 + }, + clear: func(p pointer) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + rv.Set(reflect.Zero(rv.Type())) + }, + get: func(p pointer) pref.Value { + if p.IsNil() { + return conv.Zero() + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type) + if rv.Elem().Len() == 0 { + return conv.Zero() + } + return conv.PBValueOf(rv) + }, + set: func(p pointer, v pref.Value) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + pv := conv.GoValueOf(v) + if pv.IsNil() { + panic(fmt.Sprintf("list field %v cannot be set with read-only value", fd.FullName())) + } + rv.Set(pv.Elem()) + }, + mutable: func(p pointer) pref.Value { + v := p.Apply(fieldOffset).AsValueOf(fs.Type) + return conv.PBValueOf(v) + }, + newField: func() pref.Value { + return conv.New() + }, + } +} + +var ( + nilBytes = reflect.ValueOf([]byte(nil)) + emptyBytes = reflect.ValueOf([]byte{}) +) + +func fieldInfoForScalar(fd pref.FieldDescriptor, fs reflect.StructField, x exporter) fieldInfo { + ft := fs.Type + nullable := fd.HasPresence() + isBytes := ft.Kind() == reflect.Slice && ft.Elem().Kind() == reflect.Uint8 + if nullable { + if ft.Kind() != reflect.Ptr && ft.Kind() != reflect.Slice { + // This never occurs for generated message types. + // Despite the protobuf type system specifying presence, + // the Go field type cannot represent it. + nullable = false + } + if ft.Kind() == reflect.Ptr { + ft = ft.Elem() + } + } + conv := NewConverter(ft, fd) + + // TODO: Implement unsafe fast path? + fieldOffset := offsetOf(fs, x) + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if nullable { + return !rv.IsNil() + } + switch rv.Kind() { + case reflect.Bool: + return rv.Bool() + case reflect.Int32, reflect.Int64: + return rv.Int() != 0 + case reflect.Uint32, reflect.Uint64: + return rv.Uint() != 0 + case reflect.Float32, reflect.Float64: + return rv.Float() != 0 || math.Signbit(rv.Float()) + case reflect.String, reflect.Slice: + return rv.Len() > 0 + default: + panic(fmt.Sprintf("field %v has invalid type: %v", fd.FullName(), rv.Type())) // should never happen + } + }, + clear: func(p pointer) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + rv.Set(reflect.Zero(rv.Type())) + }, + get: func(p pointer) pref.Value { + if p.IsNil() { + return conv.Zero() + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if nullable { + if rv.IsNil() { + return conv.Zero() + } + if rv.Kind() == reflect.Ptr { + rv = rv.Elem() + } + } + return conv.PBValueOf(rv) + }, + set: func(p pointer, v pref.Value) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if nullable && rv.Kind() == reflect.Ptr { + if rv.IsNil() { + rv.Set(reflect.New(ft)) + } + rv = rv.Elem() + } + rv.Set(conv.GoValueOf(v)) + if isBytes && rv.Len() == 0 { + if nullable { + rv.Set(emptyBytes) // preserve presence + } else { + rv.Set(nilBytes) // do not preserve presence + } + } + }, + newField: func() pref.Value { + return conv.New() + }, + } +} + +func fieldInfoForWeakMessage(fd pref.FieldDescriptor, weakOffset offset) fieldInfo { + if !flags.ProtoLegacy { + panic("no support for proto1 weak fields") + } + + var once sync.Once + var messageType pref.MessageType + lazyInit := func() { + once.Do(func() { + messageName := fd.Message().FullName() + messageType, _ = preg.GlobalTypes.FindMessageByName(messageName) + if messageType == nil { + panic(fmt.Sprintf("weak message %v for field %v is not linked in", messageName, fd.FullName())) + } + }) + } + + num := fd.Number() + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + _, ok := p.Apply(weakOffset).WeakFields().get(num) + return ok + }, + clear: func(p pointer) { + p.Apply(weakOffset).WeakFields().clear(num) + }, + get: func(p pointer) pref.Value { + lazyInit() + if p.IsNil() { + return pref.ValueOfMessage(messageType.Zero()) + } + m, ok := p.Apply(weakOffset).WeakFields().get(num) + if !ok { + return pref.ValueOfMessage(messageType.Zero()) + } + return pref.ValueOfMessage(m.ProtoReflect()) + }, + set: func(p pointer, v pref.Value) { + lazyInit() + m := v.Message() + if m.Descriptor() != messageType.Descriptor() { + if got, want := m.Descriptor().FullName(), messageType.Descriptor().FullName(); got != want { + panic(fmt.Sprintf("field %v has mismatching message descriptor: got %v, want %v", fd.FullName(), got, want)) + } + panic(fmt.Sprintf("field %v has mismatching message descriptor: %v", fd.FullName(), m.Descriptor().FullName())) + } + p.Apply(weakOffset).WeakFields().set(num, m.Interface()) + }, + mutable: func(p pointer) pref.Value { + lazyInit() + fs := p.Apply(weakOffset).WeakFields() + m, ok := fs.get(num) + if !ok { + m = messageType.New().Interface() + fs.set(num, m) + } + return pref.ValueOfMessage(m.ProtoReflect()) + }, + newMessage: func() pref.Message { + lazyInit() + return messageType.New() + }, + newField: func() pref.Value { + lazyInit() + return pref.ValueOfMessage(messageType.New()) + }, + } +} + +func fieldInfoForMessage(fd pref.FieldDescriptor, fs reflect.StructField, x exporter) fieldInfo { + ft := fs.Type + conv := NewConverter(ft, fd) + + // TODO: Implement unsafe fast path? + fieldOffset := offsetOf(fs, x) + return fieldInfo{ + fieldDesc: fd, + has: func(p pointer) bool { + if p.IsNil() { + return false + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if fs.Type.Kind() != reflect.Ptr { + return !isZero(rv) + } + return !rv.IsNil() + }, + clear: func(p pointer) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + rv.Set(reflect.Zero(rv.Type())) + }, + get: func(p pointer) pref.Value { + if p.IsNil() { + return conv.Zero() + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + return conv.PBValueOf(rv) + }, + set: func(p pointer, v pref.Value) { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + rv.Set(conv.GoValueOf(v)) + if fs.Type.Kind() == reflect.Ptr && rv.IsNil() { + panic(fmt.Sprintf("field %v has invalid nil pointer", fd.FullName())) + } + }, + mutable: func(p pointer) pref.Value { + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if fs.Type.Kind() == reflect.Ptr && rv.IsNil() { + rv.Set(conv.GoValueOf(conv.New())) + } + return conv.PBValueOf(rv) + }, + newMessage: func() pref.Message { + return conv.New().Message() + }, + newField: func() pref.Value { + return conv.New() + }, + } +} + +type oneofInfo struct { + oneofDesc pref.OneofDescriptor + which func(pointer) pref.FieldNumber +} + +func makeOneofInfo(od pref.OneofDescriptor, si structInfo, x exporter) *oneofInfo { + oi := &oneofInfo{oneofDesc: od} + if od.IsSynthetic() { + fs := si.fieldsByNumber[od.Fields().Get(0).Number()] + fieldOffset := offsetOf(fs, x) + oi.which = func(p pointer) pref.FieldNumber { + if p.IsNil() { + return 0 + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() { // valid on either *T or []byte + return 0 + } + return od.Fields().Get(0).Number() + } + } else { + fs := si.oneofsByName[od.Name()] + fieldOffset := offsetOf(fs, x) + oi.which = func(p pointer) pref.FieldNumber { + if p.IsNil() { + return 0 + } + rv := p.Apply(fieldOffset).AsValueOf(fs.Type).Elem() + if rv.IsNil() { + return 0 + } + rv = rv.Elem() + if rv.IsNil() { + return 0 + } + return si.oneofWrappersByType[rv.Type().Elem()] + } + } + return oi +} + +// isZero is identical to reflect.Value.IsZero. +// TODO: Remove this when Go1.13 is the minimally supported Go version. +func isZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return math.Float64bits(v.Float()) == 0 + case reflect.Complex64, reflect.Complex128: + c := v.Complex() + return math.Float64bits(real(c)) == 0 && math.Float64bits(imag(c)) == 0 + case reflect.Array: + for i := 0; i < v.Len(); i++ { + if !isZero(v.Index(i)) { + return false + } + } + return true + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice, reflect.UnsafePointer: + return v.IsNil() + case reflect.String: + return v.Len() == 0 + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + if !isZero(v.Field(i)) { + return false + } + } + return true + default: + panic(&reflect.ValueError{"reflect.Value.IsZero", v.Kind()}) + } +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/message_reflect_gen.go b/vendor/google.golang.org/protobuf/internal/impl/message_reflect_gen.go new file mode 100644 index 00000000..741d6e5b --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/message_reflect_gen.go @@ -0,0 +1,249 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package impl + +import ( + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +func (m *messageState) Descriptor() protoreflect.MessageDescriptor { + return m.messageInfo().Desc +} +func (m *messageState) Type() protoreflect.MessageType { + return m.messageInfo() +} +func (m *messageState) New() protoreflect.Message { + return m.messageInfo().New() +} +func (m *messageState) Interface() protoreflect.ProtoMessage { + return m.protoUnwrap().(protoreflect.ProtoMessage) +} +func (m *messageState) protoUnwrap() interface{} { + return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem()) +} +func (m *messageState) ProtoMethods() *protoiface.Methods { + m.messageInfo().init() + return &m.messageInfo().methods +} + +// ProtoMessageInfo is a pseudo-internal API for allowing the v1 code +// to be able to retrieve a v2 MessageInfo struct. +// +// WARNING: This method is exempt from the compatibility promise and +// may be removed in the future without warning. +func (m *messageState) ProtoMessageInfo() *MessageInfo { + return m.messageInfo() +} + +func (m *messageState) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) { + m.messageInfo().init() + for _, ri := range m.messageInfo().rangeInfos { + switch ri := ri.(type) { + case *fieldInfo: + if ri.has(m.pointer()) { + if !f(ri.fieldDesc, ri.get(m.pointer())) { + return + } + } + case *oneofInfo: + if n := ri.which(m.pointer()); n > 0 { + fi := m.messageInfo().fields[n] + if !f(fi.fieldDesc, fi.get(m.pointer())) { + return + } + } + } + } + m.messageInfo().extensionMap(m.pointer()).Range(f) +} +func (m *messageState) Has(fd protoreflect.FieldDescriptor) bool { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.has(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Has(xt) + } +} +func (m *messageState) Clear(fd protoreflect.FieldDescriptor) { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + fi.clear(m.pointer()) + } else { + m.messageInfo().extensionMap(m.pointer()).Clear(xt) + } +} +func (m *messageState) Get(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.get(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Get(xt) + } +} +func (m *messageState) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + fi.set(m.pointer(), v) + } else { + m.messageInfo().extensionMap(m.pointer()).Set(xt, v) + } +} +func (m *messageState) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.mutable(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Mutable(xt) + } +} +func (m *messageState) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.newField() + } else { + return xt.New() + } +} +func (m *messageState) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor { + m.messageInfo().init() + if oi := m.messageInfo().oneofs[od.Name()]; oi != nil && oi.oneofDesc == od { + return od.Fields().ByNumber(oi.which(m.pointer())) + } + panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName())) +} +func (m *messageState) GetUnknown() protoreflect.RawFields { + m.messageInfo().init() + return m.messageInfo().getUnknown(m.pointer()) +} +func (m *messageState) SetUnknown(b protoreflect.RawFields) { + m.messageInfo().init() + m.messageInfo().setUnknown(m.pointer(), b) +} +func (m *messageState) IsValid() bool { + return !m.pointer().IsNil() +} + +func (m *messageReflectWrapper) Descriptor() protoreflect.MessageDescriptor { + return m.messageInfo().Desc +} +func (m *messageReflectWrapper) Type() protoreflect.MessageType { + return m.messageInfo() +} +func (m *messageReflectWrapper) New() protoreflect.Message { + return m.messageInfo().New() +} +func (m *messageReflectWrapper) Interface() protoreflect.ProtoMessage { + if m, ok := m.protoUnwrap().(protoreflect.ProtoMessage); ok { + return m + } + return (*messageIfaceWrapper)(m) +} +func (m *messageReflectWrapper) protoUnwrap() interface{} { + return m.pointer().AsIfaceOf(m.messageInfo().GoReflectType.Elem()) +} +func (m *messageReflectWrapper) ProtoMethods() *protoiface.Methods { + m.messageInfo().init() + return &m.messageInfo().methods +} + +// ProtoMessageInfo is a pseudo-internal API for allowing the v1 code +// to be able to retrieve a v2 MessageInfo struct. +// +// WARNING: This method is exempt from the compatibility promise and +// may be removed in the future without warning. +func (m *messageReflectWrapper) ProtoMessageInfo() *MessageInfo { + return m.messageInfo() +} + +func (m *messageReflectWrapper) Range(f func(protoreflect.FieldDescriptor, protoreflect.Value) bool) { + m.messageInfo().init() + for _, ri := range m.messageInfo().rangeInfos { + switch ri := ri.(type) { + case *fieldInfo: + if ri.has(m.pointer()) { + if !f(ri.fieldDesc, ri.get(m.pointer())) { + return + } + } + case *oneofInfo: + if n := ri.which(m.pointer()); n > 0 { + fi := m.messageInfo().fields[n] + if !f(fi.fieldDesc, fi.get(m.pointer())) { + return + } + } + } + } + m.messageInfo().extensionMap(m.pointer()).Range(f) +} +func (m *messageReflectWrapper) Has(fd protoreflect.FieldDescriptor) bool { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.has(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Has(xt) + } +} +func (m *messageReflectWrapper) Clear(fd protoreflect.FieldDescriptor) { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + fi.clear(m.pointer()) + } else { + m.messageInfo().extensionMap(m.pointer()).Clear(xt) + } +} +func (m *messageReflectWrapper) Get(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.get(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Get(xt) + } +} +func (m *messageReflectWrapper) Set(fd protoreflect.FieldDescriptor, v protoreflect.Value) { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + fi.set(m.pointer(), v) + } else { + m.messageInfo().extensionMap(m.pointer()).Set(xt, v) + } +} +func (m *messageReflectWrapper) Mutable(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.mutable(m.pointer()) + } else { + return m.messageInfo().extensionMap(m.pointer()).Mutable(xt) + } +} +func (m *messageReflectWrapper) NewField(fd protoreflect.FieldDescriptor) protoreflect.Value { + m.messageInfo().init() + if fi, xt := m.messageInfo().checkField(fd); fi != nil { + return fi.newField() + } else { + return xt.New() + } +} +func (m *messageReflectWrapper) WhichOneof(od protoreflect.OneofDescriptor) protoreflect.FieldDescriptor { + m.messageInfo().init() + if oi := m.messageInfo().oneofs[od.Name()]; oi != nil && oi.oneofDesc == od { + return od.Fields().ByNumber(oi.which(m.pointer())) + } + panic("invalid oneof descriptor " + string(od.FullName()) + " for message " + string(m.Descriptor().FullName())) +} +func (m *messageReflectWrapper) GetUnknown() protoreflect.RawFields { + m.messageInfo().init() + return m.messageInfo().getUnknown(m.pointer()) +} +func (m *messageReflectWrapper) SetUnknown(b protoreflect.RawFields) { + m.messageInfo().init() + m.messageInfo().setUnknown(m.pointer(), b) +} +func (m *messageReflectWrapper) IsValid() bool { + return !m.pointer().IsNil() +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go b/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go new file mode 100644 index 00000000..9e3ed821 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/pointer_reflect.go @@ -0,0 +1,178 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego appengine + +package impl + +import ( + "fmt" + "reflect" + "sync" +) + +const UnsafeEnabled = false + +// Pointer is an opaque pointer type. +type Pointer interface{} + +// offset represents the offset to a struct field, accessible from a pointer. +// The offset is the field index into a struct. +type offset struct { + index int + export exporter +} + +// offsetOf returns a field offset for the struct field. +func offsetOf(f reflect.StructField, x exporter) offset { + if len(f.Index) != 1 { + panic("embedded structs are not supported") + } + if f.PkgPath == "" { + return offset{index: f.Index[0]} // field is already exported + } + if x == nil { + panic("exporter must be provided for unexported field") + } + return offset{index: f.Index[0], export: x} +} + +// IsValid reports whether the offset is valid. +func (f offset) IsValid() bool { return f.index >= 0 } + +// invalidOffset is an invalid field offset. +var invalidOffset = offset{index: -1} + +// zeroOffset is a noop when calling pointer.Apply. +var zeroOffset = offset{index: 0} + +// pointer is an abstract representation of a pointer to a struct or field. +type pointer struct{ v reflect.Value } + +// pointerOf returns p as a pointer. +func pointerOf(p Pointer) pointer { + return pointerOfIface(p) +} + +// pointerOfValue returns v as a pointer. +func pointerOfValue(v reflect.Value) pointer { + return pointer{v: v} +} + +// pointerOfIface returns the pointer portion of an interface. +func pointerOfIface(v interface{}) pointer { + return pointer{v: reflect.ValueOf(v)} +} + +// IsNil reports whether the pointer is nil. +func (p pointer) IsNil() bool { + return p.v.IsNil() +} + +// Apply adds an offset to the pointer to derive a new pointer +// to a specified field. The current pointer must be pointing at a struct. +func (p pointer) Apply(f offset) pointer { + if f.export != nil { + if v := reflect.ValueOf(f.export(p.v.Interface(), f.index)); v.IsValid() { + return pointer{v: v} + } + } + return pointer{v: p.v.Elem().Field(f.index).Addr()} +} + +// AsValueOf treats p as a pointer to an object of type t and returns the value. +// It is equivalent to reflect.ValueOf(p.AsIfaceOf(t)) +func (p pointer) AsValueOf(t reflect.Type) reflect.Value { + if got := p.v.Type().Elem(); got != t { + panic(fmt.Sprintf("invalid type: got %v, want %v", got, t)) + } + return p.v +} + +// AsIfaceOf treats p as a pointer to an object of type t and returns the value. +// It is equivalent to p.AsValueOf(t).Interface() +func (p pointer) AsIfaceOf(t reflect.Type) interface{} { + return p.AsValueOf(t).Interface() +} + +func (p pointer) Bool() *bool { return p.v.Interface().(*bool) } +func (p pointer) BoolPtr() **bool { return p.v.Interface().(**bool) } +func (p pointer) BoolSlice() *[]bool { return p.v.Interface().(*[]bool) } +func (p pointer) Int32() *int32 { return p.v.Interface().(*int32) } +func (p pointer) Int32Ptr() **int32 { return p.v.Interface().(**int32) } +func (p pointer) Int32Slice() *[]int32 { return p.v.Interface().(*[]int32) } +func (p pointer) Int64() *int64 { return p.v.Interface().(*int64) } +func (p pointer) Int64Ptr() **int64 { return p.v.Interface().(**int64) } +func (p pointer) Int64Slice() *[]int64 { return p.v.Interface().(*[]int64) } +func (p pointer) Uint32() *uint32 { return p.v.Interface().(*uint32) } +func (p pointer) Uint32Ptr() **uint32 { return p.v.Interface().(**uint32) } +func (p pointer) Uint32Slice() *[]uint32 { return p.v.Interface().(*[]uint32) } +func (p pointer) Uint64() *uint64 { return p.v.Interface().(*uint64) } +func (p pointer) Uint64Ptr() **uint64 { return p.v.Interface().(**uint64) } +func (p pointer) Uint64Slice() *[]uint64 { return p.v.Interface().(*[]uint64) } +func (p pointer) Float32() *float32 { return p.v.Interface().(*float32) } +func (p pointer) Float32Ptr() **float32 { return p.v.Interface().(**float32) } +func (p pointer) Float32Slice() *[]float32 { return p.v.Interface().(*[]float32) } +func (p pointer) Float64() *float64 { return p.v.Interface().(*float64) } +func (p pointer) Float64Ptr() **float64 { return p.v.Interface().(**float64) } +func (p pointer) Float64Slice() *[]float64 { return p.v.Interface().(*[]float64) } +func (p pointer) String() *string { return p.v.Interface().(*string) } +func (p pointer) StringPtr() **string { return p.v.Interface().(**string) } +func (p pointer) StringSlice() *[]string { return p.v.Interface().(*[]string) } +func (p pointer) Bytes() *[]byte { return p.v.Interface().(*[]byte) } +func (p pointer) BytesPtr() **[]byte { return p.v.Interface().(**[]byte) } +func (p pointer) BytesSlice() *[][]byte { return p.v.Interface().(*[][]byte) } +func (p pointer) WeakFields() *weakFields { return (*weakFields)(p.v.Interface().(*WeakFields)) } +func (p pointer) Extensions() *map[int32]ExtensionField { + return p.v.Interface().(*map[int32]ExtensionField) +} + +func (p pointer) Elem() pointer { + return pointer{v: p.v.Elem()} +} + +// PointerSlice copies []*T from p as a new []pointer. +// This behavior differs from the implementation in pointer_unsafe.go. +func (p pointer) PointerSlice() []pointer { + // TODO: reconsider this + if p.v.IsNil() { + return nil + } + n := p.v.Elem().Len() + s := make([]pointer, n) + for i := 0; i < n; i++ { + s[i] = pointer{v: p.v.Elem().Index(i)} + } + return s +} + +// AppendPointerSlice appends v to p, which must be a []*T. +func (p pointer) AppendPointerSlice(v pointer) { + sp := p.v.Elem() + sp.Set(reflect.Append(sp, v.v)) +} + +// SetPointer sets *p to v. +func (p pointer) SetPointer(v pointer) { + p.v.Elem().Set(v.v) +} + +func (Export) MessageStateOf(p Pointer) *messageState { panic("not supported") } +func (ms *messageState) pointer() pointer { panic("not supported") } +func (ms *messageState) messageInfo() *MessageInfo { panic("not supported") } +func (ms *messageState) LoadMessageInfo() *MessageInfo { panic("not supported") } +func (ms *messageState) StoreMessageInfo(mi *MessageInfo) { panic("not supported") } + +type atomicNilMessage struct { + once sync.Once + m messageReflectWrapper +} + +func (m *atomicNilMessage) Init(mi *MessageInfo) *messageReflectWrapper { + m.once.Do(func() { + m.m.p = pointerOfIface(reflect.Zero(mi.GoReflectType).Interface()) + m.m.mi = mi + }) + return &m.m +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go b/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go new file mode 100644 index 00000000..9ecf23a8 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/pointer_unsafe.go @@ -0,0 +1,174 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego,!appengine + +package impl + +import ( + "reflect" + "sync/atomic" + "unsafe" +) + +const UnsafeEnabled = true + +// Pointer is an opaque pointer type. +type Pointer unsafe.Pointer + +// offset represents the offset to a struct field, accessible from a pointer. +// The offset is the byte offset to the field from the start of the struct. +type offset uintptr + +// offsetOf returns a field offset for the struct field. +func offsetOf(f reflect.StructField, x exporter) offset { + return offset(f.Offset) +} + +// IsValid reports whether the offset is valid. +func (f offset) IsValid() bool { return f != invalidOffset } + +// invalidOffset is an invalid field offset. +var invalidOffset = ^offset(0) + +// zeroOffset is a noop when calling pointer.Apply. +var zeroOffset = offset(0) + +// pointer is a pointer to a message struct or field. +type pointer struct{ p unsafe.Pointer } + +// pointerOf returns p as a pointer. +func pointerOf(p Pointer) pointer { + return pointer{p: unsafe.Pointer(p)} +} + +// pointerOfValue returns v as a pointer. +func pointerOfValue(v reflect.Value) pointer { + return pointer{p: unsafe.Pointer(v.Pointer())} +} + +// pointerOfIface returns the pointer portion of an interface. +func pointerOfIface(v interface{}) pointer { + type ifaceHeader struct { + Type unsafe.Pointer + Data unsafe.Pointer + } + return pointer{p: (*ifaceHeader)(unsafe.Pointer(&v)).Data} +} + +// IsNil reports whether the pointer is nil. +func (p pointer) IsNil() bool { + return p.p == nil +} + +// Apply adds an offset to the pointer to derive a new pointer +// to a specified field. The pointer must be valid and pointing at a struct. +func (p pointer) Apply(f offset) pointer { + if p.IsNil() { + panic("invalid nil pointer") + } + return pointer{p: unsafe.Pointer(uintptr(p.p) + uintptr(f))} +} + +// AsValueOf treats p as a pointer to an object of type t and returns the value. +// It is equivalent to reflect.ValueOf(p.AsIfaceOf(t)) +func (p pointer) AsValueOf(t reflect.Type) reflect.Value { + return reflect.NewAt(t, p.p) +} + +// AsIfaceOf treats p as a pointer to an object of type t and returns the value. +// It is equivalent to p.AsValueOf(t).Interface() +func (p pointer) AsIfaceOf(t reflect.Type) interface{} { + // TODO: Use tricky unsafe magic to directly create ifaceHeader. + return p.AsValueOf(t).Interface() +} + +func (p pointer) Bool() *bool { return (*bool)(p.p) } +func (p pointer) BoolPtr() **bool { return (**bool)(p.p) } +func (p pointer) BoolSlice() *[]bool { return (*[]bool)(p.p) } +func (p pointer) Int32() *int32 { return (*int32)(p.p) } +func (p pointer) Int32Ptr() **int32 { return (**int32)(p.p) } +func (p pointer) Int32Slice() *[]int32 { return (*[]int32)(p.p) } +func (p pointer) Int64() *int64 { return (*int64)(p.p) } +func (p pointer) Int64Ptr() **int64 { return (**int64)(p.p) } +func (p pointer) Int64Slice() *[]int64 { return (*[]int64)(p.p) } +func (p pointer) Uint32() *uint32 { return (*uint32)(p.p) } +func (p pointer) Uint32Ptr() **uint32 { return (**uint32)(p.p) } +func (p pointer) Uint32Slice() *[]uint32 { return (*[]uint32)(p.p) } +func (p pointer) Uint64() *uint64 { return (*uint64)(p.p) } +func (p pointer) Uint64Ptr() **uint64 { return (**uint64)(p.p) } +func (p pointer) Uint64Slice() *[]uint64 { return (*[]uint64)(p.p) } +func (p pointer) Float32() *float32 { return (*float32)(p.p) } +func (p pointer) Float32Ptr() **float32 { return (**float32)(p.p) } +func (p pointer) Float32Slice() *[]float32 { return (*[]float32)(p.p) } +func (p pointer) Float64() *float64 { return (*float64)(p.p) } +func (p pointer) Float64Ptr() **float64 { return (**float64)(p.p) } +func (p pointer) Float64Slice() *[]float64 { return (*[]float64)(p.p) } +func (p pointer) String() *string { return (*string)(p.p) } +func (p pointer) StringPtr() **string { return (**string)(p.p) } +func (p pointer) StringSlice() *[]string { return (*[]string)(p.p) } +func (p pointer) Bytes() *[]byte { return (*[]byte)(p.p) } +func (p pointer) BytesPtr() **[]byte { return (**[]byte)(p.p) } +func (p pointer) BytesSlice() *[][]byte { return (*[][]byte)(p.p) } +func (p pointer) WeakFields() *weakFields { return (*weakFields)(p.p) } +func (p pointer) Extensions() *map[int32]ExtensionField { return (*map[int32]ExtensionField)(p.p) } + +func (p pointer) Elem() pointer { + return pointer{p: *(*unsafe.Pointer)(p.p)} +} + +// PointerSlice loads []*T from p as a []pointer. +// The value returned is aliased with the original slice. +// This behavior differs from the implementation in pointer_reflect.go. +func (p pointer) PointerSlice() []pointer { + // Super-tricky - p should point to a []*T where T is a + // message type. We load it as []pointer. + return *(*[]pointer)(p.p) +} + +// AppendPointerSlice appends v to p, which must be a []*T. +func (p pointer) AppendPointerSlice(v pointer) { + *(*[]pointer)(p.p) = append(*(*[]pointer)(p.p), v) +} + +// SetPointer sets *p to v. +func (p pointer) SetPointer(v pointer) { + *(*unsafe.Pointer)(p.p) = (unsafe.Pointer)(v.p) +} + +// Static check that MessageState does not exceed the size of a pointer. +const _ = uint(unsafe.Sizeof(unsafe.Pointer(nil)) - unsafe.Sizeof(MessageState{})) + +func (Export) MessageStateOf(p Pointer) *messageState { + // Super-tricky - see documentation on MessageState. + return (*messageState)(unsafe.Pointer(p)) +} +func (ms *messageState) pointer() pointer { + // Super-tricky - see documentation on MessageState. + return pointer{p: unsafe.Pointer(ms)} +} +func (ms *messageState) messageInfo() *MessageInfo { + mi := ms.LoadMessageInfo() + if mi == nil { + panic("invalid nil message info; this suggests memory corruption due to a race or shallow copy on the message struct") + } + return mi +} +func (ms *messageState) LoadMessageInfo() *MessageInfo { + return (*MessageInfo)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(&ms.atomicMessageInfo)))) +} +func (ms *messageState) StoreMessageInfo(mi *MessageInfo) { + atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(&ms.atomicMessageInfo)), unsafe.Pointer(mi)) +} + +type atomicNilMessage struct{ p unsafe.Pointer } // p is a *messageReflectWrapper + +func (m *atomicNilMessage) Init(mi *MessageInfo) *messageReflectWrapper { + if p := atomic.LoadPointer(&m.p); p != nil { + return (*messageReflectWrapper)(p) + } + w := &messageReflectWrapper{mi: mi} + atomic.CompareAndSwapPointer(&m.p, nil, (unsafe.Pointer)(w)) + return (*messageReflectWrapper)(atomic.LoadPointer(&m.p)) +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/validate.go b/vendor/google.golang.org/protobuf/internal/impl/validate.go new file mode 100644 index 00000000..08cfb605 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/validate.go @@ -0,0 +1,576 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + "math" + "math/bits" + "reflect" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/strs" + pref "google.golang.org/protobuf/reflect/protoreflect" + preg "google.golang.org/protobuf/reflect/protoregistry" + piface "google.golang.org/protobuf/runtime/protoiface" +) + +// ValidationStatus is the result of validating the wire-format encoding of a message. +type ValidationStatus int + +const ( + // ValidationUnknown indicates that unmarshaling the message might succeed or fail. + // The validator was unable to render a judgement. + // + // The only causes of this status are an aberrant message type appearing somewhere + // in the message or a failure in the extension resolver. + ValidationUnknown ValidationStatus = iota + 1 + + // ValidationInvalid indicates that unmarshaling the message will fail. + ValidationInvalid + + // ValidationValid indicates that unmarshaling the message will succeed. + ValidationValid +) + +func (v ValidationStatus) String() string { + switch v { + case ValidationUnknown: + return "ValidationUnknown" + case ValidationInvalid: + return "ValidationInvalid" + case ValidationValid: + return "ValidationValid" + default: + return fmt.Sprintf("ValidationStatus(%d)", int(v)) + } +} + +// Validate determines whether the contents of the buffer are a valid wire encoding +// of the message type. +// +// This function is exposed for testing. +func Validate(mt pref.MessageType, in piface.UnmarshalInput) (out piface.UnmarshalOutput, _ ValidationStatus) { + mi, ok := mt.(*MessageInfo) + if !ok { + return out, ValidationUnknown + } + if in.Resolver == nil { + in.Resolver = preg.GlobalTypes + } + o, st := mi.validate(in.Buf, 0, unmarshalOptions{ + flags: in.Flags, + resolver: in.Resolver, + }) + if o.initialized { + out.Flags |= piface.UnmarshalInitialized + } + return out, st +} + +type validationInfo struct { + mi *MessageInfo + typ validationType + keyType, valType validationType + + // For non-required fields, requiredBit is 0. + // + // For required fields, requiredBit's nth bit is set, where n is a + // unique index in the range [0, MessageInfo.numRequiredFields). + // + // If there are more than 64 required fields, requiredBit is 0. + requiredBit uint64 +} + +type validationType uint8 + +const ( + validationTypeOther validationType = iota + validationTypeMessage + validationTypeGroup + validationTypeMap + validationTypeRepeatedVarint + validationTypeRepeatedFixed32 + validationTypeRepeatedFixed64 + validationTypeVarint + validationTypeFixed32 + validationTypeFixed64 + validationTypeBytes + validationTypeUTF8String + validationTypeMessageSetItem +) + +func newFieldValidationInfo(mi *MessageInfo, si structInfo, fd pref.FieldDescriptor, ft reflect.Type) validationInfo { + var vi validationInfo + switch { + case fd.ContainingOneof() != nil && !fd.ContainingOneof().IsSynthetic(): + switch fd.Kind() { + case pref.MessageKind: + vi.typ = validationTypeMessage + if ot, ok := si.oneofWrappersByNumber[fd.Number()]; ok { + vi.mi = getMessageInfo(ot.Field(0).Type) + } + case pref.GroupKind: + vi.typ = validationTypeGroup + if ot, ok := si.oneofWrappersByNumber[fd.Number()]; ok { + vi.mi = getMessageInfo(ot.Field(0).Type) + } + case pref.StringKind: + if strs.EnforceUTF8(fd) { + vi.typ = validationTypeUTF8String + } + } + default: + vi = newValidationInfo(fd, ft) + } + if fd.Cardinality() == pref.Required { + // Avoid overflow. The required field check is done with a 64-bit mask, with + // any message containing more than 64 required fields always reported as + // potentially uninitialized, so it is not important to get a precise count + // of the required fields past 64. + if mi.numRequiredFields < math.MaxUint8 { + mi.numRequiredFields++ + vi.requiredBit = 1 << (mi.numRequiredFields - 1) + } + } + return vi +} + +func newValidationInfo(fd pref.FieldDescriptor, ft reflect.Type) validationInfo { + var vi validationInfo + switch { + case fd.IsList(): + switch fd.Kind() { + case pref.MessageKind: + vi.typ = validationTypeMessage + if ft.Kind() == reflect.Slice { + vi.mi = getMessageInfo(ft.Elem()) + } + case pref.GroupKind: + vi.typ = validationTypeGroup + if ft.Kind() == reflect.Slice { + vi.mi = getMessageInfo(ft.Elem()) + } + case pref.StringKind: + vi.typ = validationTypeBytes + if strs.EnforceUTF8(fd) { + vi.typ = validationTypeUTF8String + } + default: + switch wireTypes[fd.Kind()] { + case protowire.VarintType: + vi.typ = validationTypeRepeatedVarint + case protowire.Fixed32Type: + vi.typ = validationTypeRepeatedFixed32 + case protowire.Fixed64Type: + vi.typ = validationTypeRepeatedFixed64 + } + } + case fd.IsMap(): + vi.typ = validationTypeMap + switch fd.MapKey().Kind() { + case pref.StringKind: + if strs.EnforceUTF8(fd) { + vi.keyType = validationTypeUTF8String + } + } + switch fd.MapValue().Kind() { + case pref.MessageKind: + vi.valType = validationTypeMessage + if ft.Kind() == reflect.Map { + vi.mi = getMessageInfo(ft.Elem()) + } + case pref.StringKind: + if strs.EnforceUTF8(fd) { + vi.valType = validationTypeUTF8String + } + } + default: + switch fd.Kind() { + case pref.MessageKind: + vi.typ = validationTypeMessage + if !fd.IsWeak() { + vi.mi = getMessageInfo(ft) + } + case pref.GroupKind: + vi.typ = validationTypeGroup + vi.mi = getMessageInfo(ft) + case pref.StringKind: + vi.typ = validationTypeBytes + if strs.EnforceUTF8(fd) { + vi.typ = validationTypeUTF8String + } + default: + switch wireTypes[fd.Kind()] { + case protowire.VarintType: + vi.typ = validationTypeVarint + case protowire.Fixed32Type: + vi.typ = validationTypeFixed32 + case protowire.Fixed64Type: + vi.typ = validationTypeFixed64 + case protowire.BytesType: + vi.typ = validationTypeBytes + } + } + } + return vi +} + +func (mi *MessageInfo) validate(b []byte, groupTag protowire.Number, opts unmarshalOptions) (out unmarshalOutput, result ValidationStatus) { + mi.init() + type validationState struct { + typ validationType + keyType, valType validationType + endGroup protowire.Number + mi *MessageInfo + tail []byte + requiredMask uint64 + } + + // Pre-allocate some slots to avoid repeated slice reallocation. + states := make([]validationState, 0, 16) + states = append(states, validationState{ + typ: validationTypeMessage, + mi: mi, + }) + if groupTag > 0 { + states[0].typ = validationTypeGroup + states[0].endGroup = groupTag + } + initialized := true + start := len(b) +State: + for len(states) > 0 { + st := &states[len(states)-1] + for len(b) > 0 { + // Parse the tag (field number and wire type). + var tag uint64 + if b[0] < 0x80 { + tag = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + tag = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + tag, n = protowire.ConsumeVarint(b) + if n < 0 { + return out, ValidationInvalid + } + b = b[n:] + } + var num protowire.Number + if n := tag >> 3; n < uint64(protowire.MinValidNumber) || n > uint64(protowire.MaxValidNumber) { + return out, ValidationInvalid + } else { + num = protowire.Number(n) + } + wtyp := protowire.Type(tag & 7) + + if wtyp == protowire.EndGroupType { + if st.endGroup == num { + goto PopState + } + return out, ValidationInvalid + } + var vi validationInfo + switch { + case st.typ == validationTypeMap: + switch num { + case genid.MapEntry_Key_field_number: + vi.typ = st.keyType + case genid.MapEntry_Value_field_number: + vi.typ = st.valType + vi.mi = st.mi + vi.requiredBit = 1 + } + case flags.ProtoLegacy && st.mi.isMessageSet: + switch num { + case messageset.FieldItem: + vi.typ = validationTypeMessageSetItem + } + default: + var f *coderFieldInfo + if int(num) < len(st.mi.denseCoderFields) { + f = st.mi.denseCoderFields[num] + } else { + f = st.mi.coderFields[num] + } + if f != nil { + vi = f.validation + if vi.typ == validationTypeMessage && vi.mi == nil { + // Probable weak field. + // + // TODO: Consider storing the results of this lookup somewhere + // rather than recomputing it on every validation. + fd := st.mi.Desc.Fields().ByNumber(num) + if fd == nil || !fd.IsWeak() { + break + } + messageName := fd.Message().FullName() + messageType, err := preg.GlobalTypes.FindMessageByName(messageName) + switch err { + case nil: + vi.mi, _ = messageType.(*MessageInfo) + case preg.NotFound: + vi.typ = validationTypeBytes + default: + return out, ValidationUnknown + } + } + break + } + // Possible extension field. + // + // TODO: We should return ValidationUnknown when: + // 1. The resolver is not frozen. (More extensions may be added to it.) + // 2. The resolver returns preg.NotFound. + // In this case, a type added to the resolver in the future could cause + // unmarshaling to begin failing. Supporting this requires some way to + // determine if the resolver is frozen. + xt, err := opts.resolver.FindExtensionByNumber(st.mi.Desc.FullName(), num) + if err != nil && err != preg.NotFound { + return out, ValidationUnknown + } + if err == nil { + vi = getExtensionFieldInfo(xt).validation + } + } + if vi.requiredBit != 0 { + // Check that the field has a compatible wire type. + // We only need to consider non-repeated field types, + // since repeated fields (and maps) can never be required. + ok := false + switch vi.typ { + case validationTypeVarint: + ok = wtyp == protowire.VarintType + case validationTypeFixed32: + ok = wtyp == protowire.Fixed32Type + case validationTypeFixed64: + ok = wtyp == protowire.Fixed64Type + case validationTypeBytes, validationTypeUTF8String, validationTypeMessage: + ok = wtyp == protowire.BytesType + case validationTypeGroup: + ok = wtyp == protowire.StartGroupType + } + if ok { + st.requiredMask |= vi.requiredBit + } + } + + switch wtyp { + case protowire.VarintType: + if len(b) >= 10 { + switch { + case b[0] < 0x80: + b = b[1:] + case b[1] < 0x80: + b = b[2:] + case b[2] < 0x80: + b = b[3:] + case b[3] < 0x80: + b = b[4:] + case b[4] < 0x80: + b = b[5:] + case b[5] < 0x80: + b = b[6:] + case b[6] < 0x80: + b = b[7:] + case b[7] < 0x80: + b = b[8:] + case b[8] < 0x80: + b = b[9:] + case b[9] < 0x80 && b[9] < 2: + b = b[10:] + default: + return out, ValidationInvalid + } + } else { + switch { + case len(b) > 0 && b[0] < 0x80: + b = b[1:] + case len(b) > 1 && b[1] < 0x80: + b = b[2:] + case len(b) > 2 && b[2] < 0x80: + b = b[3:] + case len(b) > 3 && b[3] < 0x80: + b = b[4:] + case len(b) > 4 && b[4] < 0x80: + b = b[5:] + case len(b) > 5 && b[5] < 0x80: + b = b[6:] + case len(b) > 6 && b[6] < 0x80: + b = b[7:] + case len(b) > 7 && b[7] < 0x80: + b = b[8:] + case len(b) > 8 && b[8] < 0x80: + b = b[9:] + case len(b) > 9 && b[9] < 2: + b = b[10:] + default: + return out, ValidationInvalid + } + } + continue State + case protowire.BytesType: + var size uint64 + if len(b) >= 1 && b[0] < 0x80 { + size = uint64(b[0]) + b = b[1:] + } else if len(b) >= 2 && b[1] < 128 { + size = uint64(b[0]&0x7f) + uint64(b[1])<<7 + b = b[2:] + } else { + var n int + size, n = protowire.ConsumeVarint(b) + if n < 0 { + return out, ValidationInvalid + } + b = b[n:] + } + if size > uint64(len(b)) { + return out, ValidationInvalid + } + v := b[:size] + b = b[size:] + switch vi.typ { + case validationTypeMessage: + if vi.mi == nil { + return out, ValidationUnknown + } + vi.mi.init() + fallthrough + case validationTypeMap: + if vi.mi != nil { + vi.mi.init() + } + states = append(states, validationState{ + typ: vi.typ, + keyType: vi.keyType, + valType: vi.valType, + mi: vi.mi, + tail: b, + }) + b = v + continue State + case validationTypeRepeatedVarint: + // Packed field. + for len(v) > 0 { + _, n := protowire.ConsumeVarint(v) + if n < 0 { + return out, ValidationInvalid + } + v = v[n:] + } + case validationTypeRepeatedFixed32: + // Packed field. + if len(v)%4 != 0 { + return out, ValidationInvalid + } + case validationTypeRepeatedFixed64: + // Packed field. + if len(v)%8 != 0 { + return out, ValidationInvalid + } + case validationTypeUTF8String: + if !utf8.Valid(v) { + return out, ValidationInvalid + } + } + case protowire.Fixed32Type: + if len(b) < 4 { + return out, ValidationInvalid + } + b = b[4:] + case protowire.Fixed64Type: + if len(b) < 8 { + return out, ValidationInvalid + } + b = b[8:] + case protowire.StartGroupType: + switch { + case vi.typ == validationTypeGroup: + if vi.mi == nil { + return out, ValidationUnknown + } + vi.mi.init() + states = append(states, validationState{ + typ: validationTypeGroup, + mi: vi.mi, + endGroup: num, + }) + continue State + case flags.ProtoLegacy && vi.typ == validationTypeMessageSetItem: + typeid, v, n, err := messageset.ConsumeFieldValue(b, false) + if err != nil { + return out, ValidationInvalid + } + xt, err := opts.resolver.FindExtensionByNumber(st.mi.Desc.FullName(), typeid) + switch { + case err == preg.NotFound: + b = b[n:] + case err != nil: + return out, ValidationUnknown + default: + xvi := getExtensionFieldInfo(xt).validation + if xvi.mi != nil { + xvi.mi.init() + } + states = append(states, validationState{ + typ: xvi.typ, + mi: xvi.mi, + tail: b[n:], + }) + b = v + continue State + } + default: + n := protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return out, ValidationInvalid + } + b = b[n:] + } + default: + return out, ValidationInvalid + } + } + if st.endGroup != 0 { + return out, ValidationInvalid + } + if len(b) != 0 { + return out, ValidationInvalid + } + b = st.tail + PopState: + numRequiredFields := 0 + switch st.typ { + case validationTypeMessage, validationTypeGroup: + numRequiredFields = int(st.mi.numRequiredFields) + case validationTypeMap: + // If this is a map field with a message value that contains + // required fields, require that the value be present. + if st.mi != nil && st.mi.numRequiredFields > 0 { + numRequiredFields = 1 + } + } + // If there are more than 64 required fields, this check will + // always fail and we will report that the message is potentially + // uninitialized. + if numRequiredFields > 0 && bits.OnesCount64(st.requiredMask) != numRequiredFields { + initialized = false + } + states = states[:len(states)-1] + } + out.n = start - len(b) + if initialized { + out.initialized = true + } + return out, ValidationValid +} diff --git a/vendor/google.golang.org/protobuf/internal/impl/weak.go b/vendor/google.golang.org/protobuf/internal/impl/weak.go new file mode 100644 index 00000000..009cbefd --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/impl/weak.go @@ -0,0 +1,74 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package impl + +import ( + "fmt" + + pref "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +// weakFields adds methods to the exported WeakFields type for internal use. +// +// The exported type is an alias to an unnamed type, so methods can't be +// defined directly on it. +type weakFields WeakFields + +func (w weakFields) get(num pref.FieldNumber) (pref.ProtoMessage, bool) { + m, ok := w[int32(num)] + return m, ok +} + +func (w *weakFields) set(num pref.FieldNumber, m pref.ProtoMessage) { + if *w == nil { + *w = make(weakFields) + } + (*w)[int32(num)] = m +} + +func (w *weakFields) clear(num pref.FieldNumber) { + delete(*w, int32(num)) +} + +func (Export) HasWeak(w WeakFields, num pref.FieldNumber) bool { + _, ok := w[int32(num)] + return ok +} + +func (Export) ClearWeak(w *WeakFields, num pref.FieldNumber) { + delete(*w, int32(num)) +} + +func (Export) GetWeak(w WeakFields, num pref.FieldNumber, name pref.FullName) pref.ProtoMessage { + if m, ok := w[int32(num)]; ok { + return m + } + mt, _ := protoregistry.GlobalTypes.FindMessageByName(name) + if mt == nil { + panic(fmt.Sprintf("message %v for weak field is not linked in", name)) + } + return mt.Zero().Interface() +} + +func (Export) SetWeak(w *WeakFields, num pref.FieldNumber, name pref.FullName, m pref.ProtoMessage) { + if m != nil { + mt, _ := protoregistry.GlobalTypes.FindMessageByName(name) + if mt == nil { + panic(fmt.Sprintf("message %v for weak field is not linked in", name)) + } + if mt != m.ProtoReflect().Type() { + panic(fmt.Sprintf("invalid message type for weak field: got %T, want %T", m, mt.Zero().Interface())) + } + } + if m == nil || !m.ProtoReflect().IsValid() { + delete(*w, int32(num)) + return + } + if *w == nil { + *w = make(weakFields) + } + (*w)[int32(num)] = m +} diff --git a/vendor/google.golang.org/protobuf/internal/order/order.go b/vendor/google.golang.org/protobuf/internal/order/order.go new file mode 100644 index 00000000..2a24953f --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/order/order.go @@ -0,0 +1,89 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package order + +import ( + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// FieldOrder specifies the ordering to visit message fields. +// It is a function that reports whether x is ordered before y. +type FieldOrder func(x, y pref.FieldDescriptor) bool + +var ( + // AnyFieldOrder specifies no specific field ordering. + AnyFieldOrder FieldOrder = nil + + // LegacyFieldOrder sorts fields in the same ordering as emitted by + // wire serialization in the github.com/golang/protobuf implementation. + LegacyFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + ox, oy := x.ContainingOneof(), y.ContainingOneof() + inOneof := func(od pref.OneofDescriptor) bool { + return od != nil && !od.IsSynthetic() + } + + // Extension fields sort before non-extension fields. + if x.IsExtension() != y.IsExtension() { + return x.IsExtension() && !y.IsExtension() + } + // Fields not within a oneof sort before those within a oneof. + if inOneof(ox) != inOneof(oy) { + return !inOneof(ox) && inOneof(oy) + } + // Fields in disjoint oneof sets are sorted by declaration index. + if ox != nil && oy != nil && ox != oy { + return ox.Index() < oy.Index() + } + // Fields sorted by field number. + return x.Number() < y.Number() + } + + // NumberFieldOrder sorts fields by their field number. + NumberFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + return x.Number() < y.Number() + } + + // IndexNameFieldOrder sorts non-extension fields before extension fields. + // Non-extensions are sorted according to their declaration index. + // Extensions are sorted according to their full name. + IndexNameFieldOrder FieldOrder = func(x, y pref.FieldDescriptor) bool { + // Non-extension fields sort before extension fields. + if x.IsExtension() != y.IsExtension() { + return !x.IsExtension() && y.IsExtension() + } + // Extensions sorted by fullname. + if x.IsExtension() && y.IsExtension() { + return x.FullName() < y.FullName() + } + // Non-extensions sorted by declaration index. + return x.Index() < y.Index() + } +) + +// KeyOrder specifies the ordering to visit map entries. +// It is a function that reports whether x is ordered before y. +type KeyOrder func(x, y pref.MapKey) bool + +var ( + // AnyKeyOrder specifies no specific key ordering. + AnyKeyOrder KeyOrder = nil + + // GenericKeyOrder sorts false before true, numeric keys in ascending order, + // and strings in lexicographical ordering according to UTF-8 codepoints. + GenericKeyOrder KeyOrder = func(x, y pref.MapKey) bool { + switch x.Interface().(type) { + case bool: + return !x.Bool() && y.Bool() + case int32, int64: + return x.Int() < y.Int() + case uint32, uint64: + return x.Uint() < y.Uint() + case string: + return x.String() < y.String() + default: + panic("invalid map key type") + } + } +) diff --git a/vendor/google.golang.org/protobuf/internal/order/range.go b/vendor/google.golang.org/protobuf/internal/order/range.go new file mode 100644 index 00000000..c8090e0c --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/order/range.go @@ -0,0 +1,115 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package order provides ordered access to messages and maps. +package order + +import ( + "sort" + "sync" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type messageField struct { + fd pref.FieldDescriptor + v pref.Value +} + +var messageFieldPool = sync.Pool{ + New: func() interface{} { return new([]messageField) }, +} + +type ( + // FieldRnger is an interface for visiting all fields in a message. + // The protoreflect.Message type implements this interface. + FieldRanger interface{ Range(VisitField) } + // VisitField is called everytime a message field is visited. + VisitField = func(pref.FieldDescriptor, pref.Value) bool +) + +// RangeFields iterates over the fields of fs according to the specified order. +func RangeFields(fs FieldRanger, less FieldOrder, fn VisitField) { + if less == nil { + fs.Range(fn) + return + } + + // Obtain a pre-allocated scratch buffer. + p := messageFieldPool.Get().(*[]messageField) + fields := (*p)[:0] + defer func() { + if cap(fields) < 1024 { + *p = fields + messageFieldPool.Put(p) + } + }() + + // Collect all fields in the message and sort them. + fs.Range(func(fd pref.FieldDescriptor, v pref.Value) bool { + fields = append(fields, messageField{fd, v}) + return true + }) + sort.Slice(fields, func(i, j int) bool { + return less(fields[i].fd, fields[j].fd) + }) + + // Visit the fields in the specified ordering. + for _, f := range fields { + if !fn(f.fd, f.v) { + return + } + } +} + +type mapEntry struct { + k pref.MapKey + v pref.Value +} + +var mapEntryPool = sync.Pool{ + New: func() interface{} { return new([]mapEntry) }, +} + +type ( + // EntryRanger is an interface for visiting all fields in a message. + // The protoreflect.Map type implements this interface. + EntryRanger interface{ Range(VisitEntry) } + // VisitEntry is called everytime a map entry is visited. + VisitEntry = func(pref.MapKey, pref.Value) bool +) + +// RangeEntries iterates over the entries of es according to the specified order. +func RangeEntries(es EntryRanger, less KeyOrder, fn VisitEntry) { + if less == nil { + es.Range(fn) + return + } + + // Obtain a pre-allocated scratch buffer. + p := mapEntryPool.Get().(*[]mapEntry) + entries := (*p)[:0] + defer func() { + if cap(entries) < 1024 { + *p = entries + mapEntryPool.Put(p) + } + }() + + // Collect all entries in the map and sort them. + es.Range(func(k pref.MapKey, v pref.Value) bool { + entries = append(entries, mapEntry{k, v}) + return true + }) + sort.Slice(entries, func(i, j int) bool { + return less(entries[i].k, entries[j].k) + }) + + // Visit the entries in the specified ordering. + for _, e := range entries { + if !fn(e.k, e.v) { + return + } + } +} diff --git a/vendor/google.golang.org/protobuf/internal/pragma/pragma.go b/vendor/google.golang.org/protobuf/internal/pragma/pragma.go new file mode 100644 index 00000000..49dc4fcd --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/pragma/pragma.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package pragma provides types that can be embedded into a struct to +// statically enforce or prevent certain language properties. +package pragma + +import "sync" + +// NoUnkeyedLiterals can be embedded in a struct to prevent unkeyed literals. +type NoUnkeyedLiterals struct{} + +// DoNotImplement can be embedded in an interface to prevent trivial +// implementations of the interface. +// +// This is useful to prevent unauthorized implementations of an interface +// so that it can be extended in the future for any protobuf language changes. +type DoNotImplement interface{ ProtoInternal(DoNotImplement) } + +// DoNotCompare can be embedded in a struct to prevent comparability. +type DoNotCompare [0]func() + +// DoNotCopy can be embedded in a struct to help prevent shallow copies. +// This does not rely on a Go language feature, but rather a special case +// within the vet checker. +// +// See https://golang.org/issues/8005. +type DoNotCopy [0]sync.Mutex diff --git a/vendor/google.golang.org/protobuf/internal/set/ints.go b/vendor/google.golang.org/protobuf/internal/set/ints.go new file mode 100644 index 00000000..d3d7f89a --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/set/ints.go @@ -0,0 +1,58 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package set provides simple set data structures for uint64s. +package set + +import "math/bits" + +// int64s represents a set of integers within the range of 0..63. +type int64s uint64 + +func (bs *int64s) Len() int { + return bits.OnesCount64(uint64(*bs)) +} +func (bs *int64s) Has(n uint64) bool { + return uint64(*bs)&(uint64(1)< 0 +} +func (bs *int64s) Set(n uint64) { + *(*uint64)(bs) |= uint64(1) << n +} +func (bs *int64s) Clear(n uint64) { + *(*uint64)(bs) &^= uint64(1) << n +} + +// Ints represents a set of integers within the range of 0..math.MaxUint64. +type Ints struct { + lo int64s + hi map[uint64]struct{} +} + +func (bs *Ints) Len() int { + return bs.lo.Len() + len(bs.hi) +} +func (bs *Ints) Has(n uint64) bool { + if n < 64 { + return bs.lo.Has(n) + } + _, ok := bs.hi[n] + return ok +} +func (bs *Ints) Set(n uint64) { + if n < 64 { + bs.lo.Set(n) + return + } + if bs.hi == nil { + bs.hi = make(map[uint64]struct{}) + } + bs.hi[n] = struct{}{} +} +func (bs *Ints) Clear(n uint64) { + if n < 64 { + bs.lo.Clear(n) + return + } + delete(bs.hi, n) +} diff --git a/vendor/google.golang.org/protobuf/internal/strs/strings.go b/vendor/google.golang.org/protobuf/internal/strs/strings.go new file mode 100644 index 00000000..0b74e765 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/strs/strings.go @@ -0,0 +1,196 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package strs provides string manipulation functionality specific to protobuf. +package strs + +import ( + "go/token" + "strings" + "unicode" + "unicode/utf8" + + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// EnforceUTF8 reports whether to enforce strict UTF-8 validation. +func EnforceUTF8(fd protoreflect.FieldDescriptor) bool { + if flags.ProtoLegacy { + if fd, ok := fd.(interface{ EnforceUTF8() bool }); ok { + return fd.EnforceUTF8() + } + } + return fd.Syntax() == protoreflect.Proto3 +} + +// GoCamelCase camel-cases a protobuf name for use as a Go identifier. +// +// If there is an interior underscore followed by a lower case letter, +// drop the underscore and convert the letter to upper case. +func GoCamelCase(s string) string { + // Invariant: if the next letter is lower case, it must be converted + // to upper case. + // That is, we process a word at a time, where words are marked by _ or + // upper case letter. Digits are treated as words. + var b []byte + for i := 0; i < len(s); i++ { + c := s[i] + switch { + case c == '.' && i+1 < len(s) && isASCIILower(s[i+1]): + // Skip over '.' in ".{{lowercase}}". + case c == '.': + b = append(b, '_') // convert '.' to '_' + case c == '_' && (i == 0 || s[i-1] == '.'): + // Convert initial '_' to ensure we start with a capital letter. + // Do the same for '_' after '.' to match historic behavior. + b = append(b, 'X') // convert '_' to 'X' + case c == '_' && i+1 < len(s) && isASCIILower(s[i+1]): + // Skip over '_' in "_{{lowercase}}". + case isASCIIDigit(c): + b = append(b, c) + default: + // Assume we have a letter now - if not, it's a bogus identifier. + // The next word is a sequence of characters that must start upper case. + if isASCIILower(c) { + c -= 'a' - 'A' // convert lowercase to uppercase + } + b = append(b, c) + + // Accept lower case sequence that follows. + for ; i+1 < len(s) && isASCIILower(s[i+1]); i++ { + b = append(b, s[i+1]) + } + } + } + return string(b) +} + +// GoSanitized converts a string to a valid Go identifier. +func GoSanitized(s string) string { + // Sanitize the input to the set of valid characters, + // which must be '_' or be in the Unicode L or N categories. + s = strings.Map(func(r rune) rune { + if unicode.IsLetter(r) || unicode.IsDigit(r) { + return r + } + return '_' + }, s) + + // Prepend '_' in the event of a Go keyword conflict or if + // the identifier is invalid (does not start in the Unicode L category). + r, _ := utf8.DecodeRuneInString(s) + if token.Lookup(s).IsKeyword() || !unicode.IsLetter(r) { + return "_" + s + } + return s +} + +// JSONCamelCase converts a snake_case identifier to a camelCase identifier, +// according to the protobuf JSON specification. +func JSONCamelCase(s string) string { + var b []byte + var wasUnderscore bool + for i := 0; i < len(s); i++ { // proto identifiers are always ASCII + c := s[i] + if c != '_' { + if wasUnderscore && isASCIILower(c) { + c -= 'a' - 'A' // convert to uppercase + } + b = append(b, c) + } + wasUnderscore = c == '_' + } + return string(b) +} + +// JSONSnakeCase converts a camelCase identifier to a snake_case identifier, +// according to the protobuf JSON specification. +func JSONSnakeCase(s string) string { + var b []byte + for i := 0; i < len(s); i++ { // proto identifiers are always ASCII + c := s[i] + if isASCIIUpper(c) { + b = append(b, '_') + c += 'a' - 'A' // convert to lowercase + } + b = append(b, c) + } + return string(b) +} + +// MapEntryName derives the name of the map entry message given the field name. +// See protoc v3.8.0: src/google/protobuf/descriptor.cc:254-276,6057 +func MapEntryName(s string) string { + var b []byte + upperNext := true + for _, c := range s { + switch { + case c == '_': + upperNext = true + case upperNext: + b = append(b, byte(unicode.ToUpper(c))) + upperNext = false + default: + b = append(b, byte(c)) + } + } + b = append(b, "Entry"...) + return string(b) +} + +// EnumValueName derives the camel-cased enum value name. +// See protoc v3.8.0: src/google/protobuf/descriptor.cc:297-313 +func EnumValueName(s string) string { + var b []byte + upperNext := true + for _, c := range s { + switch { + case c == '_': + upperNext = true + case upperNext: + b = append(b, byte(unicode.ToUpper(c))) + upperNext = false + default: + b = append(b, byte(unicode.ToLower(c))) + upperNext = false + } + } + return string(b) +} + +// TrimEnumPrefix trims the enum name prefix from an enum value name, +// where the prefix is all lowercase without underscores. +// See protoc v3.8.0: src/google/protobuf/descriptor.cc:330-375 +func TrimEnumPrefix(s, prefix string) string { + s0 := s // original input + for len(s) > 0 && len(prefix) > 0 { + if s[0] == '_' { + s = s[1:] + continue + } + if unicode.ToLower(rune(s[0])) != rune(prefix[0]) { + return s0 // no prefix match + } + s, prefix = s[1:], prefix[1:] + } + if len(prefix) > 0 { + return s0 // no prefix match + } + s = strings.TrimLeft(s, "_") + if len(s) == 0 { + return s0 // avoid returning empty string + } + return s +} + +func isASCIILower(c byte) bool { + return 'a' <= c && c <= 'z' +} +func isASCIIUpper(c byte) bool { + return 'A' <= c && c <= 'Z' +} +func isASCIIDigit(c byte) bool { + return '0' <= c && c <= '9' +} diff --git a/vendor/google.golang.org/protobuf/internal/strs/strings_pure.go b/vendor/google.golang.org/protobuf/internal/strs/strings_pure.go new file mode 100644 index 00000000..85e074c9 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/strs/strings_pure.go @@ -0,0 +1,27 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego appengine + +package strs + +import pref "google.golang.org/protobuf/reflect/protoreflect" + +func UnsafeString(b []byte) string { + return string(b) +} + +func UnsafeBytes(s string) []byte { + return []byte(s) +} + +type Builder struct{} + +func (*Builder) AppendFullName(prefix pref.FullName, name pref.Name) pref.FullName { + return prefix.Append(name) +} + +func (*Builder) MakeString(b []byte) string { + return string(b) +} diff --git a/vendor/google.golang.org/protobuf/internal/strs/strings_unsafe.go b/vendor/google.golang.org/protobuf/internal/strs/strings_unsafe.go new file mode 100644 index 00000000..2160c701 --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/strs/strings_unsafe.go @@ -0,0 +1,94 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego,!appengine + +package strs + +import ( + "unsafe" + + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +type ( + stringHeader struct { + Data unsafe.Pointer + Len int + } + sliceHeader struct { + Data unsafe.Pointer + Len int + Cap int + } +) + +// UnsafeString returns an unsafe string reference of b. +// The caller must treat the input slice as immutable. +// +// WARNING: Use carefully. The returned result must not leak to the end user +// unless the input slice is provably immutable. +func UnsafeString(b []byte) (s string) { + src := (*sliceHeader)(unsafe.Pointer(&b)) + dst := (*stringHeader)(unsafe.Pointer(&s)) + dst.Data = src.Data + dst.Len = src.Len + return s +} + +// UnsafeBytes returns an unsafe bytes slice reference of s. +// The caller must treat returned slice as immutable. +// +// WARNING: Use carefully. The returned result must not leak to the end user. +func UnsafeBytes(s string) (b []byte) { + src := (*stringHeader)(unsafe.Pointer(&s)) + dst := (*sliceHeader)(unsafe.Pointer(&b)) + dst.Data = src.Data + dst.Len = src.Len + dst.Cap = src.Len + return b +} + +// Builder builds a set of strings with shared lifetime. +// This differs from strings.Builder, which is for building a single string. +type Builder struct { + buf []byte +} + +// AppendFullName is equivalent to protoreflect.FullName.Append, +// but optimized for large batches where each name has a shared lifetime. +func (sb *Builder) AppendFullName(prefix pref.FullName, name pref.Name) pref.FullName { + n := len(prefix) + len(".") + len(name) + if len(prefix) == 0 { + n -= len(".") + } + sb.grow(n) + sb.buf = append(sb.buf, prefix...) + sb.buf = append(sb.buf, '.') + sb.buf = append(sb.buf, name...) + return pref.FullName(sb.last(n)) +} + +// MakeString is equivalent to string(b), but optimized for large batches +// with a shared lifetime. +func (sb *Builder) MakeString(b []byte) string { + sb.grow(len(b)) + sb.buf = append(sb.buf, b...) + return sb.last(len(b)) +} + +func (sb *Builder) grow(n int) { + if cap(sb.buf)-len(sb.buf) >= n { + return + } + + // Unlike strings.Builder, we do not need to copy over the contents + // of the old buffer since our builder provides no API for + // retrieving previously created strings. + sb.buf = make([]byte, 2*(cap(sb.buf)+n)) +} + +func (sb *Builder) last(n int) string { + return UnsafeString(sb.buf[len(sb.buf)-n:]) +} diff --git a/vendor/google.golang.org/protobuf/internal/version/version.go b/vendor/google.golang.org/protobuf/internal/version/version.go new file mode 100644 index 00000000..5879131d --- /dev/null +++ b/vendor/google.golang.org/protobuf/internal/version/version.go @@ -0,0 +1,79 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package version records versioning information about this module. +package version + +import ( + "fmt" + "strings" +) + +// These constants determine the current version of this module. +// +// +// For our release process, we enforce the following rules: +// * Tagged releases use a tag that is identical to String. +// * Tagged releases never reference a commit where the String +// contains "devel". +// * The set of all commits in this repository where String +// does not contain "devel" must have a unique String. +// +// +// Steps for tagging a new release: +// 1. Create a new CL. +// +// 2. Update Minor, Patch, and/or PreRelease as necessary. +// PreRelease must not contain the string "devel". +// +// 3. Since the last released minor version, have there been any changes to +// generator that relies on new functionality in the runtime? +// If yes, then increment RequiredGenerated. +// +// 4. Since the last released minor version, have there been any changes to +// the runtime that removes support for old .pb.go source code? +// If yes, then increment SupportMinimum. +// +// 5. Send out the CL for review and submit it. +// Note that the next CL in step 8 must be submitted after this CL +// without any other CLs in-between. +// +// 6. Tag a new version, where the tag is is the current String. +// +// 7. Write release notes for all notable changes +// between this release and the last release. +// +// 8. Create a new CL. +// +// 9. Update PreRelease to include the string "devel". +// For example: "" -> "devel" or "rc.1" -> "rc.1.devel" +// +// 10. Send out the CL for review and submit it. +const ( + Major = 1 + Minor = 26 + Patch = 0 + PreRelease = "" +) + +// String formats the version string for this module in semver format. +// +// Examples: +// v1.20.1 +// v1.21.0-rc.1 +func String() string { + v := fmt.Sprintf("v%d.%d.%d", Major, Minor, Patch) + if PreRelease != "" { + v += "-" + PreRelease + + // TODO: Add metadata about the commit or build hash. + // See https://golang.org/issue/29814 + // See https://golang.org/issue/33533 + var metadata string + if strings.Contains(PreRelease, "devel") && metadata != "" { + v += "+" + metadata + } + } + return v +} diff --git a/vendor/google.golang.org/protobuf/proto/checkinit.go b/vendor/google.golang.org/protobuf/proto/checkinit.go new file mode 100644 index 00000000..3e9a6a2f --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/checkinit.go @@ -0,0 +1,71 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +// CheckInitialized returns an error if any required fields in m are not set. +func CheckInitialized(m Message) error { + // Treat a nil message interface as an "untyped" empty message, + // which we assume to have no required fields. + if m == nil { + return nil + } + + return checkInitialized(m.ProtoReflect()) +} + +// CheckInitialized returns an error if any required fields in m are not set. +func checkInitialized(m protoreflect.Message) error { + if methods := protoMethods(m); methods != nil && methods.CheckInitialized != nil { + _, err := methods.CheckInitialized(protoiface.CheckInitializedInput{ + Message: m, + }) + return err + } + return checkInitializedSlow(m) +} + +func checkInitializedSlow(m protoreflect.Message) error { + md := m.Descriptor() + fds := md.Fields() + for i, nums := 0, md.RequiredNumbers(); i < nums.Len(); i++ { + fd := fds.ByNumber(nums.Get(i)) + if !m.Has(fd) { + return errors.RequiredNotSet(string(fd.FullName())) + } + } + var err error + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + switch { + case fd.IsList(): + if fd.Message() == nil { + return true + } + for i, list := 0, v.List(); i < list.Len() && err == nil; i++ { + err = checkInitialized(list.Get(i).Message()) + } + case fd.IsMap(): + if fd.MapValue().Message() == nil { + return true + } + v.Map().Range(func(key protoreflect.MapKey, v protoreflect.Value) bool { + err = checkInitialized(v.Message()) + return err == nil + }) + default: + if fd.Message() == nil { + return true + } + err = checkInitialized(v.Message()) + } + return err == nil + }) + return err +} diff --git a/vendor/google.golang.org/protobuf/proto/decode.go b/vendor/google.golang.org/protobuf/proto/decode.go new file mode 100644 index 00000000..49f9b8c8 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/decode.go @@ -0,0 +1,278 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + "google.golang.org/protobuf/runtime/protoiface" +) + +// UnmarshalOptions configures the unmarshaler. +// +// Example usage: +// err := UnmarshalOptions{DiscardUnknown: true}.Unmarshal(b, m) +type UnmarshalOptions struct { + pragma.NoUnkeyedLiterals + + // Merge merges the input into the destination message. + // The default behavior is to always reset the message before unmarshaling, + // unless Merge is specified. + Merge bool + + // AllowPartial accepts input for messages that will result in missing + // required fields. If AllowPartial is false (the default), Unmarshal will + // return an error if there are any missing required fields. + AllowPartial bool + + // If DiscardUnknown is set, unknown fields are ignored. + DiscardUnknown bool + + // Resolver is used for looking up types when unmarshaling extension fields. + // If nil, this defaults to using protoregistry.GlobalTypes. + Resolver interface { + FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) + FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) + } +} + +// Unmarshal parses the wire-format message in b and places the result in m. +// The provided message must be mutable (e.g., a non-nil pointer to a message). +func Unmarshal(b []byte, m Message) error { + _, err := UnmarshalOptions{}.unmarshal(b, m.ProtoReflect()) + return err +} + +// Unmarshal parses the wire-format message in b and places the result in m. +// The provided message must be mutable (e.g., a non-nil pointer to a message). +func (o UnmarshalOptions) Unmarshal(b []byte, m Message) error { + _, err := o.unmarshal(b, m.ProtoReflect()) + return err +} + +// UnmarshalState parses a wire-format message and places the result in m. +// +// This method permits fine-grained control over the unmarshaler. +// Most users should use Unmarshal instead. +func (o UnmarshalOptions) UnmarshalState(in protoiface.UnmarshalInput) (protoiface.UnmarshalOutput, error) { + return o.unmarshal(in.Buf, in.Message) +} + +// unmarshal is a centralized function that all unmarshal operations go through. +// For profiling purposes, avoid changing the name of this function or +// introducing other code paths for unmarshal that do not go through this. +func (o UnmarshalOptions) unmarshal(b []byte, m protoreflect.Message) (out protoiface.UnmarshalOutput, err error) { + if o.Resolver == nil { + o.Resolver = protoregistry.GlobalTypes + } + if !o.Merge { + Reset(m.Interface()) + } + allowPartial := o.AllowPartial + o.Merge = true + o.AllowPartial = true + methods := protoMethods(m) + if methods != nil && methods.Unmarshal != nil && + !(o.DiscardUnknown && methods.Flags&protoiface.SupportUnmarshalDiscardUnknown == 0) { + in := protoiface.UnmarshalInput{ + Message: m, + Buf: b, + Resolver: o.Resolver, + } + if o.DiscardUnknown { + in.Flags |= protoiface.UnmarshalDiscardUnknown + } + out, err = methods.Unmarshal(in) + } else { + err = o.unmarshalMessageSlow(b, m) + } + if err != nil { + return out, err + } + if allowPartial || (out.Flags&protoiface.UnmarshalInitialized != 0) { + return out, nil + } + return out, checkInitialized(m) +} + +func (o UnmarshalOptions) unmarshalMessage(b []byte, m protoreflect.Message) error { + _, err := o.unmarshal(b, m) + return err +} + +func (o UnmarshalOptions) unmarshalMessageSlow(b []byte, m protoreflect.Message) error { + md := m.Descriptor() + if messageset.IsMessageSet(md) { + return o.unmarshalMessageSet(b, m) + } + fields := md.Fields() + for len(b) > 0 { + // Parse the tag (field number and wire type). + num, wtyp, tagLen := protowire.ConsumeTag(b) + if tagLen < 0 { + return errDecode + } + if num > protowire.MaxValidNumber { + return errDecode + } + + // Find the field descriptor for this field number. + fd := fields.ByNumber(num) + if fd == nil && md.ExtensionRanges().Has(num) { + extType, err := o.Resolver.FindExtensionByNumber(md.FullName(), num) + if err != nil && err != protoregistry.NotFound { + return errors.New("%v: unable to resolve extension %v: %v", md.FullName(), num, err) + } + if extType != nil { + fd = extType.TypeDescriptor() + } + } + var err error + if fd == nil { + err = errUnknown + } else if flags.ProtoLegacy { + if fd.IsWeak() && fd.Message().IsPlaceholder() { + err = errUnknown // weak referent is not linked in + } + } + + // Parse the field value. + var valLen int + switch { + case err != nil: + case fd.IsList(): + valLen, err = o.unmarshalList(b[tagLen:], wtyp, m.Mutable(fd).List(), fd) + case fd.IsMap(): + valLen, err = o.unmarshalMap(b[tagLen:], wtyp, m.Mutable(fd).Map(), fd) + default: + valLen, err = o.unmarshalSingular(b[tagLen:], wtyp, m, fd) + } + if err != nil { + if err != errUnknown { + return err + } + valLen = protowire.ConsumeFieldValue(num, wtyp, b[tagLen:]) + if valLen < 0 { + return errDecode + } + if !o.DiscardUnknown { + m.SetUnknown(append(m.GetUnknown(), b[:tagLen+valLen]...)) + } + } + b = b[tagLen+valLen:] + } + return nil +} + +func (o UnmarshalOptions) unmarshalSingular(b []byte, wtyp protowire.Type, m protoreflect.Message, fd protoreflect.FieldDescriptor) (n int, err error) { + v, n, err := o.unmarshalScalar(b, wtyp, fd) + if err != nil { + return 0, err + } + switch fd.Kind() { + case protoreflect.GroupKind, protoreflect.MessageKind: + m2 := m.Mutable(fd).Message() + if err := o.unmarshalMessage(v.Bytes(), m2); err != nil { + return n, err + } + default: + // Non-message scalars replace the previous value. + m.Set(fd, v) + } + return n, nil +} + +func (o UnmarshalOptions) unmarshalMap(b []byte, wtyp protowire.Type, mapv protoreflect.Map, fd protoreflect.FieldDescriptor) (n int, err error) { + if wtyp != protowire.BytesType { + return 0, errUnknown + } + b, n = protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + var ( + keyField = fd.MapKey() + valField = fd.MapValue() + key protoreflect.Value + val protoreflect.Value + haveKey bool + haveVal bool + ) + switch valField.Kind() { + case protoreflect.GroupKind, protoreflect.MessageKind: + val = mapv.NewValue() + } + // Map entries are represented as a two-element message with fields + // containing the key and value. + for len(b) > 0 { + num, wtyp, n := protowire.ConsumeTag(b) + if n < 0 { + return 0, errDecode + } + if num > protowire.MaxValidNumber { + return 0, errDecode + } + b = b[n:] + err = errUnknown + switch num { + case genid.MapEntry_Key_field_number: + key, n, err = o.unmarshalScalar(b, wtyp, keyField) + if err != nil { + break + } + haveKey = true + case genid.MapEntry_Value_field_number: + var v protoreflect.Value + v, n, err = o.unmarshalScalar(b, wtyp, valField) + if err != nil { + break + } + switch valField.Kind() { + case protoreflect.GroupKind, protoreflect.MessageKind: + if err := o.unmarshalMessage(v.Bytes(), val.Message()); err != nil { + return 0, err + } + default: + val = v + } + haveVal = true + } + if err == errUnknown { + n = protowire.ConsumeFieldValue(num, wtyp, b) + if n < 0 { + return 0, errDecode + } + } else if err != nil { + return 0, err + } + b = b[n:] + } + // Every map entry should have entries for key and value, but this is not strictly required. + if !haveKey { + key = keyField.Default() + } + if !haveVal { + switch valField.Kind() { + case protoreflect.GroupKind, protoreflect.MessageKind: + default: + val = valField.Default() + } + } + mapv.Set(key.MapKey(), val) + return n, nil +} + +// errUnknown is used internally to indicate fields which should be added +// to the unknown field set of a message. It is never returned from an exported +// function. +var errUnknown = errors.New("BUG: internal error (unknown)") + +var errDecode = errors.New("cannot parse invalid wire-format data") diff --git a/vendor/google.golang.org/protobuf/proto/decode_gen.go b/vendor/google.golang.org/protobuf/proto/decode_gen.go new file mode 100644 index 00000000..301eeb20 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/decode_gen.go @@ -0,0 +1,603 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package proto + +import ( + "math" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// unmarshalScalar decodes a value of the given kind. +// +// Message values are decoded into a []byte which aliases the input data. +func (o UnmarshalOptions) unmarshalScalar(b []byte, wtyp protowire.Type, fd protoreflect.FieldDescriptor) (val protoreflect.Value, n int, err error) { + switch fd.Kind() { + case protoreflect.BoolKind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfBool(protowire.DecodeBool(v)), n, nil + case protoreflect.EnumKind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfEnum(protoreflect.EnumNumber(v)), n, nil + case protoreflect.Int32Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt32(int32(v)), n, nil + case protoreflect.Sint32Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32))), n, nil + case protoreflect.Uint32Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfUint32(uint32(v)), n, nil + case protoreflect.Int64Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt64(int64(v)), n, nil + case protoreflect.Sint64Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt64(protowire.DecodeZigZag(v)), n, nil + case protoreflect.Uint64Kind: + if wtyp != protowire.VarintType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfUint64(v), n, nil + case protoreflect.Sfixed32Kind: + if wtyp != protowire.Fixed32Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt32(int32(v)), n, nil + case protoreflect.Fixed32Kind: + if wtyp != protowire.Fixed32Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfUint32(uint32(v)), n, nil + case protoreflect.FloatKind: + if wtyp != protowire.Fixed32Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v))), n, nil + case protoreflect.Sfixed64Kind: + if wtyp != protowire.Fixed64Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfInt64(int64(v)), n, nil + case protoreflect.Fixed64Kind: + if wtyp != protowire.Fixed64Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfUint64(v), n, nil + case protoreflect.DoubleKind: + if wtyp != protowire.Fixed64Type { + return val, 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfFloat64(math.Float64frombits(v)), n, nil + case protoreflect.StringKind: + if wtyp != protowire.BytesType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return val, 0, errDecode + } + if strs.EnforceUTF8(fd) && !utf8.Valid(v) { + return protoreflect.Value{}, 0, errors.InvalidUTF8(string(fd.FullName())) + } + return protoreflect.ValueOfString(string(v)), n, nil + case protoreflect.BytesKind: + if wtyp != protowire.BytesType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfBytes(append(emptyBuf[:], v...)), n, nil + case protoreflect.MessageKind: + if wtyp != protowire.BytesType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfBytes(v), n, nil + case protoreflect.GroupKind: + if wtyp != protowire.StartGroupType { + return val, 0, errUnknown + } + v, n := protowire.ConsumeGroup(fd.Number(), b) + if n < 0 { + return val, 0, errDecode + } + return protoreflect.ValueOfBytes(v), n, nil + default: + return val, 0, errUnknown + } +} + +func (o UnmarshalOptions) unmarshalList(b []byte, wtyp protowire.Type, list protoreflect.List, fd protoreflect.FieldDescriptor) (n int, err error) { + switch fd.Kind() { + case protoreflect.BoolKind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfBool(protowire.DecodeBool(v))) + return n, nil + case protoreflect.EnumKind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfEnum(protoreflect.EnumNumber(v))) + return n, nil + case protoreflect.Int32Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt32(int32(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + return n, nil + case protoreflect.Sint32Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(protowire.DecodeZigZag(v & math.MaxUint32)))) + return n, nil + case protoreflect.Uint32Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfUint32(uint32(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + return n, nil + case protoreflect.Int64Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt64(int64(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + return n, nil + case protoreflect.Sint64Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt64(protowire.DecodeZigZag(v))) + return n, nil + case protoreflect.Uint64Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeVarint(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfUint64(v)) + } + return n, nil + } + if wtyp != protowire.VarintType { + return 0, errUnknown + } + v, n := protowire.ConsumeVarint(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + return n, nil + case protoreflect.Sfixed32Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed32(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt32(int32(v))) + } + return n, nil + } + if wtyp != protowire.Fixed32Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt32(int32(v))) + return n, nil + case protoreflect.Fixed32Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed32(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfUint32(uint32(v))) + } + return n, nil + } + if wtyp != protowire.Fixed32Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfUint32(uint32(v))) + return n, nil + case protoreflect.FloatKind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed32(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) + } + return n, nil + } + if wtyp != protowire.Fixed32Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed32(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfFloat32(math.Float32frombits(uint32(v)))) + return n, nil + case protoreflect.Sfixed64Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed64(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfInt64(int64(v))) + } + return n, nil + } + if wtyp != protowire.Fixed64Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfInt64(int64(v))) + return n, nil + case protoreflect.Fixed64Kind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed64(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfUint64(v)) + } + return n, nil + } + if wtyp != protowire.Fixed64Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfUint64(v)) + return n, nil + case protoreflect.DoubleKind: + if wtyp == protowire.BytesType { + buf, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + for len(buf) > 0 { + v, n := protowire.ConsumeFixed64(buf) + if n < 0 { + return 0, errDecode + } + buf = buf[n:] + list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) + } + return n, nil + } + if wtyp != protowire.Fixed64Type { + return 0, errUnknown + } + v, n := protowire.ConsumeFixed64(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfFloat64(math.Float64frombits(v))) + return n, nil + case protoreflect.StringKind: + if wtyp != protowire.BytesType { + return 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + if strs.EnforceUTF8(fd) && !utf8.Valid(v) { + return 0, errors.InvalidUTF8(string(fd.FullName())) + } + list.Append(protoreflect.ValueOfString(string(v))) + return n, nil + case protoreflect.BytesKind: + if wtyp != protowire.BytesType { + return 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + list.Append(protoreflect.ValueOfBytes(append(emptyBuf[:], v...))) + return n, nil + case protoreflect.MessageKind: + if wtyp != protowire.BytesType { + return 0, errUnknown + } + v, n := protowire.ConsumeBytes(b) + if n < 0 { + return 0, errDecode + } + m := list.NewElement() + if err := o.unmarshalMessage(v, m.Message()); err != nil { + return 0, err + } + list.Append(m) + return n, nil + case protoreflect.GroupKind: + if wtyp != protowire.StartGroupType { + return 0, errUnknown + } + v, n := protowire.ConsumeGroup(fd.Number(), b) + if n < 0 { + return 0, errDecode + } + m := list.NewElement() + if err := o.unmarshalMessage(v, m.Message()); err != nil { + return 0, err + } + list.Append(m) + return n, nil + default: + return 0, errUnknown + } +} + +// We append to an empty array rather than a nil []byte to get non-nil zero-length byte slices. +var emptyBuf [0]byte diff --git a/vendor/google.golang.org/protobuf/proto/doc.go b/vendor/google.golang.org/protobuf/proto/doc.go new file mode 100644 index 00000000..c52d8c4a --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/doc.go @@ -0,0 +1,94 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package proto provides functions operating on protocol buffer messages. +// +// For documentation on protocol buffers in general, see: +// +// https://developers.google.com/protocol-buffers +// +// For a tutorial on using protocol buffers with Go, see: +// +// https://developers.google.com/protocol-buffers/docs/gotutorial +// +// For a guide to generated Go protocol buffer code, see: +// +// https://developers.google.com/protocol-buffers/docs/reference/go-generated +// +// +// Binary serialization +// +// This package contains functions to convert to and from the wire format, +// an efficient binary serialization of protocol buffers. +// +// • Size reports the size of a message in the wire format. +// +// • Marshal converts a message to the wire format. +// The MarshalOptions type provides more control over wire marshaling. +// +// • Unmarshal converts a message from the wire format. +// The UnmarshalOptions type provides more control over wire unmarshaling. +// +// +// Basic message operations +// +// • Clone makes a deep copy of a message. +// +// • Merge merges the content of a message into another. +// +// • Equal compares two messages. For more control over comparisons +// and detailed reporting of differences, see package +// "google.golang.org/protobuf/testing/protocmp". +// +// • Reset clears the content of a message. +// +// • CheckInitialized reports whether all required fields in a message are set. +// +// +// Optional scalar constructors +// +// The API for some generated messages represents optional scalar fields +// as pointers to a value. For example, an optional string field has the +// Go type *string. +// +// • Bool, Int32, Int64, Uint32, Uint64, Float32, Float64, and String +// take a value and return a pointer to a new instance of it, +// to simplify construction of optional field values. +// +// Generated enum types usually have an Enum method which performs the +// same operation. +// +// Optional scalar fields are only supported in proto2. +// +// +// Extension accessors +// +// • HasExtension, GetExtension, SetExtension, and ClearExtension +// access extension field values in a protocol buffer message. +// +// Extension fields are only supported in proto2. +// +// +// Related packages +// +// • Package "google.golang.org/protobuf/encoding/protojson" converts messages to +// and from JSON. +// +// • Package "google.golang.org/protobuf/encoding/prototext" converts messages to +// and from the text format. +// +// • Package "google.golang.org/protobuf/reflect/protoreflect" provides a +// reflection interface for protocol buffer data types. +// +// • Package "google.golang.org/protobuf/testing/protocmp" provides features +// to compare protocol buffer messages with the "github.com/google/go-cmp/cmp" +// package. +// +// • Package "google.golang.org/protobuf/types/dynamicpb" provides a dynamic +// message type, suitable for working with messages where the protocol buffer +// type is only known at runtime. +// +// This module contains additional packages for more specialized use cases. +// Consult the individual package documentation for details. +package proto diff --git a/vendor/google.golang.org/protobuf/proto/encode.go b/vendor/google.golang.org/protobuf/proto/encode.go new file mode 100644 index 00000000..d18239c2 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/encode.go @@ -0,0 +1,319 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/order" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +// MarshalOptions configures the marshaler. +// +// Example usage: +// b, err := MarshalOptions{Deterministic: true}.Marshal(m) +type MarshalOptions struct { + pragma.NoUnkeyedLiterals + + // AllowPartial allows messages that have missing required fields to marshal + // without returning an error. If AllowPartial is false (the default), + // Marshal will return an error if there are any missing required fields. + AllowPartial bool + + // Deterministic controls whether the same message will always be + // serialized to the same bytes within the same binary. + // + // Setting this option guarantees that repeated serialization of + // the same message will return the same bytes, and that different + // processes of the same binary (which may be executing on different + // machines) will serialize equal messages to the same bytes. + // It has no effect on the resulting size of the encoded message compared + // to a non-deterministic marshal. + // + // Note that the deterministic serialization is NOT canonical across + // languages. It is not guaranteed to remain stable over time. It is + // unstable across different builds with schema changes due to unknown + // fields. Users who need canonical serialization (e.g., persistent + // storage in a canonical form, fingerprinting, etc.) must define + // their own canonicalization specification and implement their own + // serializer rather than relying on this API. + // + // If deterministic serialization is requested, map entries will be + // sorted by keys in lexographical order. This is an implementation + // detail and subject to change. + Deterministic bool + + // UseCachedSize indicates that the result of a previous Size call + // may be reused. + // + // Setting this option asserts that: + // + // 1. Size has previously been called on this message with identical + // options (except for UseCachedSize itself). + // + // 2. The message and all its submessages have not changed in any + // way since the Size call. + // + // If either of these invariants is violated, + // the results are undefined and may include panics or corrupted output. + // + // Implementations MAY take this option into account to provide + // better performance, but there is no guarantee that they will do so. + // There is absolutely no guarantee that Size followed by Marshal with + // UseCachedSize set will perform equivalently to Marshal alone. + UseCachedSize bool +} + +// Marshal returns the wire-format encoding of m. +func Marshal(m Message) ([]byte, error) { + // Treat nil message interface as an empty message; nothing to output. + if m == nil { + return nil, nil + } + + out, err := MarshalOptions{}.marshal(nil, m.ProtoReflect()) + if len(out.Buf) == 0 && err == nil { + out.Buf = emptyBytesForMessage(m) + } + return out.Buf, err +} + +// Marshal returns the wire-format encoding of m. +func (o MarshalOptions) Marshal(m Message) ([]byte, error) { + // Treat nil message interface as an empty message; nothing to output. + if m == nil { + return nil, nil + } + + out, err := o.marshal(nil, m.ProtoReflect()) + if len(out.Buf) == 0 && err == nil { + out.Buf = emptyBytesForMessage(m) + } + return out.Buf, err +} + +// emptyBytesForMessage returns a nil buffer if and only if m is invalid, +// otherwise it returns a non-nil empty buffer. +// +// This is to assist the edge-case where user-code does the following: +// m1.OptionalBytes, _ = proto.Marshal(m2) +// where they expect the proto2 "optional_bytes" field to be populated +// if any only if m2 is a valid message. +func emptyBytesForMessage(m Message) []byte { + if m == nil || !m.ProtoReflect().IsValid() { + return nil + } + return emptyBuf[:] +} + +// MarshalAppend appends the wire-format encoding of m to b, +// returning the result. +func (o MarshalOptions) MarshalAppend(b []byte, m Message) ([]byte, error) { + // Treat nil message interface as an empty message; nothing to append. + if m == nil { + return b, nil + } + + out, err := o.marshal(b, m.ProtoReflect()) + return out.Buf, err +} + +// MarshalState returns the wire-format encoding of a message. +// +// This method permits fine-grained control over the marshaler. +// Most users should use Marshal instead. +func (o MarshalOptions) MarshalState(in protoiface.MarshalInput) (protoiface.MarshalOutput, error) { + return o.marshal(in.Buf, in.Message) +} + +// marshal is a centralized function that all marshal operations go through. +// For profiling purposes, avoid changing the name of this function or +// introducing other code paths for marshal that do not go through this. +func (o MarshalOptions) marshal(b []byte, m protoreflect.Message) (out protoiface.MarshalOutput, err error) { + allowPartial := o.AllowPartial + o.AllowPartial = true + if methods := protoMethods(m); methods != nil && methods.Marshal != nil && + !(o.Deterministic && methods.Flags&protoiface.SupportMarshalDeterministic == 0) { + in := protoiface.MarshalInput{ + Message: m, + Buf: b, + } + if o.Deterministic { + in.Flags |= protoiface.MarshalDeterministic + } + if o.UseCachedSize { + in.Flags |= protoiface.MarshalUseCachedSize + } + if methods.Size != nil { + sout := methods.Size(protoiface.SizeInput{ + Message: m, + Flags: in.Flags, + }) + if cap(b) < len(b)+sout.Size { + in.Buf = make([]byte, len(b), growcap(cap(b), len(b)+sout.Size)) + copy(in.Buf, b) + } + in.Flags |= protoiface.MarshalUseCachedSize + } + out, err = methods.Marshal(in) + } else { + out.Buf, err = o.marshalMessageSlow(b, m) + } + if err != nil { + return out, err + } + if allowPartial { + return out, nil + } + return out, checkInitialized(m) +} + +func (o MarshalOptions) marshalMessage(b []byte, m protoreflect.Message) ([]byte, error) { + out, err := o.marshal(b, m) + return out.Buf, err +} + +// growcap scales up the capacity of a slice. +// +// Given a slice with a current capacity of oldcap and a desired +// capacity of wantcap, growcap returns a new capacity >= wantcap. +// +// The algorithm is mostly identical to the one used by append as of Go 1.14. +func growcap(oldcap, wantcap int) (newcap int) { + if wantcap > oldcap*2 { + newcap = wantcap + } else if oldcap < 1024 { + // The Go 1.14 runtime takes this case when len(s) < 1024, + // not when cap(s) < 1024. The difference doesn't seem + // significant here. + newcap = oldcap * 2 + } else { + newcap = oldcap + for 0 < newcap && newcap < wantcap { + newcap += newcap / 4 + } + if newcap <= 0 { + newcap = wantcap + } + } + return newcap +} + +func (o MarshalOptions) marshalMessageSlow(b []byte, m protoreflect.Message) ([]byte, error) { + if messageset.IsMessageSet(m.Descriptor()) { + return o.marshalMessageSet(b, m) + } + fieldOrder := order.AnyFieldOrder + if o.Deterministic { + // TODO: This should use a more natural ordering like NumberFieldOrder, + // but doing so breaks golden tests that make invalid assumption about + // output stability of this implementation. + fieldOrder = order.LegacyFieldOrder + } + var err error + order.RangeFields(m, fieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + b, err = o.marshalField(b, fd, v) + return err == nil + }) + if err != nil { + return b, err + } + b = append(b, m.GetUnknown()...) + return b, nil +} + +func (o MarshalOptions) marshalField(b []byte, fd protoreflect.FieldDescriptor, value protoreflect.Value) ([]byte, error) { + switch { + case fd.IsList(): + return o.marshalList(b, fd, value.List()) + case fd.IsMap(): + return o.marshalMap(b, fd, value.Map()) + default: + b = protowire.AppendTag(b, fd.Number(), wireTypes[fd.Kind()]) + return o.marshalSingular(b, fd, value) + } +} + +func (o MarshalOptions) marshalList(b []byte, fd protoreflect.FieldDescriptor, list protoreflect.List) ([]byte, error) { + if fd.IsPacked() && list.Len() > 0 { + b = protowire.AppendTag(b, fd.Number(), protowire.BytesType) + b, pos := appendSpeculativeLength(b) + for i, llen := 0, list.Len(); i < llen; i++ { + var err error + b, err = o.marshalSingular(b, fd, list.Get(i)) + if err != nil { + return b, err + } + } + b = finishSpeculativeLength(b, pos) + return b, nil + } + + kind := fd.Kind() + for i, llen := 0, list.Len(); i < llen; i++ { + var err error + b = protowire.AppendTag(b, fd.Number(), wireTypes[kind]) + b, err = o.marshalSingular(b, fd, list.Get(i)) + if err != nil { + return b, err + } + } + return b, nil +} + +func (o MarshalOptions) marshalMap(b []byte, fd protoreflect.FieldDescriptor, mapv protoreflect.Map) ([]byte, error) { + keyf := fd.MapKey() + valf := fd.MapValue() + keyOrder := order.AnyKeyOrder + if o.Deterministic { + keyOrder = order.GenericKeyOrder + } + var err error + order.RangeEntries(mapv, keyOrder, func(key protoreflect.MapKey, value protoreflect.Value) bool { + b = protowire.AppendTag(b, fd.Number(), protowire.BytesType) + var pos int + b, pos = appendSpeculativeLength(b) + + b, err = o.marshalField(b, keyf, key.Value()) + if err != nil { + return false + } + b, err = o.marshalField(b, valf, value) + if err != nil { + return false + } + b = finishSpeculativeLength(b, pos) + return true + }) + return b, err +} + +// When encoding length-prefixed fields, we speculatively set aside some number of bytes +// for the length, encode the data, and then encode the length (shifting the data if necessary +// to make room). +const speculativeLength = 1 + +func appendSpeculativeLength(b []byte) ([]byte, int) { + pos := len(b) + b = append(b, "\x00\x00\x00\x00"[:speculativeLength]...) + return b, pos +} + +func finishSpeculativeLength(b []byte, pos int) []byte { + mlen := len(b) - pos - speculativeLength + msiz := protowire.SizeVarint(uint64(mlen)) + if msiz != speculativeLength { + for i := 0; i < msiz-speculativeLength; i++ { + b = append(b, 0) + } + copy(b[pos+msiz:], b[pos+speculativeLength:]) + b = b[:pos+msiz+mlen] + } + protowire.AppendVarint(b[:pos], uint64(mlen)) + return b +} diff --git a/vendor/google.golang.org/protobuf/proto/encode_gen.go b/vendor/google.golang.org/protobuf/proto/encode_gen.go new file mode 100644 index 00000000..185dacfb --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/encode_gen.go @@ -0,0 +1,97 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package proto + +import ( + "math" + "unicode/utf8" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/reflect/protoreflect" +) + +var wireTypes = map[protoreflect.Kind]protowire.Type{ + protoreflect.BoolKind: protowire.VarintType, + protoreflect.EnumKind: protowire.VarintType, + protoreflect.Int32Kind: protowire.VarintType, + protoreflect.Sint32Kind: protowire.VarintType, + protoreflect.Uint32Kind: protowire.VarintType, + protoreflect.Int64Kind: protowire.VarintType, + protoreflect.Sint64Kind: protowire.VarintType, + protoreflect.Uint64Kind: protowire.VarintType, + protoreflect.Sfixed32Kind: protowire.Fixed32Type, + protoreflect.Fixed32Kind: protowire.Fixed32Type, + protoreflect.FloatKind: protowire.Fixed32Type, + protoreflect.Sfixed64Kind: protowire.Fixed64Type, + protoreflect.Fixed64Kind: protowire.Fixed64Type, + protoreflect.DoubleKind: protowire.Fixed64Type, + protoreflect.StringKind: protowire.BytesType, + protoreflect.BytesKind: protowire.BytesType, + protoreflect.MessageKind: protowire.BytesType, + protoreflect.GroupKind: protowire.StartGroupType, +} + +func (o MarshalOptions) marshalSingular(b []byte, fd protoreflect.FieldDescriptor, v protoreflect.Value) ([]byte, error) { + switch fd.Kind() { + case protoreflect.BoolKind: + b = protowire.AppendVarint(b, protowire.EncodeBool(v.Bool())) + case protoreflect.EnumKind: + b = protowire.AppendVarint(b, uint64(v.Enum())) + case protoreflect.Int32Kind: + b = protowire.AppendVarint(b, uint64(int32(v.Int()))) + case protoreflect.Sint32Kind: + b = protowire.AppendVarint(b, protowire.EncodeZigZag(int64(int32(v.Int())))) + case protoreflect.Uint32Kind: + b = protowire.AppendVarint(b, uint64(uint32(v.Uint()))) + case protoreflect.Int64Kind: + b = protowire.AppendVarint(b, uint64(v.Int())) + case protoreflect.Sint64Kind: + b = protowire.AppendVarint(b, protowire.EncodeZigZag(v.Int())) + case protoreflect.Uint64Kind: + b = protowire.AppendVarint(b, v.Uint()) + case protoreflect.Sfixed32Kind: + b = protowire.AppendFixed32(b, uint32(v.Int())) + case protoreflect.Fixed32Kind: + b = protowire.AppendFixed32(b, uint32(v.Uint())) + case protoreflect.FloatKind: + b = protowire.AppendFixed32(b, math.Float32bits(float32(v.Float()))) + case protoreflect.Sfixed64Kind: + b = protowire.AppendFixed64(b, uint64(v.Int())) + case protoreflect.Fixed64Kind: + b = protowire.AppendFixed64(b, v.Uint()) + case protoreflect.DoubleKind: + b = protowire.AppendFixed64(b, math.Float64bits(v.Float())) + case protoreflect.StringKind: + if strs.EnforceUTF8(fd) && !utf8.ValidString(v.String()) { + return b, errors.InvalidUTF8(string(fd.FullName())) + } + b = protowire.AppendString(b, v.String()) + case protoreflect.BytesKind: + b = protowire.AppendBytes(b, v.Bytes()) + case protoreflect.MessageKind: + var pos int + var err error + b, pos = appendSpeculativeLength(b) + b, err = o.marshalMessage(b, v.Message()) + if err != nil { + return b, err + } + b = finishSpeculativeLength(b, pos) + case protoreflect.GroupKind: + var err error + b, err = o.marshalMessage(b, v.Message()) + if err != nil { + return b, err + } + b = protowire.AppendVarint(b, protowire.EncodeTag(fd.Number(), protowire.EndGroupType)) + default: + return b, errors.New("invalid kind %v", fd.Kind()) + } + return b, nil +} diff --git a/vendor/google.golang.org/protobuf/proto/equal.go b/vendor/google.golang.org/protobuf/proto/equal.go new file mode 100644 index 00000000..4dba2b96 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/equal.go @@ -0,0 +1,167 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "bytes" + "math" + "reflect" + + "google.golang.org/protobuf/encoding/protowire" + pref "google.golang.org/protobuf/reflect/protoreflect" +) + +// Equal reports whether two messages are equal. +// If two messages marshal to the same bytes under deterministic serialization, +// then Equal is guaranteed to report true. +// +// Two messages are equal if they belong to the same message descriptor, +// have the same set of populated known and extension field values, +// and the same set of unknown fields values. If either of the top-level +// messages are invalid, then Equal reports true only if both are invalid. +// +// Scalar values are compared with the equivalent of the == operator in Go, +// except bytes values which are compared using bytes.Equal and +// floating point values which specially treat NaNs as equal. +// Message values are compared by recursively calling Equal. +// Lists are equal if each element value is also equal. +// Maps are equal if they have the same set of keys, where the pair of values +// for each key is also equal. +func Equal(x, y Message) bool { + if x == nil || y == nil { + return x == nil && y == nil + } + mx := x.ProtoReflect() + my := y.ProtoReflect() + if mx.IsValid() != my.IsValid() { + return false + } + return equalMessage(mx, my) +} + +// equalMessage compares two messages. +func equalMessage(mx, my pref.Message) bool { + if mx.Descriptor() != my.Descriptor() { + return false + } + + nx := 0 + equal := true + mx.Range(func(fd pref.FieldDescriptor, vx pref.Value) bool { + nx++ + vy := my.Get(fd) + equal = my.Has(fd) && equalField(fd, vx, vy) + return equal + }) + if !equal { + return false + } + ny := 0 + my.Range(func(fd pref.FieldDescriptor, vx pref.Value) bool { + ny++ + return true + }) + if nx != ny { + return false + } + + return equalUnknown(mx.GetUnknown(), my.GetUnknown()) +} + +// equalField compares two fields. +func equalField(fd pref.FieldDescriptor, x, y pref.Value) bool { + switch { + case fd.IsList(): + return equalList(fd, x.List(), y.List()) + case fd.IsMap(): + return equalMap(fd, x.Map(), y.Map()) + default: + return equalValue(fd, x, y) + } +} + +// equalMap compares two maps. +func equalMap(fd pref.FieldDescriptor, x, y pref.Map) bool { + if x.Len() != y.Len() { + return false + } + equal := true + x.Range(func(k pref.MapKey, vx pref.Value) bool { + vy := y.Get(k) + equal = y.Has(k) && equalValue(fd.MapValue(), vx, vy) + return equal + }) + return equal +} + +// equalList compares two lists. +func equalList(fd pref.FieldDescriptor, x, y pref.List) bool { + if x.Len() != y.Len() { + return false + } + for i := x.Len() - 1; i >= 0; i-- { + if !equalValue(fd, x.Get(i), y.Get(i)) { + return false + } + } + return true +} + +// equalValue compares two singular values. +func equalValue(fd pref.FieldDescriptor, x, y pref.Value) bool { + switch fd.Kind() { + case pref.BoolKind: + return x.Bool() == y.Bool() + case pref.EnumKind: + return x.Enum() == y.Enum() + case pref.Int32Kind, pref.Sint32Kind, + pref.Int64Kind, pref.Sint64Kind, + pref.Sfixed32Kind, pref.Sfixed64Kind: + return x.Int() == y.Int() + case pref.Uint32Kind, pref.Uint64Kind, + pref.Fixed32Kind, pref.Fixed64Kind: + return x.Uint() == y.Uint() + case pref.FloatKind, pref.DoubleKind: + fx := x.Float() + fy := y.Float() + if math.IsNaN(fx) || math.IsNaN(fy) { + return math.IsNaN(fx) && math.IsNaN(fy) + } + return fx == fy + case pref.StringKind: + return x.String() == y.String() + case pref.BytesKind: + return bytes.Equal(x.Bytes(), y.Bytes()) + case pref.MessageKind, pref.GroupKind: + return equalMessage(x.Message(), y.Message()) + default: + return x.Interface() == y.Interface() + } +} + +// equalUnknown compares unknown fields by direct comparison on the raw bytes +// of each individual field number. +func equalUnknown(x, y pref.RawFields) bool { + if len(x) != len(y) { + return false + } + if bytes.Equal([]byte(x), []byte(y)) { + return true + } + + mx := make(map[pref.FieldNumber]pref.RawFields) + my := make(map[pref.FieldNumber]pref.RawFields) + for len(x) > 0 { + fnum, _, n := protowire.ConsumeField(x) + mx[fnum] = append(mx[fnum], x[:n]...) + x = x[n:] + } + for len(y) > 0 { + fnum, _, n := protowire.ConsumeField(y) + my[fnum] = append(my[fnum], y[:n]...) + y = y[n:] + } + return reflect.DeepEqual(mx, my) +} diff --git a/vendor/google.golang.org/protobuf/proto/extension.go b/vendor/google.golang.org/protobuf/proto/extension.go new file mode 100644 index 00000000..5f293cda --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/extension.go @@ -0,0 +1,92 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" +) + +// HasExtension reports whether an extension field is populated. +// It returns false if m is invalid or if xt does not extend m. +func HasExtension(m Message, xt protoreflect.ExtensionType) bool { + // Treat nil message interface as an empty message; no populated fields. + if m == nil { + return false + } + + // As a special-case, we reports invalid or mismatching descriptors + // as always not being populated (since they aren't). + if xt == nil || m.ProtoReflect().Descriptor() != xt.TypeDescriptor().ContainingMessage() { + return false + } + + return m.ProtoReflect().Has(xt.TypeDescriptor()) +} + +// ClearExtension clears an extension field such that subsequent +// HasExtension calls return false. +// It panics if m is invalid or if xt does not extend m. +func ClearExtension(m Message, xt protoreflect.ExtensionType) { + m.ProtoReflect().Clear(xt.TypeDescriptor()) +} + +// GetExtension retrieves the value for an extension field. +// If the field is unpopulated, it returns the default value for +// scalars and an immutable, empty value for lists or messages. +// It panics if xt does not extend m. +func GetExtension(m Message, xt protoreflect.ExtensionType) interface{} { + // Treat nil message interface as an empty message; return the default. + if m == nil { + return xt.InterfaceOf(xt.Zero()) + } + + return xt.InterfaceOf(m.ProtoReflect().Get(xt.TypeDescriptor())) +} + +// SetExtension stores the value of an extension field. +// It panics if m is invalid, xt does not extend m, or if type of v +// is invalid for the specified extension field. +func SetExtension(m Message, xt protoreflect.ExtensionType, v interface{}) { + xd := xt.TypeDescriptor() + pv := xt.ValueOf(v) + + // Specially treat an invalid list, map, or message as clear. + isValid := true + switch { + case xd.IsList(): + isValid = pv.List().IsValid() + case xd.IsMap(): + isValid = pv.Map().IsValid() + case xd.Message() != nil: + isValid = pv.Message().IsValid() + } + if !isValid { + m.ProtoReflect().Clear(xd) + return + } + + m.ProtoReflect().Set(xd, pv) +} + +// RangeExtensions iterates over every populated extension field in m in an +// undefined order, calling f for each extension type and value encountered. +// It returns immediately if f returns false. +// While iterating, mutating operations may only be performed +// on the current extension field. +func RangeExtensions(m Message, f func(protoreflect.ExtensionType, interface{}) bool) { + // Treat nil message interface as an empty message; nothing to range over. + if m == nil { + return + } + + m.ProtoReflect().Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + if fd.IsExtension() { + xt := fd.(protoreflect.ExtensionTypeDescriptor).Type() + vi := xt.InterfaceOf(v) + return f(xt, vi) + } + return true + }) +} diff --git a/vendor/google.golang.org/protobuf/proto/merge.go b/vendor/google.golang.org/protobuf/proto/merge.go new file mode 100644 index 00000000..d761ab33 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/merge.go @@ -0,0 +1,139 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "fmt" + + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +// Merge merges src into dst, which must be a message with the same descriptor. +// +// Populated scalar fields in src are copied to dst, while populated +// singular messages in src are merged into dst by recursively calling Merge. +// The elements of every list field in src is appended to the corresponded +// list fields in dst. The entries of every map field in src is copied into +// the corresponding map field in dst, possibly replacing existing entries. +// The unknown fields of src are appended to the unknown fields of dst. +// +// It is semantically equivalent to unmarshaling the encoded form of src +// into dst with the UnmarshalOptions.Merge option specified. +func Merge(dst, src Message) { + // TODO: Should nil src be treated as semantically equivalent to a + // untyped, read-only, empty message? What about a nil dst? + + dstMsg, srcMsg := dst.ProtoReflect(), src.ProtoReflect() + if dstMsg.Descriptor() != srcMsg.Descriptor() { + if got, want := dstMsg.Descriptor().FullName(), srcMsg.Descriptor().FullName(); got != want { + panic(fmt.Sprintf("descriptor mismatch: %v != %v", got, want)) + } + panic("descriptor mismatch") + } + mergeOptions{}.mergeMessage(dstMsg, srcMsg) +} + +// Clone returns a deep copy of m. +// If the top-level message is invalid, it returns an invalid message as well. +func Clone(m Message) Message { + // NOTE: Most usages of Clone assume the following properties: + // t := reflect.TypeOf(m) + // t == reflect.TypeOf(m.ProtoReflect().New().Interface()) + // t == reflect.TypeOf(m.ProtoReflect().Type().Zero().Interface()) + // + // Embedding protobuf messages breaks this since the parent type will have + // a forwarded ProtoReflect method, but the Interface method will return + // the underlying embedded message type. + if m == nil { + return nil + } + src := m.ProtoReflect() + if !src.IsValid() { + return src.Type().Zero().Interface() + } + dst := src.New() + mergeOptions{}.mergeMessage(dst, src) + return dst.Interface() +} + +// mergeOptions provides a namespace for merge functions, and can be +// exported in the future if we add user-visible merge options. +type mergeOptions struct{} + +func (o mergeOptions) mergeMessage(dst, src protoreflect.Message) { + methods := protoMethods(dst) + if methods != nil && methods.Merge != nil { + in := protoiface.MergeInput{ + Destination: dst, + Source: src, + } + out := methods.Merge(in) + if out.Flags&protoiface.MergeComplete != 0 { + return + } + } + + if !dst.IsValid() { + panic(fmt.Sprintf("cannot merge into invalid %v message", dst.Descriptor().FullName())) + } + + src.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + switch { + case fd.IsList(): + o.mergeList(dst.Mutable(fd).List(), v.List(), fd) + case fd.IsMap(): + o.mergeMap(dst.Mutable(fd).Map(), v.Map(), fd.MapValue()) + case fd.Message() != nil: + o.mergeMessage(dst.Mutable(fd).Message(), v.Message()) + case fd.Kind() == protoreflect.BytesKind: + dst.Set(fd, o.cloneBytes(v)) + default: + dst.Set(fd, v) + } + return true + }) + + if len(src.GetUnknown()) > 0 { + dst.SetUnknown(append(dst.GetUnknown(), src.GetUnknown()...)) + } +} + +func (o mergeOptions) mergeList(dst, src protoreflect.List, fd protoreflect.FieldDescriptor) { + // Merge semantics appends to the end of the existing list. + for i, n := 0, src.Len(); i < n; i++ { + switch v := src.Get(i); { + case fd.Message() != nil: + dstv := dst.NewElement() + o.mergeMessage(dstv.Message(), v.Message()) + dst.Append(dstv) + case fd.Kind() == protoreflect.BytesKind: + dst.Append(o.cloneBytes(v)) + default: + dst.Append(v) + } + } +} + +func (o mergeOptions) mergeMap(dst, src protoreflect.Map, fd protoreflect.FieldDescriptor) { + // Merge semantics replaces, rather than merges into existing entries. + src.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { + switch { + case fd.Message() != nil: + dstv := dst.NewValue() + o.mergeMessage(dstv.Message(), v.Message()) + dst.Set(k, dstv) + case fd.Kind() == protoreflect.BytesKind: + dst.Set(k, o.cloneBytes(v)) + default: + dst.Set(k, v) + } + return true + }) +} + +func (o mergeOptions) cloneBytes(v protoreflect.Value) protoreflect.Value { + return protoreflect.ValueOfBytes(append([]byte{}, v.Bytes()...)) +} diff --git a/vendor/google.golang.org/protobuf/proto/messageset.go b/vendor/google.golang.org/protobuf/proto/messageset.go new file mode 100644 index 00000000..312d5d45 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/messageset.go @@ -0,0 +1,93 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/order" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" +) + +func (o MarshalOptions) sizeMessageSet(m protoreflect.Message) (size int) { + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + size += messageset.SizeField(fd.Number()) + size += protowire.SizeTag(messageset.FieldMessage) + size += protowire.SizeBytes(o.size(v.Message())) + return true + }) + size += messageset.SizeUnknown(m.GetUnknown()) + return size +} + +func (o MarshalOptions) marshalMessageSet(b []byte, m protoreflect.Message) ([]byte, error) { + if !flags.ProtoLegacy { + return b, errors.New("no support for message_set_wire_format") + } + fieldOrder := order.AnyFieldOrder + if o.Deterministic { + fieldOrder = order.NumberFieldOrder + } + var err error + order.RangeFields(m, fieldOrder, func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + b, err = o.marshalMessageSetField(b, fd, v) + return err == nil + }) + if err != nil { + return b, err + } + return messageset.AppendUnknown(b, m.GetUnknown()) +} + +func (o MarshalOptions) marshalMessageSetField(b []byte, fd protoreflect.FieldDescriptor, value protoreflect.Value) ([]byte, error) { + b = messageset.AppendFieldStart(b, fd.Number()) + b = protowire.AppendTag(b, messageset.FieldMessage, protowire.BytesType) + b = protowire.AppendVarint(b, uint64(o.Size(value.Message().Interface()))) + b, err := o.marshalMessage(b, value.Message()) + if err != nil { + return b, err + } + b = messageset.AppendFieldEnd(b) + return b, nil +} + +func (o UnmarshalOptions) unmarshalMessageSet(b []byte, m protoreflect.Message) error { + if !flags.ProtoLegacy { + return errors.New("no support for message_set_wire_format") + } + return messageset.Unmarshal(b, false, func(num protowire.Number, v []byte) error { + err := o.unmarshalMessageSetField(m, num, v) + if err == errUnknown { + unknown := m.GetUnknown() + unknown = protowire.AppendTag(unknown, num, protowire.BytesType) + unknown = protowire.AppendBytes(unknown, v) + m.SetUnknown(unknown) + return nil + } + return err + }) +} + +func (o UnmarshalOptions) unmarshalMessageSetField(m protoreflect.Message, num protowire.Number, v []byte) error { + md := m.Descriptor() + if !md.ExtensionRanges().Has(num) { + return errUnknown + } + xt, err := o.Resolver.FindExtensionByNumber(md.FullName(), num) + if err == protoregistry.NotFound { + return errUnknown + } + if err != nil { + return errors.New("%v: unable to resolve extension %v: %v", md.FullName(), num, err) + } + xd := xt.TypeDescriptor() + if err := o.unmarshalMessage(v, m.Mutable(xd).Message()); err != nil { + return err + } + return nil +} diff --git a/vendor/google.golang.org/protobuf/proto/proto.go b/vendor/google.golang.org/protobuf/proto/proto.go new file mode 100644 index 00000000..1f0d183b --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/proto.go @@ -0,0 +1,43 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// Message is the top-level interface that all messages must implement. +// It provides access to a reflective view of a message. +// Any implementation of this interface may be used with all functions in the +// protobuf module that accept a Message, except where otherwise specified. +// +// This is the v2 interface definition for protobuf messages. +// The v1 interface definition is "github.com/golang/protobuf/proto".Message. +// +// To convert a v1 message to a v2 message, +// use "github.com/golang/protobuf/proto".MessageV2. +// To convert a v2 message to a v1 message, +// use "github.com/golang/protobuf/proto".MessageV1. +type Message = protoreflect.ProtoMessage + +// Error matches all errors produced by packages in the protobuf module. +// +// That is, errors.Is(err, Error) reports whether an error is produced +// by this module. +var Error error + +func init() { + Error = errors.Error +} + +// MessageName returns the full name of m. +// If m is nil, it returns an empty string. +func MessageName(m Message) protoreflect.FullName { + if m == nil { + return "" + } + return m.ProtoReflect().Descriptor().FullName() +} diff --git a/vendor/google.golang.org/protobuf/proto/proto_methods.go b/vendor/google.golang.org/protobuf/proto/proto_methods.go new file mode 100644 index 00000000..d8dd604f --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/proto_methods.go @@ -0,0 +1,19 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The protoreflect build tag disables use of fast-path methods. +// +build !protoreflect + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +const hasProtoMethods = true + +func protoMethods(m protoreflect.Message) *protoiface.Methods { + return m.ProtoMethods() +} diff --git a/vendor/google.golang.org/protobuf/proto/proto_reflect.go b/vendor/google.golang.org/protobuf/proto/proto_reflect.go new file mode 100644 index 00000000..b103d432 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/proto_reflect.go @@ -0,0 +1,19 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The protoreflect build tag disables use of fast-path methods. +// +build protoreflect + +package proto + +import ( + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +const hasProtoMethods = false + +func protoMethods(m protoreflect.Message) *protoiface.Methods { + return nil +} diff --git a/vendor/google.golang.org/protobuf/proto/reset.go b/vendor/google.golang.org/protobuf/proto/reset.go new file mode 100644 index 00000000..3d7f8943 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/reset.go @@ -0,0 +1,43 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "fmt" + + "google.golang.org/protobuf/reflect/protoreflect" +) + +// Reset clears every field in the message. +// The resulting message shares no observable memory with its previous state +// other than the memory for the message itself. +func Reset(m Message) { + if mr, ok := m.(interface{ Reset() }); ok && hasProtoMethods { + mr.Reset() + return + } + resetMessage(m.ProtoReflect()) +} + +func resetMessage(m protoreflect.Message) { + if !m.IsValid() { + panic(fmt.Sprintf("cannot reset invalid %v message", m.Descriptor().FullName())) + } + + // Clear all known fields. + fds := m.Descriptor().Fields() + for i := 0; i < fds.Len(); i++ { + m.Clear(fds.Get(i)) + } + + // Clear extension fields. + m.Range(func(fd protoreflect.FieldDescriptor, _ protoreflect.Value) bool { + m.Clear(fd) + return true + }) + + // Clear unknown fields. + m.SetUnknown(nil) +} diff --git a/vendor/google.golang.org/protobuf/proto/size.go b/vendor/google.golang.org/protobuf/proto/size.go new file mode 100644 index 00000000..554b9c6c --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/size.go @@ -0,0 +1,97 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/runtime/protoiface" +) + +// Size returns the size in bytes of the wire-format encoding of m. +func Size(m Message) int { + return MarshalOptions{}.Size(m) +} + +// Size returns the size in bytes of the wire-format encoding of m. +func (o MarshalOptions) Size(m Message) int { + // Treat a nil message interface as an empty message; nothing to output. + if m == nil { + return 0 + } + + return o.size(m.ProtoReflect()) +} + +// size is a centralized function that all size operations go through. +// For profiling purposes, avoid changing the name of this function or +// introducing other code paths for size that do not go through this. +func (o MarshalOptions) size(m protoreflect.Message) (size int) { + methods := protoMethods(m) + if methods != nil && methods.Size != nil { + out := methods.Size(protoiface.SizeInput{ + Message: m, + }) + return out.Size + } + if methods != nil && methods.Marshal != nil { + // This is not efficient, but we don't have any choice. + // This case is mainly used for legacy types with a Marshal method. + out, _ := methods.Marshal(protoiface.MarshalInput{ + Message: m, + }) + return len(out.Buf) + } + return o.sizeMessageSlow(m) +} + +func (o MarshalOptions) sizeMessageSlow(m protoreflect.Message) (size int) { + if messageset.IsMessageSet(m.Descriptor()) { + return o.sizeMessageSet(m) + } + m.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { + size += o.sizeField(fd, v) + return true + }) + size += len(m.GetUnknown()) + return size +} + +func (o MarshalOptions) sizeField(fd protoreflect.FieldDescriptor, value protoreflect.Value) (size int) { + num := fd.Number() + switch { + case fd.IsList(): + return o.sizeList(num, fd, value.List()) + case fd.IsMap(): + return o.sizeMap(num, fd, value.Map()) + default: + return protowire.SizeTag(num) + o.sizeSingular(num, fd.Kind(), value) + } +} + +func (o MarshalOptions) sizeList(num protowire.Number, fd protoreflect.FieldDescriptor, list protoreflect.List) (size int) { + if fd.IsPacked() && list.Len() > 0 { + content := 0 + for i, llen := 0, list.Len(); i < llen; i++ { + content += o.sizeSingular(num, fd.Kind(), list.Get(i)) + } + return protowire.SizeTag(num) + protowire.SizeBytes(content) + } + + for i, llen := 0, list.Len(); i < llen; i++ { + size += protowire.SizeTag(num) + o.sizeSingular(num, fd.Kind(), list.Get(i)) + } + return size +} + +func (o MarshalOptions) sizeMap(num protowire.Number, fd protoreflect.FieldDescriptor, mapv protoreflect.Map) (size int) { + mapv.Range(func(key protoreflect.MapKey, value protoreflect.Value) bool { + size += protowire.SizeTag(num) + size += protowire.SizeBytes(o.sizeField(fd.MapKey(), key.Value()) + o.sizeField(fd.MapValue(), value)) + return true + }) + return size +} diff --git a/vendor/google.golang.org/protobuf/proto/size_gen.go b/vendor/google.golang.org/protobuf/proto/size_gen.go new file mode 100644 index 00000000..3cf61a82 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/size_gen.go @@ -0,0 +1,55 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-types. DO NOT EDIT. + +package proto + +import ( + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/reflect/protoreflect" +) + +func (o MarshalOptions) sizeSingular(num protowire.Number, kind protoreflect.Kind, v protoreflect.Value) int { + switch kind { + case protoreflect.BoolKind: + return protowire.SizeVarint(protowire.EncodeBool(v.Bool())) + case protoreflect.EnumKind: + return protowire.SizeVarint(uint64(v.Enum())) + case protoreflect.Int32Kind: + return protowire.SizeVarint(uint64(int32(v.Int()))) + case protoreflect.Sint32Kind: + return protowire.SizeVarint(protowire.EncodeZigZag(int64(int32(v.Int())))) + case protoreflect.Uint32Kind: + return protowire.SizeVarint(uint64(uint32(v.Uint()))) + case protoreflect.Int64Kind: + return protowire.SizeVarint(uint64(v.Int())) + case protoreflect.Sint64Kind: + return protowire.SizeVarint(protowire.EncodeZigZag(v.Int())) + case protoreflect.Uint64Kind: + return protowire.SizeVarint(v.Uint()) + case protoreflect.Sfixed32Kind: + return protowire.SizeFixed32() + case protoreflect.Fixed32Kind: + return protowire.SizeFixed32() + case protoreflect.FloatKind: + return protowire.SizeFixed32() + case protoreflect.Sfixed64Kind: + return protowire.SizeFixed64() + case protoreflect.Fixed64Kind: + return protowire.SizeFixed64() + case protoreflect.DoubleKind: + return protowire.SizeFixed64() + case protoreflect.StringKind: + return protowire.SizeBytes(len(v.String())) + case protoreflect.BytesKind: + return protowire.SizeBytes(len(v.Bytes())) + case protoreflect.MessageKind: + return protowire.SizeBytes(o.size(v.Message())) + case protoreflect.GroupKind: + return protowire.SizeGroup(num, o.size(v.Message())) + default: + return 0 + } +} diff --git a/vendor/google.golang.org/protobuf/proto/wrappers.go b/vendor/google.golang.org/protobuf/proto/wrappers.go new file mode 100644 index 00000000..653b12c3 --- /dev/null +++ b/vendor/google.golang.org/protobuf/proto/wrappers.go @@ -0,0 +1,29 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package proto + +// Bool stores v in a new bool value and returns a pointer to it. +func Bool(v bool) *bool { return &v } + +// Int32 stores v in a new int32 value and returns a pointer to it. +func Int32(v int32) *int32 { return &v } + +// Int64 stores v in a new int64 value and returns a pointer to it. +func Int64(v int64) *int64 { return &v } + +// Float32 stores v in a new float32 value and returns a pointer to it. +func Float32(v float32) *float32 { return &v } + +// Float64 stores v in a new float64 value and returns a pointer to it. +func Float64(v float64) *float64 { return &v } + +// Uint32 stores v in a new uint32 value and returns a pointer to it. +func Uint32(v uint32) *uint32 { return &v } + +// Uint64 stores v in a new uint64 value and returns a pointer to it. +func Uint64(v uint64) *uint64 { return &v } + +// String stores v in a new string value and returns a pointer to it. +func String(v string) *string { return &v } diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go new file mode 100644 index 00000000..e4dfb120 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go @@ -0,0 +1,276 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protodesc provides functionality for converting +// FileDescriptorProto messages to/from protoreflect.FileDescriptor values. +// +// The google.protobuf.FileDescriptorProto is a protobuf message that describes +// the type information for a .proto file in a form that is easily serializable. +// The protoreflect.FileDescriptor is a more structured representation of +// the FileDescriptorProto message where references and remote dependencies +// can be directly followed. +package protodesc + +import ( + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + + "google.golang.org/protobuf/types/descriptorpb" +) + +// Resolver is the resolver used by NewFile to resolve dependencies. +// The enums and messages provided must belong to some parent file, +// which is also registered. +// +// It is implemented by protoregistry.Files. +type Resolver interface { + FindFileByPath(string) (protoreflect.FileDescriptor, error) + FindDescriptorByName(protoreflect.FullName) (protoreflect.Descriptor, error) +} + +// FileOptions configures the construction of file descriptors. +type FileOptions struct { + pragma.NoUnkeyedLiterals + + // AllowUnresolvable configures New to permissively allow unresolvable + // file, enum, or message dependencies. Unresolved dependencies are replaced + // by placeholder equivalents. + // + // The following dependencies may be left unresolved: + // • Resolving an imported file. + // • Resolving the type for a message field or extension field. + // If the kind of the field is unknown, then a placeholder is used for both + // the Enum and Message accessors on the protoreflect.FieldDescriptor. + // • Resolving an enum value set as the default for an optional enum field. + // If unresolvable, the protoreflect.FieldDescriptor.Default is set to the + // first value in the associated enum (or zero if the also enum dependency + // is also unresolvable). The protoreflect.FieldDescriptor.DefaultEnumValue + // is populated with a placeholder. + // • Resolving the extended message type for an extension field. + // • Resolving the input or output message type for a service method. + // + // If the unresolved dependency uses a relative name, + // then the placeholder will contain an invalid FullName with a "*." prefix, + // indicating that the starting prefix of the full name is unknown. + AllowUnresolvable bool +} + +// NewFile creates a new protoreflect.FileDescriptor from the provided +// file descriptor message. See FileOptions.New for more information. +func NewFile(fd *descriptorpb.FileDescriptorProto, r Resolver) (protoreflect.FileDescriptor, error) { + return FileOptions{}.New(fd, r) +} + +// NewFiles creates a new protoregistry.Files from the provided +// FileDescriptorSet message. See FileOptions.NewFiles for more information. +func NewFiles(fd *descriptorpb.FileDescriptorSet) (*protoregistry.Files, error) { + return FileOptions{}.NewFiles(fd) +} + +// New creates a new protoreflect.FileDescriptor from the provided +// file descriptor message. The file must represent a valid proto file according +// to protobuf semantics. The returned descriptor is a deep copy of the input. +// +// Any imported files, enum types, or message types referenced in the file are +// resolved using the provided registry. When looking up an import file path, +// the path must be unique. The newly created file descriptor is not registered +// back into the provided file registry. +func (o FileOptions) New(fd *descriptorpb.FileDescriptorProto, r Resolver) (protoreflect.FileDescriptor, error) { + if r == nil { + r = (*protoregistry.Files)(nil) // empty resolver + } + + // Handle the file descriptor content. + f := &filedesc.File{L2: &filedesc.FileL2{}} + switch fd.GetSyntax() { + case "proto2", "": + f.L1.Syntax = protoreflect.Proto2 + case "proto3": + f.L1.Syntax = protoreflect.Proto3 + default: + return nil, errors.New("invalid syntax: %q", fd.GetSyntax()) + } + f.L1.Path = fd.GetName() + if f.L1.Path == "" { + return nil, errors.New("file path must be populated") + } + f.L1.Package = protoreflect.FullName(fd.GetPackage()) + if !f.L1.Package.IsValid() && f.L1.Package != "" { + return nil, errors.New("invalid package: %q", f.L1.Package) + } + if opts := fd.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.FileOptions) + f.L2.Options = func() protoreflect.ProtoMessage { return opts } + } + + f.L2.Imports = make(filedesc.FileImports, len(fd.GetDependency())) + for _, i := range fd.GetPublicDependency() { + if !(0 <= i && int(i) < len(f.L2.Imports)) || f.L2.Imports[i].IsPublic { + return nil, errors.New("invalid or duplicate public import index: %d", i) + } + f.L2.Imports[i].IsPublic = true + } + for _, i := range fd.GetWeakDependency() { + if !(0 <= i && int(i) < len(f.L2.Imports)) || f.L2.Imports[i].IsWeak { + return nil, errors.New("invalid or duplicate weak import index: %d", i) + } + f.L2.Imports[i].IsWeak = true + } + imps := importSet{f.Path(): true} + for i, path := range fd.GetDependency() { + imp := &f.L2.Imports[i] + f, err := r.FindFileByPath(path) + if err == protoregistry.NotFound && (o.AllowUnresolvable || imp.IsWeak) { + f = filedesc.PlaceholderFile(path) + } else if err != nil { + return nil, errors.New("could not resolve import %q: %v", path, err) + } + imp.FileDescriptor = f + + if imps[imp.Path()] { + return nil, errors.New("already imported %q", path) + } + imps[imp.Path()] = true + } + for i := range fd.GetDependency() { + imp := &f.L2.Imports[i] + imps.importPublic(imp.Imports()) + } + + // Handle source locations. + f.L2.Locations.File = f + for _, loc := range fd.GetSourceCodeInfo().GetLocation() { + var l protoreflect.SourceLocation + // TODO: Validate that the path points to an actual declaration? + l.Path = protoreflect.SourcePath(loc.GetPath()) + s := loc.GetSpan() + switch len(s) { + case 3: + l.StartLine, l.StartColumn, l.EndLine, l.EndColumn = int(s[0]), int(s[1]), int(s[0]), int(s[2]) + case 4: + l.StartLine, l.StartColumn, l.EndLine, l.EndColumn = int(s[0]), int(s[1]), int(s[2]), int(s[3]) + default: + return nil, errors.New("invalid span: %v", s) + } + // TODO: Validate that the span information is sensible? + // See https://github.com/protocolbuffers/protobuf/issues/6378. + if false && (l.EndLine < l.StartLine || l.StartLine < 0 || l.StartColumn < 0 || l.EndColumn < 0 || + (l.StartLine == l.EndLine && l.EndColumn <= l.StartColumn)) { + return nil, errors.New("invalid span: %v", s) + } + l.LeadingDetachedComments = loc.GetLeadingDetachedComments() + l.LeadingComments = loc.GetLeadingComments() + l.TrailingComments = loc.GetTrailingComments() + f.L2.Locations.List = append(f.L2.Locations.List, l) + } + + // Step 1: Allocate and derive the names for all declarations. + // This copies all fields from the descriptor proto except: + // google.protobuf.FieldDescriptorProto.type_name + // google.protobuf.FieldDescriptorProto.default_value + // google.protobuf.FieldDescriptorProto.oneof_index + // google.protobuf.FieldDescriptorProto.extendee + // google.protobuf.MethodDescriptorProto.input + // google.protobuf.MethodDescriptorProto.output + var err error + sb := new(strs.Builder) + r1 := make(descsByName) + if f.L1.Enums.List, err = r1.initEnumDeclarations(fd.GetEnumType(), f, sb); err != nil { + return nil, err + } + if f.L1.Messages.List, err = r1.initMessagesDeclarations(fd.GetMessageType(), f, sb); err != nil { + return nil, err + } + if f.L1.Extensions.List, err = r1.initExtensionDeclarations(fd.GetExtension(), f, sb); err != nil { + return nil, err + } + if f.L1.Services.List, err = r1.initServiceDeclarations(fd.GetService(), f, sb); err != nil { + return nil, err + } + + // Step 2: Resolve every dependency reference not handled by step 1. + r2 := &resolver{local: r1, remote: r, imports: imps, allowUnresolvable: o.AllowUnresolvable} + if err := r2.resolveMessageDependencies(f.L1.Messages.List, fd.GetMessageType()); err != nil { + return nil, err + } + if err := r2.resolveExtensionDependencies(f.L1.Extensions.List, fd.GetExtension()); err != nil { + return nil, err + } + if err := r2.resolveServiceDependencies(f.L1.Services.List, fd.GetService()); err != nil { + return nil, err + } + + // Step 3: Validate every enum, message, and extension declaration. + if err := validateEnumDeclarations(f.L1.Enums.List, fd.GetEnumType()); err != nil { + return nil, err + } + if err := validateMessageDeclarations(f.L1.Messages.List, fd.GetMessageType()); err != nil { + return nil, err + } + if err := validateExtensionDeclarations(f.L1.Extensions.List, fd.GetExtension()); err != nil { + return nil, err + } + + return f, nil +} + +type importSet map[string]bool + +func (is importSet) importPublic(imps protoreflect.FileImports) { + for i := 0; i < imps.Len(); i++ { + if imp := imps.Get(i); imp.IsPublic { + is[imp.Path()] = true + is.importPublic(imp.Imports()) + } + } +} + +// NewFiles creates a new protoregistry.Files from the provided +// FileDescriptorSet message. The descriptor set must include only +// valid files according to protobuf semantics. The returned descriptors +// are a deep copy of the input. +func (o FileOptions) NewFiles(fds *descriptorpb.FileDescriptorSet) (*protoregistry.Files, error) { + files := make(map[string]*descriptorpb.FileDescriptorProto) + for _, fd := range fds.File { + if _, ok := files[fd.GetName()]; ok { + return nil, errors.New("file appears multiple times: %q", fd.GetName()) + } + files[fd.GetName()] = fd + } + r := &protoregistry.Files{} + for _, fd := range files { + if err := o.addFileDeps(r, fd, files); err != nil { + return nil, err + } + } + return r, nil +} +func (o FileOptions) addFileDeps(r *protoregistry.Files, fd *descriptorpb.FileDescriptorProto, files map[string]*descriptorpb.FileDescriptorProto) error { + // Set the entry to nil while descending into a file's dependencies to detect cycles. + files[fd.GetName()] = nil + for _, dep := range fd.Dependency { + depfd, ok := files[dep] + if depfd == nil { + if ok { + return errors.New("import cycle in file: %q", dep) + } + continue + } + if err := o.addFileDeps(r, depfd, files); err != nil { + return err + } + } + // Delete the entry once dependencies are processed. + delete(files, fd.GetName()) + f, err := o.New(fd, r) + if err != nil { + return err + } + return r.RegisterFile(f) +} diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go new file mode 100644 index 00000000..37efda1a --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_init.go @@ -0,0 +1,248 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protodesc + +import ( + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + + "google.golang.org/protobuf/types/descriptorpb" +) + +type descsByName map[protoreflect.FullName]protoreflect.Descriptor + +func (r descsByName) initEnumDeclarations(eds []*descriptorpb.EnumDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (es []filedesc.Enum, err error) { + es = make([]filedesc.Enum, len(eds)) // allocate up-front to ensure stable pointers + for i, ed := range eds { + e := &es[i] + e.L2 = new(filedesc.EnumL2) + if e.L0, err = r.makeBase(e, parent, ed.GetName(), i, sb); err != nil { + return nil, err + } + if opts := ed.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.EnumOptions) + e.L2.Options = func() protoreflect.ProtoMessage { return opts } + } + for _, s := range ed.GetReservedName() { + e.L2.ReservedNames.List = append(e.L2.ReservedNames.List, protoreflect.Name(s)) + } + for _, rr := range ed.GetReservedRange() { + e.L2.ReservedRanges.List = append(e.L2.ReservedRanges.List, [2]protoreflect.EnumNumber{ + protoreflect.EnumNumber(rr.GetStart()), + protoreflect.EnumNumber(rr.GetEnd()), + }) + } + if e.L2.Values.List, err = r.initEnumValuesFromDescriptorProto(ed.GetValue(), e, sb); err != nil { + return nil, err + } + } + return es, nil +} + +func (r descsByName) initEnumValuesFromDescriptorProto(vds []*descriptorpb.EnumValueDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (vs []filedesc.EnumValue, err error) { + vs = make([]filedesc.EnumValue, len(vds)) // allocate up-front to ensure stable pointers + for i, vd := range vds { + v := &vs[i] + if v.L0, err = r.makeBase(v, parent, vd.GetName(), i, sb); err != nil { + return nil, err + } + if opts := vd.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.EnumValueOptions) + v.L1.Options = func() protoreflect.ProtoMessage { return opts } + } + v.L1.Number = protoreflect.EnumNumber(vd.GetNumber()) + } + return vs, nil +} + +func (r descsByName) initMessagesDeclarations(mds []*descriptorpb.DescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ms []filedesc.Message, err error) { + ms = make([]filedesc.Message, len(mds)) // allocate up-front to ensure stable pointers + for i, md := range mds { + m := &ms[i] + m.L2 = new(filedesc.MessageL2) + if m.L0, err = r.makeBase(m, parent, md.GetName(), i, sb); err != nil { + return nil, err + } + if opts := md.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.MessageOptions) + m.L2.Options = func() protoreflect.ProtoMessage { return opts } + m.L1.IsMapEntry = opts.GetMapEntry() + m.L1.IsMessageSet = opts.GetMessageSetWireFormat() + } + for _, s := range md.GetReservedName() { + m.L2.ReservedNames.List = append(m.L2.ReservedNames.List, protoreflect.Name(s)) + } + for _, rr := range md.GetReservedRange() { + m.L2.ReservedRanges.List = append(m.L2.ReservedRanges.List, [2]protoreflect.FieldNumber{ + protoreflect.FieldNumber(rr.GetStart()), + protoreflect.FieldNumber(rr.GetEnd()), + }) + } + for _, xr := range md.GetExtensionRange() { + m.L2.ExtensionRanges.List = append(m.L2.ExtensionRanges.List, [2]protoreflect.FieldNumber{ + protoreflect.FieldNumber(xr.GetStart()), + protoreflect.FieldNumber(xr.GetEnd()), + }) + var optsFunc func() protoreflect.ProtoMessage + if opts := xr.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.ExtensionRangeOptions) + optsFunc = func() protoreflect.ProtoMessage { return opts } + } + m.L2.ExtensionRangeOptions = append(m.L2.ExtensionRangeOptions, optsFunc) + } + if m.L2.Fields.List, err = r.initFieldsFromDescriptorProto(md.GetField(), m, sb); err != nil { + return nil, err + } + if m.L2.Oneofs.List, err = r.initOneofsFromDescriptorProto(md.GetOneofDecl(), m, sb); err != nil { + return nil, err + } + if m.L1.Enums.List, err = r.initEnumDeclarations(md.GetEnumType(), m, sb); err != nil { + return nil, err + } + if m.L1.Messages.List, err = r.initMessagesDeclarations(md.GetNestedType(), m, sb); err != nil { + return nil, err + } + if m.L1.Extensions.List, err = r.initExtensionDeclarations(md.GetExtension(), m, sb); err != nil { + return nil, err + } + } + return ms, nil +} + +func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (fs []filedesc.Field, err error) { + fs = make([]filedesc.Field, len(fds)) // allocate up-front to ensure stable pointers + for i, fd := range fds { + f := &fs[i] + if f.L0, err = r.makeBase(f, parent, fd.GetName(), i, sb); err != nil { + return nil, err + } + f.L1.IsProto3Optional = fd.GetProto3Optional() + if opts := fd.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.FieldOptions) + f.L1.Options = func() protoreflect.ProtoMessage { return opts } + f.L1.IsWeak = opts.GetWeak() + f.L1.HasPacked = opts.Packed != nil + f.L1.IsPacked = opts.GetPacked() + } + f.L1.Number = protoreflect.FieldNumber(fd.GetNumber()) + f.L1.Cardinality = protoreflect.Cardinality(fd.GetLabel()) + if fd.Type != nil { + f.L1.Kind = protoreflect.Kind(fd.GetType()) + } + if fd.JsonName != nil { + f.L1.StringName.InitJSON(fd.GetJsonName()) + } + } + return fs, nil +} + +func (r descsByName) initOneofsFromDescriptorProto(ods []*descriptorpb.OneofDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (os []filedesc.Oneof, err error) { + os = make([]filedesc.Oneof, len(ods)) // allocate up-front to ensure stable pointers + for i, od := range ods { + o := &os[i] + if o.L0, err = r.makeBase(o, parent, od.GetName(), i, sb); err != nil { + return nil, err + } + if opts := od.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.OneofOptions) + o.L1.Options = func() protoreflect.ProtoMessage { return opts } + } + } + return os, nil +} + +func (r descsByName) initExtensionDeclarations(xds []*descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (xs []filedesc.Extension, err error) { + xs = make([]filedesc.Extension, len(xds)) // allocate up-front to ensure stable pointers + for i, xd := range xds { + x := &xs[i] + x.L2 = new(filedesc.ExtensionL2) + if x.L0, err = r.makeBase(x, parent, xd.GetName(), i, sb); err != nil { + return nil, err + } + if opts := xd.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.FieldOptions) + x.L2.Options = func() protoreflect.ProtoMessage { return opts } + x.L2.IsPacked = opts.GetPacked() + } + x.L1.Number = protoreflect.FieldNumber(xd.GetNumber()) + x.L1.Cardinality = protoreflect.Cardinality(xd.GetLabel()) + if xd.Type != nil { + x.L1.Kind = protoreflect.Kind(xd.GetType()) + } + if xd.JsonName != nil { + x.L2.StringName.InitJSON(xd.GetJsonName()) + } + } + return xs, nil +} + +func (r descsByName) initServiceDeclarations(sds []*descriptorpb.ServiceDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ss []filedesc.Service, err error) { + ss = make([]filedesc.Service, len(sds)) // allocate up-front to ensure stable pointers + for i, sd := range sds { + s := &ss[i] + s.L2 = new(filedesc.ServiceL2) + if s.L0, err = r.makeBase(s, parent, sd.GetName(), i, sb); err != nil { + return nil, err + } + if opts := sd.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.ServiceOptions) + s.L2.Options = func() protoreflect.ProtoMessage { return opts } + } + if s.L2.Methods.List, err = r.initMethodsFromDescriptorProto(sd.GetMethod(), s, sb); err != nil { + return nil, err + } + } + return ss, nil +} + +func (r descsByName) initMethodsFromDescriptorProto(mds []*descriptorpb.MethodDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (ms []filedesc.Method, err error) { + ms = make([]filedesc.Method, len(mds)) // allocate up-front to ensure stable pointers + for i, md := range mds { + m := &ms[i] + if m.L0, err = r.makeBase(m, parent, md.GetName(), i, sb); err != nil { + return nil, err + } + if opts := md.GetOptions(); opts != nil { + opts = proto.Clone(opts).(*descriptorpb.MethodOptions) + m.L1.Options = func() protoreflect.ProtoMessage { return opts } + } + m.L1.IsStreamingClient = md.GetClientStreaming() + m.L1.IsStreamingServer = md.GetServerStreaming() + } + return ms, nil +} + +func (r descsByName) makeBase(child, parent protoreflect.Descriptor, name string, idx int, sb *strs.Builder) (filedesc.BaseL0, error) { + if !protoreflect.Name(name).IsValid() { + return filedesc.BaseL0{}, errors.New("descriptor %q has an invalid nested name: %q", parent.FullName(), name) + } + + // Derive the full name of the child. + // Note that enum values are a sibling to the enum parent in the namespace. + var fullName protoreflect.FullName + if _, ok := parent.(protoreflect.EnumDescriptor); ok { + fullName = sb.AppendFullName(parent.FullName().Parent(), protoreflect.Name(name)) + } else { + fullName = sb.AppendFullName(parent.FullName(), protoreflect.Name(name)) + } + if _, ok := r[fullName]; ok { + return filedesc.BaseL0{}, errors.New("descriptor %q already declared", fullName) + } + r[fullName] = child + + // TODO: Verify that the full name does not already exist in the resolver? + // This is not as critical since most usages of NewFile will register + // the created file back into the registry, which will perform this check. + + return filedesc.BaseL0{ + FullName: fullName, + ParentFile: parent.ParentFile().(*filedesc.File), + Parent: parent, + Index: idx, + }, nil +} diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_resolve.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_resolve.go new file mode 100644 index 00000000..cebb36cd --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_resolve.go @@ -0,0 +1,286 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protodesc + +import ( + "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/reflect/protoregistry" + + "google.golang.org/protobuf/types/descriptorpb" +) + +// resolver is a wrapper around a local registry of declarations within the file +// and the remote resolver. The remote resolver is restricted to only return +// descriptors that have been imported. +type resolver struct { + local descsByName + remote Resolver + imports importSet + + allowUnresolvable bool +} + +func (r *resolver) resolveMessageDependencies(ms []filedesc.Message, mds []*descriptorpb.DescriptorProto) (err error) { + for i, md := range mds { + m := &ms[i] + for j, fd := range md.GetField() { + f := &m.L2.Fields.List[j] + if f.L1.Cardinality == protoreflect.Required { + m.L2.RequiredNumbers.List = append(m.L2.RequiredNumbers.List, f.L1.Number) + } + if fd.OneofIndex != nil { + k := int(fd.GetOneofIndex()) + if !(0 <= k && k < len(md.GetOneofDecl())) { + return errors.New("message field %q has an invalid oneof index: %d", f.FullName(), k) + } + o := &m.L2.Oneofs.List[k] + f.L1.ContainingOneof = o + o.L1.Fields.List = append(o.L1.Fields.List, f) + } + + if f.L1.Kind, f.L1.Enum, f.L1.Message, err = r.findTarget(f.Kind(), f.Parent().FullName(), partialName(fd.GetTypeName()), f.IsWeak()); err != nil { + return errors.New("message field %q cannot resolve type: %v", f.FullName(), err) + } + if fd.DefaultValue != nil { + v, ev, err := unmarshalDefault(fd.GetDefaultValue(), f, r.allowUnresolvable) + if err != nil { + return errors.New("message field %q has invalid default: %v", f.FullName(), err) + } + f.L1.Default = filedesc.DefaultValue(v, ev) + } + } + + if err := r.resolveMessageDependencies(m.L1.Messages.List, md.GetNestedType()); err != nil { + return err + } + if err := r.resolveExtensionDependencies(m.L1.Extensions.List, md.GetExtension()); err != nil { + return err + } + } + return nil +} + +func (r *resolver) resolveExtensionDependencies(xs []filedesc.Extension, xds []*descriptorpb.FieldDescriptorProto) (err error) { + for i, xd := range xds { + x := &xs[i] + if x.L1.Extendee, err = r.findMessageDescriptor(x.Parent().FullName(), partialName(xd.GetExtendee()), false); err != nil { + return errors.New("extension field %q cannot resolve extendee: %v", x.FullName(), err) + } + if x.L1.Kind, x.L2.Enum, x.L2.Message, err = r.findTarget(x.Kind(), x.Parent().FullName(), partialName(xd.GetTypeName()), false); err != nil { + return errors.New("extension field %q cannot resolve type: %v", x.FullName(), err) + } + if xd.DefaultValue != nil { + v, ev, err := unmarshalDefault(xd.GetDefaultValue(), x, r.allowUnresolvable) + if err != nil { + return errors.New("extension field %q has invalid default: %v", x.FullName(), err) + } + x.L2.Default = filedesc.DefaultValue(v, ev) + } + } + return nil +} + +func (r *resolver) resolveServiceDependencies(ss []filedesc.Service, sds []*descriptorpb.ServiceDescriptorProto) (err error) { + for i, sd := range sds { + s := &ss[i] + for j, md := range sd.GetMethod() { + m := &s.L2.Methods.List[j] + m.L1.Input, err = r.findMessageDescriptor(m.Parent().FullName(), partialName(md.GetInputType()), false) + if err != nil { + return errors.New("service method %q cannot resolve input: %v", m.FullName(), err) + } + m.L1.Output, err = r.findMessageDescriptor(s.FullName(), partialName(md.GetOutputType()), false) + if err != nil { + return errors.New("service method %q cannot resolve output: %v", m.FullName(), err) + } + } + } + return nil +} + +// findTarget finds an enum or message descriptor if k is an enum, message, +// group, or unknown. If unknown, and the name could be resolved, the kind +// returned kind is set based on the type of the resolved descriptor. +func (r *resolver) findTarget(k protoreflect.Kind, scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.Kind, protoreflect.EnumDescriptor, protoreflect.MessageDescriptor, error) { + switch k { + case protoreflect.EnumKind: + ed, err := r.findEnumDescriptor(scope, ref, isWeak) + if err != nil { + return 0, nil, nil, err + } + return k, ed, nil, nil + case protoreflect.MessageKind, protoreflect.GroupKind: + md, err := r.findMessageDescriptor(scope, ref, isWeak) + if err != nil { + return 0, nil, nil, err + } + return k, nil, md, nil + case 0: + // Handle unspecified kinds (possible with parsers that operate + // on a per-file basis without knowledge of dependencies). + d, err := r.findDescriptor(scope, ref) + if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) { + return k, filedesc.PlaceholderEnum(ref.FullName()), filedesc.PlaceholderMessage(ref.FullName()), nil + } else if err == protoregistry.NotFound { + return 0, nil, nil, errors.New("%q not found", ref.FullName()) + } else if err != nil { + return 0, nil, nil, err + } + switch d := d.(type) { + case protoreflect.EnumDescriptor: + return protoreflect.EnumKind, d, nil, nil + case protoreflect.MessageDescriptor: + return protoreflect.MessageKind, nil, d, nil + default: + return 0, nil, nil, errors.New("unknown kind") + } + default: + if ref != "" { + return 0, nil, nil, errors.New("target name cannot be specified for %v", k) + } + if !k.IsValid() { + return 0, nil, nil, errors.New("invalid kind: %d", k) + } + return k, nil, nil, nil + } +} + +// findDescriptor finds the descriptor by name, +// which may be a relative name within some scope. +// +// Suppose the scope was "fizz.buzz" and the reference was "Foo.Bar", +// then the following full names are searched: +// * fizz.buzz.Foo.Bar +// * fizz.Foo.Bar +// * Foo.Bar +func (r *resolver) findDescriptor(scope protoreflect.FullName, ref partialName) (protoreflect.Descriptor, error) { + if !ref.IsValid() { + return nil, errors.New("invalid name reference: %q", ref) + } + if ref.IsFull() { + scope, ref = "", ref[1:] + } + var foundButNotImported protoreflect.Descriptor + for { + // Derive the full name to search. + s := protoreflect.FullName(ref) + if scope != "" { + s = scope + "." + s + } + + // Check the current file for the descriptor. + if d, ok := r.local[s]; ok { + return d, nil + } + + // Check the remote registry for the descriptor. + d, err := r.remote.FindDescriptorByName(s) + if err == nil { + // Only allow descriptors covered by one of the imports. + if r.imports[d.ParentFile().Path()] { + return d, nil + } + foundButNotImported = d + } else if err != protoregistry.NotFound { + return nil, errors.Wrap(err, "%q", s) + } + + // Continue on at a higher level of scoping. + if scope == "" { + if d := foundButNotImported; d != nil { + return nil, errors.New("resolved %q, but %q is not imported", d.FullName(), d.ParentFile().Path()) + } + return nil, protoregistry.NotFound + } + scope = scope.Parent() + } +} + +func (r *resolver) findEnumDescriptor(scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.EnumDescriptor, error) { + d, err := r.findDescriptor(scope, ref) + if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) { + return filedesc.PlaceholderEnum(ref.FullName()), nil + } else if err == protoregistry.NotFound { + return nil, errors.New("%q not found", ref.FullName()) + } else if err != nil { + return nil, err + } + ed, ok := d.(protoreflect.EnumDescriptor) + if !ok { + return nil, errors.New("resolved %q, but it is not an enum", d.FullName()) + } + return ed, nil +} + +func (r *resolver) findMessageDescriptor(scope protoreflect.FullName, ref partialName, isWeak bool) (protoreflect.MessageDescriptor, error) { + d, err := r.findDescriptor(scope, ref) + if err == protoregistry.NotFound && (r.allowUnresolvable || isWeak) { + return filedesc.PlaceholderMessage(ref.FullName()), nil + } else if err == protoregistry.NotFound { + return nil, errors.New("%q not found", ref.FullName()) + } else if err != nil { + return nil, err + } + md, ok := d.(protoreflect.MessageDescriptor) + if !ok { + return nil, errors.New("resolved %q, but it is not an message", d.FullName()) + } + return md, nil +} + +// partialName is the partial name. A leading dot means that the name is full, +// otherwise the name is relative to some current scope. +// See google.protobuf.FieldDescriptorProto.type_name. +type partialName string + +func (s partialName) IsFull() bool { + return len(s) > 0 && s[0] == '.' +} + +func (s partialName) IsValid() bool { + if s.IsFull() { + return protoreflect.FullName(s[1:]).IsValid() + } + return protoreflect.FullName(s).IsValid() +} + +const unknownPrefix = "*." + +// FullName converts the partial name to a full name on a best-effort basis. +// If relative, it creates an invalid full name, using a "*." prefix +// to indicate that the start of the full name is unknown. +func (s partialName) FullName() protoreflect.FullName { + if s.IsFull() { + return protoreflect.FullName(s[1:]) + } + return protoreflect.FullName(unknownPrefix + s) +} + +func unmarshalDefault(s string, fd protoreflect.FieldDescriptor, allowUnresolvable bool) (protoreflect.Value, protoreflect.EnumValueDescriptor, error) { + var evs protoreflect.EnumValueDescriptors + if fd.Enum() != nil { + evs = fd.Enum().Values() + } + v, ev, err := defval.Unmarshal(s, fd.Kind(), evs, defval.Descriptor) + if err != nil && allowUnresolvable && evs != nil && protoreflect.Name(s).IsValid() { + v = protoreflect.ValueOfEnum(0) + if evs.Len() > 0 { + v = protoreflect.ValueOfEnum(evs.Get(0).Number()) + } + ev = filedesc.PlaceholderEnumValue(fd.Enum().FullName().Parent().Append(protoreflect.Name(s))) + } else if err != nil { + return v, ev, err + } + if fd.Syntax() == protoreflect.Proto3 { + return v, ev, errors.New("cannot be specified under proto3 semantics") + } + if fd.Kind() == protoreflect.MessageKind || fd.Kind() == protoreflect.GroupKind || fd.Cardinality() == protoreflect.Repeated { + return v, ev, errors.New("cannot be specified on composite types") + } + return v, ev, nil +} diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go new file mode 100644 index 00000000..9af1d564 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc_validate.go @@ -0,0 +1,374 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protodesc + +import ( + "strings" + "unicode" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/internal/genid" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/reflect/protoreflect" + + "google.golang.org/protobuf/types/descriptorpb" +) + +func validateEnumDeclarations(es []filedesc.Enum, eds []*descriptorpb.EnumDescriptorProto) error { + for i, ed := range eds { + e := &es[i] + if err := e.L2.ReservedNames.CheckValid(); err != nil { + return errors.New("enum %q reserved names has %v", e.FullName(), err) + } + if err := e.L2.ReservedRanges.CheckValid(); err != nil { + return errors.New("enum %q reserved ranges has %v", e.FullName(), err) + } + if len(ed.GetValue()) == 0 { + return errors.New("enum %q must contain at least one value declaration", e.FullName()) + } + allowAlias := ed.GetOptions().GetAllowAlias() + foundAlias := false + for i := 0; i < e.Values().Len(); i++ { + v1 := e.Values().Get(i) + if v2 := e.Values().ByNumber(v1.Number()); v1 != v2 { + foundAlias = true + if !allowAlias { + return errors.New("enum %q has conflicting non-aliased values on number %d: %q with %q", e.FullName(), v1.Number(), v1.Name(), v2.Name()) + } + } + } + if allowAlias && !foundAlias { + return errors.New("enum %q allows aliases, but none were found", e.FullName()) + } + if e.Syntax() == protoreflect.Proto3 { + if v := e.Values().Get(0); v.Number() != 0 { + return errors.New("enum %q using proto3 semantics must have zero number for the first value", v.FullName()) + } + // Verify that value names in proto3 do not conflict if the + // case-insensitive prefix is removed. + // See protoc v3.8.0: src/google/protobuf/descriptor.cc:4991-5055 + names := map[string]protoreflect.EnumValueDescriptor{} + prefix := strings.Replace(strings.ToLower(string(e.Name())), "_", "", -1) + for i := 0; i < e.Values().Len(); i++ { + v1 := e.Values().Get(i) + s := strs.EnumValueName(strs.TrimEnumPrefix(string(v1.Name()), prefix)) + if v2, ok := names[s]; ok && v1.Number() != v2.Number() { + return errors.New("enum %q using proto3 semantics has conflict: %q with %q", e.FullName(), v1.Name(), v2.Name()) + } + names[s] = v1 + } + } + + for j, vd := range ed.GetValue() { + v := &e.L2.Values.List[j] + if vd.Number == nil { + return errors.New("enum value %q must have a specified number", v.FullName()) + } + if e.L2.ReservedNames.Has(v.Name()) { + return errors.New("enum value %q must not use reserved name", v.FullName()) + } + if e.L2.ReservedRanges.Has(v.Number()) { + return errors.New("enum value %q must not use reserved number %d", v.FullName(), v.Number()) + } + } + } + return nil +} + +func validateMessageDeclarations(ms []filedesc.Message, mds []*descriptorpb.DescriptorProto) error { + for i, md := range mds { + m := &ms[i] + + // Handle the message descriptor itself. + isMessageSet := md.GetOptions().GetMessageSetWireFormat() + if err := m.L2.ReservedNames.CheckValid(); err != nil { + return errors.New("message %q reserved names has %v", m.FullName(), err) + } + if err := m.L2.ReservedRanges.CheckValid(isMessageSet); err != nil { + return errors.New("message %q reserved ranges has %v", m.FullName(), err) + } + if err := m.L2.ExtensionRanges.CheckValid(isMessageSet); err != nil { + return errors.New("message %q extension ranges has %v", m.FullName(), err) + } + if err := (*filedesc.FieldRanges).CheckOverlap(&m.L2.ReservedRanges, &m.L2.ExtensionRanges); err != nil { + return errors.New("message %q reserved and extension ranges has %v", m.FullName(), err) + } + for i := 0; i < m.Fields().Len(); i++ { + f1 := m.Fields().Get(i) + if f2 := m.Fields().ByNumber(f1.Number()); f1 != f2 { + return errors.New("message %q has conflicting fields: %q with %q", m.FullName(), f1.Name(), f2.Name()) + } + } + if isMessageSet && !flags.ProtoLegacy { + return errors.New("message %q is a MessageSet, which is a legacy proto1 feature that is no longer supported", m.FullName()) + } + if isMessageSet && (m.Syntax() != protoreflect.Proto2 || m.Fields().Len() > 0 || m.ExtensionRanges().Len() == 0) { + return errors.New("message %q is an invalid proto1 MessageSet", m.FullName()) + } + if m.Syntax() == protoreflect.Proto3 { + if m.ExtensionRanges().Len() > 0 { + return errors.New("message %q using proto3 semantics cannot have extension ranges", m.FullName()) + } + // Verify that field names in proto3 do not conflict if lowercased + // with all underscores removed. + // See protoc v3.8.0: src/google/protobuf/descriptor.cc:5830-5847 + names := map[string]protoreflect.FieldDescriptor{} + for i := 0; i < m.Fields().Len(); i++ { + f1 := m.Fields().Get(i) + s := strings.Replace(strings.ToLower(string(f1.Name())), "_", "", -1) + if f2, ok := names[s]; ok { + return errors.New("message %q using proto3 semantics has conflict: %q with %q", m.FullName(), f1.Name(), f2.Name()) + } + names[s] = f1 + } + } + + for j, fd := range md.GetField() { + f := &m.L2.Fields.List[j] + if m.L2.ReservedNames.Has(f.Name()) { + return errors.New("message field %q must not use reserved name", f.FullName()) + } + if !f.Number().IsValid() { + return errors.New("message field %q has an invalid number: %d", f.FullName(), f.Number()) + } + if !f.Cardinality().IsValid() { + return errors.New("message field %q has an invalid cardinality: %d", f.FullName(), f.Cardinality()) + } + if m.L2.ReservedRanges.Has(f.Number()) { + return errors.New("message field %q must not use reserved number %d", f.FullName(), f.Number()) + } + if m.L2.ExtensionRanges.Has(f.Number()) { + return errors.New("message field %q with number %d in extension range", f.FullName(), f.Number()) + } + if fd.Extendee != nil { + return errors.New("message field %q may not have extendee: %q", f.FullName(), fd.GetExtendee()) + } + if f.L1.IsProto3Optional { + if f.Syntax() != protoreflect.Proto3 { + return errors.New("message field %q under proto3 optional semantics must be specified in the proto3 syntax", f.FullName()) + } + if f.Cardinality() != protoreflect.Optional { + return errors.New("message field %q under proto3 optional semantics must have optional cardinality", f.FullName()) + } + if f.ContainingOneof() != nil && f.ContainingOneof().Fields().Len() != 1 { + return errors.New("message field %q under proto3 optional semantics must be within a single element oneof", f.FullName()) + } + } + if f.IsWeak() && !flags.ProtoLegacy { + return errors.New("message field %q is a weak field, which is a legacy proto1 feature that is no longer supported", f.FullName()) + } + if f.IsWeak() && (f.Syntax() != protoreflect.Proto2 || !isOptionalMessage(f) || f.ContainingOneof() != nil) { + return errors.New("message field %q may only be weak for an optional message", f.FullName()) + } + if f.IsPacked() && !isPackable(f) { + return errors.New("message field %q is not packable", f.FullName()) + } + if err := checkValidGroup(f); err != nil { + return errors.New("message field %q is an invalid group: %v", f.FullName(), err) + } + if err := checkValidMap(f); err != nil { + return errors.New("message field %q is an invalid map: %v", f.FullName(), err) + } + if f.Syntax() == protoreflect.Proto3 { + if f.Cardinality() == protoreflect.Required { + return errors.New("message field %q using proto3 semantics cannot be required", f.FullName()) + } + if f.Enum() != nil && !f.Enum().IsPlaceholder() && f.Enum().Syntax() != protoreflect.Proto3 { + return errors.New("message field %q using proto3 semantics may only depend on a proto3 enum", f.FullName()) + } + } + } + seenSynthetic := false // synthetic oneofs for proto3 optional must come after real oneofs + for j := range md.GetOneofDecl() { + o := &m.L2.Oneofs.List[j] + if o.Fields().Len() == 0 { + return errors.New("message oneof %q must contain at least one field declaration", o.FullName()) + } + if n := o.Fields().Len(); n-1 != (o.Fields().Get(n-1).Index() - o.Fields().Get(0).Index()) { + return errors.New("message oneof %q must have consecutively declared fields", o.FullName()) + } + + if o.IsSynthetic() { + seenSynthetic = true + continue + } + if !o.IsSynthetic() && seenSynthetic { + return errors.New("message oneof %q must be declared before synthetic oneofs", o.FullName()) + } + + for i := 0; i < o.Fields().Len(); i++ { + f := o.Fields().Get(i) + if f.Cardinality() != protoreflect.Optional { + return errors.New("message field %q belongs in a oneof and must be optional", f.FullName()) + } + if f.IsWeak() { + return errors.New("message field %q belongs in a oneof and must not be a weak reference", f.FullName()) + } + } + } + + if err := validateEnumDeclarations(m.L1.Enums.List, md.GetEnumType()); err != nil { + return err + } + if err := validateMessageDeclarations(m.L1.Messages.List, md.GetNestedType()); err != nil { + return err + } + if err := validateExtensionDeclarations(m.L1.Extensions.List, md.GetExtension()); err != nil { + return err + } + } + return nil +} + +func validateExtensionDeclarations(xs []filedesc.Extension, xds []*descriptorpb.FieldDescriptorProto) error { + for i, xd := range xds { + x := &xs[i] + // NOTE: Avoid using the IsValid method since extensions to MessageSet + // may have a field number higher than normal. This check only verifies + // that the number is not negative or reserved. We check again later + // if we know that the extendee is definitely not a MessageSet. + if n := x.Number(); n < 0 || (protowire.FirstReservedNumber <= n && n <= protowire.LastReservedNumber) { + return errors.New("extension field %q has an invalid number: %d", x.FullName(), x.Number()) + } + if !x.Cardinality().IsValid() || x.Cardinality() == protoreflect.Required { + return errors.New("extension field %q has an invalid cardinality: %d", x.FullName(), x.Cardinality()) + } + if xd.JsonName != nil { + // A bug in older versions of protoc would always populate the + // "json_name" option for extensions when it is meaningless. + // When it did so, it would always use the camel-cased field name. + if xd.GetJsonName() != strs.JSONCamelCase(string(x.Name())) { + return errors.New("extension field %q may not have an explicitly set JSON name: %q", x.FullName(), xd.GetJsonName()) + } + } + if xd.OneofIndex != nil { + return errors.New("extension field %q may not be part of a oneof", x.FullName()) + } + if md := x.ContainingMessage(); !md.IsPlaceholder() { + if !md.ExtensionRanges().Has(x.Number()) { + return errors.New("extension field %q extends %q with non-extension field number: %d", x.FullName(), md.FullName(), x.Number()) + } + isMessageSet := md.Options().(*descriptorpb.MessageOptions).GetMessageSetWireFormat() + if isMessageSet && !isOptionalMessage(x) { + return errors.New("extension field %q extends MessageSet and must be an optional message", x.FullName()) + } + if !isMessageSet && !x.Number().IsValid() { + return errors.New("extension field %q has an invalid number: %d", x.FullName(), x.Number()) + } + } + if xd.GetOptions().GetWeak() { + return errors.New("extension field %q cannot be a weak reference", x.FullName()) + } + if x.IsPacked() && !isPackable(x) { + return errors.New("extension field %q is not packable", x.FullName()) + } + if err := checkValidGroup(x); err != nil { + return errors.New("extension field %q is an invalid group: %v", x.FullName(), err) + } + if md := x.Message(); md != nil && md.IsMapEntry() { + return errors.New("extension field %q cannot be a map entry", x.FullName()) + } + if x.Syntax() == protoreflect.Proto3 { + switch x.ContainingMessage().FullName() { + case (*descriptorpb.FileOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.EnumOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.EnumValueOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.MessageOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.FieldOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.OneofOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.ExtensionRangeOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.ServiceOptions)(nil).ProtoReflect().Descriptor().FullName(): + case (*descriptorpb.MethodOptions)(nil).ProtoReflect().Descriptor().FullName(): + default: + return errors.New("extension field %q cannot be declared in proto3 unless extended descriptor options", x.FullName()) + } + } + } + return nil +} + +// isOptionalMessage reports whether this is an optional message. +// If the kind is unknown, it is assumed to be a message. +func isOptionalMessage(fd protoreflect.FieldDescriptor) bool { + return (fd.Kind() == 0 || fd.Kind() == protoreflect.MessageKind) && fd.Cardinality() == protoreflect.Optional +} + +// isPackable checks whether the pack option can be specified. +func isPackable(fd protoreflect.FieldDescriptor) bool { + switch fd.Kind() { + case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind: + return false + } + return fd.IsList() +} + +// checkValidGroup reports whether fd is a valid group according to the same +// rules that protoc imposes. +func checkValidGroup(fd protoreflect.FieldDescriptor) error { + md := fd.Message() + switch { + case fd.Kind() != protoreflect.GroupKind: + return nil + case fd.Syntax() != protoreflect.Proto2: + return errors.New("invalid under proto2 semantics") + case md == nil || md.IsPlaceholder(): + return errors.New("message must be resolvable") + case fd.FullName().Parent() != md.FullName().Parent(): + return errors.New("message and field must be declared in the same scope") + case !unicode.IsUpper(rune(md.Name()[0])): + return errors.New("message name must start with an uppercase") + case fd.Name() != protoreflect.Name(strings.ToLower(string(md.Name()))): + return errors.New("field name must be lowercased form of the message name") + } + return nil +} + +// checkValidMap checks whether the field is a valid map according to the same +// rules that protoc imposes. +// See protoc v3.8.0: src/google/protobuf/descriptor.cc:6045-6115 +func checkValidMap(fd protoreflect.FieldDescriptor) error { + md := fd.Message() + switch { + case md == nil || !md.IsMapEntry(): + return nil + case fd.FullName().Parent() != md.FullName().Parent(): + return errors.New("message and field must be declared in the same scope") + case md.Name() != protoreflect.Name(strs.MapEntryName(string(fd.Name()))): + return errors.New("incorrect implicit map entry name") + case fd.Cardinality() != protoreflect.Repeated: + return errors.New("field must be repeated") + case md.Fields().Len() != 2: + return errors.New("message must have exactly two fields") + case md.ExtensionRanges().Len() > 0: + return errors.New("message must not have any extension ranges") + case md.Enums().Len()+md.Messages().Len()+md.Extensions().Len() > 0: + return errors.New("message must not have any nested declarations") + } + kf := md.Fields().Get(0) + vf := md.Fields().Get(1) + switch { + case kf.Name() != genid.MapEntry_Key_field_name || kf.Number() != genid.MapEntry_Key_field_number || kf.Cardinality() != protoreflect.Optional || kf.ContainingOneof() != nil || kf.HasDefault(): + return errors.New("invalid key field") + case vf.Name() != genid.MapEntry_Value_field_name || vf.Number() != genid.MapEntry_Value_field_number || vf.Cardinality() != protoreflect.Optional || vf.ContainingOneof() != nil || vf.HasDefault(): + return errors.New("invalid value field") + } + switch kf.Kind() { + case protoreflect.BoolKind: // bool + case protoreflect.Int32Kind, protoreflect.Sint32Kind, protoreflect.Sfixed32Kind: // int32 + case protoreflect.Int64Kind, protoreflect.Sint64Kind, protoreflect.Sfixed64Kind: // int64 + case protoreflect.Uint32Kind, protoreflect.Fixed32Kind: // uint32 + case protoreflect.Uint64Kind, protoreflect.Fixed64Kind: // uint64 + case protoreflect.StringKind: // string + default: + return errors.New("invalid key kind: %v", kf.Kind()) + } + if e := vf.Enum(); e != nil && e.Values().Len() > 0 && e.Values().Get(0).Number() != 0 { + return errors.New("map enum value must have zero number for the first value") + } + return nil +} diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go b/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go new file mode 100644 index 00000000..a7c5ceff --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/proto.go @@ -0,0 +1,252 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protodesc + +import ( + "fmt" + "strings" + + "google.golang.org/protobuf/internal/encoding/defval" + "google.golang.org/protobuf/internal/strs" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + + "google.golang.org/protobuf/types/descriptorpb" +) + +// ToFileDescriptorProto copies a protoreflect.FileDescriptor into a +// google.protobuf.FileDescriptorProto message. +func ToFileDescriptorProto(file protoreflect.FileDescriptor) *descriptorpb.FileDescriptorProto { + p := &descriptorpb.FileDescriptorProto{ + Name: proto.String(file.Path()), + Options: proto.Clone(file.Options()).(*descriptorpb.FileOptions), + } + if file.Package() != "" { + p.Package = proto.String(string(file.Package())) + } + for i, imports := 0, file.Imports(); i < imports.Len(); i++ { + imp := imports.Get(i) + p.Dependency = append(p.Dependency, imp.Path()) + if imp.IsPublic { + p.PublicDependency = append(p.PublicDependency, int32(i)) + } + if imp.IsWeak { + p.WeakDependency = append(p.WeakDependency, int32(i)) + } + } + for i, locs := 0, file.SourceLocations(); i < locs.Len(); i++ { + loc := locs.Get(i) + l := &descriptorpb.SourceCodeInfo_Location{} + l.Path = append(l.Path, loc.Path...) + if loc.StartLine == loc.EndLine { + l.Span = []int32{int32(loc.StartLine), int32(loc.StartColumn), int32(loc.EndColumn)} + } else { + l.Span = []int32{int32(loc.StartLine), int32(loc.StartColumn), int32(loc.EndLine), int32(loc.EndColumn)} + } + l.LeadingDetachedComments = append([]string(nil), loc.LeadingDetachedComments...) + if loc.LeadingComments != "" { + l.LeadingComments = proto.String(loc.LeadingComments) + } + if loc.TrailingComments != "" { + l.TrailingComments = proto.String(loc.TrailingComments) + } + if p.SourceCodeInfo == nil { + p.SourceCodeInfo = &descriptorpb.SourceCodeInfo{} + } + p.SourceCodeInfo.Location = append(p.SourceCodeInfo.Location, l) + + } + for i, messages := 0, file.Messages(); i < messages.Len(); i++ { + p.MessageType = append(p.MessageType, ToDescriptorProto(messages.Get(i))) + } + for i, enums := 0, file.Enums(); i < enums.Len(); i++ { + p.EnumType = append(p.EnumType, ToEnumDescriptorProto(enums.Get(i))) + } + for i, services := 0, file.Services(); i < services.Len(); i++ { + p.Service = append(p.Service, ToServiceDescriptorProto(services.Get(i))) + } + for i, exts := 0, file.Extensions(); i < exts.Len(); i++ { + p.Extension = append(p.Extension, ToFieldDescriptorProto(exts.Get(i))) + } + if syntax := file.Syntax(); syntax != protoreflect.Proto2 { + p.Syntax = proto.String(file.Syntax().String()) + } + return p +} + +// ToDescriptorProto copies a protoreflect.MessageDescriptor into a +// google.protobuf.DescriptorProto message. +func ToDescriptorProto(message protoreflect.MessageDescriptor) *descriptorpb.DescriptorProto { + p := &descriptorpb.DescriptorProto{ + Name: proto.String(string(message.Name())), + Options: proto.Clone(message.Options()).(*descriptorpb.MessageOptions), + } + for i, fields := 0, message.Fields(); i < fields.Len(); i++ { + p.Field = append(p.Field, ToFieldDescriptorProto(fields.Get(i))) + } + for i, exts := 0, message.Extensions(); i < exts.Len(); i++ { + p.Extension = append(p.Extension, ToFieldDescriptorProto(exts.Get(i))) + } + for i, messages := 0, message.Messages(); i < messages.Len(); i++ { + p.NestedType = append(p.NestedType, ToDescriptorProto(messages.Get(i))) + } + for i, enums := 0, message.Enums(); i < enums.Len(); i++ { + p.EnumType = append(p.EnumType, ToEnumDescriptorProto(enums.Get(i))) + } + for i, xranges := 0, message.ExtensionRanges(); i < xranges.Len(); i++ { + xrange := xranges.Get(i) + p.ExtensionRange = append(p.ExtensionRange, &descriptorpb.DescriptorProto_ExtensionRange{ + Start: proto.Int32(int32(xrange[0])), + End: proto.Int32(int32(xrange[1])), + Options: proto.Clone(message.ExtensionRangeOptions(i)).(*descriptorpb.ExtensionRangeOptions), + }) + } + for i, oneofs := 0, message.Oneofs(); i < oneofs.Len(); i++ { + p.OneofDecl = append(p.OneofDecl, ToOneofDescriptorProto(oneofs.Get(i))) + } + for i, ranges := 0, message.ReservedRanges(); i < ranges.Len(); i++ { + rrange := ranges.Get(i) + p.ReservedRange = append(p.ReservedRange, &descriptorpb.DescriptorProto_ReservedRange{ + Start: proto.Int32(int32(rrange[0])), + End: proto.Int32(int32(rrange[1])), + }) + } + for i, names := 0, message.ReservedNames(); i < names.Len(); i++ { + p.ReservedName = append(p.ReservedName, string(names.Get(i))) + } + return p +} + +// ToFieldDescriptorProto copies a protoreflect.FieldDescriptor into a +// google.protobuf.FieldDescriptorProto message. +func ToFieldDescriptorProto(field protoreflect.FieldDescriptor) *descriptorpb.FieldDescriptorProto { + p := &descriptorpb.FieldDescriptorProto{ + Name: proto.String(string(field.Name())), + Number: proto.Int32(int32(field.Number())), + Label: descriptorpb.FieldDescriptorProto_Label(field.Cardinality()).Enum(), + Options: proto.Clone(field.Options()).(*descriptorpb.FieldOptions), + } + if field.IsExtension() { + p.Extendee = fullNameOf(field.ContainingMessage()) + } + if field.Kind().IsValid() { + p.Type = descriptorpb.FieldDescriptorProto_Type(field.Kind()).Enum() + } + if field.Enum() != nil { + p.TypeName = fullNameOf(field.Enum()) + } + if field.Message() != nil { + p.TypeName = fullNameOf(field.Message()) + } + if field.HasJSONName() { + // A bug in older versions of protoc would always populate the + // "json_name" option for extensions when it is meaningless. + // When it did so, it would always use the camel-cased field name. + if field.IsExtension() { + p.JsonName = proto.String(strs.JSONCamelCase(string(field.Name()))) + } else { + p.JsonName = proto.String(field.JSONName()) + } + } + if field.Syntax() == protoreflect.Proto3 && field.HasOptionalKeyword() { + p.Proto3Optional = proto.Bool(true) + } + if field.HasDefault() { + def, err := defval.Marshal(field.Default(), field.DefaultEnumValue(), field.Kind(), defval.Descriptor) + if err != nil && field.DefaultEnumValue() != nil { + def = string(field.DefaultEnumValue().Name()) // occurs for unresolved enum values + } else if err != nil { + panic(fmt.Sprintf("%v: %v", field.FullName(), err)) + } + p.DefaultValue = proto.String(def) + } + if oneof := field.ContainingOneof(); oneof != nil { + p.OneofIndex = proto.Int32(int32(oneof.Index())) + } + return p +} + +// ToOneofDescriptorProto copies a protoreflect.OneofDescriptor into a +// google.protobuf.OneofDescriptorProto message. +func ToOneofDescriptorProto(oneof protoreflect.OneofDescriptor) *descriptorpb.OneofDescriptorProto { + return &descriptorpb.OneofDescriptorProto{ + Name: proto.String(string(oneof.Name())), + Options: proto.Clone(oneof.Options()).(*descriptorpb.OneofOptions), + } +} + +// ToEnumDescriptorProto copies a protoreflect.EnumDescriptor into a +// google.protobuf.EnumDescriptorProto message. +func ToEnumDescriptorProto(enum protoreflect.EnumDescriptor) *descriptorpb.EnumDescriptorProto { + p := &descriptorpb.EnumDescriptorProto{ + Name: proto.String(string(enum.Name())), + Options: proto.Clone(enum.Options()).(*descriptorpb.EnumOptions), + } + for i, values := 0, enum.Values(); i < values.Len(); i++ { + p.Value = append(p.Value, ToEnumValueDescriptorProto(values.Get(i))) + } + for i, ranges := 0, enum.ReservedRanges(); i < ranges.Len(); i++ { + rrange := ranges.Get(i) + p.ReservedRange = append(p.ReservedRange, &descriptorpb.EnumDescriptorProto_EnumReservedRange{ + Start: proto.Int32(int32(rrange[0])), + End: proto.Int32(int32(rrange[1])), + }) + } + for i, names := 0, enum.ReservedNames(); i < names.Len(); i++ { + p.ReservedName = append(p.ReservedName, string(names.Get(i))) + } + return p +} + +// ToEnumValueDescriptorProto copies a protoreflect.EnumValueDescriptor into a +// google.protobuf.EnumValueDescriptorProto message. +func ToEnumValueDescriptorProto(value protoreflect.EnumValueDescriptor) *descriptorpb.EnumValueDescriptorProto { + return &descriptorpb.EnumValueDescriptorProto{ + Name: proto.String(string(value.Name())), + Number: proto.Int32(int32(value.Number())), + Options: proto.Clone(value.Options()).(*descriptorpb.EnumValueOptions), + } +} + +// ToServiceDescriptorProto copies a protoreflect.ServiceDescriptor into a +// google.protobuf.ServiceDescriptorProto message. +func ToServiceDescriptorProto(service protoreflect.ServiceDescriptor) *descriptorpb.ServiceDescriptorProto { + p := &descriptorpb.ServiceDescriptorProto{ + Name: proto.String(string(service.Name())), + Options: proto.Clone(service.Options()).(*descriptorpb.ServiceOptions), + } + for i, methods := 0, service.Methods(); i < methods.Len(); i++ { + p.Method = append(p.Method, ToMethodDescriptorProto(methods.Get(i))) + } + return p +} + +// ToMethodDescriptorProto copies a protoreflect.MethodDescriptor into a +// google.protobuf.MethodDescriptorProto message. +func ToMethodDescriptorProto(method protoreflect.MethodDescriptor) *descriptorpb.MethodDescriptorProto { + p := &descriptorpb.MethodDescriptorProto{ + Name: proto.String(string(method.Name())), + InputType: fullNameOf(method.Input()), + OutputType: fullNameOf(method.Output()), + Options: proto.Clone(method.Options()).(*descriptorpb.MethodOptions), + } + if method.IsStreamingClient() { + p.ClientStreaming = proto.Bool(true) + } + if method.IsStreamingServer() { + p.ServerStreaming = proto.Bool(true) + } + return p +} + +func fullNameOf(d protoreflect.Descriptor) *string { + if d == nil { + return nil + } + if strings.HasPrefix(string(d.FullName()), unknownPrefix) { + return proto.String(string(d.FullName()[len(unknownPrefix):])) + } + return proto.String("." + string(d.FullName())) +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/methods.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/methods.go new file mode 100644 index 00000000..6be5d16e --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/methods.go @@ -0,0 +1,77 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoreflect + +import ( + "google.golang.org/protobuf/internal/pragma" +) + +// The following types are used by the fast-path Message.ProtoMethods method. +// +// To avoid polluting the public protoreflect API with types used only by +// low-level implementations, the canonical definitions of these types are +// in the runtime/protoiface package. The definitions here and in protoiface +// must be kept in sync. +type ( + methods = struct { + pragma.NoUnkeyedLiterals + Flags supportFlags + Size func(sizeInput) sizeOutput + Marshal func(marshalInput) (marshalOutput, error) + Unmarshal func(unmarshalInput) (unmarshalOutput, error) + Merge func(mergeInput) mergeOutput + CheckInitialized func(checkInitializedInput) (checkInitializedOutput, error) + } + supportFlags = uint64 + sizeInput = struct { + pragma.NoUnkeyedLiterals + Message Message + Flags uint8 + } + sizeOutput = struct { + pragma.NoUnkeyedLiterals + Size int + } + marshalInput = struct { + pragma.NoUnkeyedLiterals + Message Message + Buf []byte + Flags uint8 + } + marshalOutput = struct { + pragma.NoUnkeyedLiterals + Buf []byte + } + unmarshalInput = struct { + pragma.NoUnkeyedLiterals + Message Message + Buf []byte + Flags uint8 + Resolver interface { + FindExtensionByName(field FullName) (ExtensionType, error) + FindExtensionByNumber(message FullName, field FieldNumber) (ExtensionType, error) + } + } + unmarshalOutput = struct { + pragma.NoUnkeyedLiterals + Flags uint8 + } + mergeInput = struct { + pragma.NoUnkeyedLiterals + Source Message + Destination Message + } + mergeOutput = struct { + pragma.NoUnkeyedLiterals + Flags uint8 + } + checkInitializedInput = struct { + pragma.NoUnkeyedLiterals + Message Message + } + checkInitializedOutput = struct { + pragma.NoUnkeyedLiterals + } +) diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go new file mode 100644 index 00000000..dd85915b --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/proto.go @@ -0,0 +1,504 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protoreflect provides interfaces to dynamically manipulate messages. +// +// This package includes type descriptors which describe the structure of types +// defined in proto source files and value interfaces which provide the +// ability to examine and manipulate the contents of messages. +// +// +// Protocol Buffer Descriptors +// +// Protobuf descriptors (e.g., EnumDescriptor or MessageDescriptor) +// are immutable objects that represent protobuf type information. +// They are wrappers around the messages declared in descriptor.proto. +// Protobuf descriptors alone lack any information regarding Go types. +// +// Enums and messages generated by this module implement Enum and ProtoMessage, +// where the Descriptor and ProtoReflect.Descriptor accessors respectively +// return the protobuf descriptor for the values. +// +// The protobuf descriptor interfaces are not meant to be implemented by +// user code since they might need to be extended in the future to support +// additions to the protobuf language. +// The "google.golang.org/protobuf/reflect/protodesc" package converts between +// google.protobuf.DescriptorProto messages and protobuf descriptors. +// +// +// Go Type Descriptors +// +// A type descriptor (e.g., EnumType or MessageType) is a constructor for +// a concrete Go type that represents the associated protobuf descriptor. +// There is commonly a one-to-one relationship between protobuf descriptors and +// Go type descriptors, but it can potentially be a one-to-many relationship. +// +// Enums and messages generated by this module implement Enum and ProtoMessage, +// where the Type and ProtoReflect.Type accessors respectively +// return the protobuf descriptor for the values. +// +// The "google.golang.org/protobuf/types/dynamicpb" package can be used to +// create Go type descriptors from protobuf descriptors. +// +// +// Value Interfaces +// +// The Enum and Message interfaces provide a reflective view over an +// enum or message instance. For enums, it provides the ability to retrieve +// the enum value number for any concrete enum type. For messages, it provides +// the ability to access or manipulate fields of the message. +// +// To convert a proto.Message to a protoreflect.Message, use the +// former's ProtoReflect method. Since the ProtoReflect method is new to the +// v2 message interface, it may not be present on older message implementations. +// The "github.com/golang/protobuf/proto".MessageReflect function can be used +// to obtain a reflective view on older messages. +// +// +// Relationships +// +// The following diagrams demonstrate the relationships between +// various types declared in this package. +// +// +// ┌───────────────────────────────────┐ +// V │ +// ┌────────────── New(n) ─────────────┐ │ +// │ │ │ +// │ ┌──── Descriptor() ──┐ │ ┌── Number() ──┐ │ +// │ │ V V │ V │ +// ╔════════════╗ ╔════════════════╗ ╔════════╗ ╔════════════╗ +// ║ EnumType ║ ║ EnumDescriptor ║ ║ Enum ║ ║ EnumNumber ║ +// ╚════════════╝ ╚════════════════╝ ╚════════╝ ╚════════════╝ +// Λ Λ │ │ +// │ └─── Descriptor() ──┘ │ +// │ │ +// └────────────────── Type() ───────┘ +// +// • An EnumType describes a concrete Go enum type. +// It has an EnumDescriptor and can construct an Enum instance. +// +// • An EnumDescriptor describes an abstract protobuf enum type. +// +// • An Enum is a concrete enum instance. Generated enums implement Enum. +// +// +// ┌──────────────── New() ─────────────────┐ +// │ │ +// │ ┌─── Descriptor() ─────┐ │ ┌── Interface() ───┐ +// │ │ V V │ V +// ╔═════════════╗ ╔═══════════════════╗ ╔═════════╗ ╔══════════════╗ +// ║ MessageType ║ ║ MessageDescriptor ║ ║ Message ║ ║ ProtoMessage ║ +// ╚═════════════╝ ╚═══════════════════╝ ╚═════════╝ ╚══════════════╝ +// Λ Λ │ │ Λ │ +// │ └──── Descriptor() ────┘ │ └─ ProtoReflect() ─┘ +// │ │ +// └─────────────────── Type() ─────────┘ +// +// • A MessageType describes a concrete Go message type. +// It has a MessageDescriptor and can construct a Message instance. +// +// • A MessageDescriptor describes an abstract protobuf message type. +// +// • A Message is a concrete message instance. Generated messages implement +// ProtoMessage, which can convert to/from a Message. +// +// +// ┌── TypeDescriptor() ──┐ ┌───── Descriptor() ─────┐ +// │ V │ V +// ╔═══════════════╗ ╔═════════════════════════╗ ╔═════════════════════╗ +// ║ ExtensionType ║ ║ ExtensionTypeDescriptor ║ ║ ExtensionDescriptor ║ +// ╚═══════════════╝ ╚═════════════════════════╝ ╚═════════════════════╝ +// Λ │ │ Λ │ Λ +// └─────── Type() ───────┘ │ └─── may implement ────┘ │ +// │ │ +// └────── implements ────────┘ +// +// • An ExtensionType describes a concrete Go implementation of an extension. +// It has an ExtensionTypeDescriptor and can convert to/from +// abstract Values and Go values. +// +// • An ExtensionTypeDescriptor is an ExtensionDescriptor +// which also has an ExtensionType. +// +// • An ExtensionDescriptor describes an abstract protobuf extension field and +// may not always be an ExtensionTypeDescriptor. +package protoreflect + +import ( + "fmt" + "strings" + + "google.golang.org/protobuf/encoding/protowire" + "google.golang.org/protobuf/internal/pragma" +) + +type doNotImplement pragma.DoNotImplement + +// ProtoMessage is the top-level interface that all proto messages implement. +// This is declared in the protoreflect package to avoid a cyclic dependency; +// use the proto.Message type instead, which aliases this type. +type ProtoMessage interface{ ProtoReflect() Message } + +// Syntax is the language version of the proto file. +type Syntax syntax + +type syntax int8 // keep exact type opaque as the int type may change + +const ( + Proto2 Syntax = 2 + Proto3 Syntax = 3 +) + +// IsValid reports whether the syntax is valid. +func (s Syntax) IsValid() bool { + switch s { + case Proto2, Proto3: + return true + default: + return false + } +} + +// String returns s as a proto source identifier (e.g., "proto2"). +func (s Syntax) String() string { + switch s { + case Proto2: + return "proto2" + case Proto3: + return "proto3" + default: + return fmt.Sprintf("", s) + } +} + +// GoString returns s as a Go source identifier (e.g., "Proto2"). +func (s Syntax) GoString() string { + switch s { + case Proto2: + return "Proto2" + case Proto3: + return "Proto3" + default: + return fmt.Sprintf("Syntax(%d)", s) + } +} + +// Cardinality determines whether a field is optional, required, or repeated. +type Cardinality cardinality + +type cardinality int8 // keep exact type opaque as the int type may change + +// Constants as defined by the google.protobuf.Cardinality enumeration. +const ( + Optional Cardinality = 1 // appears zero or one times + Required Cardinality = 2 // appears exactly one time; invalid with Proto3 + Repeated Cardinality = 3 // appears zero or more times +) + +// IsValid reports whether the cardinality is valid. +func (c Cardinality) IsValid() bool { + switch c { + case Optional, Required, Repeated: + return true + default: + return false + } +} + +// String returns c as a proto source identifier (e.g., "optional"). +func (c Cardinality) String() string { + switch c { + case Optional: + return "optional" + case Required: + return "required" + case Repeated: + return "repeated" + default: + return fmt.Sprintf("", c) + } +} + +// GoString returns c as a Go source identifier (e.g., "Optional"). +func (c Cardinality) GoString() string { + switch c { + case Optional: + return "Optional" + case Required: + return "Required" + case Repeated: + return "Repeated" + default: + return fmt.Sprintf("Cardinality(%d)", c) + } +} + +// Kind indicates the basic proto kind of a field. +type Kind kind + +type kind int8 // keep exact type opaque as the int type may change + +// Constants as defined by the google.protobuf.Field.Kind enumeration. +const ( + BoolKind Kind = 8 + EnumKind Kind = 14 + Int32Kind Kind = 5 + Sint32Kind Kind = 17 + Uint32Kind Kind = 13 + Int64Kind Kind = 3 + Sint64Kind Kind = 18 + Uint64Kind Kind = 4 + Sfixed32Kind Kind = 15 + Fixed32Kind Kind = 7 + FloatKind Kind = 2 + Sfixed64Kind Kind = 16 + Fixed64Kind Kind = 6 + DoubleKind Kind = 1 + StringKind Kind = 9 + BytesKind Kind = 12 + MessageKind Kind = 11 + GroupKind Kind = 10 +) + +// IsValid reports whether the kind is valid. +func (k Kind) IsValid() bool { + switch k { + case BoolKind, EnumKind, + Int32Kind, Sint32Kind, Uint32Kind, + Int64Kind, Sint64Kind, Uint64Kind, + Sfixed32Kind, Fixed32Kind, FloatKind, + Sfixed64Kind, Fixed64Kind, DoubleKind, + StringKind, BytesKind, MessageKind, GroupKind: + return true + default: + return false + } +} + +// String returns k as a proto source identifier (e.g., "bool"). +func (k Kind) String() string { + switch k { + case BoolKind: + return "bool" + case EnumKind: + return "enum" + case Int32Kind: + return "int32" + case Sint32Kind: + return "sint32" + case Uint32Kind: + return "uint32" + case Int64Kind: + return "int64" + case Sint64Kind: + return "sint64" + case Uint64Kind: + return "uint64" + case Sfixed32Kind: + return "sfixed32" + case Fixed32Kind: + return "fixed32" + case FloatKind: + return "float" + case Sfixed64Kind: + return "sfixed64" + case Fixed64Kind: + return "fixed64" + case DoubleKind: + return "double" + case StringKind: + return "string" + case BytesKind: + return "bytes" + case MessageKind: + return "message" + case GroupKind: + return "group" + default: + return fmt.Sprintf("", k) + } +} + +// GoString returns k as a Go source identifier (e.g., "BoolKind"). +func (k Kind) GoString() string { + switch k { + case BoolKind: + return "BoolKind" + case EnumKind: + return "EnumKind" + case Int32Kind: + return "Int32Kind" + case Sint32Kind: + return "Sint32Kind" + case Uint32Kind: + return "Uint32Kind" + case Int64Kind: + return "Int64Kind" + case Sint64Kind: + return "Sint64Kind" + case Uint64Kind: + return "Uint64Kind" + case Sfixed32Kind: + return "Sfixed32Kind" + case Fixed32Kind: + return "Fixed32Kind" + case FloatKind: + return "FloatKind" + case Sfixed64Kind: + return "Sfixed64Kind" + case Fixed64Kind: + return "Fixed64Kind" + case DoubleKind: + return "DoubleKind" + case StringKind: + return "StringKind" + case BytesKind: + return "BytesKind" + case MessageKind: + return "MessageKind" + case GroupKind: + return "GroupKind" + default: + return fmt.Sprintf("Kind(%d)", k) + } +} + +// FieldNumber is the field number in a message. +type FieldNumber = protowire.Number + +// FieldNumbers represent a list of field numbers. +type FieldNumbers interface { + // Len reports the number of fields in the list. + Len() int + // Get returns the ith field number. It panics if out of bounds. + Get(i int) FieldNumber + // Has reports whether n is within the list of fields. + Has(n FieldNumber) bool + + doNotImplement +} + +// FieldRanges represent a list of field number ranges. +type FieldRanges interface { + // Len reports the number of ranges in the list. + Len() int + // Get returns the ith range. It panics if out of bounds. + Get(i int) [2]FieldNumber // start inclusive; end exclusive + // Has reports whether n is within any of the ranges. + Has(n FieldNumber) bool + + doNotImplement +} + +// EnumNumber is the numeric value for an enum. +type EnumNumber int32 + +// EnumRanges represent a list of enum number ranges. +type EnumRanges interface { + // Len reports the number of ranges in the list. + Len() int + // Get returns the ith range. It panics if out of bounds. + Get(i int) [2]EnumNumber // start inclusive; end inclusive + // Has reports whether n is within any of the ranges. + Has(n EnumNumber) bool + + doNotImplement +} + +// Name is the short name for a proto declaration. This is not the name +// as used in Go source code, which might not be identical to the proto name. +type Name string // e.g., "Kind" + +// IsValid reports whether s is a syntactically valid name. +// An empty name is invalid. +func (s Name) IsValid() bool { + return consumeIdent(string(s)) == len(s) +} + +// Names represent a list of names. +type Names interface { + // Len reports the number of names in the list. + Len() int + // Get returns the ith name. It panics if out of bounds. + Get(i int) Name + // Has reports whether s matches any names in the list. + Has(s Name) bool + + doNotImplement +} + +// FullName is a qualified name that uniquely identifies a proto declaration. +// A qualified name is the concatenation of the proto package along with the +// fully-declared name (i.e., name of parent preceding the name of the child), +// with a '.' delimiter placed between each Name. +// +// This should not have any leading or trailing dots. +type FullName string // e.g., "google.protobuf.Field.Kind" + +// IsValid reports whether s is a syntactically valid full name. +// An empty full name is invalid. +func (s FullName) IsValid() bool { + i := consumeIdent(string(s)) + if i < 0 { + return false + } + for len(s) > i { + if s[i] != '.' { + return false + } + i++ + n := consumeIdent(string(s[i:])) + if n < 0 { + return false + } + i += n + } + return true +} + +func consumeIdent(s string) (i int) { + if len(s) == 0 || !isLetter(s[i]) { + return -1 + } + i++ + for len(s) > i && isLetterDigit(s[i]) { + i++ + } + return i +} +func isLetter(c byte) bool { + return c == '_' || ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') +} +func isLetterDigit(c byte) bool { + return isLetter(c) || ('0' <= c && c <= '9') +} + +// Name returns the short name, which is the last identifier segment. +// A single segment FullName is the Name itself. +func (n FullName) Name() Name { + if i := strings.LastIndexByte(string(n), '.'); i >= 0 { + return Name(n[i+1:]) + } + return Name(n) +} + +// Parent returns the full name with the trailing identifier removed. +// A single segment FullName has no parent. +func (n FullName) Parent() FullName { + if i := strings.LastIndexByte(string(n), '.'); i >= 0 { + return n[:i] + } + return "" +} + +// Append returns the qualified name appended with the provided short name. +// +// Invariant: n == n.Parent().Append(n.Name()) // assuming n is valid +func (n FullName) Append(s Name) FullName { + if n == "" { + return FullName(s) + } + return n + "." + FullName(s) +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go new file mode 100644 index 00000000..121ba3a0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/source.go @@ -0,0 +1,128 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoreflect + +import ( + "strconv" +) + +// SourceLocations is a list of source locations. +type SourceLocations interface { + // Len reports the number of source locations in the proto file. + Len() int + // Get returns the ith SourceLocation. It panics if out of bounds. + Get(int) SourceLocation + + // ByPath returns the SourceLocation for the given path, + // returning the first location if multiple exist for the same path. + // If multiple locations exist for the same path, + // then SourceLocation.Next index can be used to identify the + // index of the next SourceLocation. + // If no location exists for this path, it returns the zero value. + ByPath(path SourcePath) SourceLocation + + // ByDescriptor returns the SourceLocation for the given descriptor, + // returning the first location if multiple exist for the same path. + // If no location exists for this descriptor, it returns the zero value. + ByDescriptor(desc Descriptor) SourceLocation + + doNotImplement +} + +// SourceLocation describes a source location and +// corresponds with the google.protobuf.SourceCodeInfo.Location message. +type SourceLocation struct { + // Path is the path to the declaration from the root file descriptor. + // The contents of this slice must not be mutated. + Path SourcePath + + // StartLine and StartColumn are the zero-indexed starting location + // in the source file for the declaration. + StartLine, StartColumn int + // EndLine and EndColumn are the zero-indexed ending location + // in the source file for the declaration. + // In the descriptor.proto, the end line may be omitted if it is identical + // to the start line. Here, it is always populated. + EndLine, EndColumn int + + // LeadingDetachedComments are the leading detached comments + // for the declaration. The contents of this slice must not be mutated. + LeadingDetachedComments []string + // LeadingComments is the leading attached comment for the declaration. + LeadingComments string + // TrailingComments is the trailing attached comment for the declaration. + TrailingComments string + + // Next is an index into SourceLocations for the next source location that + // has the same Path. It is zero if there is no next location. + Next int +} + +// SourcePath identifies part of a file descriptor for a source location. +// The SourcePath is a sequence of either field numbers or indexes into +// a repeated field that form a path starting from the root file descriptor. +// +// See google.protobuf.SourceCodeInfo.Location.path. +type SourcePath []int32 + +// Equal reports whether p1 equals p2. +func (p1 SourcePath) Equal(p2 SourcePath) bool { + if len(p1) != len(p2) { + return false + } + for i := range p1 { + if p1[i] != p2[i] { + return false + } + } + return true +} + +// String formats the path in a humanly readable manner. +// The output is guaranteed to be deterministic, +// making it suitable for use as a key into a Go map. +// It is not guaranteed to be stable as the exact output could change +// in a future version of this module. +// +// Example output: +// .message_type[6].nested_type[15].field[3] +func (p SourcePath) String() string { + b := p.appendFileDescriptorProto(nil) + for _, i := range p { + b = append(b, '.') + b = strconv.AppendInt(b, int64(i), 10) + } + return string(b) +} + +type appendFunc func(*SourcePath, []byte) []byte + +func (p *SourcePath) appendSingularField(b []byte, name string, f appendFunc) []byte { + if len(*p) == 0 { + return b + } + b = append(b, '.') + b = append(b, name...) + *p = (*p)[1:] + if f != nil { + b = f(p, b) + } + return b +} + +func (p *SourcePath) appendRepeatedField(b []byte, name string, f appendFunc) []byte { + b = p.appendSingularField(b, name, nil) + if len(*p) == 0 || (*p)[0] < 0 { + return b + } + b = append(b, '[') + b = strconv.AppendUint(b, uint64((*p)[0]), 10) + b = append(b, ']') + *p = (*p)[1:] + if f != nil { + b = f(p, b) + } + return b +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go new file mode 100644 index 00000000..b03c1223 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/source_gen.go @@ -0,0 +1,461 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by generate-protos. DO NOT EDIT. + +package protoreflect + +func (p *SourcePath) appendFileDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "package", nil) + case 3: + b = p.appendRepeatedField(b, "dependency", nil) + case 10: + b = p.appendRepeatedField(b, "public_dependency", nil) + case 11: + b = p.appendRepeatedField(b, "weak_dependency", nil) + case 4: + b = p.appendRepeatedField(b, "message_type", (*SourcePath).appendDescriptorProto) + case 5: + b = p.appendRepeatedField(b, "enum_type", (*SourcePath).appendEnumDescriptorProto) + case 6: + b = p.appendRepeatedField(b, "service", (*SourcePath).appendServiceDescriptorProto) + case 7: + b = p.appendRepeatedField(b, "extension", (*SourcePath).appendFieldDescriptorProto) + case 8: + b = p.appendSingularField(b, "options", (*SourcePath).appendFileOptions) + case 9: + b = p.appendSingularField(b, "source_code_info", (*SourcePath).appendSourceCodeInfo) + case 12: + b = p.appendSingularField(b, "syntax", nil) + } + return b +} + +func (p *SourcePath) appendDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "field", (*SourcePath).appendFieldDescriptorProto) + case 6: + b = p.appendRepeatedField(b, "extension", (*SourcePath).appendFieldDescriptorProto) + case 3: + b = p.appendRepeatedField(b, "nested_type", (*SourcePath).appendDescriptorProto) + case 4: + b = p.appendRepeatedField(b, "enum_type", (*SourcePath).appendEnumDescriptorProto) + case 5: + b = p.appendRepeatedField(b, "extension_range", (*SourcePath).appendDescriptorProto_ExtensionRange) + case 8: + b = p.appendRepeatedField(b, "oneof_decl", (*SourcePath).appendOneofDescriptorProto) + case 7: + b = p.appendSingularField(b, "options", (*SourcePath).appendMessageOptions) + case 9: + b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendDescriptorProto_ReservedRange) + case 10: + b = p.appendRepeatedField(b, "reserved_name", nil) + } + return b +} + +func (p *SourcePath) appendEnumDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "value", (*SourcePath).appendEnumValueDescriptorProto) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendEnumOptions) + case 4: + b = p.appendRepeatedField(b, "reserved_range", (*SourcePath).appendEnumDescriptorProto_EnumReservedRange) + case 5: + b = p.appendRepeatedField(b, "reserved_name", nil) + } + return b +} + +func (p *SourcePath) appendServiceDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendRepeatedField(b, "method", (*SourcePath).appendMethodDescriptorProto) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendServiceOptions) + } + return b +} + +func (p *SourcePath) appendFieldDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 3: + b = p.appendSingularField(b, "number", nil) + case 4: + b = p.appendSingularField(b, "label", nil) + case 5: + b = p.appendSingularField(b, "type", nil) + case 6: + b = p.appendSingularField(b, "type_name", nil) + case 2: + b = p.appendSingularField(b, "extendee", nil) + case 7: + b = p.appendSingularField(b, "default_value", nil) + case 9: + b = p.appendSingularField(b, "oneof_index", nil) + case 10: + b = p.appendSingularField(b, "json_name", nil) + case 8: + b = p.appendSingularField(b, "options", (*SourcePath).appendFieldOptions) + case 17: + b = p.appendSingularField(b, "proto3_optional", nil) + } + return b +} + +func (p *SourcePath) appendFileOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "java_package", nil) + case 8: + b = p.appendSingularField(b, "java_outer_classname", nil) + case 10: + b = p.appendSingularField(b, "java_multiple_files", nil) + case 20: + b = p.appendSingularField(b, "java_generate_equals_and_hash", nil) + case 27: + b = p.appendSingularField(b, "java_string_check_utf8", nil) + case 9: + b = p.appendSingularField(b, "optimize_for", nil) + case 11: + b = p.appendSingularField(b, "go_package", nil) + case 16: + b = p.appendSingularField(b, "cc_generic_services", nil) + case 17: + b = p.appendSingularField(b, "java_generic_services", nil) + case 18: + b = p.appendSingularField(b, "py_generic_services", nil) + case 42: + b = p.appendSingularField(b, "php_generic_services", nil) + case 23: + b = p.appendSingularField(b, "deprecated", nil) + case 31: + b = p.appendSingularField(b, "cc_enable_arenas", nil) + case 36: + b = p.appendSingularField(b, "objc_class_prefix", nil) + case 37: + b = p.appendSingularField(b, "csharp_namespace", nil) + case 39: + b = p.appendSingularField(b, "swift_prefix", nil) + case 40: + b = p.appendSingularField(b, "php_class_prefix", nil) + case 41: + b = p.appendSingularField(b, "php_namespace", nil) + case 44: + b = p.appendSingularField(b, "php_metadata_namespace", nil) + case 45: + b = p.appendSingularField(b, "ruby_package", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendSourceCodeInfo(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendRepeatedField(b, "location", (*SourcePath).appendSourceCodeInfo_Location) + } + return b +} + +func (p *SourcePath) appendDescriptorProto_ExtensionRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendExtensionRangeOptions) + } + return b +} + +func (p *SourcePath) appendOneofDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "options", (*SourcePath).appendOneofOptions) + } + return b +} + +func (p *SourcePath) appendMessageOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "message_set_wire_format", nil) + case 2: + b = p.appendSingularField(b, "no_standard_descriptor_accessor", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 7: + b = p.appendSingularField(b, "map_entry", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendDescriptorProto_ReservedRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + } + return b +} + +func (p *SourcePath) appendEnumValueDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "number", nil) + case 3: + b = p.appendSingularField(b, "options", (*SourcePath).appendEnumValueOptions) + } + return b +} + +func (p *SourcePath) appendEnumOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 2: + b = p.appendSingularField(b, "allow_alias", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendEnumDescriptorProto_EnumReservedRange(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "start", nil) + case 2: + b = p.appendSingularField(b, "end", nil) + } + return b +} + +func (p *SourcePath) appendMethodDescriptorProto(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name", nil) + case 2: + b = p.appendSingularField(b, "input_type", nil) + case 3: + b = p.appendSingularField(b, "output_type", nil) + case 4: + b = p.appendSingularField(b, "options", (*SourcePath).appendMethodOptions) + case 5: + b = p.appendSingularField(b, "client_streaming", nil) + case 6: + b = p.appendSingularField(b, "server_streaming", nil) + } + return b +} + +func (p *SourcePath) appendServiceOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 33: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendFieldOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "ctype", nil) + case 2: + b = p.appendSingularField(b, "packed", nil) + case 6: + b = p.appendSingularField(b, "jstype", nil) + case 5: + b = p.appendSingularField(b, "lazy", nil) + case 3: + b = p.appendSingularField(b, "deprecated", nil) + case 10: + b = p.appendSingularField(b, "weak", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendUninterpretedOption(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 2: + b = p.appendRepeatedField(b, "name", (*SourcePath).appendUninterpretedOption_NamePart) + case 3: + b = p.appendSingularField(b, "identifier_value", nil) + case 4: + b = p.appendSingularField(b, "positive_int_value", nil) + case 5: + b = p.appendSingularField(b, "negative_int_value", nil) + case 6: + b = p.appendSingularField(b, "double_value", nil) + case 7: + b = p.appendSingularField(b, "string_value", nil) + case 8: + b = p.appendSingularField(b, "aggregate_value", nil) + } + return b +} + +func (p *SourcePath) appendSourceCodeInfo_Location(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendRepeatedField(b, "path", nil) + case 2: + b = p.appendRepeatedField(b, "span", nil) + case 3: + b = p.appendSingularField(b, "leading_comments", nil) + case 4: + b = p.appendSingularField(b, "trailing_comments", nil) + case 6: + b = p.appendRepeatedField(b, "leading_detached_comments", nil) + } + return b +} + +func (p *SourcePath) appendExtensionRangeOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendOneofOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendEnumValueOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "deprecated", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendMethodOptions(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 33: + b = p.appendSingularField(b, "deprecated", nil) + case 34: + b = p.appendSingularField(b, "idempotency_level", nil) + case 999: + b = p.appendRepeatedField(b, "uninterpreted_option", (*SourcePath).appendUninterpretedOption) + } + return b +} + +func (p *SourcePath) appendUninterpretedOption_NamePart(b []byte) []byte { + if len(*p) == 0 { + return b + } + switch (*p)[0] { + case 1: + b = p.appendSingularField(b, "name_part", nil) + case 2: + b = p.appendSingularField(b, "is_extension", nil) + } + return b +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go new file mode 100644 index 00000000..8e53c44a --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/type.go @@ -0,0 +1,665 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoreflect + +// Descriptor provides a set of accessors that are common to every descriptor. +// Each descriptor type wraps the equivalent google.protobuf.XXXDescriptorProto, +// but provides efficient lookup and immutability. +// +// Each descriptor is comparable. Equality implies that the two types are +// exactly identical. However, it is possible for the same semantically +// identical proto type to be represented by multiple type descriptors. +// +// For example, suppose we have t1 and t2 which are both MessageDescriptors. +// If t1 == t2, then the types are definitely equal and all accessors return +// the same information. However, if t1 != t2, then it is still possible that +// they still represent the same proto type (e.g., t1.FullName == t2.FullName). +// This can occur if a descriptor type is created dynamically, or multiple +// versions of the same proto type are accidentally linked into the Go binary. +type Descriptor interface { + // ParentFile returns the parent file descriptor that this descriptor + // is declared within. The parent file for the file descriptor is itself. + // + // Support for this functionality is optional and may return nil. + ParentFile() FileDescriptor + + // Parent returns the parent containing this descriptor declaration. + // The following shows the mapping from child type to possible parent types: + // + // ╔═════════════════════╤═══════════════════════════════════╗ + // ║ Child type │ Possible parent types ║ + // ╠═════════════════════╪═══════════════════════════════════╣ + // ║ FileDescriptor │ nil ║ + // ║ MessageDescriptor │ FileDescriptor, MessageDescriptor ║ + // ║ FieldDescriptor │ FileDescriptor, MessageDescriptor ║ + // ║ OneofDescriptor │ MessageDescriptor ║ + // ║ EnumDescriptor │ FileDescriptor, MessageDescriptor ║ + // ║ EnumValueDescriptor │ EnumDescriptor ║ + // ║ ServiceDescriptor │ FileDescriptor ║ + // ║ MethodDescriptor │ ServiceDescriptor ║ + // ╚═════════════════════╧═══════════════════════════════════╝ + // + // Support for this functionality is optional and may return nil. + Parent() Descriptor + + // Index returns the index of this descriptor within its parent. + // It returns 0 if the descriptor does not have a parent or if the parent + // is unknown. + Index() int + + // Syntax is the protobuf syntax. + Syntax() Syntax // e.g., Proto2 or Proto3 + + // Name is the short name of the declaration (i.e., FullName.Name). + Name() Name // e.g., "Any" + + // FullName is the fully-qualified name of the declaration. + // + // The FullName is a concatenation of the full name of the type that this + // type is declared within and the declaration name. For example, + // field "foo_field" in message "proto.package.MyMessage" is + // uniquely identified as "proto.package.MyMessage.foo_field". + // Enum values are an exception to the rule (see EnumValueDescriptor). + FullName() FullName // e.g., "google.protobuf.Any" + + // IsPlaceholder reports whether type information is missing since a + // dependency is not resolved, in which case only name information is known. + // + // Placeholder types may only be returned by the following accessors + // as a result of unresolved dependencies or weak imports: + // + // ╔═══════════════════════════════════╤═════════════════════╗ + // ║ Accessor │ Descriptor ║ + // ╠═══════════════════════════════════╪═════════════════════╣ + // ║ FileImports.FileDescriptor │ FileDescriptor ║ + // ║ FieldDescriptor.Enum │ EnumDescriptor ║ + // ║ FieldDescriptor.Message │ MessageDescriptor ║ + // ║ FieldDescriptor.DefaultEnumValue │ EnumValueDescriptor ║ + // ║ FieldDescriptor.ContainingMessage │ MessageDescriptor ║ + // ║ MethodDescriptor.Input │ MessageDescriptor ║ + // ║ MethodDescriptor.Output │ MessageDescriptor ║ + // ╚═══════════════════════════════════╧═════════════════════╝ + // + // If true, only Name and FullName are valid. + // For FileDescriptor, the Path is also valid. + IsPlaceholder() bool + + // Options returns the descriptor options. The caller must not modify + // the returned value. + // + // To avoid a dependency cycle, this function returns a proto.Message value. + // The proto message type returned for each descriptor type is as follows: + // ╔═════════════════════╤══════════════════════════════════════════╗ + // ║ Go type │ Protobuf message type ║ + // ╠═════════════════════╪══════════════════════════════════════════╣ + // ║ FileDescriptor │ google.protobuf.FileOptions ║ + // ║ EnumDescriptor │ google.protobuf.EnumOptions ║ + // ║ EnumValueDescriptor │ google.protobuf.EnumValueOptions ║ + // ║ MessageDescriptor │ google.protobuf.MessageOptions ║ + // ║ FieldDescriptor │ google.protobuf.FieldOptions ║ + // ║ OneofDescriptor │ google.protobuf.OneofOptions ║ + // ║ ServiceDescriptor │ google.protobuf.ServiceOptions ║ + // ║ MethodDescriptor │ google.protobuf.MethodOptions ║ + // ╚═════════════════════╧══════════════════════════════════════════╝ + // + // This method returns a typed nil-pointer if no options are present. + // The caller must import the descriptorpb package to use this. + Options() ProtoMessage + + doNotImplement +} + +// FileDescriptor describes the types in a complete proto file and +// corresponds with the google.protobuf.FileDescriptorProto message. +// +// Top-level declarations: +// EnumDescriptor, MessageDescriptor, FieldDescriptor, and/or ServiceDescriptor. +type FileDescriptor interface { + Descriptor // Descriptor.FullName is identical to Package + + // Path returns the file name, relative to the source tree root. + Path() string // e.g., "path/to/file.proto" + // Package returns the protobuf package namespace. + Package() FullName // e.g., "google.protobuf" + + // Imports is a list of imported proto files. + Imports() FileImports + + // Enums is a list of the top-level enum declarations. + Enums() EnumDescriptors + // Messages is a list of the top-level message declarations. + Messages() MessageDescriptors + // Extensions is a list of the top-level extension declarations. + Extensions() ExtensionDescriptors + // Services is a list of the top-level service declarations. + Services() ServiceDescriptors + + // SourceLocations is a list of source locations. + SourceLocations() SourceLocations + + isFileDescriptor +} +type isFileDescriptor interface{ ProtoType(FileDescriptor) } + +// FileImports is a list of file imports. +type FileImports interface { + // Len reports the number of files imported by this proto file. + Len() int + // Get returns the ith FileImport. It panics if out of bounds. + Get(i int) FileImport + + doNotImplement +} + +// FileImport is the declaration for a proto file import. +type FileImport struct { + // FileDescriptor is the file type for the given import. + // It is a placeholder descriptor if IsWeak is set or if a dependency has + // not been regenerated to implement the new reflection APIs. + FileDescriptor + + // IsPublic reports whether this is a public import, which causes this file + // to alias declarations within the imported file. The intended use cases + // for this feature is the ability to move proto files without breaking + // existing dependencies. + // + // The current file and the imported file must be within proto package. + IsPublic bool + + // IsWeak reports whether this is a weak import, which does not impose + // a direct dependency on the target file. + // + // Weak imports are a legacy proto1 feature. Equivalent behavior is + // achieved using proto2 extension fields or proto3 Any messages. + IsWeak bool +} + +// MessageDescriptor describes a message and +// corresponds with the google.protobuf.DescriptorProto message. +// +// Nested declarations: +// FieldDescriptor, OneofDescriptor, FieldDescriptor, EnumDescriptor, +// and/or MessageDescriptor. +type MessageDescriptor interface { + Descriptor + + // IsMapEntry indicates that this is an auto-generated message type to + // represent the entry type for a map field. + // + // Map entry messages have only two fields: + // • a "key" field with a field number of 1 + // • a "value" field with a field number of 2 + // The key and value types are determined by these two fields. + // + // If IsMapEntry is true, it implies that FieldDescriptor.IsMap is true + // for some field with this message type. + IsMapEntry() bool + + // Fields is a list of nested field declarations. + Fields() FieldDescriptors + // Oneofs is a list of nested oneof declarations. + Oneofs() OneofDescriptors + + // ReservedNames is a list of reserved field names. + ReservedNames() Names + // ReservedRanges is a list of reserved ranges of field numbers. + ReservedRanges() FieldRanges + // RequiredNumbers is a list of required field numbers. + // In Proto3, it is always an empty list. + RequiredNumbers() FieldNumbers + // ExtensionRanges is the field ranges used for extension fields. + // In Proto3, it is always an empty ranges. + ExtensionRanges() FieldRanges + // ExtensionRangeOptions returns the ith extension range options. + // + // To avoid a dependency cycle, this method returns a proto.Message value, + // which always contains a google.protobuf.ExtensionRangeOptions message. + // This method returns a typed nil-pointer if no options are present. + // The caller must import the descriptorpb package to use this. + ExtensionRangeOptions(i int) ProtoMessage + + // Enums is a list of nested enum declarations. + Enums() EnumDescriptors + // Messages is a list of nested message declarations. + Messages() MessageDescriptors + // Extensions is a list of nested extension declarations. + Extensions() ExtensionDescriptors + + isMessageDescriptor +} +type isMessageDescriptor interface{ ProtoType(MessageDescriptor) } + +// MessageType encapsulates a MessageDescriptor with a concrete Go implementation. +// It is recommended that implementations of this interface also implement the +// MessageFieldTypes interface. +type MessageType interface { + // New returns a newly allocated empty message. + // It may return nil for synthetic messages representing a map entry. + New() Message + + // Zero returns an empty, read-only message. + // It may return nil for synthetic messages representing a map entry. + Zero() Message + + // Descriptor returns the message descriptor. + // + // Invariant: t.Descriptor() == t.New().Descriptor() + Descriptor() MessageDescriptor +} + +// MessageFieldTypes extends a MessageType by providing type information +// regarding enums and messages referenced by the message fields. +type MessageFieldTypes interface { + MessageType + + // Enum returns the EnumType for the ith field in Descriptor.Fields. + // It returns nil if the ith field is not an enum kind. + // It panics if out of bounds. + // + // Invariant: mt.Enum(i).Descriptor() == mt.Descriptor().Fields(i).Enum() + Enum(i int) EnumType + + // Message returns the MessageType for the ith field in Descriptor.Fields. + // It returns nil if the ith field is not a message or group kind. + // It panics if out of bounds. + // + // Invariant: mt.Message(i).Descriptor() == mt.Descriptor().Fields(i).Message() + Message(i int) MessageType +} + +// MessageDescriptors is a list of message declarations. +type MessageDescriptors interface { + // Len reports the number of messages. + Len() int + // Get returns the ith MessageDescriptor. It panics if out of bounds. + Get(i int) MessageDescriptor + // ByName returns the MessageDescriptor for a message named s. + // It returns nil if not found. + ByName(s Name) MessageDescriptor + + doNotImplement +} + +// FieldDescriptor describes a field within a message and +// corresponds with the google.protobuf.FieldDescriptorProto message. +// +// It is used for both normal fields defined within the parent message +// (e.g., MessageDescriptor.Fields) and fields that extend some remote message +// (e.g., FileDescriptor.Extensions or MessageDescriptor.Extensions). +type FieldDescriptor interface { + Descriptor + + // Number reports the unique number for this field. + Number() FieldNumber + // Cardinality reports the cardinality for this field. + Cardinality() Cardinality + // Kind reports the basic kind for this field. + Kind() Kind + + // HasJSONName reports whether this field has an explicitly set JSON name. + HasJSONName() bool + + // JSONName reports the name used for JSON serialization. + // It is usually the camel-cased form of the field name. + // Extension fields are represented by the full name surrounded by brackets. + JSONName() string + + // TextName reports the name used for text serialization. + // It is usually the name of the field, except that groups use the name + // of the inlined message, and extension fields are represented by the + // full name surrounded by brackets. + TextName() string + + // HasPresence reports whether the field distinguishes between unpopulated + // and default values. + HasPresence() bool + + // IsExtension reports whether this is an extension field. If false, + // then Parent and ContainingMessage refer to the same message. + // Otherwise, ContainingMessage and Parent likely differ. + IsExtension() bool + + // HasOptionalKeyword reports whether the "optional" keyword was explicitly + // specified in the source .proto file. + HasOptionalKeyword() bool + + // IsWeak reports whether this is a weak field, which does not impose a + // direct dependency on the target type. + // If true, then Message returns a placeholder type. + IsWeak() bool + + // IsPacked reports whether repeated primitive numeric kinds should be + // serialized using a packed encoding. + // If true, then it implies Cardinality is Repeated. + IsPacked() bool + + // IsList reports whether this field represents a list, + // where the value type for the associated field is a List. + // It is equivalent to checking whether Cardinality is Repeated and + // that IsMap reports false. + IsList() bool + + // IsMap reports whether this field represents a map, + // where the value type for the associated field is a Map. + // It is equivalent to checking whether Cardinality is Repeated, + // that the Kind is MessageKind, and that Message.IsMapEntry reports true. + IsMap() bool + + // MapKey returns the field descriptor for the key in the map entry. + // It returns nil if IsMap reports false. + MapKey() FieldDescriptor + + // MapValue returns the field descriptor for the value in the map entry. + // It returns nil if IsMap reports false. + MapValue() FieldDescriptor + + // HasDefault reports whether this field has a default value. + HasDefault() bool + + // Default returns the default value for scalar fields. + // For proto2, it is the default value as specified in the proto file, + // or the zero value if unspecified. + // For proto3, it is always the zero value of the scalar. + // The Value type is determined by the Kind. + Default() Value + + // DefaultEnumValue returns the enum value descriptor for the default value + // of an enum field, and is nil for any other kind of field. + DefaultEnumValue() EnumValueDescriptor + + // ContainingOneof is the containing oneof that this field belongs to, + // and is nil if this field is not part of a oneof. + ContainingOneof() OneofDescriptor + + // ContainingMessage is the containing message that this field belongs to. + // For extension fields, this may not necessarily be the parent message + // that the field is declared within. + ContainingMessage() MessageDescriptor + + // Enum is the enum descriptor if Kind is EnumKind. + // It returns nil for any other Kind. + Enum() EnumDescriptor + + // Message is the message descriptor if Kind is + // MessageKind or GroupKind. It returns nil for any other Kind. + Message() MessageDescriptor + + isFieldDescriptor +} +type isFieldDescriptor interface{ ProtoType(FieldDescriptor) } + +// FieldDescriptors is a list of field declarations. +type FieldDescriptors interface { + // Len reports the number of fields. + Len() int + // Get returns the ith FieldDescriptor. It panics if out of bounds. + Get(i int) FieldDescriptor + // ByName returns the FieldDescriptor for a field named s. + // It returns nil if not found. + ByName(s Name) FieldDescriptor + // ByJSONName returns the FieldDescriptor for a field with s as the JSON name. + // It returns nil if not found. + ByJSONName(s string) FieldDescriptor + // ByTextName returns the FieldDescriptor for a field with s as the text name. + // It returns nil if not found. + ByTextName(s string) FieldDescriptor + // ByNumber returns the FieldDescriptor for a field numbered n. + // It returns nil if not found. + ByNumber(n FieldNumber) FieldDescriptor + + doNotImplement +} + +// OneofDescriptor describes a oneof field set within a given message and +// corresponds with the google.protobuf.OneofDescriptorProto message. +type OneofDescriptor interface { + Descriptor + + // IsSynthetic reports whether this is a synthetic oneof created to support + // proto3 optional semantics. If true, Fields contains exactly one field + // with HasOptionalKeyword specified. + IsSynthetic() bool + + // Fields is a list of fields belonging to this oneof. + Fields() FieldDescriptors + + isOneofDescriptor +} +type isOneofDescriptor interface{ ProtoType(OneofDescriptor) } + +// OneofDescriptors is a list of oneof declarations. +type OneofDescriptors interface { + // Len reports the number of oneof fields. + Len() int + // Get returns the ith OneofDescriptor. It panics if out of bounds. + Get(i int) OneofDescriptor + // ByName returns the OneofDescriptor for a oneof named s. + // It returns nil if not found. + ByName(s Name) OneofDescriptor + + doNotImplement +} + +// ExtensionDescriptor is an alias of FieldDescriptor for documentation. +type ExtensionDescriptor = FieldDescriptor + +// ExtensionTypeDescriptor is an ExtensionDescriptor with an associated ExtensionType. +type ExtensionTypeDescriptor interface { + ExtensionDescriptor + + // Type returns the associated ExtensionType. + Type() ExtensionType + + // Descriptor returns the plain ExtensionDescriptor without the + // associated ExtensionType. + Descriptor() ExtensionDescriptor +} + +// ExtensionDescriptors is a list of field declarations. +type ExtensionDescriptors interface { + // Len reports the number of fields. + Len() int + // Get returns the ith ExtensionDescriptor. It panics if out of bounds. + Get(i int) ExtensionDescriptor + // ByName returns the ExtensionDescriptor for a field named s. + // It returns nil if not found. + ByName(s Name) ExtensionDescriptor + + doNotImplement +} + +// ExtensionType encapsulates an ExtensionDescriptor with a concrete +// Go implementation. The nested field descriptor must be for a extension field. +// +// While a normal field is a member of the parent message that it is declared +// within (see Descriptor.Parent), an extension field is a member of some other +// target message (see ExtensionDescriptor.Extendee) and may have no +// relationship with the parent. However, the full name of an extension field is +// relative to the parent that it is declared within. +// +// For example: +// syntax = "proto2"; +// package example; +// message FooMessage { +// extensions 100 to max; +// } +// message BarMessage { +// extends FooMessage { optional BarMessage bar_field = 100; } +// } +// +// Field "bar_field" is an extension of FooMessage, but its full name is +// "example.BarMessage.bar_field" instead of "example.FooMessage.bar_field". +type ExtensionType interface { + // New returns a new value for the field. + // For scalars, this returns the default value in native Go form. + New() Value + + // Zero returns a new value for the field. + // For scalars, this returns the default value in native Go form. + // For composite types, this returns an empty, read-only message, list, or map. + Zero() Value + + // TypeDescriptor returns the extension type descriptor. + TypeDescriptor() ExtensionTypeDescriptor + + // ValueOf wraps the input and returns it as a Value. + // ValueOf panics if the input value is invalid or not the appropriate type. + // + // ValueOf is more extensive than protoreflect.ValueOf for a given field's + // value as it has more type information available. + ValueOf(interface{}) Value + + // InterfaceOf completely unwraps the Value to the underlying Go type. + // InterfaceOf panics if the input is nil or does not represent the + // appropriate underlying Go type. For composite types, it panics if the + // value is not mutable. + // + // InterfaceOf is able to unwrap the Value further than Value.Interface + // as it has more type information available. + InterfaceOf(Value) interface{} + + // IsValidValue reports whether the Value is valid to assign to the field. + IsValidValue(Value) bool + + // IsValidInterface reports whether the input is valid to assign to the field. + IsValidInterface(interface{}) bool +} + +// EnumDescriptor describes an enum and +// corresponds with the google.protobuf.EnumDescriptorProto message. +// +// Nested declarations: +// EnumValueDescriptor. +type EnumDescriptor interface { + Descriptor + + // Values is a list of nested enum value declarations. + Values() EnumValueDescriptors + + // ReservedNames is a list of reserved enum names. + ReservedNames() Names + // ReservedRanges is a list of reserved ranges of enum numbers. + ReservedRanges() EnumRanges + + isEnumDescriptor +} +type isEnumDescriptor interface{ ProtoType(EnumDescriptor) } + +// EnumType encapsulates an EnumDescriptor with a concrete Go implementation. +type EnumType interface { + // New returns an instance of this enum type with its value set to n. + New(n EnumNumber) Enum + + // Descriptor returns the enum descriptor. + // + // Invariant: t.Descriptor() == t.New(0).Descriptor() + Descriptor() EnumDescriptor +} + +// EnumDescriptors is a list of enum declarations. +type EnumDescriptors interface { + // Len reports the number of enum types. + Len() int + // Get returns the ith EnumDescriptor. It panics if out of bounds. + Get(i int) EnumDescriptor + // ByName returns the EnumDescriptor for an enum named s. + // It returns nil if not found. + ByName(s Name) EnumDescriptor + + doNotImplement +} + +// EnumValueDescriptor describes an enum value and +// corresponds with the google.protobuf.EnumValueDescriptorProto message. +// +// All other proto declarations are in the namespace of the parent. +// However, enum values do not follow this rule and are within the namespace +// of the parent's parent (i.e., they are a sibling of the containing enum). +// Thus, a value named "FOO_VALUE" declared within an enum uniquely identified +// as "proto.package.MyEnum" has a full name of "proto.package.FOO_VALUE". +type EnumValueDescriptor interface { + Descriptor + + // Number returns the enum value as an integer. + Number() EnumNumber + + isEnumValueDescriptor +} +type isEnumValueDescriptor interface{ ProtoType(EnumValueDescriptor) } + +// EnumValueDescriptors is a list of enum value declarations. +type EnumValueDescriptors interface { + // Len reports the number of enum values. + Len() int + // Get returns the ith EnumValueDescriptor. It panics if out of bounds. + Get(i int) EnumValueDescriptor + // ByName returns the EnumValueDescriptor for the enum value named s. + // It returns nil if not found. + ByName(s Name) EnumValueDescriptor + // ByNumber returns the EnumValueDescriptor for the enum value numbered n. + // If multiple have the same number, the first one defined is returned + // It returns nil if not found. + ByNumber(n EnumNumber) EnumValueDescriptor + + doNotImplement +} + +// ServiceDescriptor describes a service and +// corresponds with the google.protobuf.ServiceDescriptorProto message. +// +// Nested declarations: MethodDescriptor. +type ServiceDescriptor interface { + Descriptor + + // Methods is a list of nested message declarations. + Methods() MethodDescriptors + + isServiceDescriptor +} +type isServiceDescriptor interface{ ProtoType(ServiceDescriptor) } + +// ServiceDescriptors is a list of service declarations. +type ServiceDescriptors interface { + // Len reports the number of services. + Len() int + // Get returns the ith ServiceDescriptor. It panics if out of bounds. + Get(i int) ServiceDescriptor + // ByName returns the ServiceDescriptor for a service named s. + // It returns nil if not found. + ByName(s Name) ServiceDescriptor + + doNotImplement +} + +// MethodDescriptor describes a method and +// corresponds with the google.protobuf.MethodDescriptorProto message. +type MethodDescriptor interface { + Descriptor + + // Input is the input message descriptor. + Input() MessageDescriptor + // Output is the output message descriptor. + Output() MessageDescriptor + // IsStreamingClient reports whether the client streams multiple messages. + IsStreamingClient() bool + // IsStreamingServer reports whether the server streams multiple messages. + IsStreamingServer() bool + + isMethodDescriptor +} +type isMethodDescriptor interface{ ProtoType(MethodDescriptor) } + +// MethodDescriptors is a list of method declarations. +type MethodDescriptors interface { + // Len reports the number of methods. + Len() int + // Get returns the ith MethodDescriptor. It panics if out of bounds. + Get(i int) MethodDescriptor + // ByName returns the MethodDescriptor for a service method named s. + // It returns nil if not found. + ByName(s Name) MethodDescriptor + + doNotImplement +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/value.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/value.go new file mode 100644 index 00000000..f3198107 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/value.go @@ -0,0 +1,285 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoreflect + +import "google.golang.org/protobuf/encoding/protowire" + +// Enum is a reflection interface for a concrete enum value, +// which provides type information and a getter for the enum number. +// Enum does not provide a mutable API since enums are commonly backed by +// Go constants, which are not addressable. +type Enum interface { + // Descriptor returns enum descriptor, which contains only the protobuf + // type information for the enum. + Descriptor() EnumDescriptor + + // Type returns the enum type, which encapsulates both Go and protobuf + // type information. If the Go type information is not needed, + // it is recommended that the enum descriptor be used instead. + Type() EnumType + + // Number returns the enum value as an integer. + Number() EnumNumber +} + +// Message is a reflective interface for a concrete message value, +// encapsulating both type and value information for the message. +// +// Accessor/mutators for individual fields are keyed by FieldDescriptor. +// For non-extension fields, the descriptor must exactly match the +// field known by the parent message. +// For extension fields, the descriptor must implement ExtensionTypeDescriptor, +// extend the parent message (i.e., have the same message FullName), and +// be within the parent's extension range. +// +// Each field Value can be a scalar or a composite type (Message, List, or Map). +// See Value for the Go types associated with a FieldDescriptor. +// Providing a Value that is invalid or of an incorrect type panics. +type Message interface { + // Descriptor returns message descriptor, which contains only the protobuf + // type information for the message. + Descriptor() MessageDescriptor + + // Type returns the message type, which encapsulates both Go and protobuf + // type information. If the Go type information is not needed, + // it is recommended that the message descriptor be used instead. + Type() MessageType + + // New returns a newly allocated and mutable empty message. + New() Message + + // Interface unwraps the message reflection interface and + // returns the underlying ProtoMessage interface. + Interface() ProtoMessage + + // Range iterates over every populated field in an undefined order, + // calling f for each field descriptor and value encountered. + // Range returns immediately if f returns false. + // While iterating, mutating operations may only be performed + // on the current field descriptor. + Range(f func(FieldDescriptor, Value) bool) + + // Has reports whether a field is populated. + // + // Some fields have the property of nullability where it is possible to + // distinguish between the default value of a field and whether the field + // was explicitly populated with the default value. Singular message fields, + // member fields of a oneof, and proto2 scalar fields are nullable. Such + // fields are populated only if explicitly set. + // + // In other cases (aside from the nullable cases above), + // a proto3 scalar field is populated if it contains a non-zero value, and + // a repeated field is populated if it is non-empty. + Has(FieldDescriptor) bool + + // Clear clears the field such that a subsequent Has call reports false. + // + // Clearing an extension field clears both the extension type and value + // associated with the given field number. + // + // Clear is a mutating operation and unsafe for concurrent use. + Clear(FieldDescriptor) + + // Get retrieves the value for a field. + // + // For unpopulated scalars, it returns the default value, where + // the default value of a bytes scalar is guaranteed to be a copy. + // For unpopulated composite types, it returns an empty, read-only view + // of the value; to obtain a mutable reference, use Mutable. + Get(FieldDescriptor) Value + + // Set stores the value for a field. + // + // For a field belonging to a oneof, it implicitly clears any other field + // that may be currently set within the same oneof. + // For extension fields, it implicitly stores the provided ExtensionType. + // When setting a composite type, it is unspecified whether the stored value + // aliases the source's memory in any way. If the composite value is an + // empty, read-only value, then it panics. + // + // Set is a mutating operation and unsafe for concurrent use. + Set(FieldDescriptor, Value) + + // Mutable returns a mutable reference to a composite type. + // + // If the field is unpopulated, it may allocate a composite value. + // For a field belonging to a oneof, it implicitly clears any other field + // that may be currently set within the same oneof. + // For extension fields, it implicitly stores the provided ExtensionType + // if not already stored. + // It panics if the field does not contain a composite type. + // + // Mutable is a mutating operation and unsafe for concurrent use. + Mutable(FieldDescriptor) Value + + // NewField returns a new value that is assignable to the field + // for the given descriptor. For scalars, this returns the default value. + // For lists, maps, and messages, this returns a new, empty, mutable value. + NewField(FieldDescriptor) Value + + // WhichOneof reports which field within the oneof is populated, + // returning nil if none are populated. + // It panics if the oneof descriptor does not belong to this message. + WhichOneof(OneofDescriptor) FieldDescriptor + + // GetUnknown retrieves the entire list of unknown fields. + // The caller may only mutate the contents of the RawFields + // if the mutated bytes are stored back into the message with SetUnknown. + GetUnknown() RawFields + + // SetUnknown stores an entire list of unknown fields. + // The raw fields must be syntactically valid according to the wire format. + // An implementation may panic if this is not the case. + // Once stored, the caller must not mutate the content of the RawFields. + // An empty RawFields may be passed to clear the fields. + // + // SetUnknown is a mutating operation and unsafe for concurrent use. + SetUnknown(RawFields) + + // IsValid reports whether the message is valid. + // + // An invalid message is an empty, read-only value. + // + // An invalid message often corresponds to a nil pointer of the concrete + // message type, but the details are implementation dependent. + // Validity is not part of the protobuf data model, and may not + // be preserved in marshaling or other operations. + IsValid() bool + + // ProtoMethods returns optional fast-path implementions of various operations. + // This method may return nil. + // + // The returned methods type is identical to + // "google.golang.org/protobuf/runtime/protoiface".Methods. + // Consult the protoiface package documentation for details. + ProtoMethods() *methods +} + +// RawFields is the raw bytes for an ordered sequence of fields. +// Each field contains both the tag (representing field number and wire type), +// and also the wire data itself. +type RawFields []byte + +// IsValid reports whether b is syntactically correct wire format. +func (b RawFields) IsValid() bool { + for len(b) > 0 { + _, _, n := protowire.ConsumeField(b) + if n < 0 { + return false + } + b = b[n:] + } + return true +} + +// List is a zero-indexed, ordered list. +// The element Value type is determined by FieldDescriptor.Kind. +// Providing a Value that is invalid or of an incorrect type panics. +type List interface { + // Len reports the number of entries in the List. + // Get, Set, and Truncate panic with out of bound indexes. + Len() int + + // Get retrieves the value at the given index. + // It never returns an invalid value. + Get(int) Value + + // Set stores a value for the given index. + // When setting a composite type, it is unspecified whether the set + // value aliases the source's memory in any way. + // + // Set is a mutating operation and unsafe for concurrent use. + Set(int, Value) + + // Append appends the provided value to the end of the list. + // When appending a composite type, it is unspecified whether the appended + // value aliases the source's memory in any way. + // + // Append is a mutating operation and unsafe for concurrent use. + Append(Value) + + // AppendMutable appends a new, empty, mutable message value to the end + // of the list and returns it. + // It panics if the list does not contain a message type. + AppendMutable() Value + + // Truncate truncates the list to a smaller length. + // + // Truncate is a mutating operation and unsafe for concurrent use. + Truncate(int) + + // NewElement returns a new value for a list element. + // For enums, this returns the first enum value. + // For other scalars, this returns the zero value. + // For messages, this returns a new, empty, mutable value. + NewElement() Value + + // IsValid reports whether the list is valid. + // + // An invalid list is an empty, read-only value. + // + // Validity is not part of the protobuf data model, and may not + // be preserved in marshaling or other operations. + IsValid() bool +} + +// Map is an unordered, associative map. +// The entry MapKey type is determined by FieldDescriptor.MapKey.Kind. +// The entry Value type is determined by FieldDescriptor.MapValue.Kind. +// Providing a MapKey or Value that is invalid or of an incorrect type panics. +type Map interface { + // Len reports the number of elements in the map. + Len() int + + // Range iterates over every map entry in an undefined order, + // calling f for each key and value encountered. + // Range calls f Len times unless f returns false, which stops iteration. + // While iterating, mutating operations may only be performed + // on the current map key. + Range(f func(MapKey, Value) bool) + + // Has reports whether an entry with the given key is in the map. + Has(MapKey) bool + + // Clear clears the entry associated with they given key. + // The operation does nothing if there is no entry associated with the key. + // + // Clear is a mutating operation and unsafe for concurrent use. + Clear(MapKey) + + // Get retrieves the value for an entry with the given key. + // It returns an invalid value for non-existent entries. + Get(MapKey) Value + + // Set stores the value for an entry with the given key. + // It panics when given a key or value that is invalid or the wrong type. + // When setting a composite type, it is unspecified whether the set + // value aliases the source's memory in any way. + // + // Set is a mutating operation and unsafe for concurrent use. + Set(MapKey, Value) + + // Mutable retrieves a mutable reference to the entry for the given key. + // If no entry exists for the key, it creates a new, empty, mutable value + // and stores it as the entry for the key. + // It panics if the map value is not a message. + Mutable(MapKey) Value + + // NewValue returns a new value assignable as a map value. + // For enums, this returns the first enum value. + // For other scalars, this returns the zero value. + // For messages, this returns a new, empty, mutable value. + NewValue() Value + + // IsValid reports whether the map is valid. + // + // An invalid map is an empty, read-only value. + // + // An invalid message often corresponds to a nil Go map value, + // but the details are implementation dependent. + // Validity is not part of the protobuf data model, and may not + // be preserved in marshaling or other operations. + IsValid() bool +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/value_pure.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_pure.go new file mode 100644 index 00000000..918e685e --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_pure.go @@ -0,0 +1,59 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego appengine + +package protoreflect + +import "google.golang.org/protobuf/internal/pragma" + +type valueType int + +const ( + nilType valueType = iota + boolType + int32Type + int64Type + uint32Type + uint64Type + float32Type + float64Type + stringType + bytesType + enumType + ifaceType +) + +// value is a union where only one type can be represented at a time. +// This uses a distinct field for each type. This is type safe in Go, but +// occupies more memory than necessary (72B). +type value struct { + pragma.DoNotCompare // 0B + + typ valueType // 8B + num uint64 // 8B + str string // 16B + bin []byte // 24B + iface interface{} // 16B +} + +func valueOfString(v string) Value { + return Value{typ: stringType, str: v} +} +func valueOfBytes(v []byte) Value { + return Value{typ: bytesType, bin: v} +} +func valueOfIface(v interface{}) Value { + return Value{typ: ifaceType, iface: v} +} + +func (v Value) getString() string { + return v.str +} +func (v Value) getBytes() []byte { + return v.bin +} +func (v Value) getIface() interface{} { + return v.iface +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/value_union.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_union.go new file mode 100644 index 00000000..5a341472 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_union.go @@ -0,0 +1,411 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoreflect + +import ( + "fmt" + "math" +) + +// Value is a union where only one Go type may be set at a time. +// The Value is used to represent all possible values a field may take. +// The following shows which Go type is used to represent each proto Kind: +// +// ╔════════════╤═════════════════════════════════════╗ +// ║ Go type │ Protobuf kind ║ +// ╠════════════╪═════════════════════════════════════╣ +// ║ bool │ BoolKind ║ +// ║ int32 │ Int32Kind, Sint32Kind, Sfixed32Kind ║ +// ║ int64 │ Int64Kind, Sint64Kind, Sfixed64Kind ║ +// ║ uint32 │ Uint32Kind, Fixed32Kind ║ +// ║ uint64 │ Uint64Kind, Fixed64Kind ║ +// ║ float32 │ FloatKind ║ +// ║ float64 │ DoubleKind ║ +// ║ string │ StringKind ║ +// ║ []byte │ BytesKind ║ +// ║ EnumNumber │ EnumKind ║ +// ║ Message │ MessageKind, GroupKind ║ +// ╚════════════╧═════════════════════════════════════╝ +// +// Multiple protobuf Kinds may be represented by a single Go type if the type +// can losslessly represent the information for the proto kind. For example, +// Int64Kind, Sint64Kind, and Sfixed64Kind are all represented by int64, +// but use different integer encoding methods. +// +// The List or Map types are used if the field cardinality is repeated. +// A field is a List if FieldDescriptor.IsList reports true. +// A field is a Map if FieldDescriptor.IsMap reports true. +// +// Converting to/from a Value and a concrete Go value panics on type mismatch. +// For example, ValueOf("hello").Int() panics because this attempts to +// retrieve an int64 from a string. +type Value value + +// The protoreflect API uses a custom Value union type instead of interface{} +// to keep the future open for performance optimizations. Using an interface{} +// always incurs an allocation for primitives (e.g., int64) since it needs to +// be boxed on the heap (as interfaces can only contain pointers natively). +// Instead, we represent the Value union as a flat struct that internally keeps +// track of which type is set. Using unsafe, the Value union can be reduced +// down to 24B, which is identical in size to a slice. +// +// The latest compiler (Go1.11) currently suffers from some limitations: +// • With inlining, the compiler should be able to statically prove that +// only one of these switch cases are taken and inline one specific case. +// See https://golang.org/issue/22310. + +// ValueOf returns a Value initialized with the concrete value stored in v. +// This panics if the type does not match one of the allowed types in the +// Value union. +func ValueOf(v interface{}) Value { + switch v := v.(type) { + case nil: + return Value{} + case bool: + return ValueOfBool(v) + case int32: + return ValueOfInt32(v) + case int64: + return ValueOfInt64(v) + case uint32: + return ValueOfUint32(v) + case uint64: + return ValueOfUint64(v) + case float32: + return ValueOfFloat32(v) + case float64: + return ValueOfFloat64(v) + case string: + return ValueOfString(v) + case []byte: + return ValueOfBytes(v) + case EnumNumber: + return ValueOfEnum(v) + case Message, List, Map: + return valueOfIface(v) + case ProtoMessage: + panic(fmt.Sprintf("invalid proto.Message(%T) type, expected a protoreflect.Message type", v)) + default: + panic(fmt.Sprintf("invalid type: %T", v)) + } +} + +// ValueOfBool returns a new boolean value. +func ValueOfBool(v bool) Value { + if v { + return Value{typ: boolType, num: 1} + } else { + return Value{typ: boolType, num: 0} + } +} + +// ValueOfInt32 returns a new int32 value. +func ValueOfInt32(v int32) Value { + return Value{typ: int32Type, num: uint64(v)} +} + +// ValueOfInt64 returns a new int64 value. +func ValueOfInt64(v int64) Value { + return Value{typ: int64Type, num: uint64(v)} +} + +// ValueOfUint32 returns a new uint32 value. +func ValueOfUint32(v uint32) Value { + return Value{typ: uint32Type, num: uint64(v)} +} + +// ValueOfUint64 returns a new uint64 value. +func ValueOfUint64(v uint64) Value { + return Value{typ: uint64Type, num: v} +} + +// ValueOfFloat32 returns a new float32 value. +func ValueOfFloat32(v float32) Value { + return Value{typ: float32Type, num: uint64(math.Float64bits(float64(v)))} +} + +// ValueOfFloat64 returns a new float64 value. +func ValueOfFloat64(v float64) Value { + return Value{typ: float64Type, num: uint64(math.Float64bits(float64(v)))} +} + +// ValueOfString returns a new string value. +func ValueOfString(v string) Value { + return valueOfString(v) +} + +// ValueOfBytes returns a new bytes value. +func ValueOfBytes(v []byte) Value { + return valueOfBytes(v[:len(v):len(v)]) +} + +// ValueOfEnum returns a new enum value. +func ValueOfEnum(v EnumNumber) Value { + return Value{typ: enumType, num: uint64(v)} +} + +// ValueOfMessage returns a new Message value. +func ValueOfMessage(v Message) Value { + return valueOfIface(v) +} + +// ValueOfList returns a new List value. +func ValueOfList(v List) Value { + return valueOfIface(v) +} + +// ValueOfMap returns a new Map value. +func ValueOfMap(v Map) Value { + return valueOfIface(v) +} + +// IsValid reports whether v is populated with a value. +func (v Value) IsValid() bool { + return v.typ != nilType +} + +// Interface returns v as an interface{}. +// +// Invariant: v == ValueOf(v).Interface() +func (v Value) Interface() interface{} { + switch v.typ { + case nilType: + return nil + case boolType: + return v.Bool() + case int32Type: + return int32(v.Int()) + case int64Type: + return int64(v.Int()) + case uint32Type: + return uint32(v.Uint()) + case uint64Type: + return uint64(v.Uint()) + case float32Type: + return float32(v.Float()) + case float64Type: + return float64(v.Float()) + case stringType: + return v.String() + case bytesType: + return v.Bytes() + case enumType: + return v.Enum() + default: + return v.getIface() + } +} + +func (v Value) typeName() string { + switch v.typ { + case nilType: + return "nil" + case boolType: + return "bool" + case int32Type: + return "int32" + case int64Type: + return "int64" + case uint32Type: + return "uint32" + case uint64Type: + return "uint64" + case float32Type: + return "float32" + case float64Type: + return "float64" + case stringType: + return "string" + case bytesType: + return "bytes" + case enumType: + return "enum" + default: + switch v := v.getIface().(type) { + case Message: + return "message" + case List: + return "list" + case Map: + return "map" + default: + return fmt.Sprintf("", v) + } + } +} + +func (v Value) panicMessage(what string) string { + return fmt.Sprintf("type mismatch: cannot convert %v to %s", v.typeName(), what) +} + +// Bool returns v as a bool and panics if the type is not a bool. +func (v Value) Bool() bool { + switch v.typ { + case boolType: + return v.num > 0 + default: + panic(v.panicMessage("bool")) + } +} + +// Int returns v as a int64 and panics if the type is not a int32 or int64. +func (v Value) Int() int64 { + switch v.typ { + case int32Type, int64Type: + return int64(v.num) + default: + panic(v.panicMessage("int")) + } +} + +// Uint returns v as a uint64 and panics if the type is not a uint32 or uint64. +func (v Value) Uint() uint64 { + switch v.typ { + case uint32Type, uint64Type: + return uint64(v.num) + default: + panic(v.panicMessage("uint")) + } +} + +// Float returns v as a float64 and panics if the type is not a float32 or float64. +func (v Value) Float() float64 { + switch v.typ { + case float32Type, float64Type: + return math.Float64frombits(uint64(v.num)) + default: + panic(v.panicMessage("float")) + } +} + +// String returns v as a string. Since this method implements fmt.Stringer, +// this returns the formatted string value for any non-string type. +func (v Value) String() string { + switch v.typ { + case stringType: + return v.getString() + default: + return fmt.Sprint(v.Interface()) + } +} + +// Bytes returns v as a []byte and panics if the type is not a []byte. +func (v Value) Bytes() []byte { + switch v.typ { + case bytesType: + return v.getBytes() + default: + panic(v.panicMessage("bytes")) + } +} + +// Enum returns v as a EnumNumber and panics if the type is not a EnumNumber. +func (v Value) Enum() EnumNumber { + switch v.typ { + case enumType: + return EnumNumber(v.num) + default: + panic(v.panicMessage("enum")) + } +} + +// Message returns v as a Message and panics if the type is not a Message. +func (v Value) Message() Message { + switch vi := v.getIface().(type) { + case Message: + return vi + default: + panic(v.panicMessage("message")) + } +} + +// List returns v as a List and panics if the type is not a List. +func (v Value) List() List { + switch vi := v.getIface().(type) { + case List: + return vi + default: + panic(v.panicMessage("list")) + } +} + +// Map returns v as a Map and panics if the type is not a Map. +func (v Value) Map() Map { + switch vi := v.getIface().(type) { + case Map: + return vi + default: + panic(v.panicMessage("map")) + } +} + +// MapKey returns v as a MapKey and panics for invalid MapKey types. +func (v Value) MapKey() MapKey { + switch v.typ { + case boolType, int32Type, int64Type, uint32Type, uint64Type, stringType: + return MapKey(v) + default: + panic(v.panicMessage("map key")) + } +} + +// MapKey is used to index maps, where the Go type of the MapKey must match +// the specified key Kind (see MessageDescriptor.IsMapEntry). +// The following shows what Go type is used to represent each proto Kind: +// +// ╔═════════╤═════════════════════════════════════╗ +// ║ Go type │ Protobuf kind ║ +// ╠═════════╪═════════════════════════════════════╣ +// ║ bool │ BoolKind ║ +// ║ int32 │ Int32Kind, Sint32Kind, Sfixed32Kind ║ +// ║ int64 │ Int64Kind, Sint64Kind, Sfixed64Kind ║ +// ║ uint32 │ Uint32Kind, Fixed32Kind ║ +// ║ uint64 │ Uint64Kind, Fixed64Kind ║ +// ║ string │ StringKind ║ +// ╚═════════╧═════════════════════════════════════╝ +// +// A MapKey is constructed and accessed through a Value: +// k := ValueOf("hash").MapKey() // convert string to MapKey +// s := k.String() // convert MapKey to string +// +// The MapKey is a strict subset of valid types used in Value; +// converting a Value to a MapKey with an invalid type panics. +type MapKey value + +// IsValid reports whether k is populated with a value. +func (k MapKey) IsValid() bool { + return Value(k).IsValid() +} + +// Interface returns k as an interface{}. +func (k MapKey) Interface() interface{} { + return Value(k).Interface() +} + +// Bool returns k as a bool and panics if the type is not a bool. +func (k MapKey) Bool() bool { + return Value(k).Bool() +} + +// Int returns k as a int64 and panics if the type is not a int32 or int64. +func (k MapKey) Int() int64 { + return Value(k).Int() +} + +// Uint returns k as a uint64 and panics if the type is not a uint32 or uint64. +func (k MapKey) Uint() uint64 { + return Value(k).Uint() +} + +// String returns k as a string. Since this method implements fmt.Stringer, +// this returns the formatted string value for any non-string type. +func (k MapKey) String() string { + return Value(k).String() +} + +// Value returns k as a Value. +func (k MapKey) Value() Value { + return Value(k) +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoreflect/value_unsafe.go b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_unsafe.go new file mode 100644 index 00000000..c45debdc --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoreflect/value_unsafe.go @@ -0,0 +1,98 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego,!appengine + +package protoreflect + +import ( + "unsafe" + + "google.golang.org/protobuf/internal/pragma" +) + +type ( + stringHeader struct { + Data unsafe.Pointer + Len int + } + sliceHeader struct { + Data unsafe.Pointer + Len int + Cap int + } + ifaceHeader struct { + Type unsafe.Pointer + Data unsafe.Pointer + } +) + +var ( + nilType = typeOf(nil) + boolType = typeOf(*new(bool)) + int32Type = typeOf(*new(int32)) + int64Type = typeOf(*new(int64)) + uint32Type = typeOf(*new(uint32)) + uint64Type = typeOf(*new(uint64)) + float32Type = typeOf(*new(float32)) + float64Type = typeOf(*new(float64)) + stringType = typeOf(*new(string)) + bytesType = typeOf(*new([]byte)) + enumType = typeOf(*new(EnumNumber)) +) + +// typeOf returns a pointer to the Go type information. +// The pointer is comparable and equal if and only if the types are identical. +func typeOf(t interface{}) unsafe.Pointer { + return (*ifaceHeader)(unsafe.Pointer(&t)).Type +} + +// value is a union where only one type can be represented at a time. +// The struct is 24B large on 64-bit systems and requires the minimum storage +// necessary to represent each possible type. +// +// The Go GC needs to be able to scan variables containing pointers. +// As such, pointers and non-pointers cannot be intermixed. +type value struct { + pragma.DoNotCompare // 0B + + // typ stores the type of the value as a pointer to the Go type. + typ unsafe.Pointer // 8B + + // ptr stores the data pointer for a String, Bytes, or interface value. + ptr unsafe.Pointer // 8B + + // num stores a Bool, Int32, Int64, Uint32, Uint64, Float32, Float64, or + // Enum value as a raw uint64. + // + // It is also used to store the length of a String or Bytes value; + // the capacity is ignored. + num uint64 // 8B +} + +func valueOfString(v string) Value { + p := (*stringHeader)(unsafe.Pointer(&v)) + return Value{typ: stringType, ptr: p.Data, num: uint64(len(v))} +} +func valueOfBytes(v []byte) Value { + p := (*sliceHeader)(unsafe.Pointer(&v)) + return Value{typ: bytesType, ptr: p.Data, num: uint64(len(v))} +} +func valueOfIface(v interface{}) Value { + p := (*ifaceHeader)(unsafe.Pointer(&v)) + return Value{typ: p.Type, ptr: p.Data} +} + +func (v Value) getString() (x string) { + *(*stringHeader)(unsafe.Pointer(&x)) = stringHeader{Data: v.ptr, Len: int(v.num)} + return x +} +func (v Value) getBytes() (x []byte) { + *(*sliceHeader)(unsafe.Pointer(&x)) = sliceHeader{Data: v.ptr, Len: int(v.num), Cap: int(v.num)} + return x +} +func (v Value) getIface() (x interface{}) { + *(*ifaceHeader)(unsafe.Pointer(&x)) = ifaceHeader{Type: v.typ, Data: v.ptr} + return x +} diff --git a/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go b/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go new file mode 100644 index 00000000..66dcbcd0 --- /dev/null +++ b/vendor/google.golang.org/protobuf/reflect/protoregistry/registry.go @@ -0,0 +1,869 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protoregistry provides data structures to register and lookup +// protobuf descriptor types. +// +// The Files registry contains file descriptors and provides the ability +// to iterate over the files or lookup a specific descriptor within the files. +// Files only contains protobuf descriptors and has no understanding of Go +// type information that may be associated with each descriptor. +// +// The Types registry contains descriptor types for which there is a known +// Go type associated with that descriptor. It provides the ability to iterate +// over the registered types or lookup a type by name. +package protoregistry + +import ( + "fmt" + "os" + "strings" + "sync" + + "google.golang.org/protobuf/internal/encoding/messageset" + "google.golang.org/protobuf/internal/errors" + "google.golang.org/protobuf/internal/flags" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// conflictPolicy configures the policy for handling registration conflicts. +// +// It can be over-written at compile time with a linker-initialized variable: +// go build -ldflags "-X google.golang.org/protobuf/reflect/protoregistry.conflictPolicy=warn" +// +// It can be over-written at program execution with an environment variable: +// GOLANG_PROTOBUF_REGISTRATION_CONFLICT=warn ./main +// +// Neither of the above are covered by the compatibility promise and +// may be removed in a future release of this module. +var conflictPolicy = "panic" // "panic" | "warn" | "ignore" + +// ignoreConflict reports whether to ignore a registration conflict +// given the descriptor being registered and the error. +// It is a variable so that the behavior is easily overridden in another file. +var ignoreConflict = func(d protoreflect.Descriptor, err error) bool { + const env = "GOLANG_PROTOBUF_REGISTRATION_CONFLICT" + const faq = "https://developers.google.com/protocol-buffers/docs/reference/go/faq#namespace-conflict" + policy := conflictPolicy + if v := os.Getenv(env); v != "" { + policy = v + } + switch policy { + case "panic": + panic(fmt.Sprintf("%v\nSee %v\n", err, faq)) + case "warn": + fmt.Fprintf(os.Stderr, "WARNING: %v\nSee %v\n\n", err, faq) + return true + case "ignore": + return true + default: + panic("invalid " + env + " value: " + os.Getenv(env)) + } +} + +var globalMutex sync.RWMutex + +// GlobalFiles is a global registry of file descriptors. +var GlobalFiles *Files = new(Files) + +// GlobalTypes is the registry used by default for type lookups +// unless a local registry is provided by the user. +var GlobalTypes *Types = new(Types) + +// NotFound is a sentinel error value to indicate that the type was not found. +// +// Since registry lookup can happen in the critical performance path, resolvers +// must return this exact error value, not an error wrapping it. +var NotFound = errors.New("not found") + +// Files is a registry for looking up or iterating over files and the +// descriptors contained within them. +// The Find and Range methods are safe for concurrent use. +type Files struct { + // The map of descsByName contains: + // EnumDescriptor + // EnumValueDescriptor + // MessageDescriptor + // ExtensionDescriptor + // ServiceDescriptor + // *packageDescriptor + // + // Note that files are stored as a slice, since a package may contain + // multiple files. Only top-level declarations are registered. + // Note that enum values are in the top-level since that are in the same + // scope as the parent enum. + descsByName map[protoreflect.FullName]interface{} + filesByPath map[string]protoreflect.FileDescriptor +} + +type packageDescriptor struct { + files []protoreflect.FileDescriptor +} + +// RegisterFile registers the provided file descriptor. +// +// If any descriptor within the file conflicts with the descriptor of any +// previously registered file (e.g., two enums with the same full name), +// then the file is not registered and an error is returned. +// +// It is permitted for multiple files to have the same file path. +func (r *Files) RegisterFile(file protoreflect.FileDescriptor) error { + if r == GlobalFiles { + globalMutex.Lock() + defer globalMutex.Unlock() + } + if r.descsByName == nil { + r.descsByName = map[protoreflect.FullName]interface{}{ + "": &packageDescriptor{}, + } + r.filesByPath = make(map[string]protoreflect.FileDescriptor) + } + path := file.Path() + if prev := r.filesByPath[path]; prev != nil { + r.checkGenProtoConflict(path) + err := errors.New("file %q is already registered", file.Path()) + err = amendErrorWithCaller(err, prev, file) + if r == GlobalFiles && ignoreConflict(file, err) { + err = nil + } + return err + } + + for name := file.Package(); name != ""; name = name.Parent() { + switch prev := r.descsByName[name]; prev.(type) { + case nil, *packageDescriptor: + default: + err := errors.New("file %q has a package name conflict over %v", file.Path(), name) + err = amendErrorWithCaller(err, prev, file) + if r == GlobalFiles && ignoreConflict(file, err) { + err = nil + } + return err + } + } + var err error + var hasConflict bool + rangeTopLevelDescriptors(file, func(d protoreflect.Descriptor) { + if prev := r.descsByName[d.FullName()]; prev != nil { + hasConflict = true + err = errors.New("file %q has a name conflict over %v", file.Path(), d.FullName()) + err = amendErrorWithCaller(err, prev, file) + if r == GlobalFiles && ignoreConflict(d, err) { + err = nil + } + } + }) + if hasConflict { + return err + } + + for name := file.Package(); name != ""; name = name.Parent() { + if r.descsByName[name] == nil { + r.descsByName[name] = &packageDescriptor{} + } + } + p := r.descsByName[file.Package()].(*packageDescriptor) + p.files = append(p.files, file) + rangeTopLevelDescriptors(file, func(d protoreflect.Descriptor) { + r.descsByName[d.FullName()] = d + }) + r.filesByPath[path] = file + return nil +} + +// Several well-known types were hosted in the google.golang.org/genproto module +// but were later moved to this module. To avoid a weak dependency on the +// genproto module (and its relatively large set of transitive dependencies), +// we rely on a registration conflict to determine whether the genproto version +// is too old (i.e., does not contain aliases to the new type declarations). +func (r *Files) checkGenProtoConflict(path string) { + if r != GlobalFiles { + return + } + var prevPath string + const prevModule = "google.golang.org/genproto" + const prevVersion = "cb27e3aa (May 26th, 2020)" + switch path { + case "google/protobuf/field_mask.proto": + prevPath = prevModule + "/protobuf/field_mask" + case "google/protobuf/api.proto": + prevPath = prevModule + "/protobuf/api" + case "google/protobuf/type.proto": + prevPath = prevModule + "/protobuf/ptype" + case "google/protobuf/source_context.proto": + prevPath = prevModule + "/protobuf/source_context" + default: + return + } + pkgName := strings.TrimSuffix(strings.TrimPrefix(path, "google/protobuf/"), ".proto") + pkgName = strings.Replace(pkgName, "_", "", -1) + "pb" // e.g., "field_mask" => "fieldmaskpb" + currPath := "google.golang.org/protobuf/types/known/" + pkgName + panic(fmt.Sprintf(""+ + "duplicate registration of %q\n"+ + "\n"+ + "The generated definition for this file has moved:\n"+ + "\tfrom: %q\n"+ + "\tto: %q\n"+ + "A dependency on the %q module must\n"+ + "be at version %v or higher.\n"+ + "\n"+ + "Upgrade the dependency by running:\n"+ + "\tgo get -u %v\n", + path, prevPath, currPath, prevModule, prevVersion, prevPath)) +} + +// FindDescriptorByName looks up a descriptor by the full name. +// +// This returns (nil, NotFound) if not found. +func (r *Files) FindDescriptorByName(name protoreflect.FullName) (protoreflect.Descriptor, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + prefix := name + suffix := nameSuffix("") + for prefix != "" { + if d, ok := r.descsByName[prefix]; ok { + switch d := d.(type) { + case protoreflect.EnumDescriptor: + if d.FullName() == name { + return d, nil + } + case protoreflect.EnumValueDescriptor: + if d.FullName() == name { + return d, nil + } + case protoreflect.MessageDescriptor: + if d.FullName() == name { + return d, nil + } + if d := findDescriptorInMessage(d, suffix); d != nil && d.FullName() == name { + return d, nil + } + case protoreflect.ExtensionDescriptor: + if d.FullName() == name { + return d, nil + } + case protoreflect.ServiceDescriptor: + if d.FullName() == name { + return d, nil + } + if d := d.Methods().ByName(suffix.Pop()); d != nil && d.FullName() == name { + return d, nil + } + } + return nil, NotFound + } + prefix = prefix.Parent() + suffix = nameSuffix(name[len(prefix)+len("."):]) + } + return nil, NotFound +} + +func findDescriptorInMessage(md protoreflect.MessageDescriptor, suffix nameSuffix) protoreflect.Descriptor { + name := suffix.Pop() + if suffix == "" { + if ed := md.Enums().ByName(name); ed != nil { + return ed + } + for i := md.Enums().Len() - 1; i >= 0; i-- { + if vd := md.Enums().Get(i).Values().ByName(name); vd != nil { + return vd + } + } + if xd := md.Extensions().ByName(name); xd != nil { + return xd + } + if fd := md.Fields().ByName(name); fd != nil { + return fd + } + if od := md.Oneofs().ByName(name); od != nil { + return od + } + } + if md := md.Messages().ByName(name); md != nil { + if suffix == "" { + return md + } + return findDescriptorInMessage(md, suffix) + } + return nil +} + +type nameSuffix string + +func (s *nameSuffix) Pop() (name protoreflect.Name) { + if i := strings.IndexByte(string(*s), '.'); i >= 0 { + name, *s = protoreflect.Name((*s)[:i]), (*s)[i+1:] + } else { + name, *s = protoreflect.Name((*s)), "" + } + return name +} + +// FindFileByPath looks up a file by the path. +// +// This returns (nil, NotFound) if not found. +func (r *Files) FindFileByPath(path string) (protoreflect.FileDescriptor, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if fd, ok := r.filesByPath[path]; ok { + return fd, nil + } + return nil, NotFound +} + +// NumFiles reports the number of registered files. +func (r *Files) NumFiles() int { + if r == nil { + return 0 + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + return len(r.filesByPath) +} + +// RangeFiles iterates over all registered files while f returns true. +// The iteration order is undefined. +func (r *Files) RangeFiles(f func(protoreflect.FileDescriptor) bool) { + if r == nil { + return + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + for _, file := range r.filesByPath { + if !f(file) { + return + } + } +} + +// NumFilesByPackage reports the number of registered files in a proto package. +func (r *Files) NumFilesByPackage(name protoreflect.FullName) int { + if r == nil { + return 0 + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + p, ok := r.descsByName[name].(*packageDescriptor) + if !ok { + return 0 + } + return len(p.files) +} + +// RangeFilesByPackage iterates over all registered files in a given proto package +// while f returns true. The iteration order is undefined. +func (r *Files) RangeFilesByPackage(name protoreflect.FullName, f func(protoreflect.FileDescriptor) bool) { + if r == nil { + return + } + if r == GlobalFiles { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + p, ok := r.descsByName[name].(*packageDescriptor) + if !ok { + return + } + for _, file := range p.files { + if !f(file) { + return + } + } +} + +// rangeTopLevelDescriptors iterates over all top-level descriptors in a file +// which will be directly entered into the registry. +func rangeTopLevelDescriptors(fd protoreflect.FileDescriptor, f func(protoreflect.Descriptor)) { + eds := fd.Enums() + for i := eds.Len() - 1; i >= 0; i-- { + f(eds.Get(i)) + vds := eds.Get(i).Values() + for i := vds.Len() - 1; i >= 0; i-- { + f(vds.Get(i)) + } + } + mds := fd.Messages() + for i := mds.Len() - 1; i >= 0; i-- { + f(mds.Get(i)) + } + xds := fd.Extensions() + for i := xds.Len() - 1; i >= 0; i-- { + f(xds.Get(i)) + } + sds := fd.Services() + for i := sds.Len() - 1; i >= 0; i-- { + f(sds.Get(i)) + } +} + +// MessageTypeResolver is an interface for looking up messages. +// +// A compliant implementation must deterministically return the same type +// if no error is encountered. +// +// The Types type implements this interface. +type MessageTypeResolver interface { + // FindMessageByName looks up a message by its full name. + // E.g., "google.protobuf.Any" + // + // This return (nil, NotFound) if not found. + FindMessageByName(message protoreflect.FullName) (protoreflect.MessageType, error) + + // FindMessageByURL looks up a message by a URL identifier. + // See documentation on google.protobuf.Any.type_url for the URL format. + // + // This returns (nil, NotFound) if not found. + FindMessageByURL(url string) (protoreflect.MessageType, error) +} + +// ExtensionTypeResolver is an interface for looking up extensions. +// +// A compliant implementation must deterministically return the same type +// if no error is encountered. +// +// The Types type implements this interface. +type ExtensionTypeResolver interface { + // FindExtensionByName looks up a extension field by the field's full name. + // Note that this is the full name of the field as determined by + // where the extension is declared and is unrelated to the full name of the + // message being extended. + // + // This returns (nil, NotFound) if not found. + FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) + + // FindExtensionByNumber looks up a extension field by the field number + // within some parent message, identified by full name. + // + // This returns (nil, NotFound) if not found. + FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) +} + +var ( + _ MessageTypeResolver = (*Types)(nil) + _ ExtensionTypeResolver = (*Types)(nil) +) + +// Types is a registry for looking up or iterating over descriptor types. +// The Find and Range methods are safe for concurrent use. +type Types struct { + typesByName typesByName + extensionsByMessage extensionsByMessage + + numEnums int + numMessages int + numExtensions int +} + +type ( + typesByName map[protoreflect.FullName]interface{} + extensionsByMessage map[protoreflect.FullName]extensionsByNumber + extensionsByNumber map[protoreflect.FieldNumber]protoreflect.ExtensionType +) + +// RegisterMessage registers the provided message type. +// +// If a naming conflict occurs, the type is not registered and an error is returned. +func (r *Types) RegisterMessage(mt protoreflect.MessageType) error { + // Under rare circumstances getting the descriptor might recursively + // examine the registry, so fetch it before locking. + md := mt.Descriptor() + + if r == GlobalTypes { + globalMutex.Lock() + defer globalMutex.Unlock() + } + + if err := r.register("message", md, mt); err != nil { + return err + } + r.numMessages++ + return nil +} + +// RegisterEnum registers the provided enum type. +// +// If a naming conflict occurs, the type is not registered and an error is returned. +func (r *Types) RegisterEnum(et protoreflect.EnumType) error { + // Under rare circumstances getting the descriptor might recursively + // examine the registry, so fetch it before locking. + ed := et.Descriptor() + + if r == GlobalTypes { + globalMutex.Lock() + defer globalMutex.Unlock() + } + + if err := r.register("enum", ed, et); err != nil { + return err + } + r.numEnums++ + return nil +} + +// RegisterExtension registers the provided extension type. +// +// If a naming conflict occurs, the type is not registered and an error is returned. +func (r *Types) RegisterExtension(xt protoreflect.ExtensionType) error { + // Under rare circumstances getting the descriptor might recursively + // examine the registry, so fetch it before locking. + // + // A known case where this can happen: Fetching the TypeDescriptor for a + // legacy ExtensionDesc can consult the global registry. + xd := xt.TypeDescriptor() + + if r == GlobalTypes { + globalMutex.Lock() + defer globalMutex.Unlock() + } + + field := xd.Number() + message := xd.ContainingMessage().FullName() + if prev := r.extensionsByMessage[message][field]; prev != nil { + err := errors.New("extension number %d is already registered on message %v", field, message) + err = amendErrorWithCaller(err, prev, xt) + if !(r == GlobalTypes && ignoreConflict(xd, err)) { + return err + } + } + + if err := r.register("extension", xd, xt); err != nil { + return err + } + if r.extensionsByMessage == nil { + r.extensionsByMessage = make(extensionsByMessage) + } + if r.extensionsByMessage[message] == nil { + r.extensionsByMessage[message] = make(extensionsByNumber) + } + r.extensionsByMessage[message][field] = xt + r.numExtensions++ + return nil +} + +func (r *Types) register(kind string, desc protoreflect.Descriptor, typ interface{}) error { + name := desc.FullName() + prev := r.typesByName[name] + if prev != nil { + err := errors.New("%v %v is already registered", kind, name) + err = amendErrorWithCaller(err, prev, typ) + if !(r == GlobalTypes && ignoreConflict(desc, err)) { + return err + } + } + if r.typesByName == nil { + r.typesByName = make(typesByName) + } + r.typesByName[name] = typ + return nil +} + +// FindEnumByName looks up an enum by its full name. +// E.g., "google.protobuf.Field.Kind". +// +// This returns (nil, NotFound) if not found. +func (r *Types) FindEnumByName(enum protoreflect.FullName) (protoreflect.EnumType, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if v := r.typesByName[enum]; v != nil { + if et, _ := v.(protoreflect.EnumType); et != nil { + return et, nil + } + return nil, errors.New("found wrong type: got %v, want enum", typeName(v)) + } + return nil, NotFound +} + +// FindMessageByName looks up a message by its full name, +// e.g. "google.protobuf.Any". +// +// This returns (nil, NotFound) if not found. +func (r *Types) FindMessageByName(message protoreflect.FullName) (protoreflect.MessageType, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if v := r.typesByName[message]; v != nil { + if mt, _ := v.(protoreflect.MessageType); mt != nil { + return mt, nil + } + return nil, errors.New("found wrong type: got %v, want message", typeName(v)) + } + return nil, NotFound +} + +// FindMessageByURL looks up a message by a URL identifier. +// See documentation on google.protobuf.Any.type_url for the URL format. +// +// This returns (nil, NotFound) if not found. +func (r *Types) FindMessageByURL(url string) (protoreflect.MessageType, error) { + // This function is similar to FindMessageByName but + // truncates anything before and including '/' in the URL. + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + message := protoreflect.FullName(url) + if i := strings.LastIndexByte(url, '/'); i >= 0 { + message = message[i+len("/"):] + } + + if v := r.typesByName[message]; v != nil { + if mt, _ := v.(protoreflect.MessageType); mt != nil { + return mt, nil + } + return nil, errors.New("found wrong type: got %v, want message", typeName(v)) + } + return nil, NotFound +} + +// FindExtensionByName looks up a extension field by the field's full name. +// Note that this is the full name of the field as determined by +// where the extension is declared and is unrelated to the full name of the +// message being extended. +// +// This returns (nil, NotFound) if not found. +func (r *Types) FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if v := r.typesByName[field]; v != nil { + if xt, _ := v.(protoreflect.ExtensionType); xt != nil { + return xt, nil + } + + // MessageSet extensions are special in that the name of the extension + // is the name of the message type used to extend the MessageSet. + // This naming scheme is used by text and JSON serialization. + // + // This feature is protected by the ProtoLegacy flag since MessageSets + // are a proto1 feature that is long deprecated. + if flags.ProtoLegacy { + if _, ok := v.(protoreflect.MessageType); ok { + field := field.Append(messageset.ExtensionName) + if v := r.typesByName[field]; v != nil { + if xt, _ := v.(protoreflect.ExtensionType); xt != nil { + if messageset.IsMessageSetExtension(xt.TypeDescriptor()) { + return xt, nil + } + } + } + } + } + + return nil, errors.New("found wrong type: got %v, want extension", typeName(v)) + } + return nil, NotFound +} + +// FindExtensionByNumber looks up a extension field by the field number +// within some parent message, identified by full name. +// +// This returns (nil, NotFound) if not found. +func (r *Types) FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) { + if r == nil { + return nil, NotFound + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + if xt, ok := r.extensionsByMessage[message][field]; ok { + return xt, nil + } + return nil, NotFound +} + +// NumEnums reports the number of registered enums. +func (r *Types) NumEnums() int { + if r == nil { + return 0 + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + return r.numEnums +} + +// RangeEnums iterates over all registered enums while f returns true. +// Iteration order is undefined. +func (r *Types) RangeEnums(f func(protoreflect.EnumType) bool) { + if r == nil { + return + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + for _, typ := range r.typesByName { + if et, ok := typ.(protoreflect.EnumType); ok { + if !f(et) { + return + } + } + } +} + +// NumMessages reports the number of registered messages. +func (r *Types) NumMessages() int { + if r == nil { + return 0 + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + return r.numMessages +} + +// RangeMessages iterates over all registered messages while f returns true. +// Iteration order is undefined. +func (r *Types) RangeMessages(f func(protoreflect.MessageType) bool) { + if r == nil { + return + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + for _, typ := range r.typesByName { + if mt, ok := typ.(protoreflect.MessageType); ok { + if !f(mt) { + return + } + } + } +} + +// NumExtensions reports the number of registered extensions. +func (r *Types) NumExtensions() int { + if r == nil { + return 0 + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + return r.numExtensions +} + +// RangeExtensions iterates over all registered extensions while f returns true. +// Iteration order is undefined. +func (r *Types) RangeExtensions(f func(protoreflect.ExtensionType) bool) { + if r == nil { + return + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + for _, typ := range r.typesByName { + if xt, ok := typ.(protoreflect.ExtensionType); ok { + if !f(xt) { + return + } + } + } +} + +// NumExtensionsByMessage reports the number of registered extensions for +// a given message type. +func (r *Types) NumExtensionsByMessage(message protoreflect.FullName) int { + if r == nil { + return 0 + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + return len(r.extensionsByMessage[message]) +} + +// RangeExtensionsByMessage iterates over all registered extensions filtered +// by a given message type while f returns true. Iteration order is undefined. +func (r *Types) RangeExtensionsByMessage(message protoreflect.FullName, f func(protoreflect.ExtensionType) bool) { + if r == nil { + return + } + if r == GlobalTypes { + globalMutex.RLock() + defer globalMutex.RUnlock() + } + for _, xt := range r.extensionsByMessage[message] { + if !f(xt) { + return + } + } +} + +func typeName(t interface{}) string { + switch t.(type) { + case protoreflect.EnumType: + return "enum" + case protoreflect.MessageType: + return "message" + case protoreflect.ExtensionType: + return "extension" + default: + return fmt.Sprintf("%T", t) + } +} + +func amendErrorWithCaller(err error, prev, curr interface{}) error { + prevPkg := goPackage(prev) + currPkg := goPackage(curr) + if prevPkg == "" || currPkg == "" || prevPkg == currPkg { + return err + } + return errors.New("%s\n\tpreviously from: %q\n\tcurrently from: %q", err, prevPkg, currPkg) +} + +func goPackage(v interface{}) string { + switch d := v.(type) { + case protoreflect.EnumType: + v = d.Descriptor() + case protoreflect.MessageType: + v = d.Descriptor() + case protoreflect.ExtensionType: + v = d.TypeDescriptor() + } + if d, ok := v.(protoreflect.Descriptor); ok { + v = d.ParentFile() + } + if d, ok := v.(interface{ GoPackagePath() string }); ok { + return d.GoPackagePath() + } + return "" +} diff --git a/vendor/google.golang.org/protobuf/runtime/protoiface/legacy.go b/vendor/google.golang.org/protobuf/runtime/protoiface/legacy.go new file mode 100644 index 00000000..c5872767 --- /dev/null +++ b/vendor/google.golang.org/protobuf/runtime/protoiface/legacy.go @@ -0,0 +1,15 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoiface + +type MessageV1 interface { + Reset() + String() string + ProtoMessage() +} + +type ExtensionRangeV1 struct { + Start, End int32 // both inclusive +} diff --git a/vendor/google.golang.org/protobuf/runtime/protoiface/methods.go b/vendor/google.golang.org/protobuf/runtime/protoiface/methods.go new file mode 100644 index 00000000..32c04f67 --- /dev/null +++ b/vendor/google.golang.org/protobuf/runtime/protoiface/methods.go @@ -0,0 +1,167 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protoiface contains types referenced or implemented by messages. +// +// WARNING: This package should only be imported by message implementations. +// The functionality found in this package should be accessed through +// higher-level abstractions provided by the proto package. +package protoiface + +import ( + "google.golang.org/protobuf/internal/pragma" + "google.golang.org/protobuf/reflect/protoreflect" +) + +// Methods is a set of optional fast-path implementations of various operations. +type Methods = struct { + pragma.NoUnkeyedLiterals + + // Flags indicate support for optional features. + Flags SupportFlags + + // Size returns the size in bytes of the wire-format encoding of a message. + // Marshal must be provided if a custom Size is provided. + Size func(SizeInput) SizeOutput + + // Marshal formats a message in the wire-format encoding to the provided buffer. + // Size should be provided if a custom Marshal is provided. + // It must not return an error for a partial message. + Marshal func(MarshalInput) (MarshalOutput, error) + + // Unmarshal parses the wire-format encoding and merges the result into a message. + // It must not reset the target message or return an error for a partial message. + Unmarshal func(UnmarshalInput) (UnmarshalOutput, error) + + // Merge merges the contents of a source message into a destination message. + Merge func(MergeInput) MergeOutput + + // CheckInitialized returns an error if any required fields in the message are not set. + CheckInitialized func(CheckInitializedInput) (CheckInitializedOutput, error) +} + +// SupportFlags indicate support for optional features. +type SupportFlags = uint64 + +const ( + // SupportMarshalDeterministic reports whether MarshalOptions.Deterministic is supported. + SupportMarshalDeterministic SupportFlags = 1 << iota + + // SupportUnmarshalDiscardUnknown reports whether UnmarshalOptions.DiscardUnknown is supported. + SupportUnmarshalDiscardUnknown +) + +// SizeInput is input to the Size method. +type SizeInput = struct { + pragma.NoUnkeyedLiterals + + Message protoreflect.Message + Flags MarshalInputFlags +} + +// SizeOutput is output from the Size method. +type SizeOutput = struct { + pragma.NoUnkeyedLiterals + + Size int +} + +// MarshalInput is input to the Marshal method. +type MarshalInput = struct { + pragma.NoUnkeyedLiterals + + Message protoreflect.Message + Buf []byte // output is appended to this buffer + Flags MarshalInputFlags +} + +// MarshalOutput is output from the Marshal method. +type MarshalOutput = struct { + pragma.NoUnkeyedLiterals + + Buf []byte // contains marshaled message +} + +// MarshalInputFlags configure the marshaler. +// Most flags correspond to fields in proto.MarshalOptions. +type MarshalInputFlags = uint8 + +const ( + MarshalDeterministic MarshalInputFlags = 1 << iota + MarshalUseCachedSize +) + +// UnmarshalInput is input to the Unmarshal method. +type UnmarshalInput = struct { + pragma.NoUnkeyedLiterals + + Message protoreflect.Message + Buf []byte // input buffer + Flags UnmarshalInputFlags + Resolver interface { + FindExtensionByName(field protoreflect.FullName) (protoreflect.ExtensionType, error) + FindExtensionByNumber(message protoreflect.FullName, field protoreflect.FieldNumber) (protoreflect.ExtensionType, error) + } +} + +// UnmarshalOutput is output from the Unmarshal method. +type UnmarshalOutput = struct { + pragma.NoUnkeyedLiterals + + Flags UnmarshalOutputFlags +} + +// UnmarshalInputFlags configure the unmarshaler. +// Most flags correspond to fields in proto.UnmarshalOptions. +type UnmarshalInputFlags = uint8 + +const ( + UnmarshalDiscardUnknown UnmarshalInputFlags = 1 << iota +) + +// UnmarshalOutputFlags are output from the Unmarshal method. +type UnmarshalOutputFlags = uint8 + +const ( + // UnmarshalInitialized may be set on return if all required fields are known to be set. + // If unset, then it does not necessarily indicate that the message is uninitialized, + // only that its status could not be confirmed. + UnmarshalInitialized UnmarshalOutputFlags = 1 << iota +) + +// MergeInput is input to the Merge method. +type MergeInput = struct { + pragma.NoUnkeyedLiterals + + Source protoreflect.Message + Destination protoreflect.Message +} + +// MergeOutput is output from the Merge method. +type MergeOutput = struct { + pragma.NoUnkeyedLiterals + + Flags MergeOutputFlags +} + +// MergeOutputFlags are output from the Merge method. +type MergeOutputFlags = uint8 + +const ( + // MergeComplete reports whether the merge was performed. + // If unset, the merger must have made no changes to the destination. + MergeComplete MergeOutputFlags = 1 << iota +) + +// CheckInitializedInput is input to the CheckInitialized method. +type CheckInitializedInput = struct { + pragma.NoUnkeyedLiterals + + Message protoreflect.Message +} + +// CheckInitializedOutput is output from the CheckInitialized method. +type CheckInitializedOutput = struct { + pragma.NoUnkeyedLiterals +} diff --git a/vendor/google.golang.org/protobuf/runtime/protoimpl/impl.go b/vendor/google.golang.org/protobuf/runtime/protoimpl/impl.go new file mode 100644 index 00000000..4a1ab7fb --- /dev/null +++ b/vendor/google.golang.org/protobuf/runtime/protoimpl/impl.go @@ -0,0 +1,44 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package protoimpl contains the default implementation for messages +// generated by protoc-gen-go. +// +// WARNING: This package should only ever be imported by generated messages. +// The compatibility agreement covers nothing except for functionality needed +// to keep existing generated messages operational. Breakages that occur due +// to unauthorized usages of this package are not the author's responsibility. +package protoimpl + +import ( + "google.golang.org/protobuf/internal/filedesc" + "google.golang.org/protobuf/internal/filetype" + "google.golang.org/protobuf/internal/impl" +) + +// UnsafeEnabled specifies whether package unsafe can be used. +const UnsafeEnabled = impl.UnsafeEnabled + +type ( + // Types used by generated code in init functions. + DescBuilder = filedesc.Builder + TypeBuilder = filetype.Builder + + // Types used by generated code to implement EnumType, MessageType, and ExtensionType. + EnumInfo = impl.EnumInfo + MessageInfo = impl.MessageInfo + ExtensionInfo = impl.ExtensionInfo + + // Types embedded in generated messages. + MessageState = impl.MessageState + SizeCache = impl.SizeCache + WeakFields = impl.WeakFields + UnknownFields = impl.UnknownFields + ExtensionFields = impl.ExtensionFields + ExtensionFieldV1 = impl.ExtensionField + + Pointer = impl.Pointer +) + +var X impl.Export diff --git a/vendor/google.golang.org/protobuf/runtime/protoimpl/version.go b/vendor/google.golang.org/protobuf/runtime/protoimpl/version.go new file mode 100644 index 00000000..ff094e1b --- /dev/null +++ b/vendor/google.golang.org/protobuf/runtime/protoimpl/version.go @@ -0,0 +1,56 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package protoimpl + +import ( + "google.golang.org/protobuf/internal/version" +) + +const ( + // MaxVersion is the maximum supported version for generated .pb.go files. + // It is always the current version of the module. + MaxVersion = version.Minor + + // GenVersion is the runtime version required by generated .pb.go files. + // This is incremented when generated code relies on new functionality + // in the runtime. + GenVersion = 20 + + // MinVersion is the minimum supported version for generated .pb.go files. + // This is incremented when the runtime drops support for old code. + MinVersion = 0 +) + +// EnforceVersion is used by code generated by protoc-gen-go +// to statically enforce minimum and maximum versions of this package. +// A compilation failure implies either that: +// * the runtime package is too old and needs to be updated OR +// * the generated code is too old and needs to be regenerated. +// +// The runtime package can be upgraded by running: +// go get google.golang.org/protobuf +// +// The generated code can be regenerated by running: +// protoc --go_out=${PROTOC_GEN_GO_ARGS} ${PROTO_FILES} +// +// Example usage by generated code: +// const ( +// // Verify that this generated code is sufficiently up-to-date. +// _ = protoimpl.EnforceVersion(genVersion - protoimpl.MinVersion) +// // Verify that runtime/protoimpl is sufficiently up-to-date. +// _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - genVersion) +// ) +// +// The genVersion is the current minor version used to generated the code. +// This compile-time check relies on negative integer overflow of a uint +// being a compilation failure (guaranteed by the Go specification). +type EnforceVersion uint + +// This enforces the following invariant: +// MinVersion ≤ GenVersion ≤ MaxVersion +const ( + _ = EnforceVersion(GenVersion - MinVersion) + _ = EnforceVersion(MaxVersion - GenVersion) +) diff --git a/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go b/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go new file mode 100644 index 00000000..f77239fc --- /dev/null +++ b/vendor/google.golang.org/protobuf/types/descriptorpb/descriptor.pb.go @@ -0,0 +1,4039 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/descriptor.proto + +package descriptorpb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoiface "google.golang.org/protobuf/runtime/protoiface" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +type FieldDescriptorProto_Type int32 + +const ( + // 0 is reserved for errors. + // Order is weird for historical reasons. + FieldDescriptorProto_TYPE_DOUBLE FieldDescriptorProto_Type = 1 + FieldDescriptorProto_TYPE_FLOAT FieldDescriptorProto_Type = 2 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT64 FieldDescriptorProto_Type = 3 + FieldDescriptorProto_TYPE_UINT64 FieldDescriptorProto_Type = 4 + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + FieldDescriptorProto_TYPE_INT32 FieldDescriptorProto_Type = 5 + FieldDescriptorProto_TYPE_FIXED64 FieldDescriptorProto_Type = 6 + FieldDescriptorProto_TYPE_FIXED32 FieldDescriptorProto_Type = 7 + FieldDescriptorProto_TYPE_BOOL FieldDescriptorProto_Type = 8 + FieldDescriptorProto_TYPE_STRING FieldDescriptorProto_Type = 9 + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + FieldDescriptorProto_TYPE_GROUP FieldDescriptorProto_Type = 10 + FieldDescriptorProto_TYPE_MESSAGE FieldDescriptorProto_Type = 11 // Length-delimited aggregate. + // New in version 2. + FieldDescriptorProto_TYPE_BYTES FieldDescriptorProto_Type = 12 + FieldDescriptorProto_TYPE_UINT32 FieldDescriptorProto_Type = 13 + FieldDescriptorProto_TYPE_ENUM FieldDescriptorProto_Type = 14 + FieldDescriptorProto_TYPE_SFIXED32 FieldDescriptorProto_Type = 15 + FieldDescriptorProto_TYPE_SFIXED64 FieldDescriptorProto_Type = 16 + FieldDescriptorProto_TYPE_SINT32 FieldDescriptorProto_Type = 17 // Uses ZigZag encoding. + FieldDescriptorProto_TYPE_SINT64 FieldDescriptorProto_Type = 18 // Uses ZigZag encoding. +) + +// Enum value maps for FieldDescriptorProto_Type. +var ( + FieldDescriptorProto_Type_name = map[int32]string{ + 1: "TYPE_DOUBLE", + 2: "TYPE_FLOAT", + 3: "TYPE_INT64", + 4: "TYPE_UINT64", + 5: "TYPE_INT32", + 6: "TYPE_FIXED64", + 7: "TYPE_FIXED32", + 8: "TYPE_BOOL", + 9: "TYPE_STRING", + 10: "TYPE_GROUP", + 11: "TYPE_MESSAGE", + 12: "TYPE_BYTES", + 13: "TYPE_UINT32", + 14: "TYPE_ENUM", + 15: "TYPE_SFIXED32", + 16: "TYPE_SFIXED64", + 17: "TYPE_SINT32", + 18: "TYPE_SINT64", + } + FieldDescriptorProto_Type_value = map[string]int32{ + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18, + } +) + +func (x FieldDescriptorProto_Type) Enum() *FieldDescriptorProto_Type { + p := new(FieldDescriptorProto_Type) + *p = x + return p +} + +func (x FieldDescriptorProto_Type) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FieldDescriptorProto_Type) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[0].Descriptor() +} + +func (FieldDescriptorProto_Type) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[0] +} + +func (x FieldDescriptorProto_Type) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FieldDescriptorProto_Type) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FieldDescriptorProto_Type(num) + return nil +} + +// Deprecated: Use FieldDescriptorProto_Type.Descriptor instead. +func (FieldDescriptorProto_Type) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{4, 0} +} + +type FieldDescriptorProto_Label int32 + +const ( + // 0 is reserved for errors + FieldDescriptorProto_LABEL_OPTIONAL FieldDescriptorProto_Label = 1 + FieldDescriptorProto_LABEL_REQUIRED FieldDescriptorProto_Label = 2 + FieldDescriptorProto_LABEL_REPEATED FieldDescriptorProto_Label = 3 +) + +// Enum value maps for FieldDescriptorProto_Label. +var ( + FieldDescriptorProto_Label_name = map[int32]string{ + 1: "LABEL_OPTIONAL", + 2: "LABEL_REQUIRED", + 3: "LABEL_REPEATED", + } + FieldDescriptorProto_Label_value = map[string]int32{ + "LABEL_OPTIONAL": 1, + "LABEL_REQUIRED": 2, + "LABEL_REPEATED": 3, + } +) + +func (x FieldDescriptorProto_Label) Enum() *FieldDescriptorProto_Label { + p := new(FieldDescriptorProto_Label) + *p = x + return p +} + +func (x FieldDescriptorProto_Label) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FieldDescriptorProto_Label) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[1].Descriptor() +} + +func (FieldDescriptorProto_Label) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[1] +} + +func (x FieldDescriptorProto_Label) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FieldDescriptorProto_Label) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FieldDescriptorProto_Label(num) + return nil +} + +// Deprecated: Use FieldDescriptorProto_Label.Descriptor instead. +func (FieldDescriptorProto_Label) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{4, 1} +} + +// Generated classes can be optimized for speed or code size. +type FileOptions_OptimizeMode int32 + +const ( + FileOptions_SPEED FileOptions_OptimizeMode = 1 // Generate complete code for parsing, serialization, + // etc. + FileOptions_CODE_SIZE FileOptions_OptimizeMode = 2 // Use ReflectionOps to implement these methods. + FileOptions_LITE_RUNTIME FileOptions_OptimizeMode = 3 // Generate code using MessageLite and the lite runtime. +) + +// Enum value maps for FileOptions_OptimizeMode. +var ( + FileOptions_OptimizeMode_name = map[int32]string{ + 1: "SPEED", + 2: "CODE_SIZE", + 3: "LITE_RUNTIME", + } + FileOptions_OptimizeMode_value = map[string]int32{ + "SPEED": 1, + "CODE_SIZE": 2, + "LITE_RUNTIME": 3, + } +) + +func (x FileOptions_OptimizeMode) Enum() *FileOptions_OptimizeMode { + p := new(FileOptions_OptimizeMode) + *p = x + return p +} + +func (x FileOptions_OptimizeMode) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FileOptions_OptimizeMode) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[2].Descriptor() +} + +func (FileOptions_OptimizeMode) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[2] +} + +func (x FileOptions_OptimizeMode) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FileOptions_OptimizeMode) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FileOptions_OptimizeMode(num) + return nil +} + +// Deprecated: Use FileOptions_OptimizeMode.Descriptor instead. +func (FileOptions_OptimizeMode) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{10, 0} +} + +type FieldOptions_CType int32 + +const ( + // Default mode. + FieldOptions_STRING FieldOptions_CType = 0 + FieldOptions_CORD FieldOptions_CType = 1 + FieldOptions_STRING_PIECE FieldOptions_CType = 2 +) + +// Enum value maps for FieldOptions_CType. +var ( + FieldOptions_CType_name = map[int32]string{ + 0: "STRING", + 1: "CORD", + 2: "STRING_PIECE", + } + FieldOptions_CType_value = map[string]int32{ + "STRING": 0, + "CORD": 1, + "STRING_PIECE": 2, + } +) + +func (x FieldOptions_CType) Enum() *FieldOptions_CType { + p := new(FieldOptions_CType) + *p = x + return p +} + +func (x FieldOptions_CType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FieldOptions_CType) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[3].Descriptor() +} + +func (FieldOptions_CType) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[3] +} + +func (x FieldOptions_CType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FieldOptions_CType) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FieldOptions_CType(num) + return nil +} + +// Deprecated: Use FieldOptions_CType.Descriptor instead. +func (FieldOptions_CType) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{12, 0} +} + +type FieldOptions_JSType int32 + +const ( + // Use the default type. + FieldOptions_JS_NORMAL FieldOptions_JSType = 0 + // Use JavaScript strings. + FieldOptions_JS_STRING FieldOptions_JSType = 1 + // Use JavaScript numbers. + FieldOptions_JS_NUMBER FieldOptions_JSType = 2 +) + +// Enum value maps for FieldOptions_JSType. +var ( + FieldOptions_JSType_name = map[int32]string{ + 0: "JS_NORMAL", + 1: "JS_STRING", + 2: "JS_NUMBER", + } + FieldOptions_JSType_value = map[string]int32{ + "JS_NORMAL": 0, + "JS_STRING": 1, + "JS_NUMBER": 2, + } +) + +func (x FieldOptions_JSType) Enum() *FieldOptions_JSType { + p := new(FieldOptions_JSType) + *p = x + return p +} + +func (x FieldOptions_JSType) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (FieldOptions_JSType) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[4].Descriptor() +} + +func (FieldOptions_JSType) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[4] +} + +func (x FieldOptions_JSType) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *FieldOptions_JSType) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = FieldOptions_JSType(num) + return nil +} + +// Deprecated: Use FieldOptions_JSType.Descriptor instead. +func (FieldOptions_JSType) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{12, 1} +} + +// Is this method side-effect-free (or safe in HTTP parlance), or idempotent, +// or neither? HTTP based RPC implementation may choose GET verb for safe +// methods, and PUT verb for idempotent methods instead of the default POST. +type MethodOptions_IdempotencyLevel int32 + +const ( + MethodOptions_IDEMPOTENCY_UNKNOWN MethodOptions_IdempotencyLevel = 0 + MethodOptions_NO_SIDE_EFFECTS MethodOptions_IdempotencyLevel = 1 // implies idempotent + MethodOptions_IDEMPOTENT MethodOptions_IdempotencyLevel = 2 // idempotent, but may have side effects +) + +// Enum value maps for MethodOptions_IdempotencyLevel. +var ( + MethodOptions_IdempotencyLevel_name = map[int32]string{ + 0: "IDEMPOTENCY_UNKNOWN", + 1: "NO_SIDE_EFFECTS", + 2: "IDEMPOTENT", + } + MethodOptions_IdempotencyLevel_value = map[string]int32{ + "IDEMPOTENCY_UNKNOWN": 0, + "NO_SIDE_EFFECTS": 1, + "IDEMPOTENT": 2, + } +) + +func (x MethodOptions_IdempotencyLevel) Enum() *MethodOptions_IdempotencyLevel { + p := new(MethodOptions_IdempotencyLevel) + *p = x + return p +} + +func (x MethodOptions_IdempotencyLevel) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (MethodOptions_IdempotencyLevel) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_descriptor_proto_enumTypes[5].Descriptor() +} + +func (MethodOptions_IdempotencyLevel) Type() protoreflect.EnumType { + return &file_google_protobuf_descriptor_proto_enumTypes[5] +} + +func (x MethodOptions_IdempotencyLevel) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *MethodOptions_IdempotencyLevel) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = MethodOptions_IdempotencyLevel(num) + return nil +} + +// Deprecated: Use MethodOptions_IdempotencyLevel.Descriptor instead. +func (MethodOptions_IdempotencyLevel) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{17, 0} +} + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +type FileDescriptorSet struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + File []*FileDescriptorProto `protobuf:"bytes,1,rep,name=file" json:"file,omitempty"` +} + +func (x *FileDescriptorSet) Reset() { + *x = FileDescriptorSet{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileDescriptorSet) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileDescriptorSet) ProtoMessage() {} + +func (x *FileDescriptorSet) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileDescriptorSet.ProtoReflect.Descriptor instead. +func (*FileDescriptorSet) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{0} +} + +func (x *FileDescriptorSet) GetFile() []*FileDescriptorProto { + if x != nil { + return x.File + } + return nil +} + +// Describes a complete .proto file. +type FileDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` // file name, relative to root of source tree + Package *string `protobuf:"bytes,2,opt,name=package" json:"package,omitempty"` // e.g. "foo", "foo.bar", etc. + // Names of files imported by this file. + Dependency []string `protobuf:"bytes,3,rep,name=dependency" json:"dependency,omitempty"` + // Indexes of the public imported files in the dependency list above. + PublicDependency []int32 `protobuf:"varint,10,rep,name=public_dependency,json=publicDependency" json:"public_dependency,omitempty"` + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + WeakDependency []int32 `protobuf:"varint,11,rep,name=weak_dependency,json=weakDependency" json:"weak_dependency,omitempty"` + // All top-level definitions in this file. + MessageType []*DescriptorProto `protobuf:"bytes,4,rep,name=message_type,json=messageType" json:"message_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,5,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + Service []*ServiceDescriptorProto `protobuf:"bytes,6,rep,name=service" json:"service,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,7,rep,name=extension" json:"extension,omitempty"` + Options *FileOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + SourceCodeInfo *SourceCodeInfo `protobuf:"bytes,9,opt,name=source_code_info,json=sourceCodeInfo" json:"source_code_info,omitempty"` + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + Syntax *string `protobuf:"bytes,12,opt,name=syntax" json:"syntax,omitempty"` +} + +func (x *FileDescriptorProto) Reset() { + *x = FileDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileDescriptorProto) ProtoMessage() {} + +func (x *FileDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileDescriptorProto.ProtoReflect.Descriptor instead. +func (*FileDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{1} +} + +func (x *FileDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *FileDescriptorProto) GetPackage() string { + if x != nil && x.Package != nil { + return *x.Package + } + return "" +} + +func (x *FileDescriptorProto) GetDependency() []string { + if x != nil { + return x.Dependency + } + return nil +} + +func (x *FileDescriptorProto) GetPublicDependency() []int32 { + if x != nil { + return x.PublicDependency + } + return nil +} + +func (x *FileDescriptorProto) GetWeakDependency() []int32 { + if x != nil { + return x.WeakDependency + } + return nil +} + +func (x *FileDescriptorProto) GetMessageType() []*DescriptorProto { + if x != nil { + return x.MessageType + } + return nil +} + +func (x *FileDescriptorProto) GetEnumType() []*EnumDescriptorProto { + if x != nil { + return x.EnumType + } + return nil +} + +func (x *FileDescriptorProto) GetService() []*ServiceDescriptorProto { + if x != nil { + return x.Service + } + return nil +} + +func (x *FileDescriptorProto) GetExtension() []*FieldDescriptorProto { + if x != nil { + return x.Extension + } + return nil +} + +func (x *FileDescriptorProto) GetOptions() *FileOptions { + if x != nil { + return x.Options + } + return nil +} + +func (x *FileDescriptorProto) GetSourceCodeInfo() *SourceCodeInfo { + if x != nil { + return x.SourceCodeInfo + } + return nil +} + +func (x *FileDescriptorProto) GetSyntax() string { + if x != nil && x.Syntax != nil { + return *x.Syntax + } + return "" +} + +// Describes a message type. +type DescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Field []*FieldDescriptorProto `protobuf:"bytes,2,rep,name=field" json:"field,omitempty"` + Extension []*FieldDescriptorProto `protobuf:"bytes,6,rep,name=extension" json:"extension,omitempty"` + NestedType []*DescriptorProto `protobuf:"bytes,3,rep,name=nested_type,json=nestedType" json:"nested_type,omitempty"` + EnumType []*EnumDescriptorProto `protobuf:"bytes,4,rep,name=enum_type,json=enumType" json:"enum_type,omitempty"` + ExtensionRange []*DescriptorProto_ExtensionRange `protobuf:"bytes,5,rep,name=extension_range,json=extensionRange" json:"extension_range,omitempty"` + OneofDecl []*OneofDescriptorProto `protobuf:"bytes,8,rep,name=oneof_decl,json=oneofDecl" json:"oneof_decl,omitempty"` + Options *MessageOptions `protobuf:"bytes,7,opt,name=options" json:"options,omitempty"` + ReservedRange []*DescriptorProto_ReservedRange `protobuf:"bytes,9,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + ReservedName []string `protobuf:"bytes,10,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` +} + +func (x *DescriptorProto) Reset() { + *x = DescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DescriptorProto) ProtoMessage() {} + +func (x *DescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DescriptorProto.ProtoReflect.Descriptor instead. +func (*DescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{2} +} + +func (x *DescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *DescriptorProto) GetField() []*FieldDescriptorProto { + if x != nil { + return x.Field + } + return nil +} + +func (x *DescriptorProto) GetExtension() []*FieldDescriptorProto { + if x != nil { + return x.Extension + } + return nil +} + +func (x *DescriptorProto) GetNestedType() []*DescriptorProto { + if x != nil { + return x.NestedType + } + return nil +} + +func (x *DescriptorProto) GetEnumType() []*EnumDescriptorProto { + if x != nil { + return x.EnumType + } + return nil +} + +func (x *DescriptorProto) GetExtensionRange() []*DescriptorProto_ExtensionRange { + if x != nil { + return x.ExtensionRange + } + return nil +} + +func (x *DescriptorProto) GetOneofDecl() []*OneofDescriptorProto { + if x != nil { + return x.OneofDecl + } + return nil +} + +func (x *DescriptorProto) GetOptions() *MessageOptions { + if x != nil { + return x.Options + } + return nil +} + +func (x *DescriptorProto) GetReservedRange() []*DescriptorProto_ReservedRange { + if x != nil { + return x.ReservedRange + } + return nil +} + +func (x *DescriptorProto) GetReservedName() []string { + if x != nil { + return x.ReservedName + } + return nil +} + +type ExtensionRangeOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +func (x *ExtensionRangeOptions) Reset() { + *x = ExtensionRangeOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ExtensionRangeOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExtensionRangeOptions) ProtoMessage() {} + +func (x *ExtensionRangeOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExtensionRangeOptions.ProtoReflect.Descriptor instead. +func (*ExtensionRangeOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{3} +} + +var extRange_ExtensionRangeOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use ExtensionRangeOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*ExtensionRangeOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_ExtensionRangeOptions +} + +func (x *ExtensionRangeOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +// Describes a field within a message. +type FieldDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,3,opt,name=number" json:"number,omitempty"` + Label *FieldDescriptorProto_Label `protobuf:"varint,4,opt,name=label,enum=google.protobuf.FieldDescriptorProto_Label" json:"label,omitempty"` + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + Type *FieldDescriptorProto_Type `protobuf:"varint,5,opt,name=type,enum=google.protobuf.FieldDescriptorProto_Type" json:"type,omitempty"` + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + TypeName *string `protobuf:"bytes,6,opt,name=type_name,json=typeName" json:"type_name,omitempty"` + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + Extendee *string `protobuf:"bytes,2,opt,name=extendee" json:"extendee,omitempty"` + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + DefaultValue *string `protobuf:"bytes,7,opt,name=default_value,json=defaultValue" json:"default_value,omitempty"` + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + OneofIndex *int32 `protobuf:"varint,9,opt,name=oneof_index,json=oneofIndex" json:"oneof_index,omitempty"` + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + JsonName *string `protobuf:"bytes,10,opt,name=json_name,json=jsonName" json:"json_name,omitempty"` + Options *FieldOptions `protobuf:"bytes,8,opt,name=options" json:"options,omitempty"` + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + Proto3Optional *bool `protobuf:"varint,17,opt,name=proto3_optional,json=proto3Optional" json:"proto3_optional,omitempty"` +} + +func (x *FieldDescriptorProto) Reset() { + *x = FieldDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FieldDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FieldDescriptorProto) ProtoMessage() {} + +func (x *FieldDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FieldDescriptorProto.ProtoReflect.Descriptor instead. +func (*FieldDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{4} +} + +func (x *FieldDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *FieldDescriptorProto) GetNumber() int32 { + if x != nil && x.Number != nil { + return *x.Number + } + return 0 +} + +func (x *FieldDescriptorProto) GetLabel() FieldDescriptorProto_Label { + if x != nil && x.Label != nil { + return *x.Label + } + return FieldDescriptorProto_LABEL_OPTIONAL +} + +func (x *FieldDescriptorProto) GetType() FieldDescriptorProto_Type { + if x != nil && x.Type != nil { + return *x.Type + } + return FieldDescriptorProto_TYPE_DOUBLE +} + +func (x *FieldDescriptorProto) GetTypeName() string { + if x != nil && x.TypeName != nil { + return *x.TypeName + } + return "" +} + +func (x *FieldDescriptorProto) GetExtendee() string { + if x != nil && x.Extendee != nil { + return *x.Extendee + } + return "" +} + +func (x *FieldDescriptorProto) GetDefaultValue() string { + if x != nil && x.DefaultValue != nil { + return *x.DefaultValue + } + return "" +} + +func (x *FieldDescriptorProto) GetOneofIndex() int32 { + if x != nil && x.OneofIndex != nil { + return *x.OneofIndex + } + return 0 +} + +func (x *FieldDescriptorProto) GetJsonName() string { + if x != nil && x.JsonName != nil { + return *x.JsonName + } + return "" +} + +func (x *FieldDescriptorProto) GetOptions() *FieldOptions { + if x != nil { + return x.Options + } + return nil +} + +func (x *FieldDescriptorProto) GetProto3Optional() bool { + if x != nil && x.Proto3Optional != nil { + return *x.Proto3Optional + } + return false +} + +// Describes a oneof. +type OneofDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Options *OneofOptions `protobuf:"bytes,2,opt,name=options" json:"options,omitempty"` +} + +func (x *OneofDescriptorProto) Reset() { + *x = OneofDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OneofDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OneofDescriptorProto) ProtoMessage() {} + +func (x *OneofDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OneofDescriptorProto.ProtoReflect.Descriptor instead. +func (*OneofDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{5} +} + +func (x *OneofDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *OneofDescriptorProto) GetOptions() *OneofOptions { + if x != nil { + return x.Options + } + return nil +} + +// Describes an enum type. +type EnumDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Value []*EnumValueDescriptorProto `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"` + Options *EnumOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + ReservedRange []*EnumDescriptorProto_EnumReservedRange `protobuf:"bytes,4,rep,name=reserved_range,json=reservedRange" json:"reserved_range,omitempty"` + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + ReservedName []string `protobuf:"bytes,5,rep,name=reserved_name,json=reservedName" json:"reserved_name,omitempty"` +} + +func (x *EnumDescriptorProto) Reset() { + *x = EnumDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnumDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnumDescriptorProto) ProtoMessage() {} + +func (x *EnumDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnumDescriptorProto.ProtoReflect.Descriptor instead. +func (*EnumDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{6} +} + +func (x *EnumDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *EnumDescriptorProto) GetValue() []*EnumValueDescriptorProto { + if x != nil { + return x.Value + } + return nil +} + +func (x *EnumDescriptorProto) GetOptions() *EnumOptions { + if x != nil { + return x.Options + } + return nil +} + +func (x *EnumDescriptorProto) GetReservedRange() []*EnumDescriptorProto_EnumReservedRange { + if x != nil { + return x.ReservedRange + } + return nil +} + +func (x *EnumDescriptorProto) GetReservedName() []string { + if x != nil { + return x.ReservedName + } + return nil +} + +// Describes a value within an enum. +type EnumValueDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Number *int32 `protobuf:"varint,2,opt,name=number" json:"number,omitempty"` + Options *EnumValueOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` +} + +func (x *EnumValueDescriptorProto) Reset() { + *x = EnumValueDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnumValueDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnumValueDescriptorProto) ProtoMessage() {} + +func (x *EnumValueDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnumValueDescriptorProto.ProtoReflect.Descriptor instead. +func (*EnumValueDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{7} +} + +func (x *EnumValueDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *EnumValueDescriptorProto) GetNumber() int32 { + if x != nil && x.Number != nil { + return *x.Number + } + return 0 +} + +func (x *EnumValueDescriptorProto) GetOptions() *EnumValueOptions { + if x != nil { + return x.Options + } + return nil +} + +// Describes a service. +type ServiceDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Method []*MethodDescriptorProto `protobuf:"bytes,2,rep,name=method" json:"method,omitempty"` + Options *ServiceOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` +} + +func (x *ServiceDescriptorProto) Reset() { + *x = ServiceDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServiceDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServiceDescriptorProto) ProtoMessage() {} + +func (x *ServiceDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServiceDescriptorProto.ProtoReflect.Descriptor instead. +func (*ServiceDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{8} +} + +func (x *ServiceDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *ServiceDescriptorProto) GetMethod() []*MethodDescriptorProto { + if x != nil { + return x.Method + } + return nil +} + +func (x *ServiceDescriptorProto) GetOptions() *ServiceOptions { + if x != nil { + return x.Options + } + return nil +} + +// Describes a method of a service. +type MethodDescriptorProto struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name *string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + InputType *string `protobuf:"bytes,2,opt,name=input_type,json=inputType" json:"input_type,omitempty"` + OutputType *string `protobuf:"bytes,3,opt,name=output_type,json=outputType" json:"output_type,omitempty"` + Options *MethodOptions `protobuf:"bytes,4,opt,name=options" json:"options,omitempty"` + // Identifies if client streams multiple client messages + ClientStreaming *bool `protobuf:"varint,5,opt,name=client_streaming,json=clientStreaming,def=0" json:"client_streaming,omitempty"` + // Identifies if server streams multiple server messages + ServerStreaming *bool `protobuf:"varint,6,opt,name=server_streaming,json=serverStreaming,def=0" json:"server_streaming,omitempty"` +} + +// Default values for MethodDescriptorProto fields. +const ( + Default_MethodDescriptorProto_ClientStreaming = bool(false) + Default_MethodDescriptorProto_ServerStreaming = bool(false) +) + +func (x *MethodDescriptorProto) Reset() { + *x = MethodDescriptorProto{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MethodDescriptorProto) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MethodDescriptorProto) ProtoMessage() {} + +func (x *MethodDescriptorProto) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MethodDescriptorProto.ProtoReflect.Descriptor instead. +func (*MethodDescriptorProto) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{9} +} + +func (x *MethodDescriptorProto) GetName() string { + if x != nil && x.Name != nil { + return *x.Name + } + return "" +} + +func (x *MethodDescriptorProto) GetInputType() string { + if x != nil && x.InputType != nil { + return *x.InputType + } + return "" +} + +func (x *MethodDescriptorProto) GetOutputType() string { + if x != nil && x.OutputType != nil { + return *x.OutputType + } + return "" +} + +func (x *MethodDescriptorProto) GetOptions() *MethodOptions { + if x != nil { + return x.Options + } + return nil +} + +func (x *MethodDescriptorProto) GetClientStreaming() bool { + if x != nil && x.ClientStreaming != nil { + return *x.ClientStreaming + } + return Default_MethodDescriptorProto_ClientStreaming +} + +func (x *MethodDescriptorProto) GetServerStreaming() bool { + if x != nil && x.ServerStreaming != nil { + return *x.ServerStreaming + } + return Default_MethodDescriptorProto_ServerStreaming +} + +type FileOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + JavaPackage *string `protobuf:"bytes,1,opt,name=java_package,json=javaPackage" json:"java_package,omitempty"` + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + JavaOuterClassname *string `protobuf:"bytes,8,opt,name=java_outer_classname,json=javaOuterClassname" json:"java_outer_classname,omitempty"` + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + JavaMultipleFiles *bool `protobuf:"varint,10,opt,name=java_multiple_files,json=javaMultipleFiles,def=0" json:"java_multiple_files,omitempty"` + // This option does nothing. + // + // Deprecated: Do not use. + JavaGenerateEqualsAndHash *bool `protobuf:"varint,20,opt,name=java_generate_equals_and_hash,json=javaGenerateEqualsAndHash" json:"java_generate_equals_and_hash,omitempty"` + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + JavaStringCheckUtf8 *bool `protobuf:"varint,27,opt,name=java_string_check_utf8,json=javaStringCheckUtf8,def=0" json:"java_string_check_utf8,omitempty"` + OptimizeFor *FileOptions_OptimizeMode `protobuf:"varint,9,opt,name=optimize_for,json=optimizeFor,enum=google.protobuf.FileOptions_OptimizeMode,def=1" json:"optimize_for,omitempty"` + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + GoPackage *string `protobuf:"bytes,11,opt,name=go_package,json=goPackage" json:"go_package,omitempty"` + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + CcGenericServices *bool `protobuf:"varint,16,opt,name=cc_generic_services,json=ccGenericServices,def=0" json:"cc_generic_services,omitempty"` + JavaGenericServices *bool `protobuf:"varint,17,opt,name=java_generic_services,json=javaGenericServices,def=0" json:"java_generic_services,omitempty"` + PyGenericServices *bool `protobuf:"varint,18,opt,name=py_generic_services,json=pyGenericServices,def=0" json:"py_generic_services,omitempty"` + PhpGenericServices *bool `protobuf:"varint,42,opt,name=php_generic_services,json=phpGenericServices,def=0" json:"php_generic_services,omitempty"` + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + Deprecated *bool `protobuf:"varint,23,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + CcEnableArenas *bool `protobuf:"varint,31,opt,name=cc_enable_arenas,json=ccEnableArenas,def=1" json:"cc_enable_arenas,omitempty"` + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + ObjcClassPrefix *string `protobuf:"bytes,36,opt,name=objc_class_prefix,json=objcClassPrefix" json:"objc_class_prefix,omitempty"` + // Namespace for generated classes; defaults to the package. + CsharpNamespace *string `protobuf:"bytes,37,opt,name=csharp_namespace,json=csharpNamespace" json:"csharp_namespace,omitempty"` + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + SwiftPrefix *string `protobuf:"bytes,39,opt,name=swift_prefix,json=swiftPrefix" json:"swift_prefix,omitempty"` + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + PhpClassPrefix *string `protobuf:"bytes,40,opt,name=php_class_prefix,json=phpClassPrefix" json:"php_class_prefix,omitempty"` + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + PhpNamespace *string `protobuf:"bytes,41,opt,name=php_namespace,json=phpNamespace" json:"php_namespace,omitempty"` + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + PhpMetadataNamespace *string `protobuf:"bytes,44,opt,name=php_metadata_namespace,json=phpMetadataNamespace" json:"php_metadata_namespace,omitempty"` + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + RubyPackage *string `protobuf:"bytes,45,opt,name=ruby_package,json=rubyPackage" json:"ruby_package,omitempty"` + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for FileOptions fields. +const ( + Default_FileOptions_JavaMultipleFiles = bool(false) + Default_FileOptions_JavaStringCheckUtf8 = bool(false) + Default_FileOptions_OptimizeFor = FileOptions_SPEED + Default_FileOptions_CcGenericServices = bool(false) + Default_FileOptions_JavaGenericServices = bool(false) + Default_FileOptions_PyGenericServices = bool(false) + Default_FileOptions_PhpGenericServices = bool(false) + Default_FileOptions_Deprecated = bool(false) + Default_FileOptions_CcEnableArenas = bool(true) +) + +func (x *FileOptions) Reset() { + *x = FileOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FileOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FileOptions) ProtoMessage() {} + +func (x *FileOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FileOptions.ProtoReflect.Descriptor instead. +func (*FileOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{10} +} + +var extRange_FileOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use FileOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*FileOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_FileOptions +} + +func (x *FileOptions) GetJavaPackage() string { + if x != nil && x.JavaPackage != nil { + return *x.JavaPackage + } + return "" +} + +func (x *FileOptions) GetJavaOuterClassname() string { + if x != nil && x.JavaOuterClassname != nil { + return *x.JavaOuterClassname + } + return "" +} + +func (x *FileOptions) GetJavaMultipleFiles() bool { + if x != nil && x.JavaMultipleFiles != nil { + return *x.JavaMultipleFiles + } + return Default_FileOptions_JavaMultipleFiles +} + +// Deprecated: Do not use. +func (x *FileOptions) GetJavaGenerateEqualsAndHash() bool { + if x != nil && x.JavaGenerateEqualsAndHash != nil { + return *x.JavaGenerateEqualsAndHash + } + return false +} + +func (x *FileOptions) GetJavaStringCheckUtf8() bool { + if x != nil && x.JavaStringCheckUtf8 != nil { + return *x.JavaStringCheckUtf8 + } + return Default_FileOptions_JavaStringCheckUtf8 +} + +func (x *FileOptions) GetOptimizeFor() FileOptions_OptimizeMode { + if x != nil && x.OptimizeFor != nil { + return *x.OptimizeFor + } + return Default_FileOptions_OptimizeFor +} + +func (x *FileOptions) GetGoPackage() string { + if x != nil && x.GoPackage != nil { + return *x.GoPackage + } + return "" +} + +func (x *FileOptions) GetCcGenericServices() bool { + if x != nil && x.CcGenericServices != nil { + return *x.CcGenericServices + } + return Default_FileOptions_CcGenericServices +} + +func (x *FileOptions) GetJavaGenericServices() bool { + if x != nil && x.JavaGenericServices != nil { + return *x.JavaGenericServices + } + return Default_FileOptions_JavaGenericServices +} + +func (x *FileOptions) GetPyGenericServices() bool { + if x != nil && x.PyGenericServices != nil { + return *x.PyGenericServices + } + return Default_FileOptions_PyGenericServices +} + +func (x *FileOptions) GetPhpGenericServices() bool { + if x != nil && x.PhpGenericServices != nil { + return *x.PhpGenericServices + } + return Default_FileOptions_PhpGenericServices +} + +func (x *FileOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_FileOptions_Deprecated +} + +func (x *FileOptions) GetCcEnableArenas() bool { + if x != nil && x.CcEnableArenas != nil { + return *x.CcEnableArenas + } + return Default_FileOptions_CcEnableArenas +} + +func (x *FileOptions) GetObjcClassPrefix() string { + if x != nil && x.ObjcClassPrefix != nil { + return *x.ObjcClassPrefix + } + return "" +} + +func (x *FileOptions) GetCsharpNamespace() string { + if x != nil && x.CsharpNamespace != nil { + return *x.CsharpNamespace + } + return "" +} + +func (x *FileOptions) GetSwiftPrefix() string { + if x != nil && x.SwiftPrefix != nil { + return *x.SwiftPrefix + } + return "" +} + +func (x *FileOptions) GetPhpClassPrefix() string { + if x != nil && x.PhpClassPrefix != nil { + return *x.PhpClassPrefix + } + return "" +} + +func (x *FileOptions) GetPhpNamespace() string { + if x != nil && x.PhpNamespace != nil { + return *x.PhpNamespace + } + return "" +} + +func (x *FileOptions) GetPhpMetadataNamespace() string { + if x != nil && x.PhpMetadataNamespace != nil { + return *x.PhpMetadataNamespace + } + return "" +} + +func (x *FileOptions) GetRubyPackage() string { + if x != nil && x.RubyPackage != nil { + return *x.RubyPackage + } + return "" +} + +func (x *FileOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type MessageOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + MessageSetWireFormat *bool `protobuf:"varint,1,opt,name=message_set_wire_format,json=messageSetWireFormat,def=0" json:"message_set_wire_format,omitempty"` + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + NoStandardDescriptorAccessor *bool `protobuf:"varint,2,opt,name=no_standard_descriptor_accessor,json=noStandardDescriptorAccessor,def=0" json:"no_standard_descriptor_accessor,omitempty"` + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + MapEntry *bool `protobuf:"varint,7,opt,name=map_entry,json=mapEntry" json:"map_entry,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for MessageOptions fields. +const ( + Default_MessageOptions_MessageSetWireFormat = bool(false) + Default_MessageOptions_NoStandardDescriptorAccessor = bool(false) + Default_MessageOptions_Deprecated = bool(false) +) + +func (x *MessageOptions) Reset() { + *x = MessageOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MessageOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MessageOptions) ProtoMessage() {} + +func (x *MessageOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MessageOptions.ProtoReflect.Descriptor instead. +func (*MessageOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{11} +} + +var extRange_MessageOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use MessageOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*MessageOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_MessageOptions +} + +func (x *MessageOptions) GetMessageSetWireFormat() bool { + if x != nil && x.MessageSetWireFormat != nil { + return *x.MessageSetWireFormat + } + return Default_MessageOptions_MessageSetWireFormat +} + +func (x *MessageOptions) GetNoStandardDescriptorAccessor() bool { + if x != nil && x.NoStandardDescriptorAccessor != nil { + return *x.NoStandardDescriptorAccessor + } + return Default_MessageOptions_NoStandardDescriptorAccessor +} + +func (x *MessageOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_MessageOptions_Deprecated +} + +func (x *MessageOptions) GetMapEntry() bool { + if x != nil && x.MapEntry != nil { + return *x.MapEntry + } + return false +} + +func (x *MessageOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type FieldOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + Ctype *FieldOptions_CType `protobuf:"varint,1,opt,name=ctype,enum=google.protobuf.FieldOptions_CType,def=0" json:"ctype,omitempty"` + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + Packed *bool `protobuf:"varint,2,opt,name=packed" json:"packed,omitempty"` + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + Jstype *FieldOptions_JSType `protobuf:"varint,6,opt,name=jstype,enum=google.protobuf.FieldOptions_JSType,def=0" json:"jstype,omitempty"` + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + Lazy *bool `protobuf:"varint,5,opt,name=lazy,def=0" json:"lazy,omitempty"` + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // For Google-internal migration only. Do not use. + Weak *bool `protobuf:"varint,10,opt,name=weak,def=0" json:"weak,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for FieldOptions fields. +const ( + Default_FieldOptions_Ctype = FieldOptions_STRING + Default_FieldOptions_Jstype = FieldOptions_JS_NORMAL + Default_FieldOptions_Lazy = bool(false) + Default_FieldOptions_Deprecated = bool(false) + Default_FieldOptions_Weak = bool(false) +) + +func (x *FieldOptions) Reset() { + *x = FieldOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *FieldOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*FieldOptions) ProtoMessage() {} + +func (x *FieldOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use FieldOptions.ProtoReflect.Descriptor instead. +func (*FieldOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{12} +} + +var extRange_FieldOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use FieldOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*FieldOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_FieldOptions +} + +func (x *FieldOptions) GetCtype() FieldOptions_CType { + if x != nil && x.Ctype != nil { + return *x.Ctype + } + return Default_FieldOptions_Ctype +} + +func (x *FieldOptions) GetPacked() bool { + if x != nil && x.Packed != nil { + return *x.Packed + } + return false +} + +func (x *FieldOptions) GetJstype() FieldOptions_JSType { + if x != nil && x.Jstype != nil { + return *x.Jstype + } + return Default_FieldOptions_Jstype +} + +func (x *FieldOptions) GetLazy() bool { + if x != nil && x.Lazy != nil { + return *x.Lazy + } + return Default_FieldOptions_Lazy +} + +func (x *FieldOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_FieldOptions_Deprecated +} + +func (x *FieldOptions) GetWeak() bool { + if x != nil && x.Weak != nil { + return *x.Weak + } + return Default_FieldOptions_Weak +} + +func (x *FieldOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type OneofOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +func (x *OneofOptions) Reset() { + *x = OneofOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *OneofOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*OneofOptions) ProtoMessage() {} + +func (x *OneofOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use OneofOptions.ProtoReflect.Descriptor instead. +func (*OneofOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{13} +} + +var extRange_OneofOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use OneofOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*OneofOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_OneofOptions +} + +func (x *OneofOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type EnumOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Set this option to true to allow mapping different tag names to the same + // value. + AllowAlias *bool `protobuf:"varint,2,opt,name=allow_alias,json=allowAlias" json:"allow_alias,omitempty"` + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + Deprecated *bool `protobuf:"varint,3,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for EnumOptions fields. +const ( + Default_EnumOptions_Deprecated = bool(false) +) + +func (x *EnumOptions) Reset() { + *x = EnumOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnumOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnumOptions) ProtoMessage() {} + +func (x *EnumOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnumOptions.ProtoReflect.Descriptor instead. +func (*EnumOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{14} +} + +var extRange_EnumOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use EnumOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*EnumOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_EnumOptions +} + +func (x *EnumOptions) GetAllowAlias() bool { + if x != nil && x.AllowAlias != nil { + return *x.AllowAlias + } + return false +} + +func (x *EnumOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_EnumOptions_Deprecated +} + +func (x *EnumOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type EnumValueOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + Deprecated *bool `protobuf:"varint,1,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for EnumValueOptions fields. +const ( + Default_EnumValueOptions_Deprecated = bool(false) +) + +func (x *EnumValueOptions) Reset() { + *x = EnumValueOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[15] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnumValueOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnumValueOptions) ProtoMessage() {} + +func (x *EnumValueOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[15] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnumValueOptions.ProtoReflect.Descriptor instead. +func (*EnumValueOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{15} +} + +var extRange_EnumValueOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use EnumValueOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*EnumValueOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_EnumValueOptions +} + +func (x *EnumValueOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_EnumValueOptions_Deprecated +} + +func (x *EnumValueOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type ServiceOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for ServiceOptions fields. +const ( + Default_ServiceOptions_Deprecated = bool(false) +) + +func (x *ServiceOptions) Reset() { + *x = ServiceOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[16] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ServiceOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ServiceOptions) ProtoMessage() {} + +func (x *ServiceOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[16] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ServiceOptions.ProtoReflect.Descriptor instead. +func (*ServiceOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{16} +} + +var extRange_ServiceOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use ServiceOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*ServiceOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_ServiceOptions +} + +func (x *ServiceOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_ServiceOptions_Deprecated +} + +func (x *ServiceOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +type MethodOptions struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + extensionFields protoimpl.ExtensionFields + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + Deprecated *bool `protobuf:"varint,33,opt,name=deprecated,def=0" json:"deprecated,omitempty"` + IdempotencyLevel *MethodOptions_IdempotencyLevel `protobuf:"varint,34,opt,name=idempotency_level,json=idempotencyLevel,enum=google.protobuf.MethodOptions_IdempotencyLevel,def=0" json:"idempotency_level,omitempty"` + // The parser stores options it doesn't recognize here. See above. + UninterpretedOption []*UninterpretedOption `protobuf:"bytes,999,rep,name=uninterpreted_option,json=uninterpretedOption" json:"uninterpreted_option,omitempty"` +} + +// Default values for MethodOptions fields. +const ( + Default_MethodOptions_Deprecated = bool(false) + Default_MethodOptions_IdempotencyLevel = MethodOptions_IDEMPOTENCY_UNKNOWN +) + +func (x *MethodOptions) Reset() { + *x = MethodOptions{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[17] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *MethodOptions) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*MethodOptions) ProtoMessage() {} + +func (x *MethodOptions) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[17] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use MethodOptions.ProtoReflect.Descriptor instead. +func (*MethodOptions) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{17} +} + +var extRange_MethodOptions = []protoiface.ExtensionRangeV1{ + {Start: 1000, End: 536870911}, +} + +// Deprecated: Use MethodOptions.ProtoReflect.Descriptor.ExtensionRanges instead. +func (*MethodOptions) ExtensionRangeArray() []protoiface.ExtensionRangeV1 { + return extRange_MethodOptions +} + +func (x *MethodOptions) GetDeprecated() bool { + if x != nil && x.Deprecated != nil { + return *x.Deprecated + } + return Default_MethodOptions_Deprecated +} + +func (x *MethodOptions) GetIdempotencyLevel() MethodOptions_IdempotencyLevel { + if x != nil && x.IdempotencyLevel != nil { + return *x.IdempotencyLevel + } + return Default_MethodOptions_IdempotencyLevel +} + +func (x *MethodOptions) GetUninterpretedOption() []*UninterpretedOption { + if x != nil { + return x.UninterpretedOption + } + return nil +} + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +type UninterpretedOption struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name []*UninterpretedOption_NamePart `protobuf:"bytes,2,rep,name=name" json:"name,omitempty"` + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + IdentifierValue *string `protobuf:"bytes,3,opt,name=identifier_value,json=identifierValue" json:"identifier_value,omitempty"` + PositiveIntValue *uint64 `protobuf:"varint,4,opt,name=positive_int_value,json=positiveIntValue" json:"positive_int_value,omitempty"` + NegativeIntValue *int64 `protobuf:"varint,5,opt,name=negative_int_value,json=negativeIntValue" json:"negative_int_value,omitempty"` + DoubleValue *float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue" json:"double_value,omitempty"` + StringValue []byte `protobuf:"bytes,7,opt,name=string_value,json=stringValue" json:"string_value,omitempty"` + AggregateValue *string `protobuf:"bytes,8,opt,name=aggregate_value,json=aggregateValue" json:"aggregate_value,omitempty"` +} + +func (x *UninterpretedOption) Reset() { + *x = UninterpretedOption{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[18] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UninterpretedOption) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UninterpretedOption) ProtoMessage() {} + +func (x *UninterpretedOption) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[18] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UninterpretedOption.ProtoReflect.Descriptor instead. +func (*UninterpretedOption) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{18} +} + +func (x *UninterpretedOption) GetName() []*UninterpretedOption_NamePart { + if x != nil { + return x.Name + } + return nil +} + +func (x *UninterpretedOption) GetIdentifierValue() string { + if x != nil && x.IdentifierValue != nil { + return *x.IdentifierValue + } + return "" +} + +func (x *UninterpretedOption) GetPositiveIntValue() uint64 { + if x != nil && x.PositiveIntValue != nil { + return *x.PositiveIntValue + } + return 0 +} + +func (x *UninterpretedOption) GetNegativeIntValue() int64 { + if x != nil && x.NegativeIntValue != nil { + return *x.NegativeIntValue + } + return 0 +} + +func (x *UninterpretedOption) GetDoubleValue() float64 { + if x != nil && x.DoubleValue != nil { + return *x.DoubleValue + } + return 0 +} + +func (x *UninterpretedOption) GetStringValue() []byte { + if x != nil { + return x.StringValue + } + return nil +} + +func (x *UninterpretedOption) GetAggregateValue() string { + if x != nil && x.AggregateValue != nil { + return *x.AggregateValue + } + return "" +} + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +type SourceCodeInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + Location []*SourceCodeInfo_Location `protobuf:"bytes,1,rep,name=location" json:"location,omitempty"` +} + +func (x *SourceCodeInfo) Reset() { + *x = SourceCodeInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[19] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceCodeInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceCodeInfo) ProtoMessage() {} + +func (x *SourceCodeInfo) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[19] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceCodeInfo.ProtoReflect.Descriptor instead. +func (*SourceCodeInfo) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{19} +} + +func (x *SourceCodeInfo) GetLocation() []*SourceCodeInfo_Location { + if x != nil { + return x.Location + } + return nil +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +type GeneratedCodeInfo struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + Annotation []*GeneratedCodeInfo_Annotation `protobuf:"bytes,1,rep,name=annotation" json:"annotation,omitempty"` +} + +func (x *GeneratedCodeInfo) Reset() { + *x = GeneratedCodeInfo{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[20] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GeneratedCodeInfo) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeneratedCodeInfo) ProtoMessage() {} + +func (x *GeneratedCodeInfo) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[20] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeneratedCodeInfo.ProtoReflect.Descriptor instead. +func (*GeneratedCodeInfo) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{20} +} + +func (x *GeneratedCodeInfo) GetAnnotation() []*GeneratedCodeInfo_Annotation { + if x != nil { + return x.Annotation + } + return nil +} + +type DescriptorProto_ExtensionRange struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` // Inclusive. + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` // Exclusive. + Options *ExtensionRangeOptions `protobuf:"bytes,3,opt,name=options" json:"options,omitempty"` +} + +func (x *DescriptorProto_ExtensionRange) Reset() { + *x = DescriptorProto_ExtensionRange{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[21] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DescriptorProto_ExtensionRange) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DescriptorProto_ExtensionRange) ProtoMessage() {} + +func (x *DescriptorProto_ExtensionRange) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[21] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DescriptorProto_ExtensionRange.ProtoReflect.Descriptor instead. +func (*DescriptorProto_ExtensionRange) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{2, 0} +} + +func (x *DescriptorProto_ExtensionRange) GetStart() int32 { + if x != nil && x.Start != nil { + return *x.Start + } + return 0 +} + +func (x *DescriptorProto_ExtensionRange) GetEnd() int32 { + if x != nil && x.End != nil { + return *x.End + } + return 0 +} + +func (x *DescriptorProto_ExtensionRange) GetOptions() *ExtensionRangeOptions { + if x != nil { + return x.Options + } + return nil +} + +// Range of reserved tag numbers. Reserved tag numbers may not be used by +// fields or extension ranges in the same message. Reserved ranges may +// not overlap. +type DescriptorProto_ReservedRange struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` // Inclusive. + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` // Exclusive. +} + +func (x *DescriptorProto_ReservedRange) Reset() { + *x = DescriptorProto_ReservedRange{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DescriptorProto_ReservedRange) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DescriptorProto_ReservedRange) ProtoMessage() {} + +func (x *DescriptorProto_ReservedRange) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DescriptorProto_ReservedRange.ProtoReflect.Descriptor instead. +func (*DescriptorProto_ReservedRange) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{2, 1} +} + +func (x *DescriptorProto_ReservedRange) GetStart() int32 { + if x != nil && x.Start != nil { + return *x.Start + } + return 0 +} + +func (x *DescriptorProto_ReservedRange) GetEnd() int32 { + if x != nil && x.End != nil { + return *x.End + } + return 0 +} + +// Range of reserved numeric values. Reserved values may not be used by +// entries in the same enum. Reserved ranges may not overlap. +// +// Note that this is distinct from DescriptorProto.ReservedRange in that it +// is inclusive such that it can appropriately represent the entire int32 +// domain. +type EnumDescriptorProto_EnumReservedRange struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Start *int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` // Inclusive. + End *int32 `protobuf:"varint,2,opt,name=end" json:"end,omitempty"` // Inclusive. +} + +func (x *EnumDescriptorProto_EnumReservedRange) Reset() { + *x = EnumDescriptorProto_EnumReservedRange{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *EnumDescriptorProto_EnumReservedRange) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*EnumDescriptorProto_EnumReservedRange) ProtoMessage() {} + +func (x *EnumDescriptorProto_EnumReservedRange) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use EnumDescriptorProto_EnumReservedRange.ProtoReflect.Descriptor instead. +func (*EnumDescriptorProto_EnumReservedRange) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{6, 0} +} + +func (x *EnumDescriptorProto_EnumReservedRange) GetStart() int32 { + if x != nil && x.Start != nil { + return *x.Start + } + return 0 +} + +func (x *EnumDescriptorProto_EnumReservedRange) GetEnd() int32 { + if x != nil && x.End != nil { + return *x.End + } + return 0 +} + +// The name of the uninterpreted option. Each string represents a segment in +// a dot-separated name. is_extension is true iff a segment represents an +// extension (denoted with parentheses in options specs in .proto files). +// E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents +// "foo.(bar.baz).qux". +type UninterpretedOption_NamePart struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + NamePart *string `protobuf:"bytes,1,req,name=name_part,json=namePart" json:"name_part,omitempty"` + IsExtension *bool `protobuf:"varint,2,req,name=is_extension,json=isExtension" json:"is_extension,omitempty"` +} + +func (x *UninterpretedOption_NamePart) Reset() { + *x = UninterpretedOption_NamePart{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *UninterpretedOption_NamePart) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*UninterpretedOption_NamePart) ProtoMessage() {} + +func (x *UninterpretedOption_NamePart) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use UninterpretedOption_NamePart.ProtoReflect.Descriptor instead. +func (*UninterpretedOption_NamePart) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{18, 0} +} + +func (x *UninterpretedOption_NamePart) GetNamePart() string { + if x != nil && x.NamePart != nil { + return *x.NamePart + } + return "" +} + +func (x *UninterpretedOption_NamePart) GetIsExtension() bool { + if x != nil && x.IsExtension != nil { + return *x.IsExtension + } + return false +} + +type SourceCodeInfo_Location struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + Span []int32 `protobuf:"varint,2,rep,packed,name=span" json:"span,omitempty"` + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + LeadingComments *string `protobuf:"bytes,3,opt,name=leading_comments,json=leadingComments" json:"leading_comments,omitempty"` + TrailingComments *string `protobuf:"bytes,4,opt,name=trailing_comments,json=trailingComments" json:"trailing_comments,omitempty"` + LeadingDetachedComments []string `protobuf:"bytes,6,rep,name=leading_detached_comments,json=leadingDetachedComments" json:"leading_detached_comments,omitempty"` +} + +func (x *SourceCodeInfo_Location) Reset() { + *x = SourceCodeInfo_Location{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[25] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SourceCodeInfo_Location) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SourceCodeInfo_Location) ProtoMessage() {} + +func (x *SourceCodeInfo_Location) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[25] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SourceCodeInfo_Location.ProtoReflect.Descriptor instead. +func (*SourceCodeInfo_Location) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{19, 0} +} + +func (x *SourceCodeInfo_Location) GetPath() []int32 { + if x != nil { + return x.Path + } + return nil +} + +func (x *SourceCodeInfo_Location) GetSpan() []int32 { + if x != nil { + return x.Span + } + return nil +} + +func (x *SourceCodeInfo_Location) GetLeadingComments() string { + if x != nil && x.LeadingComments != nil { + return *x.LeadingComments + } + return "" +} + +func (x *SourceCodeInfo_Location) GetTrailingComments() string { + if x != nil && x.TrailingComments != nil { + return *x.TrailingComments + } + return "" +} + +func (x *SourceCodeInfo_Location) GetLeadingDetachedComments() []string { + if x != nil { + return x.LeadingDetachedComments + } + return nil +} + +type GeneratedCodeInfo_Annotation struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + Path []int32 `protobuf:"varint,1,rep,packed,name=path" json:"path,omitempty"` + // Identifies the filesystem path to the original source .proto. + SourceFile *string `protobuf:"bytes,2,opt,name=source_file,json=sourceFile" json:"source_file,omitempty"` + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + Begin *int32 `protobuf:"varint,3,opt,name=begin" json:"begin,omitempty"` + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + End *int32 `protobuf:"varint,4,opt,name=end" json:"end,omitempty"` +} + +func (x *GeneratedCodeInfo_Annotation) Reset() { + *x = GeneratedCodeInfo_Annotation{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_descriptor_proto_msgTypes[26] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GeneratedCodeInfo_Annotation) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GeneratedCodeInfo_Annotation) ProtoMessage() {} + +func (x *GeneratedCodeInfo_Annotation) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_descriptor_proto_msgTypes[26] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GeneratedCodeInfo_Annotation.ProtoReflect.Descriptor instead. +func (*GeneratedCodeInfo_Annotation) Descriptor() ([]byte, []int) { + return file_google_protobuf_descriptor_proto_rawDescGZIP(), []int{20, 0} +} + +func (x *GeneratedCodeInfo_Annotation) GetPath() []int32 { + if x != nil { + return x.Path + } + return nil +} + +func (x *GeneratedCodeInfo_Annotation) GetSourceFile() string { + if x != nil && x.SourceFile != nil { + return *x.SourceFile + } + return "" +} + +func (x *GeneratedCodeInfo_Annotation) GetBegin() int32 { + if x != nil && x.Begin != nil { + return *x.Begin + } + return 0 +} + +func (x *GeneratedCodeInfo_Annotation) GetEnd() int32 { + if x != nil && x.End != nil { + return *x.End + } + return 0 +} + +var File_google_protobuf_descriptor_proto protoreflect.FileDescriptor + +var file_google_protobuf_descriptor_proto_rawDesc = []byte{ + 0x0a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x12, 0x0f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x22, 0x4d, 0x0a, 0x11, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x74, 0x12, 0x38, 0x0a, 0x04, 0x66, 0x69, 0x6c, 0x65, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x04, 0x66, 0x69, + 0x6c, 0x65, 0x22, 0xe4, 0x04, 0x0a, 0x13, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, + 0x0a, 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x07, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x65, + 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0a, 0x64, 0x65, + 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x2b, 0x0a, 0x11, 0x70, 0x75, 0x62, 0x6c, + 0x69, 0x63, 0x5f, 0x64, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x0a, 0x20, + 0x03, 0x28, 0x05, 0x52, 0x10, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x44, 0x65, 0x70, 0x65, 0x6e, + 0x64, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x27, 0x0a, 0x0f, 0x77, 0x65, 0x61, 0x6b, 0x5f, 0x64, 0x65, + 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x05, 0x52, 0x0e, + 0x77, 0x65, 0x61, 0x6b, 0x44, 0x65, 0x70, 0x65, 0x6e, 0x64, 0x65, 0x6e, 0x63, 0x79, 0x12, 0x43, + 0x0a, 0x0c, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, + 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0b, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, + 0x79, 0x70, 0x65, 0x12, 0x41, 0x0a, 0x09, 0x65, 0x6e, 0x75, 0x6d, 0x5f, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x44, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x08, 0x65, 0x6e, + 0x75, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x41, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x43, 0x0a, 0x09, 0x65, 0x78, 0x74, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, + 0x69, 0x65, 0x6c, 0x64, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x36, + 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x49, 0x0a, 0x10, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x0e, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, + 0x6f, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x18, 0x0c, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x73, 0x79, 0x6e, 0x74, 0x61, 0x78, 0x22, 0xb9, 0x06, 0x0a, 0x0f, 0x44, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x3b, 0x0a, 0x05, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x25, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x05, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x12, 0x43, + 0x0a, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x25, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x09, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0b, 0x6e, 0x65, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x79, + 0x70, 0x65, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0a, 0x6e, 0x65, 0x73, 0x74, + 0x65, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x41, 0x0a, 0x09, 0x65, 0x6e, 0x75, 0x6d, 0x5f, 0x74, + 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, + 0x08, 0x65, 0x6e, 0x75, 0x6d, 0x54, 0x79, 0x70, 0x65, 0x12, 0x58, 0x0a, 0x0f, 0x65, 0x78, 0x74, + 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x61, + 0x6e, 0x67, 0x65, 0x52, 0x0e, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x61, + 0x6e, 0x67, 0x65, 0x12, 0x44, 0x0a, 0x0a, 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x5f, 0x64, 0x65, 0x63, + 0x6c, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4f, 0x6e, 0x65, 0x6f, 0x66, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x09, + 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x44, 0x65, 0x63, 0x6c, 0x12, 0x39, 0x0a, 0x07, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x55, 0x0a, 0x0e, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, + 0x5f, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x09, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, + 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x0d, 0x72, 0x65, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x72, + 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, + 0x1a, 0x7a, 0x0a, 0x0e, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, + 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x05, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x40, 0x0a, 0x07, 0x6f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x78, + 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x37, 0x0a, 0x0d, + 0x52, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x14, 0x0a, + 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x03, 0x65, 0x6e, 0x64, 0x22, 0x7c, 0x0a, 0x15, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, + 0x6f, 0x6e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x58, + 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, + 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, + 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, + 0x80, 0x80, 0x02, 0x22, 0xc1, 0x06, 0x0a, 0x14, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x44, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, + 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x41, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, + 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, + 0x61, 0x62, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x3e, 0x0a, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, + 0x64, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x74, + 0x79, 0x70, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, + 0x74, 0x79, 0x70, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x65, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x65, 0x78, 0x74, 0x65, + 0x6e, 0x64, 0x65, 0x65, 0x12, 0x23, 0x0a, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x6f, 0x6e, 0x65, + 0x6f, 0x66, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x09, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, + 0x6f, 0x6e, 0x65, 0x6f, 0x66, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1b, 0x0a, 0x09, 0x6a, 0x73, + 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, + 0x73, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x12, 0x27, 0x0a, 0x0f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x61, 0x6c, 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x33, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x22, 0xb6, 0x02, 0x0a, 0x04, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x4f, 0x55, 0x42, 0x4c, + 0x45, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x4c, 0x4f, 0x41, + 0x54, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x36, + 0x34, 0x10, 0x03, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, + 0x36, 0x34, 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, + 0x33, 0x32, 0x10, 0x05, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x58, + 0x45, 0x44, 0x36, 0x34, 0x10, 0x06, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, + 0x49, 0x58, 0x45, 0x44, 0x33, 0x32, 0x10, 0x07, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x08, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x09, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x10, 0x0a, 0x12, 0x10, 0x0a, 0x0c, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x4d, 0x45, 0x53, 0x53, 0x41, 0x47, 0x45, 0x10, 0x0b, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x42, 0x59, 0x54, 0x45, 0x53, 0x10, 0x0c, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x55, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, 0x0d, 0x12, 0x0d, 0x0a, 0x09, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x10, 0x0e, 0x12, 0x11, 0x0a, 0x0d, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x53, 0x46, 0x49, 0x58, 0x45, 0x44, 0x33, 0x32, 0x10, 0x0f, 0x12, 0x11, 0x0a, + 0x0d, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x46, 0x49, 0x58, 0x45, 0x44, 0x36, 0x34, 0x10, 0x10, + 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x49, 0x4e, 0x54, 0x33, 0x32, 0x10, + 0x11, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x49, 0x4e, 0x54, 0x36, 0x34, + 0x10, 0x12, 0x22, 0x43, 0x0a, 0x05, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x12, 0x0a, 0x0e, 0x4c, + 0x41, 0x42, 0x45, 0x4c, 0x5f, 0x4f, 0x50, 0x54, 0x49, 0x4f, 0x4e, 0x41, 0x4c, 0x10, 0x01, 0x12, + 0x12, 0x0a, 0x0e, 0x4c, 0x41, 0x42, 0x45, 0x4c, 0x5f, 0x52, 0x45, 0x51, 0x55, 0x49, 0x52, 0x45, + 0x44, 0x10, 0x02, 0x12, 0x12, 0x0a, 0x0e, 0x4c, 0x41, 0x42, 0x45, 0x4c, 0x5f, 0x52, 0x45, 0x50, + 0x45, 0x41, 0x54, 0x45, 0x44, 0x10, 0x03, 0x22, 0x63, 0x0a, 0x14, 0x4f, 0x6e, 0x65, 0x6f, 0x66, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4f, 0x6e, 0x65, 0x6f, 0x66, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xe3, 0x02, 0x0a, + 0x13, 0x45, 0x6e, 0x75, 0x6d, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, + 0x74, 0x6f, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x36, 0x0a, 0x07, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6e, 0x75, + 0x6d, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x12, 0x5d, 0x0a, 0x0e, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x72, 0x61, + 0x6e, 0x67, 0x65, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x36, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6e, 0x75, 0x6d, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, + 0x65, 0x52, 0x0d, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, + 0x12, 0x23, 0x0a, 0x0d, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x5f, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x73, 0x65, 0x72, 0x76, 0x65, + 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x1a, 0x3b, 0x0a, 0x11, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x65, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x65, + 0x6e, 0x64, 0x22, 0x83, 0x01, 0x0a, 0x18, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x05, 0x52, 0x06, 0x6e, 0x75, 0x6d, 0x62, 0x65, 0x72, 0x12, 0x3b, 0x0a, 0x07, 0x6f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, + 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, + 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xa7, 0x01, 0x0a, 0x16, 0x53, 0x65, 0x72, + 0x76, 0x69, 0x63, 0x65, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x3e, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, + 0x64, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, + 0x06, 0x6d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x12, 0x39, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x22, 0x89, 0x02, 0x0a, 0x15, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x44, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x12, 0x0a, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, + 0x1f, 0x0a, 0x0b, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x54, 0x79, 0x70, 0x65, + 0x12, 0x38, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x30, 0x0a, 0x10, 0x63, 0x6c, + 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0f, 0x63, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x12, 0x30, 0x0a, 0x10, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, + 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0f, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x69, 0x6e, 0x67, 0x22, 0x91, + 0x09, 0x0a, 0x0b, 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x21, + 0x0a, 0x0c, 0x6a, 0x61, 0x76, 0x61, 0x5f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6a, 0x61, 0x76, 0x61, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, + 0x65, 0x12, 0x30, 0x0a, 0x14, 0x6a, 0x61, 0x76, 0x61, 0x5f, 0x6f, 0x75, 0x74, 0x65, 0x72, 0x5f, + 0x63, 0x6c, 0x61, 0x73, 0x73, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x12, 0x6a, 0x61, 0x76, 0x61, 0x4f, 0x75, 0x74, 0x65, 0x72, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x35, 0x0a, 0x13, 0x6a, 0x61, 0x76, 0x61, 0x5f, 0x6d, 0x75, 0x6c, 0x74, + 0x69, 0x70, 0x6c, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, + 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x11, 0x6a, 0x61, 0x76, 0x61, 0x4d, 0x75, 0x6c, + 0x74, 0x69, 0x70, 0x6c, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x44, 0x0a, 0x1d, 0x6a, 0x61, + 0x76, 0x61, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x5f, 0x65, 0x71, 0x75, 0x61, + 0x6c, 0x73, 0x5f, 0x61, 0x6e, 0x64, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x14, 0x20, 0x01, 0x28, + 0x08, 0x42, 0x02, 0x18, 0x01, 0x52, 0x19, 0x6a, 0x61, 0x76, 0x61, 0x47, 0x65, 0x6e, 0x65, 0x72, + 0x61, 0x74, 0x65, 0x45, 0x71, 0x75, 0x61, 0x6c, 0x73, 0x41, 0x6e, 0x64, 0x48, 0x61, 0x73, 0x68, + 0x12, 0x3a, 0x0a, 0x16, 0x6a, 0x61, 0x76, 0x61, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, + 0x63, 0x68, 0x65, 0x63, 0x6b, 0x5f, 0x75, 0x74, 0x66, 0x38, 0x18, 0x1b, 0x20, 0x01, 0x28, 0x08, + 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x13, 0x6a, 0x61, 0x76, 0x61, 0x53, 0x74, 0x72, + 0x69, 0x6e, 0x67, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x55, 0x74, 0x66, 0x38, 0x12, 0x53, 0x0a, 0x0c, + 0x6f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x09, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x29, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x3a, 0x05, 0x53, + 0x50, 0x45, 0x45, 0x44, 0x52, 0x0b, 0x6f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, 0x65, 0x46, 0x6f, + 0x72, 0x12, 0x1d, 0x0a, 0x0a, 0x67, 0x6f, 0x5f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, + 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x67, 0x6f, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, + 0x12, 0x35, 0x0a, 0x13, 0x63, 0x63, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x5f, 0x73, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x10, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, + 0x61, 0x6c, 0x73, 0x65, 0x52, 0x11, 0x63, 0x63, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x53, + 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x39, 0x0a, 0x15, 0x6a, 0x61, 0x76, 0x61, 0x5f, + 0x67, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, + 0x18, 0x11, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x13, 0x6a, + 0x61, 0x76, 0x61, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x73, 0x12, 0x35, 0x0a, 0x13, 0x70, 0x79, 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, + 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x12, 0x20, 0x01, 0x28, 0x08, 0x3a, + 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x11, 0x70, 0x79, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, + 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x37, 0x0a, 0x14, 0x70, 0x68, 0x70, + 0x5f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x73, 0x18, 0x2a, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x12, + 0x70, 0x68, 0x70, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x69, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, + 0x18, 0x17, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0a, 0x64, + 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x2e, 0x0a, 0x10, 0x63, 0x63, 0x5f, + 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x61, 0x72, 0x65, 0x6e, 0x61, 0x73, 0x18, 0x1f, 0x20, + 0x01, 0x28, 0x08, 0x3a, 0x04, 0x74, 0x72, 0x75, 0x65, 0x52, 0x0e, 0x63, 0x63, 0x45, 0x6e, 0x61, + 0x62, 0x6c, 0x65, 0x41, 0x72, 0x65, 0x6e, 0x61, 0x73, 0x12, 0x2a, 0x0a, 0x11, 0x6f, 0x62, 0x6a, + 0x63, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x24, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6f, 0x62, 0x6a, 0x63, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x50, + 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x73, 0x68, 0x61, 0x72, 0x70, 0x5f, + 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x25, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0f, 0x63, 0x73, 0x68, 0x61, 0x72, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x77, 0x69, 0x66, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, + 0x18, 0x27, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x77, 0x69, 0x66, 0x74, 0x50, 0x72, 0x65, + 0x66, 0x69, 0x78, 0x12, 0x28, 0x0a, 0x10, 0x70, 0x68, 0x70, 0x5f, 0x63, 0x6c, 0x61, 0x73, 0x73, + 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x28, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x70, + 0x68, 0x70, 0x43, 0x6c, 0x61, 0x73, 0x73, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x23, 0x0a, + 0x0d, 0x70, 0x68, 0x70, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x29, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x70, 0x68, 0x70, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x70, 0x68, 0x70, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x18, 0x2c, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x14, 0x70, 0x68, 0x70, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x4e, + 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x72, 0x75, 0x62, 0x79, + 0x5f, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x18, 0x2d, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x72, 0x75, 0x62, 0x79, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x58, 0x0a, 0x14, 0x75, + 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, 0x6e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x3a, 0x0a, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6d, 0x69, 0x7a, + 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x50, 0x45, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x44, 0x45, 0x5f, 0x53, 0x49, 0x5a, 0x45, 0x10, 0x02, 0x12, + 0x10, 0x0a, 0x0c, 0x4c, 0x49, 0x54, 0x45, 0x5f, 0x52, 0x55, 0x4e, 0x54, 0x49, 0x4d, 0x45, 0x10, + 0x03, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x4a, 0x04, 0x08, 0x26, + 0x10, 0x27, 0x22, 0xd1, 0x02, 0x0a, 0x0e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3c, 0x0a, 0x17, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, + 0x5f, 0x73, 0x65, 0x74, 0x5f, 0x77, 0x69, 0x72, 0x65, 0x5f, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x14, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x65, 0x74, 0x57, 0x69, 0x72, 0x65, 0x46, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x12, 0x4c, 0x0a, 0x1f, 0x6e, 0x6f, 0x5f, 0x73, 0x74, 0x61, 0x6e, 0x64, 0x61, + 0x72, 0x64, 0x5f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x5f, 0x61, 0x63, + 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x52, 0x1c, 0x6e, 0x6f, 0x53, 0x74, 0x61, 0x6e, 0x64, 0x61, 0x72, 0x64, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73, 0x6f, + 0x72, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0a, 0x64, 0x65, + 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x1b, 0x0a, 0x09, 0x6d, 0x61, 0x70, 0x5f, + 0x65, 0x6e, 0x74, 0x72, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x6d, 0x61, 0x70, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x58, 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, + 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, + 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, + 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x4a, 0x04, 0x08, 0x08, 0x10, 0x09, + 0x4a, 0x04, 0x08, 0x09, 0x10, 0x0a, 0x22, 0xe2, 0x03, 0x0a, 0x0c, 0x46, 0x69, 0x65, 0x6c, 0x64, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x41, 0x0a, 0x05, 0x63, 0x74, 0x79, 0x70, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x43, 0x54, 0x79, 0x70, 0x65, 0x3a, 0x06, 0x53, 0x54, 0x52, + 0x49, 0x4e, 0x47, 0x52, 0x05, 0x63, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, + 0x63, 0x6b, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x70, 0x61, 0x63, 0x6b, + 0x65, 0x64, 0x12, 0x47, 0x0a, 0x06, 0x6a, 0x73, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x69, 0x65, 0x6c, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x2e, 0x4a, 0x53, 0x54, 0x79, 0x70, 0x65, 0x3a, 0x09, 0x4a, 0x53, 0x5f, 0x4e, 0x4f, 0x52, + 0x4d, 0x41, 0x4c, 0x52, 0x06, 0x6a, 0x73, 0x74, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x04, 0x6c, + 0x61, 0x7a, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, + 0x52, 0x04, 0x6c, 0x61, 0x7a, 0x79, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, + 0x61, 0x74, 0x65, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, + 0x65, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x19, 0x0a, + 0x04, 0x77, 0x65, 0x61, 0x6b, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x52, 0x04, 0x77, 0x65, 0x61, 0x6b, 0x12, 0x58, 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, + 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x22, 0x2f, 0x0a, 0x05, 0x43, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x53, + 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x4f, 0x52, 0x44, 0x10, + 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, 0x50, 0x49, 0x45, 0x43, + 0x45, 0x10, 0x02, 0x22, 0x35, 0x0a, 0x06, 0x4a, 0x53, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0d, 0x0a, + 0x09, 0x4a, 0x53, 0x5f, 0x4e, 0x4f, 0x52, 0x4d, 0x41, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, + 0x4a, 0x53, 0x5f, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0d, 0x0a, 0x09, 0x4a, + 0x53, 0x5f, 0x4e, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x10, 0x02, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, + 0x80, 0x80, 0x80, 0x80, 0x02, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x22, 0x73, 0x0a, 0x0c, 0x4f, + 0x6e, 0x65, 0x6f, 0x66, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x58, 0x0a, 0x14, 0x75, + 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x6f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, 0x6e, 0x69, + 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, + 0x22, 0xc0, 0x01, 0x0a, 0x0b, 0x45, 0x6e, 0x75, 0x6d, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x12, 0x1f, 0x0a, 0x0b, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x61, 0x6c, 0x69, 0x61, 0x73, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x41, 0x6c, 0x69, 0x61, + 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x52, 0x0a, 0x64, 0x65, + 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x58, 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, + 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, + 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, + 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x4a, 0x04, 0x08, + 0x05, 0x10, 0x06, 0x22, 0x9e, 0x01, 0x0a, 0x10, 0x45, 0x6e, 0x75, 0x6d, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, + 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, + 0x6c, 0x73, 0x65, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, + 0x58, 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, + 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, + 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, + 0x80, 0x80, 0x80, 0x02, 0x22, 0x9c, 0x01, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, + 0x63, 0x61, 0x74, 0x65, 0x64, 0x18, 0x21, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, + 0x73, 0x65, 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x58, + 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x5f, + 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, + 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x2a, 0x09, 0x08, 0xe8, 0x07, 0x10, 0x80, 0x80, + 0x80, 0x80, 0x02, 0x22, 0xe0, 0x02, 0x0a, 0x0d, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x25, 0x0a, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, + 0x74, 0x65, 0x64, 0x18, 0x21, 0x20, 0x01, 0x28, 0x08, 0x3a, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, + 0x52, 0x0a, 0x64, 0x65, 0x70, 0x72, 0x65, 0x63, 0x61, 0x74, 0x65, 0x64, 0x12, 0x71, 0x0a, 0x11, + 0x69, 0x64, 0x65, 0x6d, 0x70, 0x6f, 0x74, 0x65, 0x6e, 0x63, 0x79, 0x5f, 0x6c, 0x65, 0x76, 0x65, + 0x6c, 0x18, 0x22, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, + 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x49, 0x64, 0x65, 0x6d, 0x70, 0x6f, 0x74, 0x65, + 0x6e, 0x63, 0x79, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x3a, 0x13, 0x49, 0x44, 0x45, 0x4d, 0x50, 0x4f, + 0x54, 0x45, 0x4e, 0x43, 0x59, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x52, 0x10, 0x69, + 0x64, 0x65, 0x6d, 0x70, 0x6f, 0x74, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, + 0x58, 0x0a, 0x14, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, + 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0xe7, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x55, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x13, 0x75, 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, + 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x50, 0x0a, 0x10, 0x49, 0x64, 0x65, + 0x6d, 0x70, 0x6f, 0x74, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x17, 0x0a, + 0x13, 0x49, 0x44, 0x45, 0x4d, 0x50, 0x4f, 0x54, 0x45, 0x4e, 0x43, 0x59, 0x5f, 0x55, 0x4e, 0x4b, + 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x4e, 0x4f, 0x5f, 0x53, 0x49, 0x44, + 0x45, 0x5f, 0x45, 0x46, 0x46, 0x45, 0x43, 0x54, 0x53, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x49, + 0x44, 0x45, 0x4d, 0x50, 0x4f, 0x54, 0x45, 0x4e, 0x54, 0x10, 0x02, 0x2a, 0x09, 0x08, 0xe8, 0x07, + 0x10, 0x80, 0x80, 0x80, 0x80, 0x02, 0x22, 0x9a, 0x03, 0x0a, 0x13, 0x55, 0x6e, 0x69, 0x6e, 0x74, + 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x41, + 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x55, + 0x6e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x70, 0x72, 0x65, 0x74, 0x65, 0x64, 0x4f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x2e, 0x4e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x74, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x29, 0x0a, 0x10, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x69, 0x64, 0x65, + 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2c, 0x0a, 0x12, + 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x10, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, + 0x76, 0x65, 0x49, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x6e, 0x65, + 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x10, 0x6e, 0x65, 0x67, 0x61, 0x74, 0x69, 0x76, 0x65, + 0x49, 0x6e, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x6f, 0x75, 0x62, + 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, + 0x64, 0x6f, 0x75, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x73, + 0x74, 0x72, 0x69, 0x6e, 0x67, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x0b, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x27, + 0x0a, 0x0f, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x61, 0x67, 0x67, 0x72, 0x65, 0x67, 0x61, + 0x74, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x1a, 0x4a, 0x0a, 0x08, 0x4e, 0x61, 0x6d, 0x65, 0x50, + 0x61, 0x72, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x6e, 0x61, 0x6d, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x74, + 0x18, 0x01, 0x20, 0x02, 0x28, 0x09, 0x52, 0x08, 0x6e, 0x61, 0x6d, 0x65, 0x50, 0x61, 0x72, 0x74, + 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x73, 0x5f, 0x65, 0x78, 0x74, 0x65, 0x6e, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x02, 0x20, 0x02, 0x28, 0x08, 0x52, 0x0b, 0x69, 0x73, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x73, + 0x69, 0x6f, 0x6e, 0x22, 0xa7, 0x02, 0x0a, 0x0e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x43, 0x6f, + 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x44, 0x0a, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x43, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x08, 0x6c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xce, 0x01, 0x0a, + 0x08, 0x4c, 0x6f, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x05, 0x42, 0x02, 0x10, 0x01, 0x52, 0x04, 0x70, 0x61, 0x74, + 0x68, 0x12, 0x16, 0x0a, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x03, 0x28, 0x05, 0x42, + 0x02, 0x10, 0x01, 0x52, 0x04, 0x73, 0x70, 0x61, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x6c, 0x65, 0x61, + 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6d, 0x6d, + 0x65, 0x6e, 0x74, 0x73, 0x12, 0x2b, 0x0a, 0x11, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, + 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x10, 0x74, 0x72, 0x61, 0x69, 0x6c, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, + 0x73, 0x12, 0x3a, 0x0a, 0x19, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x65, 0x74, + 0x61, 0x63, 0x68, 0x65, 0x64, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x06, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x17, 0x6c, 0x65, 0x61, 0x64, 0x69, 0x6e, 0x67, 0x44, 0x65, 0x74, + 0x61, 0x63, 0x68, 0x65, 0x64, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xd1, 0x01, + 0x0a, 0x11, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x64, 0x65, 0x49, + 0x6e, 0x66, 0x6f, 0x12, 0x4d, 0x0a, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, + 0x74, 0x65, 0x64, 0x43, 0x6f, 0x64, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x41, 0x6e, 0x6e, 0x6f, + 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0a, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x1a, 0x6d, 0x0a, 0x0a, 0x41, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x12, 0x16, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x03, 0x28, 0x05, 0x42, 0x02, + 0x10, 0x01, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x65, 0x67, + 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x62, 0x65, 0x67, 0x69, 0x6e, 0x12, + 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x65, 0x6e, + 0x64, 0x42, 0x7e, 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x42, 0x10, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, + 0x70, 0x74, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x73, 0x48, 0x01, 0x5a, 0x2d, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x70, 0x62, 0xf8, 0x01, 0x01, 0xa2, 0x02, + 0x03, 0x47, 0x50, 0x42, 0xaa, 0x02, 0x1a, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x52, 0x65, 0x66, 0x6c, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, +} + +var ( + file_google_protobuf_descriptor_proto_rawDescOnce sync.Once + file_google_protobuf_descriptor_proto_rawDescData = file_google_protobuf_descriptor_proto_rawDesc +) + +func file_google_protobuf_descriptor_proto_rawDescGZIP() []byte { + file_google_protobuf_descriptor_proto_rawDescOnce.Do(func() { + file_google_protobuf_descriptor_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_protobuf_descriptor_proto_rawDescData) + }) + return file_google_protobuf_descriptor_proto_rawDescData +} + +var file_google_protobuf_descriptor_proto_enumTypes = make([]protoimpl.EnumInfo, 6) +var file_google_protobuf_descriptor_proto_msgTypes = make([]protoimpl.MessageInfo, 27) +var file_google_protobuf_descriptor_proto_goTypes = []interface{}{ + (FieldDescriptorProto_Type)(0), // 0: google.protobuf.FieldDescriptorProto.Type + (FieldDescriptorProto_Label)(0), // 1: google.protobuf.FieldDescriptorProto.Label + (FileOptions_OptimizeMode)(0), // 2: google.protobuf.FileOptions.OptimizeMode + (FieldOptions_CType)(0), // 3: google.protobuf.FieldOptions.CType + (FieldOptions_JSType)(0), // 4: google.protobuf.FieldOptions.JSType + (MethodOptions_IdempotencyLevel)(0), // 5: google.protobuf.MethodOptions.IdempotencyLevel + (*FileDescriptorSet)(nil), // 6: google.protobuf.FileDescriptorSet + (*FileDescriptorProto)(nil), // 7: google.protobuf.FileDescriptorProto + (*DescriptorProto)(nil), // 8: google.protobuf.DescriptorProto + (*ExtensionRangeOptions)(nil), // 9: google.protobuf.ExtensionRangeOptions + (*FieldDescriptorProto)(nil), // 10: google.protobuf.FieldDescriptorProto + (*OneofDescriptorProto)(nil), // 11: google.protobuf.OneofDescriptorProto + (*EnumDescriptorProto)(nil), // 12: google.protobuf.EnumDescriptorProto + (*EnumValueDescriptorProto)(nil), // 13: google.protobuf.EnumValueDescriptorProto + (*ServiceDescriptorProto)(nil), // 14: google.protobuf.ServiceDescriptorProto + (*MethodDescriptorProto)(nil), // 15: google.protobuf.MethodDescriptorProto + (*FileOptions)(nil), // 16: google.protobuf.FileOptions + (*MessageOptions)(nil), // 17: google.protobuf.MessageOptions + (*FieldOptions)(nil), // 18: google.protobuf.FieldOptions + (*OneofOptions)(nil), // 19: google.protobuf.OneofOptions + (*EnumOptions)(nil), // 20: google.protobuf.EnumOptions + (*EnumValueOptions)(nil), // 21: google.protobuf.EnumValueOptions + (*ServiceOptions)(nil), // 22: google.protobuf.ServiceOptions + (*MethodOptions)(nil), // 23: google.protobuf.MethodOptions + (*UninterpretedOption)(nil), // 24: google.protobuf.UninterpretedOption + (*SourceCodeInfo)(nil), // 25: google.protobuf.SourceCodeInfo + (*GeneratedCodeInfo)(nil), // 26: google.protobuf.GeneratedCodeInfo + (*DescriptorProto_ExtensionRange)(nil), // 27: google.protobuf.DescriptorProto.ExtensionRange + (*DescriptorProto_ReservedRange)(nil), // 28: google.protobuf.DescriptorProto.ReservedRange + (*EnumDescriptorProto_EnumReservedRange)(nil), // 29: google.protobuf.EnumDescriptorProto.EnumReservedRange + (*UninterpretedOption_NamePart)(nil), // 30: google.protobuf.UninterpretedOption.NamePart + (*SourceCodeInfo_Location)(nil), // 31: google.protobuf.SourceCodeInfo.Location + (*GeneratedCodeInfo_Annotation)(nil), // 32: google.protobuf.GeneratedCodeInfo.Annotation +} +var file_google_protobuf_descriptor_proto_depIdxs = []int32{ + 7, // 0: google.protobuf.FileDescriptorSet.file:type_name -> google.protobuf.FileDescriptorProto + 8, // 1: google.protobuf.FileDescriptorProto.message_type:type_name -> google.protobuf.DescriptorProto + 12, // 2: google.protobuf.FileDescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto + 14, // 3: google.protobuf.FileDescriptorProto.service:type_name -> google.protobuf.ServiceDescriptorProto + 10, // 4: google.protobuf.FileDescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto + 16, // 5: google.protobuf.FileDescriptorProto.options:type_name -> google.protobuf.FileOptions + 25, // 6: google.protobuf.FileDescriptorProto.source_code_info:type_name -> google.protobuf.SourceCodeInfo + 10, // 7: google.protobuf.DescriptorProto.field:type_name -> google.protobuf.FieldDescriptorProto + 10, // 8: google.protobuf.DescriptorProto.extension:type_name -> google.protobuf.FieldDescriptorProto + 8, // 9: google.protobuf.DescriptorProto.nested_type:type_name -> google.protobuf.DescriptorProto + 12, // 10: google.protobuf.DescriptorProto.enum_type:type_name -> google.protobuf.EnumDescriptorProto + 27, // 11: google.protobuf.DescriptorProto.extension_range:type_name -> google.protobuf.DescriptorProto.ExtensionRange + 11, // 12: google.protobuf.DescriptorProto.oneof_decl:type_name -> google.protobuf.OneofDescriptorProto + 17, // 13: google.protobuf.DescriptorProto.options:type_name -> google.protobuf.MessageOptions + 28, // 14: google.protobuf.DescriptorProto.reserved_range:type_name -> google.protobuf.DescriptorProto.ReservedRange + 24, // 15: google.protobuf.ExtensionRangeOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 1, // 16: google.protobuf.FieldDescriptorProto.label:type_name -> google.protobuf.FieldDescriptorProto.Label + 0, // 17: google.protobuf.FieldDescriptorProto.type:type_name -> google.protobuf.FieldDescriptorProto.Type + 18, // 18: google.protobuf.FieldDescriptorProto.options:type_name -> google.protobuf.FieldOptions + 19, // 19: google.protobuf.OneofDescriptorProto.options:type_name -> google.protobuf.OneofOptions + 13, // 20: google.protobuf.EnumDescriptorProto.value:type_name -> google.protobuf.EnumValueDescriptorProto + 20, // 21: google.protobuf.EnumDescriptorProto.options:type_name -> google.protobuf.EnumOptions + 29, // 22: google.protobuf.EnumDescriptorProto.reserved_range:type_name -> google.protobuf.EnumDescriptorProto.EnumReservedRange + 21, // 23: google.protobuf.EnumValueDescriptorProto.options:type_name -> google.protobuf.EnumValueOptions + 15, // 24: google.protobuf.ServiceDescriptorProto.method:type_name -> google.protobuf.MethodDescriptorProto + 22, // 25: google.protobuf.ServiceDescriptorProto.options:type_name -> google.protobuf.ServiceOptions + 23, // 26: google.protobuf.MethodDescriptorProto.options:type_name -> google.protobuf.MethodOptions + 2, // 27: google.protobuf.FileOptions.optimize_for:type_name -> google.protobuf.FileOptions.OptimizeMode + 24, // 28: google.protobuf.FileOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 24, // 29: google.protobuf.MessageOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 3, // 30: google.protobuf.FieldOptions.ctype:type_name -> google.protobuf.FieldOptions.CType + 4, // 31: google.protobuf.FieldOptions.jstype:type_name -> google.protobuf.FieldOptions.JSType + 24, // 32: google.protobuf.FieldOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 24, // 33: google.protobuf.OneofOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 24, // 34: google.protobuf.EnumOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 24, // 35: google.protobuf.EnumValueOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 24, // 36: google.protobuf.ServiceOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 5, // 37: google.protobuf.MethodOptions.idempotency_level:type_name -> google.protobuf.MethodOptions.IdempotencyLevel + 24, // 38: google.protobuf.MethodOptions.uninterpreted_option:type_name -> google.protobuf.UninterpretedOption + 30, // 39: google.protobuf.UninterpretedOption.name:type_name -> google.protobuf.UninterpretedOption.NamePart + 31, // 40: google.protobuf.SourceCodeInfo.location:type_name -> google.protobuf.SourceCodeInfo.Location + 32, // 41: google.protobuf.GeneratedCodeInfo.annotation:type_name -> google.protobuf.GeneratedCodeInfo.Annotation + 9, // 42: google.protobuf.DescriptorProto.ExtensionRange.options:type_name -> google.protobuf.ExtensionRangeOptions + 43, // [43:43] is the sub-list for method output_type + 43, // [43:43] is the sub-list for method input_type + 43, // [43:43] is the sub-list for extension type_name + 43, // [43:43] is the sub-list for extension extendee + 0, // [0:43] is the sub-list for field type_name +} + +func init() { file_google_protobuf_descriptor_proto_init() } +func file_google_protobuf_descriptor_proto_init() { + if File_google_protobuf_descriptor_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_google_protobuf_descriptor_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileDescriptorSet); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ExtensionRangeOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FieldDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OneofDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnumDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnumValueDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServiceDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MethodDescriptorProto); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FileOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MessageOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FieldOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*OneofOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnumOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnumValueOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ServiceOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*MethodOptions); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + case 3: + return &v.extensionFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UninterpretedOption); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceCodeInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GeneratedCodeInfo); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DescriptorProto_ExtensionRange); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*DescriptorProto_ReservedRange); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*EnumDescriptorProto_EnumReservedRange); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*UninterpretedOption_NamePart); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SourceCodeInfo_Location); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_google_protobuf_descriptor_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GeneratedCodeInfo_Annotation); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_protobuf_descriptor_proto_rawDesc, + NumEnums: 6, + NumMessages: 27, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_google_protobuf_descriptor_proto_goTypes, + DependencyIndexes: file_google_protobuf_descriptor_proto_depIdxs, + EnumInfos: file_google_protobuf_descriptor_proto_enumTypes, + MessageInfos: file_google_protobuf_descriptor_proto_msgTypes, + }.Build() + File_google_protobuf_descriptor_proto = out.File + file_google_protobuf_descriptor_proto_rawDesc = nil + file_google_protobuf_descriptor_proto_goTypes = nil + file_google_protobuf_descriptor_proto_depIdxs = nil +} diff --git a/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go b/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go new file mode 100644 index 00000000..8c10797b --- /dev/null +++ b/vendor/google.golang.org/protobuf/types/known/anypb/any.pb.go @@ -0,0 +1,498 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/any.proto + +// Package anypb contains generated types for google/protobuf/any.proto. +// +// The Any message is a dynamic representation of any other message value. +// It is functionally a tuple of the full name of the remote message type and +// the serialized bytes of the remote message value. +// +// +// Constructing an Any +// +// An Any message containing another message value is constructed using New: +// +// any, err := anypb.New(m) +// if err != nil { +// ... // handle error +// } +// ... // make use of any +// +// +// Unmarshaling an Any +// +// With a populated Any message, the underlying message can be serialized into +// a remote concrete message value in a few ways. +// +// If the exact concrete type is known, then a new (or pre-existing) instance +// of that message can be passed to the UnmarshalTo method: +// +// m := new(foopb.MyMessage) +// if err := any.UnmarshalTo(m); err != nil { +// ... // handle error +// } +// ... // make use of m +// +// If the exact concrete type is not known, then the UnmarshalNew method can be +// used to unmarshal the contents into a new instance of the remote message type: +// +// m, err := any.UnmarshalNew() +// if err != nil { +// ... // handle error +// } +// ... // make use of m +// +// UnmarshalNew uses the global type registry to resolve the message type and +// construct a new instance of that message to unmarshal into. In order for a +// message type to appear in the global registry, the Go type representing that +// protobuf message type must be linked into the Go binary. For messages +// generated by protoc-gen-go, this is achieved through an import of the +// generated Go package representing a .proto file. +// +// A common pattern with UnmarshalNew is to use a type switch with the resulting +// proto.Message value: +// +// switch m := m.(type) { +// case *foopb.MyMessage: +// ... // make use of m as a *foopb.MyMessage +// case *barpb.OtherMessage: +// ... // make use of m as a *barpb.OtherMessage +// case *bazpb.SomeMessage: +// ... // make use of m as a *bazpb.SomeMessage +// } +// +// This pattern ensures that the generated packages containing the message types +// listed in the case clauses are linked into the Go binary and therefore also +// registered in the global registry. +// +// +// Type checking an Any +// +// In order to type check whether an Any message represents some other message, +// then use the MessageIs method: +// +// if any.MessageIs((*foopb.MyMessage)(nil)) { +// ... // make use of any, knowing that it contains a foopb.MyMessage +// } +// +// The MessageIs method can also be used with an allocated instance of the target +// message type if the intention is to unmarshal into it if the type matches: +// +// m := new(foopb.MyMessage) +// if any.MessageIs(m) { +// if err := any.UnmarshalTo(m); err != nil { +// ... // handle error +// } +// ... // make use of m +// } +// +package anypb + +import ( + proto "google.golang.org/protobuf/proto" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoregistry "google.golang.org/protobuf/reflect/protoregistry" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + strings "strings" + sync "sync" +) + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +type Any struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + TypeUrl string `protobuf:"bytes,1,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + // Must be a valid serialized protocol buffer of the above specified type. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +// New marshals src into a new Any instance. +func New(src proto.Message) (*Any, error) { + dst := new(Any) + if err := dst.MarshalFrom(src); err != nil { + return nil, err + } + return dst, nil +} + +// MarshalFrom marshals src into dst as the underlying message +// using the provided marshal options. +// +// If no options are specified, call dst.MarshalFrom instead. +func MarshalFrom(dst *Any, src proto.Message, opts proto.MarshalOptions) error { + const urlPrefix = "type.googleapis.com/" + if src == nil { + return protoimpl.X.NewError("invalid nil source message") + } + b, err := opts.Marshal(src) + if err != nil { + return err + } + dst.TypeUrl = urlPrefix + string(src.ProtoReflect().Descriptor().FullName()) + dst.Value = b + return nil +} + +// UnmarshalTo unmarshals the underlying message from src into dst +// using the provided unmarshal options. +// It reports an error if dst is not of the right message type. +// +// If no options are specified, call src.UnmarshalTo instead. +func UnmarshalTo(src *Any, dst proto.Message, opts proto.UnmarshalOptions) error { + if src == nil { + return protoimpl.X.NewError("invalid nil source message") + } + if !src.MessageIs(dst) { + got := dst.ProtoReflect().Descriptor().FullName() + want := src.MessageName() + return protoimpl.X.NewError("mismatched message type: got %q, want %q", got, want) + } + return opts.Unmarshal(src.GetValue(), dst) +} + +// UnmarshalNew unmarshals the underlying message from src into dst, +// which is newly created message using a type resolved from the type URL. +// The message type is resolved according to opt.Resolver, +// which should implement protoregistry.MessageTypeResolver. +// It reports an error if the underlying message type could not be resolved. +// +// If no options are specified, call src.UnmarshalNew instead. +func UnmarshalNew(src *Any, opts proto.UnmarshalOptions) (dst proto.Message, err error) { + if src.GetTypeUrl() == "" { + return nil, protoimpl.X.NewError("invalid empty type URL") + } + if opts.Resolver == nil { + opts.Resolver = protoregistry.GlobalTypes + } + r, ok := opts.Resolver.(protoregistry.MessageTypeResolver) + if !ok { + return nil, protoregistry.NotFound + } + mt, err := r.FindMessageByURL(src.GetTypeUrl()) + if err != nil { + if err == protoregistry.NotFound { + return nil, err + } + return nil, protoimpl.X.NewError("could not resolve %q: %v", src.GetTypeUrl(), err) + } + dst = mt.New().Interface() + return dst, opts.Unmarshal(src.GetValue(), dst) +} + +// MessageIs reports whether the underlying message is of the same type as m. +func (x *Any) MessageIs(m proto.Message) bool { + if m == nil { + return false + } + url := x.GetTypeUrl() + name := string(m.ProtoReflect().Descriptor().FullName()) + if !strings.HasSuffix(url, name) { + return false + } + return len(url) == len(name) || url[len(url)-len(name)-1] == '/' +} + +// MessageName reports the full name of the underlying message, +// returning an empty string if invalid. +func (x *Any) MessageName() protoreflect.FullName { + url := x.GetTypeUrl() + name := protoreflect.FullName(url) + if i := strings.LastIndexByte(url, '/'); i >= 0 { + name = name[i+len("/"):] + } + if !name.IsValid() { + return "" + } + return name +} + +// MarshalFrom marshals m into x as the underlying message. +func (x *Any) MarshalFrom(m proto.Message) error { + return MarshalFrom(x, m, proto.MarshalOptions{}) +} + +// UnmarshalTo unmarshals the contents of the underlying message of x into m. +// It resets m before performing the unmarshal operation. +// It reports an error if m is not of the right message type. +func (x *Any) UnmarshalTo(m proto.Message) error { + return UnmarshalTo(x, m, proto.UnmarshalOptions{}) +} + +// UnmarshalNew unmarshals the contents of the underlying message of x into +// a newly allocated message of the specified type. +// It reports an error if the underlying message type could not be resolved. +func (x *Any) UnmarshalNew() (proto.Message, error) { + return UnmarshalNew(x, proto.UnmarshalOptions{}) +} + +func (x *Any) Reset() { + *x = Any{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_any_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Any) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Any) ProtoMessage() {} + +func (x *Any) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_any_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Any.ProtoReflect.Descriptor instead. +func (*Any) Descriptor() ([]byte, []int) { + return file_google_protobuf_any_proto_rawDescGZIP(), []int{0} +} + +func (x *Any) GetTypeUrl() string { + if x != nil { + return x.TypeUrl + } + return "" +} + +func (x *Any) GetValue() []byte { + if x != nil { + return x.Value + } + return nil +} + +var File_google_protobuf_any_proto protoreflect.FileDescriptor + +var file_google_protobuf_any_proto_rawDesc = []byte{ + 0x0a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0f, 0x67, 0x6f, 0x6f, + 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x22, 0x36, 0x0a, 0x03, + 0x41, 0x6e, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x74, 0x79, 0x70, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x42, 0x76, 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x42, 0x08, 0x41, 0x6e, 0x79, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x2f, + 0x61, 0x6e, 0x79, 0x70, 0x62, 0xa2, 0x02, 0x03, 0x47, 0x50, 0x42, 0xaa, 0x02, 0x1e, 0x47, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x57, 0x65, + 0x6c, 0x6c, 0x4b, 0x6e, 0x6f, 0x77, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_google_protobuf_any_proto_rawDescOnce sync.Once + file_google_protobuf_any_proto_rawDescData = file_google_protobuf_any_proto_rawDesc +) + +func file_google_protobuf_any_proto_rawDescGZIP() []byte { + file_google_protobuf_any_proto_rawDescOnce.Do(func() { + file_google_protobuf_any_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_protobuf_any_proto_rawDescData) + }) + return file_google_protobuf_any_proto_rawDescData +} + +var file_google_protobuf_any_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_google_protobuf_any_proto_goTypes = []interface{}{ + (*Any)(nil), // 0: google.protobuf.Any +} +var file_google_protobuf_any_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_google_protobuf_any_proto_init() } +func file_google_protobuf_any_proto_init() { + if File_google_protobuf_any_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_google_protobuf_any_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Any); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_protobuf_any_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_google_protobuf_any_proto_goTypes, + DependencyIndexes: file_google_protobuf_any_proto_depIdxs, + MessageInfos: file_google_protobuf_any_proto_msgTypes, + }.Build() + File_google_protobuf_any_proto = out.File + file_google_protobuf_any_proto_rawDesc = nil + file_google_protobuf_any_proto_goTypes = nil + file_google_protobuf_any_proto_depIdxs = nil +} diff --git a/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go b/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go new file mode 100644 index 00000000..a583ca2f --- /dev/null +++ b/vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go @@ -0,0 +1,379 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/duration.proto + +// Package durationpb contains generated types for google/protobuf/duration.proto. +// +// The Duration message represents a signed span of time. +// +// +// Conversion to a Go Duration +// +// The AsDuration method can be used to convert a Duration message to a +// standard Go time.Duration value: +// +// d := dur.AsDuration() +// ... // make use of d as a time.Duration +// +// Converting to a time.Duration is a common operation so that the extensive +// set of time-based operations provided by the time package can be leveraged. +// See https://golang.org/pkg/time for more information. +// +// The AsDuration method performs the conversion on a best-effort basis. +// Durations with denormal values (e.g., nanoseconds beyond -99999999 and +// +99999999, inclusive; or seconds and nanoseconds with opposite signs) +// are normalized during the conversion to a time.Duration. To manually check for +// invalid Duration per the documented limitations in duration.proto, +// additionally call the CheckValid method: +// +// if err := dur.CheckValid(); err != nil { +// ... // handle error +// } +// +// Note that the documented limitations in duration.proto does not protect a +// Duration from overflowing the representable range of a time.Duration in Go. +// The AsDuration method uses saturation arithmetic such that an overflow clamps +// the resulting value to the closest representable value (e.g., math.MaxInt64 +// for positive overflow and math.MinInt64 for negative overflow). +// +// +// Conversion from a Go Duration +// +// The durationpb.New function can be used to construct a Duration message +// from a standard Go time.Duration value: +// +// dur := durationpb.New(d) +// ... // make use of d as a *durationpb.Duration +// +package durationpb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + math "math" + reflect "reflect" + sync "sync" + time "time" +) + +// A Duration represents a signed, fixed-length span of time represented +// as a count of seconds and fractions of seconds at nanosecond +// resolution. It is independent of any calendar and concepts like "day" +// or "month". It is related to Timestamp in that the difference between +// two Timestamp values is a Duration and it can be added or subtracted +// from a Timestamp. Range is approximately +-10,000 years. +// +// # Examples +// +// Example 1: Compute Duration from two Timestamps in pseudo code. +// +// Timestamp start = ...; +// Timestamp end = ...; +// Duration duration = ...; +// +// duration.seconds = end.seconds - start.seconds; +// duration.nanos = end.nanos - start.nanos; +// +// if (duration.seconds < 0 && duration.nanos > 0) { +// duration.seconds += 1; +// duration.nanos -= 1000000000; +// } else if (duration.seconds > 0 && duration.nanos < 0) { +// duration.seconds -= 1; +// duration.nanos += 1000000000; +// } +// +// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. +// +// Timestamp start = ...; +// Duration duration = ...; +// Timestamp end = ...; +// +// end.seconds = start.seconds + duration.seconds; +// end.nanos = start.nanos + duration.nanos; +// +// if (end.nanos < 0) { +// end.seconds -= 1; +// end.nanos += 1000000000; +// } else if (end.nanos >= 1000000000) { +// end.seconds += 1; +// end.nanos -= 1000000000; +// } +// +// Example 3: Compute Duration from datetime.timedelta in Python. +// +// td = datetime.timedelta(days=3, minutes=10) +// duration = Duration() +// duration.FromTimedelta(td) +// +// # JSON Mapping +// +// In JSON format, the Duration type is encoded as a string rather than an +// object, where the string ends in the suffix "s" (indicating seconds) and +// is preceded by the number of seconds, with nanoseconds expressed as +// fractional seconds. For example, 3 seconds with 0 nanoseconds should be +// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should +// be expressed in JSON format as "3.000000001s", and 3 seconds and 1 +// microsecond should be expressed in JSON format as "3.000001s". +// +// +type Duration struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Signed seconds of the span of time. Must be from -315,576,000,000 + // to +315,576,000,000 inclusive. Note: these bounds are computed from: + // 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` + // Signed fractions of a second at nanosecond resolution of the span + // of time. Durations less than one second are represented with a 0 + // `seconds` field and a positive or negative `nanos` field. For durations + // of one second or more, a non-zero value for the `nanos` field must be + // of the same sign as the `seconds` field. Must be from -999,999,999 + // to +999,999,999 inclusive. + Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` +} + +// New constructs a new Duration from the provided time.Duration. +func New(d time.Duration) *Duration { + nanos := d.Nanoseconds() + secs := nanos / 1e9 + nanos -= secs * 1e9 + return &Duration{Seconds: int64(secs), Nanos: int32(nanos)} +} + +// AsDuration converts x to a time.Duration, +// returning the closest duration value in the event of overflow. +func (x *Duration) AsDuration() time.Duration { + secs := x.GetSeconds() + nanos := x.GetNanos() + d := time.Duration(secs) * time.Second + overflow := d/time.Second != time.Duration(secs) + d += time.Duration(nanos) * time.Nanosecond + overflow = overflow || (secs < 0 && nanos < 0 && d > 0) + overflow = overflow || (secs > 0 && nanos > 0 && d < 0) + if overflow { + switch { + case secs < 0: + return time.Duration(math.MinInt64) + case secs > 0: + return time.Duration(math.MaxInt64) + } + } + return d +} + +// IsValid reports whether the duration is valid. +// It is equivalent to CheckValid == nil. +func (x *Duration) IsValid() bool { + return x.check() == 0 +} + +// CheckValid returns an error if the duration is invalid. +// In particular, it checks whether the value is within the range of +// -10000 years to +10000 years inclusive. +// An error is reported for a nil Duration. +func (x *Duration) CheckValid() error { + switch x.check() { + case invalidNil: + return protoimpl.X.NewError("invalid nil Duration") + case invalidUnderflow: + return protoimpl.X.NewError("duration (%v) exceeds -10000 years", x) + case invalidOverflow: + return protoimpl.X.NewError("duration (%v) exceeds +10000 years", x) + case invalidNanosRange: + return protoimpl.X.NewError("duration (%v) has out-of-range nanos", x) + case invalidNanosSign: + return protoimpl.X.NewError("duration (%v) has seconds and nanos with different signs", x) + default: + return nil + } +} + +const ( + _ = iota + invalidNil + invalidUnderflow + invalidOverflow + invalidNanosRange + invalidNanosSign +) + +func (x *Duration) check() uint { + const absDuration = 315576000000 // 10000yr * 365.25day/yr * 24hr/day * 60min/hr * 60sec/min + secs := x.GetSeconds() + nanos := x.GetNanos() + switch { + case x == nil: + return invalidNil + case secs < -absDuration: + return invalidUnderflow + case secs > +absDuration: + return invalidOverflow + case nanos <= -1e9 || nanos >= +1e9: + return invalidNanosRange + case (secs > 0 && nanos < 0) || (secs < 0 && nanos > 0): + return invalidNanosSign + default: + return 0 + } +} + +func (x *Duration) Reset() { + *x = Duration{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_duration_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Duration) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Duration) ProtoMessage() {} + +func (x *Duration) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_duration_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Duration.ProtoReflect.Descriptor instead. +func (*Duration) Descriptor() ([]byte, []int) { + return file_google_protobuf_duration_proto_rawDescGZIP(), []int{0} +} + +func (x *Duration) GetSeconds() int64 { + if x != nil { + return x.Seconds + } + return 0 +} + +func (x *Duration) GetNanos() int32 { + if x != nil { + return x.Nanos + } + return 0 +} + +var File_google_protobuf_duration_proto protoreflect.FileDescriptor + +var file_google_protobuf_duration_proto_rawDesc = []byte{ + 0x0a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x12, 0x0f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x22, 0x3a, 0x0a, 0x08, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, + 0x07, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, + 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x61, 0x6e, 0x6f, 0x73, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6e, 0x61, 0x6e, 0x6f, 0x73, 0x42, 0x83, 0x01, + 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x42, 0x0d, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x31, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, + 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x6b, 0x6e, 0x6f, 0x77, 0x6e, 0x2f, 0x64, + 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x70, 0x62, 0xf8, 0x01, 0x01, 0xa2, 0x02, 0x03, 0x47, + 0x50, 0x42, 0xaa, 0x02, 0x1e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x50, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x57, 0x65, 0x6c, 0x6c, 0x4b, 0x6e, 0x6f, 0x77, 0x6e, 0x54, 0x79, + 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_google_protobuf_duration_proto_rawDescOnce sync.Once + file_google_protobuf_duration_proto_rawDescData = file_google_protobuf_duration_proto_rawDesc +) + +func file_google_protobuf_duration_proto_rawDescGZIP() []byte { + file_google_protobuf_duration_proto_rawDescOnce.Do(func() { + file_google_protobuf_duration_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_protobuf_duration_proto_rawDescData) + }) + return file_google_protobuf_duration_proto_rawDescData +} + +var file_google_protobuf_duration_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_google_protobuf_duration_proto_goTypes = []interface{}{ + (*Duration)(nil), // 0: google.protobuf.Duration +} +var file_google_protobuf_duration_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_google_protobuf_duration_proto_init() } +func file_google_protobuf_duration_proto_init() { + if File_google_protobuf_duration_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_google_protobuf_duration_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Duration); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_protobuf_duration_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_google_protobuf_duration_proto_goTypes, + DependencyIndexes: file_google_protobuf_duration_proto_depIdxs, + MessageInfos: file_google_protobuf_duration_proto_msgTypes, + }.Build() + File_google_protobuf_duration_proto = out.File + file_google_protobuf_duration_proto_rawDesc = nil + file_google_protobuf_duration_proto_goTypes = nil + file_google_protobuf_duration_proto_depIdxs = nil +} diff --git a/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go b/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go new file mode 100644 index 00000000..c9ae9213 --- /dev/null +++ b/vendor/google.golang.org/protobuf/types/known/timestamppb/timestamp.pb.go @@ -0,0 +1,390 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/timestamp.proto + +// Package timestamppb contains generated types for google/protobuf/timestamp.proto. +// +// The Timestamp message represents a timestamp, +// an instant in time since the Unix epoch (January 1st, 1970). +// +// +// Conversion to a Go Time +// +// The AsTime method can be used to convert a Timestamp message to a +// standard Go time.Time value in UTC: +// +// t := ts.AsTime() +// ... // make use of t as a time.Time +// +// Converting to a time.Time is a common operation so that the extensive +// set of time-based operations provided by the time package can be leveraged. +// See https://golang.org/pkg/time for more information. +// +// The AsTime method performs the conversion on a best-effort basis. Timestamps +// with denormal values (e.g., nanoseconds beyond 0 and 99999999, inclusive) +// are normalized during the conversion to a time.Time. To manually check for +// invalid Timestamps per the documented limitations in timestamp.proto, +// additionally call the CheckValid method: +// +// if err := ts.CheckValid(); err != nil { +// ... // handle error +// } +// +// +// Conversion from a Go Time +// +// The timestamppb.New function can be used to construct a Timestamp message +// from a standard Go time.Time value: +// +// ts := timestamppb.New(t) +// ... // make use of ts as a *timestamppb.Timestamp +// +// In order to construct a Timestamp representing the current time, use Now: +// +// ts := timestamppb.Now() +// ... // make use of ts as a *timestamppb.Timestamp +// +package timestamppb + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" + time "time" +) + +// A Timestamp represents a point in time independent of any time zone or local +// calendar, encoded as a count of seconds and fractions of seconds at +// nanosecond resolution. The count is relative to an epoch at UTC midnight on +// January 1, 1970, in the proleptic Gregorian calendar which extends the +// Gregorian calendar backwards to year one. +// +// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap +// second table is needed for interpretation, using a [24-hour linear +// smear](https://developers.google.com/time/smear). +// +// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By +// restricting to that range, we ensure that we can convert to and from [RFC +// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. +// +// # Examples +// +// Example 1: Compute Timestamp from POSIX `time()`. +// +// Timestamp timestamp; +// timestamp.set_seconds(time(NULL)); +// timestamp.set_nanos(0); +// +// Example 2: Compute Timestamp from POSIX `gettimeofday()`. +// +// struct timeval tv; +// gettimeofday(&tv, NULL); +// +// Timestamp timestamp; +// timestamp.set_seconds(tv.tv_sec); +// timestamp.set_nanos(tv.tv_usec * 1000); +// +// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. +// +// FILETIME ft; +// GetSystemTimeAsFileTime(&ft); +// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; +// +// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z +// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. +// Timestamp timestamp; +// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); +// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); +// +// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. +// +// long millis = System.currentTimeMillis(); +// +// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) +// .setNanos((int) ((millis % 1000) * 1000000)).build(); +// +// +// Example 5: Compute Timestamp from Java `Instant.now()`. +// +// Instant now = Instant.now(); +// +// Timestamp timestamp = +// Timestamp.newBuilder().setSeconds(now.getEpochSecond()) +// .setNanos(now.getNano()).build(); +// +// +// Example 6: Compute Timestamp from current time in Python. +// +// timestamp = Timestamp() +// timestamp.GetCurrentTime() +// +// # JSON Mapping +// +// In JSON format, the Timestamp type is encoded as a string in the +// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the +// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" +// where {year} is always expressed using four digits while {month}, {day}, +// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional +// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), +// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone +// is required. A proto3 JSON serializer should always use UTC (as indicated by +// "Z") when printing the Timestamp type and a proto3 JSON parser should be +// able to accept both UTC and other timezones (as indicated by an offset). +// +// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past +// 01:30 UTC on January 15, 2017. +// +// In JavaScript, one can convert a Date object to this format using the +// standard +// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) +// method. In Python, a standard `datetime.datetime` object can be converted +// to this format using +// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with +// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use +// the Joda Time's [`ISODateTimeFormat.dateTime()`]( +// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D +// ) to obtain a formatter capable of generating timestamps in this format. +// +// +type Timestamp struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Represents seconds of UTC time since Unix epoch + // 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + // 9999-12-31T23:59:59Z inclusive. + Seconds int64 `protobuf:"varint,1,opt,name=seconds,proto3" json:"seconds,omitempty"` + // Non-negative fractions of a second at nanosecond resolution. Negative + // second values with fractions must still have non-negative nanos values + // that count forward in time. Must be from 0 to 999,999,999 + // inclusive. + Nanos int32 `protobuf:"varint,2,opt,name=nanos,proto3" json:"nanos,omitempty"` +} + +// Now constructs a new Timestamp from the current time. +func Now() *Timestamp { + return New(time.Now()) +} + +// New constructs a new Timestamp from the provided time.Time. +func New(t time.Time) *Timestamp { + return &Timestamp{Seconds: int64(t.Unix()), Nanos: int32(t.Nanosecond())} +} + +// AsTime converts x to a time.Time. +func (x *Timestamp) AsTime() time.Time { + return time.Unix(int64(x.GetSeconds()), int64(x.GetNanos())).UTC() +} + +// IsValid reports whether the timestamp is valid. +// It is equivalent to CheckValid == nil. +func (x *Timestamp) IsValid() bool { + return x.check() == 0 +} + +// CheckValid returns an error if the timestamp is invalid. +// In particular, it checks whether the value represents a date that is +// in the range of 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. +// An error is reported for a nil Timestamp. +func (x *Timestamp) CheckValid() error { + switch x.check() { + case invalidNil: + return protoimpl.X.NewError("invalid nil Timestamp") + case invalidUnderflow: + return protoimpl.X.NewError("timestamp (%v) before 0001-01-01", x) + case invalidOverflow: + return protoimpl.X.NewError("timestamp (%v) after 9999-12-31", x) + case invalidNanos: + return protoimpl.X.NewError("timestamp (%v) has out-of-range nanos", x) + default: + return nil + } +} + +const ( + _ = iota + invalidNil + invalidUnderflow + invalidOverflow + invalidNanos +) + +func (x *Timestamp) check() uint { + const minTimestamp = -62135596800 // Seconds between 1970-01-01T00:00:00Z and 0001-01-01T00:00:00Z, inclusive + const maxTimestamp = +253402300799 // Seconds between 1970-01-01T00:00:00Z and 9999-12-31T23:59:59Z, inclusive + secs := x.GetSeconds() + nanos := x.GetNanos() + switch { + case x == nil: + return invalidNil + case secs < minTimestamp: + return invalidUnderflow + case secs > maxTimestamp: + return invalidOverflow + case nanos < 0 || nanos >= 1e9: + return invalidNanos + default: + return 0 + } +} + +func (x *Timestamp) Reset() { + *x = Timestamp{} + if protoimpl.UnsafeEnabled { + mi := &file_google_protobuf_timestamp_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Timestamp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Timestamp) ProtoMessage() {} + +func (x *Timestamp) ProtoReflect() protoreflect.Message { + mi := &file_google_protobuf_timestamp_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Timestamp.ProtoReflect.Descriptor instead. +func (*Timestamp) Descriptor() ([]byte, []int) { + return file_google_protobuf_timestamp_proto_rawDescGZIP(), []int{0} +} + +func (x *Timestamp) GetSeconds() int64 { + if x != nil { + return x.Seconds + } + return 0 +} + +func (x *Timestamp) GetNanos() int32 { + if x != nil { + return x.Nanos + } + return 0 +} + +var File_google_protobuf_timestamp_proto protoreflect.FileDescriptor + +var file_google_protobuf_timestamp_proto_rawDesc = []byte{ + 0x0a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x12, 0x0f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x22, 0x3b, 0x0a, 0x09, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, + 0x18, 0x0a, 0x07, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, + 0x52, 0x07, 0x73, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x61, 0x6e, + 0x6f, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x6e, 0x61, 0x6e, 0x6f, 0x73, 0x42, + 0x85, 0x01, 0x0a, 0x13, 0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x42, 0x0e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, + 0x6d, 0x70, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x6b, 0x6e, 0x6f, 0x77, + 0x6e, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x70, 0x62, 0xf8, 0x01, 0x01, + 0xa2, 0x02, 0x03, 0x47, 0x50, 0x42, 0xaa, 0x02, 0x1e, 0x47, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x57, 0x65, 0x6c, 0x6c, 0x4b, 0x6e, 0x6f, + 0x77, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_google_protobuf_timestamp_proto_rawDescOnce sync.Once + file_google_protobuf_timestamp_proto_rawDescData = file_google_protobuf_timestamp_proto_rawDesc +) + +func file_google_protobuf_timestamp_proto_rawDescGZIP() []byte { + file_google_protobuf_timestamp_proto_rawDescOnce.Do(func() { + file_google_protobuf_timestamp_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_protobuf_timestamp_proto_rawDescData) + }) + return file_google_protobuf_timestamp_proto_rawDescData +} + +var file_google_protobuf_timestamp_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_google_protobuf_timestamp_proto_goTypes = []interface{}{ + (*Timestamp)(nil), // 0: google.protobuf.Timestamp +} +var file_google_protobuf_timestamp_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_google_protobuf_timestamp_proto_init() } +func file_google_protobuf_timestamp_proto_init() { + if File_google_protobuf_timestamp_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_google_protobuf_timestamp_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Timestamp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_google_protobuf_timestamp_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_google_protobuf_timestamp_proto_goTypes, + DependencyIndexes: file_google_protobuf_timestamp_proto_depIdxs, + MessageInfos: file_google_protobuf_timestamp_proto_msgTypes, + }.Build() + File_google_protobuf_timestamp_proto = out.File + file_google_protobuf_timestamp_proto_rawDesc = nil + file_google_protobuf_timestamp_proto_goTypes = nil + file_google_protobuf_timestamp_proto_depIdxs = nil +} diff --git a/vendor/modules.txt b/vendor/modules.txt index b72314fd..395c5bd8 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,4 +1,4 @@ -# atomicgo.dev/cursor v0.1.1 +# atomicgo.dev/cursor v0.2.0 ## explicit; go 1.15 atomicgo.dev/cursor # atomicgo.dev/keyboard v0.2.9 @@ -6,10 +6,10 @@ atomicgo.dev/cursor atomicgo.dev/keyboard atomicgo.dev/keyboard/internal atomicgo.dev/keyboard/keys -# bitbucket.org/creachadair/shell v0.0.7 -## explicit; go 1.16 -bitbucket.org/creachadair/shell -# github.com/bitfield/script v0.21.4 +# atomicgo.dev/schedule v0.1.0 +## explicit; go 1.18 +atomicgo.dev/schedule +# github.com/bitfield/script v0.22.0 ## explicit; go 1.12 github.com/bitfield/script # github.com/containerd/console v1.0.3 @@ -21,8 +21,8 @@ github.com/davecgh/go-spew/spew # github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1 ## explicit; go 1.12 github.com/dgrijalva/jwt-go/v4 -# github.com/dustin/go-humanize v1.0.0 -## explicit +# github.com/dustin/go-humanize v1.0.1 +## explicit; go 1.16 github.com/dustin/go-humanize # github.com/ericchiang/k8s v1.2.0 ## explicit @@ -38,54 +38,46 @@ github.com/ericchiang/k8s/watch/versioned # github.com/golang/mock v1.6.0 ## explicit; go 1.11 github.com/golang/mock/gomock -# github.com/golang/protobuf v1.2.0 -## explicit +# github.com/golang/protobuf v1.5.3 +## explicit; go 1.9 github.com/golang/protobuf/proto github.com/golang/protobuf/ptypes github.com/golang/protobuf/ptypes/any github.com/golang/protobuf/ptypes/duration github.com/golang/protobuf/ptypes/timestamp -# github.com/gookit/color v1.5.3 +# github.com/gookit/color v1.5.4 ## explicit; go 1.18 github.com/gookit/color -# github.com/gorilla/mux v1.8.0 -## explicit; go 1.12 +# github.com/gorilla/mux v1.8.1 +## explicit; go 1.20 github.com/gorilla/mux -# github.com/itchyny/gojq v0.12.7 -## explicit; go 1.16 +# github.com/itchyny/gojq v0.12.12 +## explicit; go 1.18 github.com/itchyny/gojq -# github.com/itchyny/timefmt-go v0.1.3 -## explicit; go 1.14 +# github.com/itchyny/timefmt-go v0.1.5 +## explicit; go 1.17 github.com/itchyny/timefmt-go -# github.com/konsorten/go-windows-terminal-sequences v1.0.1 -## explicit -github.com/konsorten/go-windows-terminal-sequences -# github.com/kr/pretty v0.2.1 -## explicit; go 1.12 -# github.com/lithammer/fuzzysearch v1.1.5 +# github.com/lithammer/fuzzysearch v1.1.8 ## explicit; go 1.15 github.com/lithammer/fuzzysearch/fuzzy -# github.com/logrusorgru/aurora v2.0.3+incompatible -## explicit -github.com/logrusorgru/aurora -# github.com/magefile/mage v1.14.0 +# github.com/magefile/mage v1.15.0 ## explicit; go 1.12 github.com/magefile/mage/mg github.com/magefile/mage/sh -# github.com/mattn/go-runewidth v0.0.14 +# github.com/mattn/go-runewidth v0.0.15 ## explicit; go 1.9 github.com/mattn/go-runewidth # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/pterm/pterm v0.12.58 -## explicit; go 1.18 +# github.com/pterm/pterm v0.12.74 +## explicit; go 1.21 github.com/pterm/pterm github.com/pterm/pterm/internal # github.com/rivo/uniseg v0.4.4 ## explicit; go 1.18 github.com/rivo/uniseg -# github.com/sheldonhull/magetools v1.0.0 +# github.com/sheldonhull/magetools v1.0.1 ## explicit; go 1.18 github.com/sheldonhull/magetools/ci github.com/sheldonhull/magetools/docgen @@ -94,24 +86,21 @@ github.com/sheldonhull/magetools/gotools github.com/sheldonhull/magetools/pkg/magetoolsutils github.com/sheldonhull/magetools/pkg/req github.com/sheldonhull/magetools/tooling -# github.com/sirupsen/logrus v1.2.0 -## explicit -github.com/sirupsen/logrus -# github.com/stretchr/testify v1.8.2 +# github.com/sirupsen/logrus v1.9.3 ## explicit; go 1.13 +github.com/sirupsen/logrus +# github.com/stretchr/testify v1.8.4 +## explicit; go 1.20 github.com/stretchr/testify/assert github.com/stretchr/testify/require github.com/stretchr/testify/suite # github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e ## explicit; go 1.19 github.com/xo/terminfo -# github.com/ztrue/tracerr v0.3.0 +# github.com/ztrue/tracerr v0.4.0 ## explicit github.com/ztrue/tracerr -# golang.org/x/crypto v0.17.0 -## explicit; go 1.18 -golang.org/x/crypto/ssh/terminal -# golang.org/x/mod v0.8.0 +# golang.org/x/mod v0.10.0 ## explicit; go 1.17 golang.org/x/mod/internal/lazyregexp golang.org/x/mod/modfile @@ -126,8 +115,6 @@ golang.org/x/net/http2/hpack golang.org/x/net/idna golang.org/x/net/internal/timeseries golang.org/x/net/trace -# golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 -## explicit # golang.org/x/sys v0.15.0 ## explicit; go 1.18 golang.org/x/sys/plan9 @@ -180,6 +167,45 @@ google.golang.org/grpc/resolver/passthrough google.golang.org/grpc/stats google.golang.org/grpc/status google.golang.org/grpc/tap +# google.golang.org/protobuf v1.26.0 +## explicit; go 1.9 +google.golang.org/protobuf/encoding/prototext +google.golang.org/protobuf/encoding/protowire +google.golang.org/protobuf/internal/descfmt +google.golang.org/protobuf/internal/descopts +google.golang.org/protobuf/internal/detrand +google.golang.org/protobuf/internal/encoding/defval +google.golang.org/protobuf/internal/encoding/messageset +google.golang.org/protobuf/internal/encoding/tag +google.golang.org/protobuf/internal/encoding/text +google.golang.org/protobuf/internal/errors +google.golang.org/protobuf/internal/filedesc +google.golang.org/protobuf/internal/filetype +google.golang.org/protobuf/internal/flags +google.golang.org/protobuf/internal/genid +google.golang.org/protobuf/internal/impl +google.golang.org/protobuf/internal/order +google.golang.org/protobuf/internal/pragma +google.golang.org/protobuf/internal/set +google.golang.org/protobuf/internal/strs +google.golang.org/protobuf/internal/version +google.golang.org/protobuf/proto +google.golang.org/protobuf/reflect/protodesc +google.golang.org/protobuf/reflect/protoreflect +google.golang.org/protobuf/reflect/protoregistry +google.golang.org/protobuf/runtime/protoiface +google.golang.org/protobuf/runtime/protoimpl +google.golang.org/protobuf/types/descriptorpb +google.golang.org/protobuf/types/known/anypb +google.golang.org/protobuf/types/known/durationpb +google.golang.org/protobuf/types/known/timestamppb # gopkg.in/yaml.v3 v3.0.1 ## explicit gopkg.in/yaml.v3 +# mvdan.cc/sh/v3 v3.6.0 +## explicit; go 1.18 +mvdan.cc/sh/v3/expand +mvdan.cc/sh/v3/fileutil +mvdan.cc/sh/v3/pattern +mvdan.cc/sh/v3/shell +mvdan.cc/sh/v3/syntax diff --git a/vendor/mvdan.cc/sh/v3/LICENSE b/vendor/mvdan.cc/sh/v3/LICENSE new file mode 100644 index 00000000..2a5268e5 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2016, Daniel Martí. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/mvdan.cc/sh/v3/expand/arith.go b/vendor/mvdan.cc/sh/v3/expand/arith.go new file mode 100644 index 00000000..3ce199ec --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/arith.go @@ -0,0 +1,221 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package expand + +import ( + "fmt" + "strconv" + "strings" + + "mvdan.cc/sh/v3/syntax" +) + +func Arithm(cfg *Config, expr syntax.ArithmExpr) (int, error) { + switch x := expr.(type) { + case *syntax.Word: + str, err := Literal(cfg, x) + if err != nil { + return 0, err + } + // recursively fetch vars + i := 0 + for syntax.ValidName(str) { + val := cfg.envGet(str) + if val == "" { + break + } + if i++; i >= maxNameRefDepth { + break + } + str = val + } + // default to 0 + return atoi(str), nil + case *syntax.ParenArithm: + return Arithm(cfg, x.X) + case *syntax.UnaryArithm: + switch x.Op { + case syntax.Inc, syntax.Dec: + name := x.X.(*syntax.Word).Lit() + old := atoi(cfg.envGet(name)) + val := old + if x.Op == syntax.Inc { + val++ + } else { + val-- + } + if err := cfg.envSet(name, strconv.Itoa(val)); err != nil { + return 0, err + } + if x.Post { + return old, nil + } + return val, nil + } + val, err := Arithm(cfg, x.X) + if err != nil { + return 0, err + } + switch x.Op { + case syntax.Not: + return oneIf(val == 0), nil + case syntax.BitNegation: + return ^val, nil + case syntax.Plus: + return val, nil + default: // syntax.Minus + return -val, nil + } + case *syntax.BinaryArithm: + switch x.Op { + case syntax.Assgn, syntax.AddAssgn, syntax.SubAssgn, + syntax.MulAssgn, syntax.QuoAssgn, syntax.RemAssgn, + syntax.AndAssgn, syntax.OrAssgn, syntax.XorAssgn, + syntax.ShlAssgn, syntax.ShrAssgn: + return cfg.assgnArit(x) + case syntax.TernQuest: // TernColon can't happen here + cond, err := Arithm(cfg, x.X) + if err != nil { + return 0, err + } + b2 := x.Y.(*syntax.BinaryArithm) // must have Op==TernColon + if cond == 1 { + return Arithm(cfg, b2.X) + } + return Arithm(cfg, b2.Y) + } + left, err := Arithm(cfg, x.X) + if err != nil { + return 0, err + } + right, err := Arithm(cfg, x.Y) + if err != nil { + return 0, err + } + return binArit(x.Op, left, right) + default: + panic(fmt.Sprintf("unexpected arithm expr: %T", x)) + } +} + +func oneIf(b bool) int { + if b { + return 1 + } + return 0 +} + +// atoi is like strconv.Atoi, but it ignores errors and trims whitespace. +func atoi(s string) int { + s = strings.TrimSpace(s) + n, _ := strconv.Atoi(s) + return n +} + +func (cfg *Config) assgnArit(b *syntax.BinaryArithm) (int, error) { + name := b.X.(*syntax.Word).Lit() + val := atoi(cfg.envGet(name)) + arg, err := Arithm(cfg, b.Y) + if err != nil { + return 0, err + } + switch b.Op { + case syntax.Assgn: + val = arg + case syntax.AddAssgn: + val += arg + case syntax.SubAssgn: + val -= arg + case syntax.MulAssgn: + val *= arg + case syntax.QuoAssgn: + if arg == 0 { + return 0, fmt.Errorf("division by zero") + } + val /= arg + case syntax.RemAssgn: + if arg == 0 { + return 0, fmt.Errorf("division by zero") + } + val %= arg + case syntax.AndAssgn: + val &= arg + case syntax.OrAssgn: + val |= arg + case syntax.XorAssgn: + val ^= arg + case syntax.ShlAssgn: + val <<= uint(arg) + case syntax.ShrAssgn: + val >>= uint(arg) + } + if err := cfg.envSet(name, strconv.Itoa(val)); err != nil { + return 0, err + } + return val, nil +} + +func intPow(a, b int) int { + p := 1 + for b > 0 { + if b&1 != 0 { + p *= a + } + b >>= 1 + a *= a + } + return p +} + +func binArit(op syntax.BinAritOperator, x, y int) (int, error) { + switch op { + case syntax.Add: + return x + y, nil + case syntax.Sub: + return x - y, nil + case syntax.Mul: + return x * y, nil + case syntax.Quo: + if y == 0 { + return 0, fmt.Errorf("division by zero") + } + return x / y, nil + case syntax.Rem: + if y == 0 { + return 0, fmt.Errorf("division by zero") + } + return x % y, nil + case syntax.Pow: + return intPow(x, y), nil + case syntax.Eql: + return oneIf(x == y), nil + case syntax.Gtr: + return oneIf(x > y), nil + case syntax.Lss: + return oneIf(x < y), nil + case syntax.Neq: + return oneIf(x != y), nil + case syntax.Leq: + return oneIf(x <= y), nil + case syntax.Geq: + return oneIf(x >= y), nil + case syntax.And: + return x & y, nil + case syntax.Or: + return x | y, nil + case syntax.Xor: + return x ^ y, nil + case syntax.Shr: + return x >> uint(y), nil + case syntax.Shl: + return x << uint(y), nil + case syntax.AndArit: + return oneIf(x != 0 && y != 0), nil + case syntax.OrArit: + return oneIf(x != 0 || y != 0), nil + default: // syntax.Comma + // x is executed but its result discarded + return y, nil + } +} diff --git a/vendor/mvdan.cc/sh/v3/expand/braces.go b/vendor/mvdan.cc/sh/v3/expand/braces.go new file mode 100644 index 00000000..e0363aa2 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/braces.go @@ -0,0 +1,85 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package expand + +import ( + "strconv" + + "mvdan.cc/sh/v3/syntax" +) + +// Braces performs brace expansion on a word, given that it contains any +// syntax.BraceExp parts. For example, the word with a brace expansion +// "foo{bar,baz}" will return two literal words, "foobar" and "foobaz". +// +// Note that the resulting words may share word parts. +func Braces(word *syntax.Word) []*syntax.Word { + var all []*syntax.Word + var left []syntax.WordPart + for i, wp := range word.Parts { + br, ok := wp.(*syntax.BraceExp) + if !ok { + left = append(left, wp) + continue + } + if br.Sequence { + chars := false + from, err1 := strconv.Atoi(br.Elems[0].Lit()) + to, err2 := strconv.Atoi(br.Elems[1].Lit()) + if err1 != nil || err2 != nil { + chars = true + from = int(br.Elems[0].Lit()[0]) + to = int(br.Elems[1].Lit()[0]) + } + upward := from <= to + incr := 1 + if !upward { + incr = -1 + } + if len(br.Elems) > 2 { + n, _ := strconv.Atoi(br.Elems[2].Lit()) + if n != 0 && n > 0 == upward { + incr = n + } + } + n := from + for { + if upward && n > to { + break + } + if !upward && n < to { + break + } + next := *word + next.Parts = next.Parts[i+1:] + lit := &syntax.Lit{} + if chars { + lit.Value = string(rune(n)) + } else { + lit.Value = strconv.Itoa(n) + } + next.Parts = append([]syntax.WordPart{lit}, next.Parts...) + exp := Braces(&next) + for _, w := range exp { + w.Parts = append(left, w.Parts...) + } + all = append(all, exp...) + n += incr + } + return all + } + for _, elem := range br.Elems { + next := *word + next.Parts = next.Parts[i+1:] + next.Parts = append(elem.Parts, next.Parts...) + exp := Braces(&next) + for _, w := range exp { + w.Parts = append(left, w.Parts...) + } + all = append(all, exp...) + } + return all + } + return []*syntax.Word{{Parts: left}} +} diff --git a/vendor/mvdan.cc/sh/v3/expand/doc.go b/vendor/mvdan.cc/sh/v3/expand/doc.go new file mode 100644 index 00000000..19d95180 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/doc.go @@ -0,0 +1,5 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +// Package expand contains code to perform various shell expansions. +package expand diff --git a/vendor/mvdan.cc/sh/v3/expand/environ.go b/vendor/mvdan.cc/sh/v3/expand/environ.go new file mode 100644 index 00000000..68ba384e --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/environ.go @@ -0,0 +1,227 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package expand + +import ( + "runtime" + "sort" + "strings" +) + +// Environ is the base interface for a shell's environment, allowing it to fetch +// variables by name and to iterate over all the currently set variables. +type Environ interface { + // Get retrieves a variable by its name. To check if the variable is + // set, use Variable.IsSet. + Get(name string) Variable + + // Each iterates over all the currently set variables, calling the + // supplied function on each variable. Iteration is stopped if the + // function returns false. + // + // The names used in the calls aren't required to be unique or sorted. + // If a variable name appears twice, the latest occurrence takes + // priority. + // + // Each is required to forward exported variables when executing + // programs. + Each(func(name string, vr Variable) bool) +} + +// WriteEnviron is an extension on Environ that supports modifying and deleting +// variables. +type WriteEnviron interface { + Environ + // Set sets a variable by name. If !vr.IsSet(), the variable is being + // unset; otherwise, the variable is being replaced. + // + // It is the implementation's responsibility to handle variable + // attributes correctly. For example, changing an exported variable's + // value does not unexport it, and overwriting a name reference variable + // should modify its target. + // + // An error may be returned if the operation is invalid, such as if the + // name is empty or if we're trying to overwrite a read-only variable. + Set(name string, vr Variable) error +} + +type ValueKind uint8 + +const ( + Unset ValueKind = iota + String + NameRef + Indexed + Associative +) + +// Variable describes a shell variable, which can have a number of attributes +// and a value. +// +// A Variable is unset if its Kind field is Unset, which can be checked via +// Variable.IsSet. The zero value of a Variable is thus a valid unset variable. +// +// If a variable is set, its Value field will be a []string if it is an indexed +// array, a map[string]string if it's an associative array, or a string +// otherwise. +type Variable struct { + Local bool + Exported bool + ReadOnly bool + + Kind ValueKind + + Str string // Used when Kind is String or NameRef. + List []string // Used when Kind is Indexed. + Map map[string]string // Used when Kind is Associative. +} + +// IsSet returns whether the variable is set. An empty variable is set, but an +// undeclared variable is not. +func (v Variable) IsSet() bool { + return v.Kind != Unset +} + +// String returns the variable's value as a string. In general, this only makes +// sense if the variable has a string value or no value at all. +func (v Variable) String() string { + switch v.Kind { + case String: + return v.Str + case Indexed: + if len(v.List) > 0 { + return v.List[0] + } + case Associative: + // nothing to do + } + return "" +} + +// maxNameRefDepth defines the maximum number of times to follow references when +// resolving a variable. Otherwise, simple name reference loops could crash a +// program quite easily. +const maxNameRefDepth = 100 + +// Resolve follows a number of nameref variables, returning the last reference +// name that was followed and the variable that it points to. +func (v Variable) Resolve(env Environ) (string, Variable) { + name := "" + for i := 0; i < maxNameRefDepth; i++ { + if v.Kind != NameRef { + return name, v + } + name = v.Str // keep name for the next iteration + v = env.Get(name) + } + return name, Variable{} +} + +// FuncEnviron wraps a function mapping variable names to their string values, +// and implements Environ. Empty strings returned by the function will be +// treated as unset variables. All variables will be exported. +// +// Note that the returned Environ's Each method will be a no-op. +func FuncEnviron(fn func(string) string) Environ { + return funcEnviron(fn) +} + +type funcEnviron func(string) string + +func (f funcEnviron) Get(name string) Variable { + value := f(name) + if value == "" { + return Variable{} + } + return Variable{Exported: true, Kind: String, Str: value} +} + +func (f funcEnviron) Each(func(name string, vr Variable) bool) {} + +// ListEnviron returns an Environ with the supplied variables, in the form +// "key=value". All variables will be exported. The last value in pairs is used +// if multiple values are present. +// +// On Windows, where environment variable names are case-insensitive, the +// resulting variable names will all be uppercase. +func ListEnviron(pairs ...string) Environ { + return listEnvironWithUpper(runtime.GOOS == "windows", pairs...) +} + +// listEnvironWithUpper implements ListEnviron, but letting the tests specify +// whether to uppercase all names or not. +func listEnvironWithUpper(upper bool, pairs ...string) Environ { + list := append([]string{}, pairs...) + if upper { + // Uppercase before sorting, so that we can remove duplicates + // without the need for linear search nor a map. + for i, s := range list { + if sep := strings.IndexByte(s, '='); sep > 0 { + list[i] = strings.ToUpper(s[:sep]) + s[sep:] + } + } + } + + sort.SliceStable(list, func(i, j int) bool { + isep := strings.IndexByte(list[i], '=') + jsep := strings.IndexByte(list[j], '=') + if isep < 0 { + isep = 0 + } else { + isep += 1 + } + if jsep < 0 { + jsep = 0 + } else { + jsep += 1 + } + return list[i][:isep] < list[j][:jsep] + }) + + last := "" + for i := 0; i < len(list); { + s := list[i] + sep := strings.IndexByte(s, '=') + if sep <= 0 { + // invalid element; remove it + list = append(list[:i], list[i+1:]...) + continue + } + name := s[:sep] + if last == name { + // duplicate; the last one wins + list = append(list[:i-1], list[i:]...) + continue + } + last = name + i++ + } + return listEnviron(list) +} + +// listEnviron is a sorted list of "name=value" strings. +type listEnviron []string + +func (l listEnviron) Get(name string) Variable { + prefix := name + "=" + i := sort.SearchStrings(l, prefix) + if i < len(l) && strings.HasPrefix(l[i], prefix) { + return Variable{Exported: true, Kind: String, Str: strings.TrimPrefix(l[i], prefix)} + } + return Variable{} +} + +func (l listEnviron) Each(fn func(name string, vr Variable) bool) { + for _, pair := range l { + i := strings.IndexByte(pair, '=') + if i < 0 { + // should never happen; see listEnvironWithUpper + panic("expand.listEnviron: did not expect malformed name-value pair: " + pair) + } + name, value := pair[:i], pair[i+1:] + if !fn(name, Variable{Exported: true, Kind: String, Str: value}) { + return + } + } +} diff --git a/vendor/mvdan.cc/sh/v3/expand/expand.go b/vendor/mvdan.cc/sh/v3/expand/expand.go new file mode 100644 index 00000000..a3152dce --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/expand.go @@ -0,0 +1,1038 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package expand + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/fs" + "os" + "os/user" + "path/filepath" + "regexp" + "runtime" + "strconv" + "strings" + "syscall" + + "mvdan.cc/sh/v3/pattern" + "mvdan.cc/sh/v3/syntax" +) + +// A Config specifies details about how shell expansion should be performed. The +// zero value is a valid configuration. +type Config struct { + // Env is used to get and set environment variables when performing + // shell expansions. Some special parameters are also expanded via this + // interface, such as: + // + // * "#", "@", "*", "0"-"9" for the shell's parameters + // * "?", "$", "PPID" for the shell's status and process + // * "HOME foo" to retrieve user foo's home directory (if unset, + // os/user.Lookup will be used) + // + // If nil, there are no environment variables set. Use + // ListEnviron(os.Environ()...) to use the system's environment + // variables. + Env Environ + + // CmdSubst expands a command substitution node, writing its standard + // output to the provided io.Writer. + // + // If nil, encountering a command substitution will result in an + // UnexpectedCommandError. + CmdSubst func(io.Writer, *syntax.CmdSubst) error + + // ProcSubst expands a process substitution node. + // + // Note that this feature is a work in progress, and the signature of + // this field might change until #451 is completely fixed. + ProcSubst func(*syntax.ProcSubst) (string, error) + + // TODO(v4): update to os.Readdir with fs.DirEntry. + // We could possibly expose that as a preferred ReadDir2 before then, + // to allow users to opt into better performance in v3. + + // ReadDir is used for file path globbing. If nil, globbing is disabled. + // Use ioutil.ReadDir to use the filesystem directly. + ReadDir func(string) ([]os.FileInfo, error) + + // GlobStar corresponds to the shell option that allows globbing with + // "**". + GlobStar bool + + // NullGlob corresponds to the shell option that allows globbing + // patterns which match nothing to result in zero fields. + NullGlob bool + + // NoUnset corresponds to the shell option that treats unset variables + // as errors. + NoUnset bool + + bufferAlloc bytes.Buffer // TODO: use strings.Builder + fieldAlloc [4]fieldPart + fieldsAlloc [4][]fieldPart + + ifs string + // A pointer to a parameter expansion node, if we're inside one. + // Necessary for ${LINENO}. + curParam *syntax.ParamExp +} + +// UnexpectedCommandError is returned if a command substitution is encountered +// when Config.CmdSubst is nil. +type UnexpectedCommandError struct { + Node *syntax.CmdSubst +} + +func (u UnexpectedCommandError) Error() string { + return fmt.Sprintf("unexpected command substitution at %s", u.Node.Pos()) +} + +var zeroConfig = &Config{} + +func prepareConfig(cfg *Config) *Config { + if cfg == nil { + cfg = zeroConfig + } + if cfg.Env == nil { + cfg.Env = FuncEnviron(func(string) string { return "" }) + } + + cfg.ifs = " \t\n" + if vr := cfg.Env.Get("IFS"); vr.IsSet() { + cfg.ifs = vr.String() + } + return cfg +} + +func (cfg *Config) ifsRune(r rune) bool { + for _, r2 := range cfg.ifs { + if r == r2 { + return true + } + } + return false +} + +func (cfg *Config) ifsJoin(strs []string) string { + sep := "" + if cfg.ifs != "" { + sep = cfg.ifs[:1] + } + return strings.Join(strs, sep) +} + +func (cfg *Config) strBuilder() *bytes.Buffer { + b := &cfg.bufferAlloc + b.Reset() + return b +} + +func (cfg *Config) envGet(name string) string { + return cfg.Env.Get(name).String() +} + +func (cfg *Config) envSet(name, value string) error { + wenv, ok := cfg.Env.(WriteEnviron) + if !ok { + return fmt.Errorf("environment is read-only") + } + return wenv.Set(name, Variable{Kind: String, Str: value}) +} + +// Literal expands a single shell word. It is similar to Fields, but the result +// is a single string. This is the behavior when a word is used as the value in +// a shell variable assignment, for example. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func Literal(cfg *Config, word *syntax.Word) (string, error) { + if word == nil { + return "", nil + } + cfg = prepareConfig(cfg) + field, err := cfg.wordField(word.Parts, quoteNone) + if err != nil { + return "", err + } + return cfg.fieldJoin(field), nil +} + +// Document expands a single shell word as if it were within double quotes. It +// is similar to Literal, but without brace expansion, tilde expansion, and +// globbing. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func Document(cfg *Config, word *syntax.Word) (string, error) { + if word == nil { + return "", nil + } + cfg = prepareConfig(cfg) + field, err := cfg.wordField(word.Parts, quoteDouble) + if err != nil { + return "", err + } + return cfg.fieldJoin(field), nil +} + +const patMode = pattern.Filenames | pattern.Braces + +// Pattern expands a single shell word as a pattern, using syntax.QuotePattern +// on any non-quoted parts of the input word. The result can be used on +// syntax.TranslatePattern directly. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func Pattern(cfg *Config, word *syntax.Word) (string, error) { + cfg = prepareConfig(cfg) + field, err := cfg.wordField(word.Parts, quoteNone) + if err != nil { + return "", err + } + buf := cfg.strBuilder() + for _, part := range field { + if part.quote > quoteNone { + buf.WriteString(pattern.QuoteMeta(part.val, patMode)) + } else { + buf.WriteString(part.val) + } + } + return buf.String(), nil +} + +// Format expands a format string with a number of arguments, following the +// shell's format specifications. These include printf(1), among others. +// +// The resulting string is returned, along with the number of arguments used. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func Format(cfg *Config, format string, args []string) (string, int, error) { + cfg = prepareConfig(cfg) + buf := cfg.strBuilder() + + consumed, err := formatIntoBuffer(buf, format, args) + if err != nil { + return "", 0, err + } + + return buf.String(), consumed, err +} + +// Format expands a format string with a number of arguments, following the +// shell's format specifications. These include printf(1), among others. +// +// The resulting string is written to the provided buffer, and the number +// of arguments used is returned. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func formatIntoBuffer(buf *bytes.Buffer, format string, args []string) (int, error) { + var fmts []byte + initialArgs := len(args) + +formatLoop: + for i := 0; i < len(format); i++ { + // readDigits reads from 0 to max digits, either octal or + // hexadecimal. + readDigits := func(max int, hex bool) string { + j := 0 + for ; j < max; j++ { + c := format[i+j] + if (c >= '0' && c <= '9') || + (hex && c >= 'a' && c <= 'f') || + (hex && c >= 'A' && c <= 'F') { + // valid octal or hex char + } else { + break + } + } + digits := format[i : i+j] + i += j - 1 // -1 since the outer loop does i++ + return digits + } + c := format[i] + switch { + case c == '\\': // escaped + i++ + switch c = format[i]; c { + case 'a': // bell + buf.WriteByte('\a') + case 'b': // backspace + buf.WriteByte('\b') + case 'e', 'E': // escape + buf.WriteByte('\x1b') + case 'f': // form feed + buf.WriteByte('\f') + case 'n': // new line + buf.WriteByte('\n') + case 'r': // carriage return + buf.WriteByte('\r') + case 't': // horizontal tab + buf.WriteByte('\t') + case 'v': // vertical tab + buf.WriteByte('\v') + case '\\', '\'', '"', '?': // just the character + buf.WriteByte(c) + case '0', '1', '2', '3', '4', '5', '6', '7': + digits := readDigits(3, false) + // if digits don't fit in 8 bits, 0xff via strconv + n, _ := strconv.ParseUint(digits, 8, 8) + buf.WriteByte(byte(n)) + case 'x', 'u', 'U': + i++ + max := 2 + if c == 'u' { + max = 4 + } else if c == 'U' { + max = 8 + } + digits := readDigits(max, true) + if len(digits) > 0 { + // can't error + n, _ := strconv.ParseUint(digits, 16, 32) + if n == 0 { + // If we're about to print \x00, + // stop the entire loop, like bash. + break formatLoop + } + if c == 'x' { + // always as a single byte + buf.WriteByte(byte(n)) + } else { + buf.WriteRune(rune(n)) + } + break + } + fallthrough + default: // no escape sequence + buf.WriteByte('\\') + buf.WriteByte(c) + } + case len(fmts) > 0: + switch c { + case '%': + buf.WriteByte('%') + fmts = nil + case 'c': + var b byte + if len(args) > 0 { + arg := "" + arg, args = args[0], args[1:] + if len(arg) > 0 { + b = arg[0] + } + } + buf.WriteByte(b) + fmts = nil + case '+', '-', ' ': + if len(fmts) > 1 { + return 0, fmt.Errorf("invalid format char: %c", c) + } + fmts = append(fmts, c) + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + fmts = append(fmts, c) + case 's', 'b', 'd', 'i', 'u', 'o', 'x': + arg := "" + if len(args) > 0 { + arg, args = args[0], args[1:] + } + var farg interface{} + if c == 'b' { + // Passing in nil for args ensures that % format + // strings aren't processed; only escape sequences + // will be handled. + _, err := formatIntoBuffer(buf, arg, nil) + if err != nil { + return 0, err + } + } else if c != 's' { + n, _ := strconv.ParseInt(arg, 0, 0) + if c == 'i' || c == 'd' { + farg = int(n) + } else { + farg = uint(n) + } + if c == 'i' || c == 'u' { + c = 'd' + } + } else { + farg = arg + } + if farg != nil { + fmts = append(fmts, c) + fmt.Fprintf(buf, string(fmts), farg) + } + fmts = nil + default: + return 0, fmt.Errorf("invalid format char: %c", c) + } + case args != nil && c == '%': + // if args == nil, we are not doing format + // arguments + fmts = []byte{c} + default: + buf.WriteByte(c) + } + } + if len(fmts) > 0 { + return 0, fmt.Errorf("missing format char") + } + return initialArgs - len(args), nil +} + +func (cfg *Config) fieldJoin(parts []fieldPart) string { + switch len(parts) { + case 0: + return "" + case 1: // short-cut without a string copy + return parts[0].val + } + buf := cfg.strBuilder() + for _, part := range parts { + buf.WriteString(part.val) + } + return buf.String() +} + +func (cfg *Config) escapedGlobField(parts []fieldPart) (escaped string, glob bool) { + buf := cfg.strBuilder() + for _, part := range parts { + if part.quote > quoteNone { + buf.WriteString(pattern.QuoteMeta(part.val, patMode)) + continue + } + buf.WriteString(part.val) + if pattern.HasMeta(part.val, patMode) { + glob = true + } + } + if glob { // only copy the string if it will be used + escaped = buf.String() + } + return escaped, glob +} + +// Fields expands a number of words as if they were arguments in a shell +// command. This includes brace expansion, tilde expansion, parameter expansion, +// command substitution, arithmetic expansion, and quote removal. +func Fields(cfg *Config, words ...*syntax.Word) ([]string, error) { + cfg = prepareConfig(cfg) + fields := make([]string, 0, len(words)) + dir := cfg.envGet("PWD") + for _, word := range words { + word := *word // make a copy, since SplitBraces replaces the Parts slice + afterBraces := []*syntax.Word{&word} + if syntax.SplitBraces(&word) { + afterBraces = Braces(&word) + } + for _, word2 := range afterBraces { + wfields, err := cfg.wordFields(word2.Parts) + if err != nil { + return nil, err + } + for _, field := range wfields { + path, doGlob := cfg.escapedGlobField(field) + var matches []string + var syntaxError *pattern.SyntaxError + if doGlob && cfg.ReadDir != nil { + matches, err = cfg.glob(dir, path) + if !errors.As(err, &syntaxError) { + if err != nil { + return nil, err + } + if len(matches) > 0 || cfg.NullGlob { + fields = append(fields, matches...) + continue + } + } + } + fields = append(fields, cfg.fieldJoin(field)) + } + } + } + return fields, nil +} + +type fieldPart struct { + val string + quote quoteLevel +} + +type quoteLevel uint + +const ( + quoteNone quoteLevel = iota + quoteDouble + quoteSingle +) + +func (cfg *Config) wordField(wps []syntax.WordPart, ql quoteLevel) ([]fieldPart, error) { + var field []fieldPart + for i, wp := range wps { + switch x := wp.(type) { + case *syntax.Lit: + s := x.Value + if i == 0 && ql == quoteNone { + if prefix, rest := cfg.expandUser(s); prefix != "" { + // TODO: return two separate fieldParts, + // like in wordFields? + s = prefix + rest + } + } + if ql == quoteDouble && strings.Contains(s, "\\") { + buf := cfg.strBuilder() + for i := 0; i < len(s); i++ { + b := s[i] + if b == '\\' && i+1 < len(s) { + switch s[i+1] { + case '"', '\\', '$', '`': // special chars + continue + } + } + buf.WriteByte(b) + } + s = buf.String() + } + if i := strings.IndexByte(s, '\x00'); i >= 0 { + s = s[:i] + } + field = append(field, fieldPart{val: s}) + case *syntax.SglQuoted: + fp := fieldPart{quote: quoteSingle, val: x.Value} + if x.Dollar { + fp.val, _, _ = Format(cfg, fp.val, nil) + } + field = append(field, fp) + case *syntax.DblQuoted: + wfield, err := cfg.wordField(x.Parts, quoteDouble) + if err != nil { + return nil, err + } + for _, part := range wfield { + part.quote = quoteDouble + field = append(field, part) + } + case *syntax.ParamExp: + val, err := cfg.paramExp(x) + if err != nil { + return nil, err + } + field = append(field, fieldPart{val: val}) + case *syntax.CmdSubst: + val, err := cfg.cmdSubst(x) + if err != nil { + return nil, err + } + field = append(field, fieldPart{val: val}) + case *syntax.ArithmExp: + n, err := Arithm(cfg, x.X) + if err != nil { + return nil, err + } + field = append(field, fieldPart{val: strconv.Itoa(n)}) + case *syntax.ProcSubst: + path, err := cfg.ProcSubst(x) + if err != nil { + return nil, err + } + field = append(field, fieldPart{val: path}) + default: + panic(fmt.Sprintf("unhandled word part: %T", x)) + } + } + return field, nil +} + +func (cfg *Config) cmdSubst(cs *syntax.CmdSubst) (string, error) { + if cfg.CmdSubst == nil { + return "", UnexpectedCommandError{Node: cs} + } + buf := cfg.strBuilder() + if err := cfg.CmdSubst(buf, cs); err != nil { + return "", err + } + out := buf.String() + if strings.IndexByte(out, '\x00') >= 0 { + out = strings.ReplaceAll(out, "\x00", "") + } + return strings.TrimRight(out, "\n"), nil +} + +func (cfg *Config) wordFields(wps []syntax.WordPart) ([][]fieldPart, error) { + fields := cfg.fieldsAlloc[:0] + curField := cfg.fieldAlloc[:0] + allowEmpty := false + flush := func() { + if len(curField) == 0 { + return + } + fields = append(fields, curField) + curField = nil + } + splitAdd := func(val string) { + fieldStart := -1 + for i, r := range val { + if cfg.ifsRune(r) { + if fieldStart >= 0 { // ending a field + curField = append(curField, fieldPart{val: val[fieldStart:i]}) + fieldStart = -1 + } + flush() + } else { + if fieldStart < 0 { // starting a new field + fieldStart = i + } + } + } + if fieldStart >= 0 { // ending a field without IFS + curField = append(curField, fieldPart{val: val[fieldStart:]}) + } + } + for i, wp := range wps { + switch x := wp.(type) { + case *syntax.Lit: + s := x.Value + if i == 0 { + prefix, rest := cfg.expandUser(s) + curField = append(curField, fieldPart{ + quote: quoteSingle, + val: prefix, + }) + s = rest + } + if strings.Contains(s, "\\") { + buf := cfg.strBuilder() + for i := 0; i < len(s); i++ { + b := s[i] + if b == '\\' { + if i++; i >= len(s) { + break + } + b = s[i] + } + buf.WriteByte(b) + } + s = buf.String() + } + curField = append(curField, fieldPart{val: s}) + case *syntax.SglQuoted: + allowEmpty = true + fp := fieldPart{quote: quoteSingle, val: x.Value} + if x.Dollar { + fp.val, _, _ = Format(cfg, fp.val, nil) + } + curField = append(curField, fp) + case *syntax.DblQuoted: + if len(x.Parts) == 1 { + pe, _ := x.Parts[0].(*syntax.ParamExp) + if elems := cfg.quotedElemFields(pe); elems != nil { + for i, elem := range elems { + if i > 0 { + flush() + } + curField = append(curField, fieldPart{ + quote: quoteDouble, + val: elem, + }) + } + continue + } + } + allowEmpty = true + wfield, err := cfg.wordField(x.Parts, quoteDouble) + if err != nil { + return nil, err + } + for _, part := range wfield { + part.quote = quoteDouble + curField = append(curField, part) + } + case *syntax.ParamExp: + val, err := cfg.paramExp(x) + if err != nil { + return nil, err + } + splitAdd(val) + case *syntax.CmdSubst: + val, err := cfg.cmdSubst(x) + if err != nil { + return nil, err + } + splitAdd(val) + case *syntax.ArithmExp: + n, err := Arithm(cfg, x.X) + if err != nil { + return nil, err + } + curField = append(curField, fieldPart{val: strconv.Itoa(n)}) + case *syntax.ProcSubst: + path, err := cfg.ProcSubst(x) + if err != nil { + return nil, err + } + splitAdd(path) + case *syntax.ExtGlob: + return nil, fmt.Errorf("extended globbing is not supported") + default: + panic(fmt.Sprintf("unhandled word part: %T", x)) + } + } + flush() + if allowEmpty && len(fields) == 0 { + fields = append(fields, curField) + } + return fields, nil +} + +// quotedElemFields returns the list of elements resulting from a quoted +// parameter expansion that should be treated especially, like "${foo[@]}". +func (cfg *Config) quotedElemFields(pe *syntax.ParamExp) []string { + if pe == nil || pe.Length || pe.Width { + return nil + } + name := pe.Param.Value + if pe.Excl { + switch pe.Names { + case syntax.NamesPrefixWords: // "${!prefix@}" + return cfg.namesByPrefix(pe.Param.Value) + case syntax.NamesPrefix: // "${!prefix*}" + return nil + } + switch nodeLit(pe.Index) { + case "@": // "${!name[@]}" + switch vr := cfg.Env.Get(name); vr.Kind { + case Indexed: + keys := make([]string, 0, len(vr.Map)) + for key := range vr.List { + keys = append(keys, strconv.Itoa(key)) + } + return keys + case Associative: + keys := make([]string, 0, len(vr.Map)) + for key := range vr.Map { + keys = append(keys, key) + } + return keys + } + } + return nil + } + switch name { + case "*": // "${*}" + return []string{cfg.ifsJoin(cfg.Env.Get(name).List)} + case "@": // "${@}" + return cfg.Env.Get(name).List + } + switch nodeLit(pe.Index) { + case "@": // "${name[@]}" + switch vr := cfg.Env.Get(name); vr.Kind { + case Indexed: + return vr.List + case Associative: + elems := make([]string, 0, len(vr.Map)) + for _, elem := range vr.Map { + elems = append(elems, elem) + } + return elems + } + case "*": // "${name[*]}" + if vr := cfg.Env.Get(name); vr.Kind == Indexed { + return []string{cfg.ifsJoin(vr.List)} + } + } + return nil +} + +func (cfg *Config) expandUser(field string) (prefix, rest string) { + if len(field) == 0 || field[0] != '~' { + return "", field + } + name := field[1:] + if i := strings.Index(name, "/"); i >= 0 { + rest = name[i:] + name = name[:i] + } + if name == "" { + // Current user; try via "HOME", otherwise fall back to the + // system's appropriate home dir env var. Don't use os/user, as + // that's overkill. We can't use os.UserHomeDir, because we want + // to use cfg.Env, and we always want to check "HOME" first. + + if vr := cfg.Env.Get("HOME"); vr.IsSet() { + return vr.String(), rest + } + + if runtime.GOOS == "windows" { + if vr := cfg.Env.Get("USERPROFILE"); vr.IsSet() { + return vr.String(), rest + } + } + return "", field + } + + // Not the current user; try via "HOME ", otherwise fall back to + // os/user. There isn't a way to lookup user home dirs without cgo. + + if vr := cfg.Env.Get("HOME " + name); vr.IsSet() { + return vr.String(), rest + } + + u, err := user.Lookup(name) + if err != nil { + return "", field + } + return u.HomeDir, rest +} + +func findAllIndex(pat, name string, n int) [][]int { + expr, err := pattern.Regexp(pat, 0) + if err != nil { + return nil + } + rx := regexp.MustCompile(expr) + return rx.FindAllStringIndex(name, n) +} + +var rxGlobStar = regexp.MustCompile(".*") + +// pathJoin2 is a simpler version of filepath.Join without cleaning the result, +// since that's needed for globbing. +func pathJoin2(elem1, elem2 string) string { + if elem1 == "" { + return elem2 + } + if strings.HasSuffix(elem1, string(filepath.Separator)) { + return elem1 + elem2 + } + return elem1 + string(filepath.Separator) + elem2 +} + +// pathSplit splits a file path into its elements, retaining empty ones. Before +// splitting, slashes are replaced with filepath.Separator, so that splitting +// Unix paths on Windows works as well. +func pathSplit(path string) []string { + path = filepath.FromSlash(path) + return strings.Split(path, string(filepath.Separator)) +} + +func (cfg *Config) glob(base, pat string) ([]string, error) { + parts := pathSplit(pat) + matches := []string{""} + if filepath.IsAbs(pat) { + if parts[0] == "" { + // unix-like + matches[0] = string(filepath.Separator) + } else { + // windows (for some reason it won't work without the + // trailing separator) + matches[0] = parts[0] + string(filepath.Separator) + } + parts = parts[1:] + } + // TODO: as an optimization, we could do chunks of the path all at once, + // like doing a single stat for "/foo/bar" in "/foo/bar/*". + + // TODO: Another optimization would be to reduce the number of ReadDir calls. + // For example, /foo/* can end up doing one duplicate call: + // + // ReadDir("/foo") to ensure that "/foo/" exists and only matches a directory + // ReadDir("/foo") glob "*" + + for i, part := range parts { + // Keep around for debugging. + // log.Printf("matches %q part %d %q", matches, i, part) + + wantDir := i < len(parts)-1 + switch { + case part == "", part == ".", part == "..": + for i, dir := range matches { + matches[i] = pathJoin2(dir, part) + } + continue + case !pattern.HasMeta(part, patMode): + var newMatches []string + for _, dir := range matches { + match := dir + if !filepath.IsAbs(match) { + match = filepath.Join(base, match) + } + match = pathJoin2(match, part) + // We can't use ReadDir on the parent and match the directory + // entry by name, because short paths on Windows break that. + // Our only option is to ReadDir on the directory entry itself, + // which can be wasteful if we only want to see if it exists, + // but at least it's correct in all scenarios. + if _, err := cfg.ReadDir(match); err != nil { + const errPathNotFound = syscall.Errno(3) // from syscall/types_windows.go, to avoid a build tag + var pathErr *os.PathError + if runtime.GOOS == "windows" && errors.As(err, &pathErr) && pathErr.Err == errPathNotFound { + // Unfortunately, os.File.Readdir on a regular file on + // Windows returns an error that satisfies ErrNotExist. + // Luckily, it returns a special "path not found" rather + // than the normal "file not found" for missing files, + // so we can use that knowledge to work around the bug. + // See https://github.com/golang/go/issues/46734. + // TODO: remove when the Go issue above is resolved. + } else if errors.Is(err, fs.ErrNotExist) { + continue // simply doesn't exist + } + if wantDir { + continue // exists but not a directory + } + } + newMatches = append(newMatches, pathJoin2(dir, part)) + } + matches = newMatches + continue + case part == "**" && cfg.GlobStar: + // Find all recursive matches for "**". + // Note that we need the results to be in depth-first order, + // and to avoid recursion, we use a slice as a stack. + // Since we pop from the back, we populate the stack backwards. + stack := make([]string, 0, len(matches)) + for i := len(matches) - 1; i >= 0; i-- { + // "a/**" should match "a/ a/b a/b/cfg ..."; + // note how the zero-match case has a trailing separator. + stack = append(stack, pathJoin2(matches[i], "")) + } + matches = matches[:0] + var newMatches []string // to reuse its capacity + for len(stack) > 0 { + dir := stack[len(stack)-1] + stack = stack[:len(stack)-1] + + // Don't include the original "" match as it's not a valid path. + if dir != "" { + matches = append(matches, dir) + } + + // If dir is not a directory, we keep the stack as-is and continue. + newMatches = newMatches[:0] + newMatches, _ = cfg.globDir(base, dir, rxGlobStar, false, wantDir, newMatches) + for i := len(newMatches) - 1; i >= 0; i-- { + stack = append(stack, newMatches[i]) + } + } + continue + } + expr, err := pattern.Regexp(part, pattern.Filenames|pattern.EntireString) + if err != nil { + return nil, err + } + rx := regexp.MustCompile(expr) + matchHidden := part[0] == byte('.') + var newMatches []string + for _, dir := range matches { + newMatches, err = cfg.globDir(base, dir, rx, matchHidden, wantDir, newMatches) + if err != nil { + return nil, err + } + } + matches = newMatches + } + return matches, nil +} + +func (cfg *Config) globDir(base, dir string, rx *regexp.Regexp, matchHidden bool, wantDir bool, matches []string) ([]string, error) { + fullDir := dir + if !filepath.IsAbs(dir) { + fullDir = filepath.Join(base, dir) + } + infos, err := cfg.ReadDir(fullDir) + if err != nil { + // We still want to return matches, for the sake of reusing slices. + return matches, err + } + for _, info := range infos { + name := info.Name() + if !wantDir { + // No filtering. + } else if mode := info.Mode(); mode&os.ModeSymlink != 0 { + // We need to know if the symlink points to a directory. + // This requires an extra syscall, as ReadDir on the parent directory + // does not follow symlinks for each of the directory entries. + // ReadDir is somewhat wasteful here, as we only want its error result, + // but we could try to reuse its result as per the TODO in Config.glob. + if _, err := cfg.ReadDir(filepath.Join(fullDir, info.Name())); err != nil { + continue + } + } else if !mode.IsDir() { + // Not a symlink nor a directory. + continue + } + if !matchHidden && name[0] == '.' { + continue + } + if rx.MatchString(name) { + matches = append(matches, pathJoin2(dir, name)) + } + } + return matches, nil +} + +// ReadFields TODO write doc. +// +// The config specifies shell expansion options; nil behaves the same as an +// empty config. +func ReadFields(cfg *Config, s string, n int, raw bool) []string { + cfg = prepareConfig(cfg) + type pos struct { + start, end int + } + var fpos []pos + + runes := make([]rune, 0, len(s)) + infield := false + esc := false + for _, r := range s { + if infield { + if cfg.ifsRune(r) && (raw || !esc) { + fpos[len(fpos)-1].end = len(runes) + infield = false + } + } else { + if !cfg.ifsRune(r) && (raw || !esc) { + fpos = append(fpos, pos{start: len(runes), end: -1}) + infield = true + } + } + if r == '\\' { + if raw || esc { + runes = append(runes, r) + } + esc = !esc + continue + } + runes = append(runes, r) + esc = false + } + if len(fpos) == 0 { + return nil + } + if infield { + fpos[len(fpos)-1].end = len(runes) + } + + switch { + case n == 1: + // include heading/trailing IFSs + fpos[0].start, fpos[0].end = 0, len(runes) + fpos = fpos[:1] + case n != -1 && n < len(fpos): + // combine to max n fields + fpos[n-1].end = fpos[len(fpos)-1].end + fpos = fpos[:n] + } + + fields := make([]string, len(fpos)) + for i, p := range fpos { + fields[i] = string(runes[p.start:p.end]) + } + return fields +} diff --git a/vendor/mvdan.cc/sh/v3/expand/param.go b/vendor/mvdan.cc/sh/v3/expand/param.go new file mode 100644 index 00000000..bf0d2382 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/expand/param.go @@ -0,0 +1,428 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package expand + +import ( + "fmt" + "regexp" + "sort" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "mvdan.cc/sh/v3/pattern" + "mvdan.cc/sh/v3/syntax" +) + +func nodeLit(node syntax.Node) string { + if word, ok := node.(*syntax.Word); ok { + return word.Lit() + } + return "" +} + +type UnsetParameterError struct { + Node *syntax.ParamExp + Message string +} + +func (u UnsetParameterError) Error() string { + return fmt.Sprintf("%s: %s", u.Node.Param.Value, u.Message) +} + +func overridingUnset(pe *syntax.ParamExp) bool { + if pe.Exp == nil { + return false + } + switch pe.Exp.Op { + case syntax.AlternateUnset, syntax.AlternateUnsetOrNull, + syntax.DefaultUnset, syntax.DefaultUnsetOrNull, + syntax.ErrorUnset, syntax.ErrorUnsetOrNull, + syntax.AssignUnset, syntax.AssignUnsetOrNull: + return true + } + return false +} + +func (cfg *Config) paramExp(pe *syntax.ParamExp) (string, error) { + oldParam := cfg.curParam + cfg.curParam = pe + defer func() { cfg.curParam = oldParam }() + + name := pe.Param.Value + index := pe.Index + switch name { + case "@", "*": + index = &syntax.Word{Parts: []syntax.WordPart{ + &syntax.Lit{Value: name}, + }} + } + var vr Variable + switch name { + case "LINENO": + // This is the only parameter expansion that the environment + // interface cannot satisfy. + line := uint64(cfg.curParam.Pos().Line()) + vr = Variable{Kind: String, Str: strconv.FormatUint(line, 10)} + default: + vr = cfg.Env.Get(name) + } + orig := vr + _, vr = vr.Resolve(cfg.Env) + if cfg.NoUnset && vr.Kind == Unset && !overridingUnset(pe) { + return "", UnsetParameterError{ + Node: pe, + Message: "unbound variable", + } + } + + var sliceOffset, sliceLen int + if pe.Slice != nil { + var err error + if pe.Slice.Offset != nil { + sliceOffset, err = Arithm(cfg, pe.Slice.Offset) + if err != nil { + return "", err + } + } + if pe.Slice.Length != nil { + sliceLen, err = Arithm(cfg, pe.Slice.Length) + if err != nil { + return "", err + } + } + } + + var ( + str string + elems []string + + indexAllElements bool // true if var has been accessed with * or @ index + callVarInd = true + ) + + switch nodeLit(index) { + case "@", "*": + switch vr.Kind { + case Unset: + elems = nil + indexAllElements = true + case Indexed: + indexAllElements = true + callVarInd = false + elems = vr.List + slicePos := func(n int) int { + if n < 0 { + n = len(elems) + n + if n < 0 { + n = len(elems) + } + } else if n > len(elems) { + n = len(elems) + } + return n + } + if pe.Slice != nil && pe.Slice.Offset != nil { + elems = elems[slicePos(sliceOffset):] + } + if pe.Slice != nil && pe.Slice.Length != nil { + elems = elems[:slicePos(sliceLen)] + } + str = strings.Join(elems, " ") + } + } + if callVarInd { + var err error + str, err = cfg.varInd(vr, index) + if err != nil { + return "", err + } + } + if !indexAllElements { + elems = []string{str} + } + + switch { + case pe.Length: + n := len(elems) + switch nodeLit(index) { + case "@", "*": + default: + n = utf8.RuneCountInString(str) + } + str = strconv.Itoa(n) + case pe.Excl: + var strs []string + switch { + case pe.Names != 0: + strs = cfg.namesByPrefix(pe.Param.Value) + case orig.Kind == NameRef: + strs = append(strs, orig.Str) + case pe.Index != nil && vr.Kind == Indexed: + for i, e := range vr.List { + if e != "" { + strs = append(strs, strconv.Itoa(i)) + } + } + case pe.Index != nil && vr.Kind == Associative: + for k := range vr.Map { + strs = append(strs, k) + } + case vr.Kind == Unset: + return "", fmt.Errorf("invalid indirect expansion") + case str == "": + return "", nil + default: + vr = cfg.Env.Get(str) + strs = append(strs, vr.String()) + } + sort.Strings(strs) + str = strings.Join(strs, " ") + case pe.Slice != nil: + if callVarInd { + slicePos := func(n int) int { + if n < 0 { + n = len(str) + n + if n < 0 { + n = len(str) + } + } else if n > len(str) { + n = len(str) + } + return n + } + if pe.Slice.Offset != nil { + str = str[slicePos(sliceOffset):] + } + if pe.Slice.Length != nil { + str = str[:slicePos(sliceLen)] + } + } else { // elems are already sliced + } + case pe.Repl != nil: + orig, err := Pattern(cfg, pe.Repl.Orig) + if err != nil { + return "", err + } + with, err := Literal(cfg, pe.Repl.With) + if err != nil { + return "", err + } + n := 1 + if pe.Repl.All { + n = -1 + } + locs := findAllIndex(orig, str, n) + buf := cfg.strBuilder() + last := 0 + for _, loc := range locs { + buf.WriteString(str[last:loc[0]]) + buf.WriteString(with) + last = loc[1] + } + buf.WriteString(str[last:]) + str = buf.String() + case pe.Exp != nil: + arg, err := Literal(cfg, pe.Exp.Word) + if err != nil { + return "", err + } + switch op := pe.Exp.Op; op { + case syntax.AlternateUnsetOrNull: + if str == "" { + break + } + fallthrough + case syntax.AlternateUnset: + if vr.IsSet() { + str = arg + } + case syntax.DefaultUnset: + if vr.IsSet() { + break + } + fallthrough + case syntax.DefaultUnsetOrNull: + if str == "" { + str = arg + } + case syntax.ErrorUnset: + if vr.IsSet() { + break + } + fallthrough + case syntax.ErrorUnsetOrNull: + if str == "" { + return "", UnsetParameterError{ + Node: pe, + Message: arg, + } + } + case syntax.AssignUnset: + if vr.IsSet() { + break + } + fallthrough + case syntax.AssignUnsetOrNull: + if str == "" { + if err := cfg.envSet(name, arg); err != nil { + return "", err + } + str = arg + } + case syntax.RemSmallPrefix, syntax.RemLargePrefix, + syntax.RemSmallSuffix, syntax.RemLargeSuffix: + suffix := op == syntax.RemSmallSuffix || op == syntax.RemLargeSuffix + small := op == syntax.RemSmallPrefix || op == syntax.RemSmallSuffix + for i, elem := range elems { + elems[i] = removePattern(elem, arg, suffix, small) + } + str = strings.Join(elems, " ") + case syntax.UpperFirst, syntax.UpperAll, + syntax.LowerFirst, syntax.LowerAll: + + caseFunc := unicode.ToLower + if op == syntax.UpperFirst || op == syntax.UpperAll { + caseFunc = unicode.ToUpper + } + all := op == syntax.UpperAll || op == syntax.LowerAll + + // empty string means '?'; nothing to do there + expr, err := pattern.Regexp(arg, 0) + if err != nil { + return str, nil + } + rx := regexp.MustCompile(expr) + + for i, elem := range elems { + rs := []rune(elem) + for ri, r := range rs { + if rx.MatchString(string(r)) { + rs[ri] = caseFunc(r) + if !all { + break + } + } + } + elems[i] = string(rs) + } + str = strings.Join(elems, " ") + case syntax.OtherParamOps: + switch arg { + case "Q": + str, err = syntax.Quote(str, syntax.LangBash) + if err != nil { + // Is this even possible? If a user runs into this panic, + // it's most likely a bug we need to fix. + panic(err) + } + case "E": + tail := str + var rns []rune + for tail != "" { + var rn rune + rn, _, tail, _ = strconv.UnquoteChar(tail, 0) + rns = append(rns, rn) + } + str = string(rns) + case "P", "A", "a": + panic(fmt.Sprintf("unhandled @%s param expansion", arg)) + default: + panic(fmt.Sprintf("unexpected @%s param expansion", arg)) + } + } + } + return str, nil +} + +func removePattern(str, pat string, fromEnd, shortest bool) string { + var mode pattern.Mode + if shortest { + mode |= pattern.Shortest + } + expr, err := pattern.Regexp(pat, mode) + if err != nil { + return str + } + switch { + case fromEnd && shortest: + // use .* to get the right-most shortest match + expr = ".*(" + expr + ")$" + case fromEnd: + // simple suffix + expr = "(" + expr + ")$" + default: + // simple prefix + expr = "^(" + expr + ")" + } + // no need to check error as Translate returns one + rx := regexp.MustCompile(expr) + if loc := rx.FindStringSubmatchIndex(str); loc != nil { + // remove the original pattern (the submatch) + str = str[:loc[2]] + str[loc[3]:] + } + return str +} + +func (cfg *Config) varInd(vr Variable, idx syntax.ArithmExpr) (string, error) { + if idx == nil { + return vr.String(), nil + } + switch vr.Kind { + case String: + n, err := Arithm(cfg, idx) + if err != nil { + return "", err + } + if n == 0 { + return vr.Str, nil + } + case Indexed: + switch nodeLit(idx) { + case "*", "@": + return strings.Join(vr.List, " "), nil + } + i, err := Arithm(cfg, idx) + if err != nil { + return "", err + } + if i < 0 { + return "", fmt.Errorf("negative array index") + } + if i < len(vr.List) { + return vr.List[i], nil + } + case Associative: + switch lit := nodeLit(idx); lit { + case "@", "*": + strs := make([]string, 0, len(vr.Map)) + for _, val := range vr.Map { + strs = append(strs, val) + } + sort.Strings(strs) + if lit == "*" { + return cfg.ifsJoin(strs), nil + } + return strings.Join(strs, " "), nil + } + val, err := Literal(cfg, idx.(*syntax.Word)) + if err != nil { + return "", err + } + return vr.Map[val], nil + } + return "", nil +} + +func (cfg *Config) namesByPrefix(prefix string) []string { + var names []string + cfg.Env.Each(func(name string, vr Variable) bool { + if strings.HasPrefix(name, prefix) { + names = append(names, name) + } + return true + }) + return names +} diff --git a/vendor/mvdan.cc/sh/v3/fileutil/file.go b/vendor/mvdan.cc/sh/v3/fileutil/file.go new file mode 100644 index 00000000..1ccf6fef --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/fileutil/file.go @@ -0,0 +1,85 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +// Package fileutil contains code to work with shell files, also known +// as shell scripts. +package fileutil + +import ( + "io/fs" + "os" + "regexp" + "strings" +) + +var ( + shebangRe = regexp.MustCompile(`^#!\s?/(usr/)?bin/(env\s+)?(sh|bash|mksh|bats|zsh)(\s|$)`) + extRe = regexp.MustCompile(`\.(sh|bash|mksh|bats|zsh)$`) +) + +// TODO: consider removing HasShebang in favor of Shebang in v4 + +// HasShebang reports whether bs begins with a valid shell shebang. +// It supports variations with /usr and env. +func HasShebang(bs []byte) bool { + return Shebang(bs) != "" +} + +// Shebang parses a "#!" sequence from the beginning of the input bytes, +// and returns the shell that it points to. +// +// For instance, it returns "sh" for "#!/bin/sh", +// and "bash" for "#!/usr/bin/env bash". +func Shebang(bs []byte) string { + m := shebangRe.FindSubmatch(bs) + if m == nil { + return "" + } + return string(m[3]) +} + +// ScriptConfidence defines how likely a file is to be a shell script, +// from complete certainty that it is not one to complete certainty that +// it is one. +type ScriptConfidence int + +const ( + // ConfNotScript describes files which are definitely not shell scripts, + // such as non-regular files or files with a non-shell extension. + ConfNotScript ScriptConfidence = iota + + // ConfIfShebang describes files which might be shell scripts, depending + // on the shebang line in the file's contents. Since CouldBeScript only + // works on os.FileInfo, the answer in this case can't be final. + ConfIfShebang + + // ConfIsScript describes files which are definitely shell scripts, + // which are regular files with a valid shell extension. + ConfIsScript +) + +// CouldBeScript is a shortcut for CouldBeScript2(fs.FileInfoToDirEntry(info)). +// +// Deprecated: prefer CouldBeScript2, which usually requires fewer syscalls. +func CouldBeScript(info os.FileInfo) ScriptConfidence { + return CouldBeScript2(fs.FileInfoToDirEntry(info)) +} + +// CouldBeScript2 reports how likely a directory entry is to be a shell script. +// It discards directories, symlinks, hidden files and files with non-shell +// extensions. +func CouldBeScript2(entry fs.DirEntry) ScriptConfidence { + name := entry.Name() + switch { + case entry.IsDir(), name[0] == '.': + return ConfNotScript + case entry.Type()&os.ModeSymlink != 0: + return ConfNotScript + case extRe.MatchString(name): + return ConfIsScript + case strings.IndexByte(name, '.') > 0: + return ConfNotScript // different extension + default: + return ConfIfShebang + } +} diff --git a/vendor/mvdan.cc/sh/v3/pattern/pattern.go b/vendor/mvdan.cc/sh/v3/pattern/pattern.go new file mode 100644 index 00000000..bde1ca71 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/pattern/pattern.go @@ -0,0 +1,335 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +// Package pattern allows working with shell pattern matching notation, also +// known as wildcards or globbing. +// +// For reference, see +// https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_13. +package pattern + +import ( + "bytes" + "fmt" + "regexp" + "strconv" + "strings" +) + +// Mode can be used to supply a number of options to the package's functions. +// Not all functions change their behavior with all of the options below. +type Mode uint + +type SyntaxError struct { + msg string + err error +} + +func (e SyntaxError) Error() string { return e.msg } + +func (e SyntaxError) Unwrap() error { return e.err } + +const ( + Shortest Mode = 1 << iota // prefer the shortest match. + Filenames // "*" and "?" don't match slashes; only "**" does + Braces // support "{a,b}" and "{1..4}" + EntireString // match the entire string using ^$ delimiters +) + +var numRange = regexp.MustCompile(`^([+-]?\d+)\.\.([+-]?\d+)}`) + +// Regexp turns a shell pattern into a regular expression that can be used with +// regexp.Compile. It will return an error if the input pattern was incorrect. +// Otherwise, the returned expression can be passed to regexp.MustCompile. +// +// For example, Regexp(`foo*bar?`, true) returns `foo.*bar.`. +// +// Note that this function (and QuoteMeta) should not be directly used with file +// paths if Windows is supported, as the path separator on that platform is the +// same character as the escaping character for shell patterns. +func Regexp(pat string, mode Mode) (string, error) { + any := false +noopLoop: + for _, r := range pat { + switch r { + // including those that need escaping since they are + // regular expression metacharacters + case '*', '?', '[', '\\', '.', '+', '(', ')', '|', + ']', '{', '}', '^', '$': + any = true + break noopLoop + } + } + if !any && mode&EntireString == 0 { // short-cut without a string copy + return pat, nil + } + closingBraces := []int{} + var buf bytes.Buffer + // Enable matching `\n` with the `.` metacharacter as globs match `\n` + buf.WriteString("(?s)") + dotMeta := false + if mode&EntireString != 0 { + buf.WriteString("^") + } +writeLoop: + for i := 0; i < len(pat); i++ { + switch c := pat[i]; c { + case '*': + if mode&Filenames != 0 { + if i++; i < len(pat) && pat[i] == '*' { + if i++; i < len(pat) && pat[i] == '/' { + buf.WriteString("(.*/|)") + dotMeta = true + } else { + buf.WriteString(".*") + dotMeta = true + i-- + } + } else { + buf.WriteString("[^/]*") + i-- + } + } else { + buf.WriteString(".*") + dotMeta = true + } + if mode&Shortest != 0 { + buf.WriteByte('?') + } + case '?': + if mode&Filenames != 0 { + buf.WriteString("[^/]") + } else { + buf.WriteByte('.') + dotMeta = true + } + case '\\': + if i++; i >= len(pat) { + return "", &SyntaxError{msg: `\ at end of pattern`} + } + buf.WriteString(regexp.QuoteMeta(string(pat[i]))) + case '[': + name, err := charClass(pat[i:]) + if err != nil { + return "", &SyntaxError{msg: "charClass invalid", err: err} + } + if name != "" { + buf.WriteString(name) + i += len(name) - 1 + break + } + if mode&Filenames != 0 { + for _, c := range pat[i:] { + if c == ']' { + break + } else if c == '/' { + buf.WriteString("\\[") + continue writeLoop + } + } + } + buf.WriteByte(c) + if i++; i >= len(pat) { + return "", &SyntaxError{msg: "[ was not matched with a closing ]"} + } + switch c = pat[i]; c { + case '!', '^': + buf.WriteByte('^') + if i++; i >= len(pat) { + return "", &SyntaxError{msg: "[ was not matched with a closing ]"} + } + } + if c = pat[i]; c == ']' { + buf.WriteByte(']') + if i++; i >= len(pat) { + return "", &SyntaxError{msg: "[ was not matched with a closing ]"} + } + } + rangeStart := byte(0) + loopBracket: + for ; i < len(pat); i++ { + c = pat[i] + buf.WriteByte(c) + switch c { + case '\\': + if i++; i < len(pat) { + buf.WriteByte(pat[i]) + } + continue + case ']': + break loopBracket + } + if rangeStart != 0 && rangeStart > c { + return "", &SyntaxError{msg: fmt.Sprintf("invalid range: %c-%c", rangeStart, c)} + } + if c == '-' { + rangeStart = pat[i-1] + } else { + rangeStart = 0 + } + } + if i >= len(pat) { + return "", &SyntaxError{msg: "[ was not matched with a closing ]"} + } + case '{': + if mode&Braces == 0 { + buf.WriteString(regexp.QuoteMeta(string(c))) + break + } + innerLevel := 1 + commas := false + peekBrace: + for j := i + 1; j < len(pat); j++ { + switch c := pat[j]; c { + case '{': + innerLevel++ + case ',': + commas = true + case '\\': + j++ + case '}': + if innerLevel--; innerLevel > 0 { + continue + } + if !commas { + break peekBrace + } + closingBraces = append(closingBraces, j) + buf.WriteString("(?:") + continue writeLoop + } + } + if match := numRange.FindStringSubmatch(pat[i+1:]); len(match) == 3 { + start, err1 := strconv.Atoi(match[1]) + end, err2 := strconv.Atoi(match[2]) + if err1 != nil || err2 != nil || start > end { + return "", &SyntaxError{msg: fmt.Sprintf("invalid range: %q", match[0])} + } + // TODO: can we do better here? + buf.WriteString("(?:") + for n := start; n <= end; n++ { + if n > start { + buf.WriteByte('|') + } + fmt.Fprintf(&buf, "%d", n) + } + buf.WriteByte(')') + i += len(match[0]) + break + } + buf.WriteString(regexp.QuoteMeta(string(c))) + case ',': + if len(closingBraces) == 0 { + buf.WriteString(regexp.QuoteMeta(string(c))) + } else { + buf.WriteByte('|') + } + case '}': + if len(closingBraces) > 0 && closingBraces[len(closingBraces)-1] == i { + buf.WriteByte(')') + closingBraces = closingBraces[:len(closingBraces)-1] + } else { + buf.WriteString(regexp.QuoteMeta(string(c))) + } + default: + if c > 128 { + buf.WriteByte(c) + } else { + buf.WriteString(regexp.QuoteMeta(string(c))) + } + } + } + if mode&EntireString != 0 { + buf.WriteString("$") + } + // No `.` metacharacters were used, so don't return the flag. + if !dotMeta { + return string(buf.Bytes()[4:]), nil + } + return buf.String(), nil +} + +func charClass(s string) (string, error) { + if strings.HasPrefix(s, "[[.") || strings.HasPrefix(s, "[[=") { + return "", fmt.Errorf("collating features not available") + } + if !strings.HasPrefix(s, "[[:") { + return "", nil + } + name := s[3:] + end := strings.Index(name, ":]]") + if end < 0 { + return "", fmt.Errorf("[[: was not matched with a closing :]]") + } + name = name[:end] + switch name { + case "alnum", "alpha", "ascii", "blank", "cntrl", "digit", "graph", + "lower", "print", "punct", "space", "upper", "word", "xdigit": + default: + return "", fmt.Errorf("invalid character class: %q", name) + } + return s[:len(name)+6], nil +} + +// HasMeta returns whether a string contains any unescaped pattern +// metacharacters: '*', '?', or '['. When the function returns false, the given +// pattern can only match at most one string. +// +// For example, HasMeta(`foo\*bar`) returns false, but HasMeta(`foo*bar`) +// returns true. +// +// This can be useful to avoid extra work, like TranslatePattern. Note that this +// function cannot be used to avoid QuotePattern, as backslashes are quoted by +// that function but ignored here. +func HasMeta(pat string, mode Mode) bool { + for i := 0; i < len(pat); i++ { + switch pat[i] { + case '\\': + i++ + case '*', '?', '[': + return true + case '{': + if mode&Braces != 0 { + return true + } + } + } + return false +} + +// QuoteMeta returns a string that quotes all pattern metacharacters in the +// given text. The returned string is a pattern that matches the literal text. +// +// For example, QuoteMeta(`foo*bar?`) returns `foo\*bar\?`. +func QuoteMeta(pat string, mode Mode) string { + any := false +loop: + for _, r := range pat { + switch r { + case '{': + if mode&Braces == 0 { + continue + } + fallthrough + case '*', '?', '[', '\\': + any = true + break loop + } + } + if !any { // short-cut without a string copy + return pat + } + var buf bytes.Buffer + for _, r := range pat { + switch r { + case '*', '?', '[', '\\': + buf.WriteByte('\\') + case '{': + if mode&Braces != 0 { + buf.WriteByte('\\') + } + } + buf.WriteRune(r) + } + return buf.String() +} diff --git a/vendor/mvdan.cc/sh/v3/shell/doc.go b/vendor/mvdan.cc/sh/v3/shell/doc.go new file mode 100644 index 00000000..81e0c2f7 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/shell/doc.go @@ -0,0 +1,14 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +// Package shell contains high-level features that use the syntax, expand, and +// interp packages under the hood. +// +// Please note that this package uses POSIX Shell syntax. As such, path names on +// Windows need to use double backslashes or be within single quotes when given +// to functions like Fields. For example: +// +// shell.Fields("echo /foo/bar") // on Unix-like +// shell.Fields("echo C:\\foo\\bar") // on Windows +// shell.Fields("echo 'C:\foo\bar'") // on Windows, with quotes +package shell diff --git a/vendor/mvdan.cc/sh/v3/shell/expand.go b/vendor/mvdan.cc/sh/v3/shell/expand.go new file mode 100644 index 00000000..e286c0a9 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/shell/expand.go @@ -0,0 +1,63 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package shell + +import ( + "os" + "strings" + + "mvdan.cc/sh/v3/expand" + "mvdan.cc/sh/v3/syntax" +) + +// Expand performs shell expansion on s as if it were within double quotes, +// using env to resolve variables. This includes parameter expansion, arithmetic +// expansion, and quote removal. +// +// If env is nil, the current environment variables are used. Empty variables +// are treated as unset; to support variables which are set but empty, use the +// expand package directly. +// +// Command substitutions like $(echo foo) aren't supported to avoid running +// arbitrary code. To support those, use an interpreter with the expand package. +// +// An error will be reported if the input string had invalid syntax. +func Expand(s string, env func(string) string) (string, error) { + p := syntax.NewParser() + word, err := p.Document(strings.NewReader(s)) + if err != nil { + return "", err + } + if env == nil { + env = os.Getenv + } + cfg := &expand.Config{Env: expand.FuncEnviron(env)} + return expand.Document(cfg, word) +} + +// Fields performs shell expansion on s as if it were a command's arguments, +// using env to resolve variables. It is similar to Expand, but includes brace +// expansion, tilde expansion, and globbing. +// +// If env is nil, the current environment variables are used. Empty variables +// are treated as unset; to support variables which are set but empty, use the +// expand package directly. +// +// An error will be reported if the input string had invalid syntax. +func Fields(s string, env func(string) string) ([]string, error) { + p := syntax.NewParser() + var words []*syntax.Word + err := p.Words(strings.NewReader(s), func(w *syntax.Word) bool { + words = append(words, w) + return true + }) + if err != nil { + return nil, err + } + if env == nil { + env = os.Getenv + } + cfg := &expand.Config{Env: expand.FuncEnviron(env)} + return expand.Fields(cfg, words...) +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/braces.go b/vendor/mvdan.cc/sh/v3/syntax/braces.go new file mode 100644 index 00000000..dca854fd --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/braces.go @@ -0,0 +1,177 @@ +// Copyright (c) 2018, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import "strconv" + +var ( + litLeftBrace = &Lit{Value: "{"} + litComma = &Lit{Value: ","} + litDots = &Lit{Value: ".."} + litRightBrace = &Lit{Value: "}"} +) + +// SplitBraces parses brace expansions within a word's literal parts. If any +// valid brace expansions are found, they are replaced with BraceExp nodes, and +// the function returns true. Otherwise, the word is left untouched and the +// function returns false. +// +// For example, a literal word "foo{bar,baz}" will result in a word containing +// the literal "foo", and a brace expansion with the elements "bar" and "baz". +// +// It does not return an error; malformed brace expansions are simply skipped. +// For example, the literal word "a{b" is left unchanged. +func SplitBraces(word *Word) bool { + any := false + top := &Word{} + acc := top + var cur *BraceExp + open := []*BraceExp{} + + pop := func() *BraceExp { + old := cur + open = open[:len(open)-1] + if len(open) == 0 { + cur = nil + acc = top + } else { + cur = open[len(open)-1] + acc = cur.Elems[len(cur.Elems)-1] + } + return old + } + addLit := func(lit *Lit) { + acc.Parts = append(acc.Parts, lit) + } + + for _, wp := range word.Parts { + lit, ok := wp.(*Lit) + if !ok { + acc.Parts = append(acc.Parts, wp) + continue + } + last := 0 + for j := 0; j < len(lit.Value); j++ { + addlitidx := func() { + if last == j { + return // empty lit + } + l2 := *lit + l2.Value = l2.Value[last:j] + addLit(&l2) + } + switch lit.Value[j] { + case '{': + addlitidx() + acc = &Word{} + cur = &BraceExp{Elems: []*Word{acc}} + open = append(open, cur) + case ',': + if cur == nil { + continue + } + addlitidx() + acc = &Word{} + cur.Elems = append(cur.Elems, acc) + case '.': + if cur == nil { + continue + } + if j+1 >= len(lit.Value) || lit.Value[j+1] != '.' { + continue + } + addlitidx() + cur.Sequence = true + acc = &Word{} + cur.Elems = append(cur.Elems, acc) + j++ + case '}': + if cur == nil { + continue + } + any = true + addlitidx() + br := pop() + if len(br.Elems) == 1 { + // return {x} to a non-brace + addLit(litLeftBrace) + acc.Parts = append(acc.Parts, br.Elems[0].Parts...) + addLit(litRightBrace) + break + } + if !br.Sequence { + acc.Parts = append(acc.Parts, br) + break + } + var chars [2]bool + broken := false + for i, elem := range br.Elems[:2] { + val := elem.Lit() + if _, err := strconv.Atoi(val); err == nil { + } else if len(val) == 1 && + 'a' <= val[0] && val[0] <= 'z' { + chars[i] = true + } else { + broken = true + } + } + if len(br.Elems) == 3 { + // increment must be a number + val := br.Elems[2].Lit() + if _, err := strconv.Atoi(val); err != nil { + broken = true + } + } + // are start and end both chars or + // non-chars? + if chars[0] != chars[1] { + broken = true + } + if !broken { + acc.Parts = append(acc.Parts, br) + break + } + // return broken {x..y[..incr]} to a non-brace + addLit(litLeftBrace) + for i, elem := range br.Elems { + if i > 0 { + addLit(litDots) + } + acc.Parts = append(acc.Parts, elem.Parts...) + } + addLit(litRightBrace) + default: + continue + } + last = j + 1 + } + if last == 0 { + addLit(lit) + } else { + left := *lit + left.Value = left.Value[last:] + addLit(&left) + } + } + if !any { + return false + } + // open braces that were never closed fall back to non-braces + for acc != top { + br := pop() + addLit(litLeftBrace) + for i, elem := range br.Elems { + if i > 0 { + if br.Sequence { + addLit(litDots) + } else { + addLit(litComma) + } + } + acc.Parts = append(acc.Parts, elem.Parts...) + } + } + *word = *top + return true +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/canonical.sh b/vendor/mvdan.cc/sh/v3/syntax/canonical.sh new file mode 100644 index 00000000..012f48dd --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/canonical.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# separate comment + +! foo bar >a & + +foo() { bar; } + +{ + var1="some long value" # var1 comment + var2=short # var2 comment +} + +if foo; then bar; fi + +for foo in a b c; do + bar +done + +case $foo in +a) A ;; +b) + B + ;; +esac + +foo | bar +foo && + $(bar) && + (more) + +foo 2>&1 +foo <<-EOF + bar +EOF + +$((3 + 4)) diff --git a/vendor/mvdan.cc/sh/v3/syntax/doc.go b/vendor/mvdan.cc/sh/v3/syntax/doc.go new file mode 100644 index 00000000..5c6275e4 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/doc.go @@ -0,0 +1,6 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +// Package syntax implements parsing and formatting of shell programs. +// It supports POSIX Shell, Bash, and mksh. +package syntax diff --git a/vendor/mvdan.cc/sh/v3/syntax/lexer.go b/vendor/mvdan.cc/sh/v3/syntax/lexer.go new file mode 100644 index 00000000..583abb61 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/lexer.go @@ -0,0 +1,1203 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "bytes" + "io" + "unicode/utf8" +) + +// bytes that form or start a token +func regOps(r rune) bool { + switch r { + case ';', '"', '\'', '(', ')', '$', '|', '&', '>', '<', '`': + return true + } + return false +} + +// tokenize these inside parameter expansions +func paramOps(r rune) bool { + switch r { + case '}', '#', '!', ':', '-', '+', '=', '?', '%', '[', ']', '/', '^', + ',', '@', '*': + return true + } + return false +} + +// these start a parameter expansion name +func paramNameOp(r rune) bool { + switch r { + case '}', ':', '+', '=', '%', '[', ']', '/', '^', ',': + return false + } + return true +} + +// tokenize these inside arithmetic expansions +func arithmOps(r rune) bool { + switch r { + case '+', '-', '!', '~', '*', '/', '%', '(', ')', '^', '<', '>', ':', '=', + ',', '?', '|', '&', '[', ']', '#': + return true + } + return false +} + +func bquoteEscaped(b byte) bool { + switch b { + case '$', '`', '\\': + return true + } + return false +} + +const escNewl rune = utf8.RuneSelf + 1 + +func (p *Parser) rune() rune { + if p.r == '\n' || p.r == escNewl { + // p.r instead of b so that newline + // character positions don't have col 0. + if p.line++; p.line > lineMax { + p.lineOverflow = true + } + p.col = 0 + p.colOverflow = false + } + if p.col += p.w; p.col > colMax { + p.colOverflow = true + } + bquotes := 0 +retry: + if p.bsp < len(p.bs) { + if b := p.bs[p.bsp]; b < utf8.RuneSelf { + p.bsp++ + if b == '\x00' { + // Ignore null bytes while parsing, like bash. + goto retry + } + if b == '\\' { + if p.r == '\\' { + } else if p.peekByte('\n') { + p.bsp++ + p.w, p.r = 1, escNewl + return escNewl + } else if p.peekBytes("\r\n") { + p.bsp += 2 + p.w, p.r = 2, escNewl + return escNewl + } + if p.openBquotes > 0 && bquotes < p.openBquotes && + p.bsp < len(p.bs) && bquoteEscaped(p.bs[p.bsp]) { + bquotes++ + goto retry + } + } + if b == '`' { + p.lastBquoteEsc = bquotes + } + if p.litBs != nil { + p.litBs = append(p.litBs, b) + } + p.w, p.r = 1, rune(b) + return p.r + } + if !utf8.FullRune(p.bs[p.bsp:]) { + // we need more bytes to read a full non-ascii rune + p.fill() + } + var w int + p.r, w = utf8.DecodeRune(p.bs[p.bsp:]) + if p.litBs != nil { + p.litBs = append(p.litBs, p.bs[p.bsp:p.bsp+w]...) + } + p.bsp += w + if p.r == utf8.RuneError && w == 1 { + p.posErr(p.nextPos(), "invalid UTF-8 encoding") + } + p.w = w + } else { + if p.r == utf8.RuneSelf { + } else if p.fill(); p.bs == nil { + p.bsp++ + p.r = utf8.RuneSelf + p.w = 1 + } else { + goto retry + } + } + return p.r +} + +// fill reads more bytes from the input src into readBuf. Any bytes that +// had not yet been used at the end of the buffer are slid into the +// beginning of the buffer. +func (p *Parser) fill() { + p.offs += p.bsp + left := len(p.bs) - p.bsp + copy(p.readBuf[:left], p.readBuf[p.bsp:]) +readAgain: + n, err := 0, p.readErr + if err == nil { + n, err = p.src.Read(p.readBuf[left:]) + p.readErr = err + } + if n == 0 { + if err == nil { + goto readAgain + } + // don't use p.errPass as we don't want to overwrite p.tok + if err != io.EOF { + p.err = err + } + if left > 0 { + p.bs = p.readBuf[:left] + } else { + p.bs = nil + } + } else { + p.bs = p.readBuf[:left+n] + } + p.bsp = 0 +} + +func (p *Parser) nextKeepSpaces() { + r := p.r + if p.quote != hdocBody && p.quote != hdocBodyTabs { + // Heredocs handle escaped newlines in a special way, but others + // do not. + for r == escNewl { + r = p.rune() + } + } + p.pos = p.nextPos() + switch p.quote { + case paramExpRepl: + switch r { + case '}', '/': + p.tok = p.paramToken(r) + case '`', '"', '$', '\'': + p.tok = p.regToken(r) + default: + p.advanceLitOther(r) + } + case dblQuotes: + switch r { + case '`', '"', '$': + p.tok = p.dqToken(r) + default: + p.advanceLitDquote(r) + } + case hdocBody, hdocBodyTabs: + switch r { + case '`', '$': + p.tok = p.dqToken(r) + default: + p.advanceLitHdoc(r) + } + default: // paramExpExp: + switch r { + case '}': + p.tok = p.paramToken(r) + case '`', '"', '$', '\'': + p.tok = p.regToken(r) + default: + p.advanceLitOther(r) + } + } + if p.err != nil && p.tok != _EOF { + p.tok = _EOF + } +} + +func (p *Parser) next() { + if p.r == utf8.RuneSelf { + p.tok = _EOF + return + } + p.spaced = false + if p.quote&allKeepSpaces != 0 { + p.nextKeepSpaces() + return + } + r := p.r + for r == escNewl { + r = p.rune() + } +skipSpace: + for { + switch r { + case utf8.RuneSelf: + p.tok = _EOF + return + case escNewl: + r = p.rune() + case ' ', '\t', '\r': + p.spaced = true + r = p.rune() + case '\n': + if p.tok == _Newl { + // merge consecutive newline tokens + r = p.rune() + continue + } + p.spaced = true + p.tok = _Newl + if p.quote != hdocWord && len(p.heredocs) > p.buriedHdocs { + p.doHeredocs() + } + return + default: + break skipSpace + } + } + if p.stopAt != nil && (p.spaced || p.tok == illegalTok || p.stopToken()) { + w := utf8.RuneLen(r) + if bytes.HasPrefix(p.bs[p.bsp-w:], p.stopAt) { + p.r = utf8.RuneSelf + p.w = 1 + p.tok = _EOF + return + } + } + p.pos = p.nextPos() + switch { + case p.quote&allRegTokens != 0: + switch r { + case ';', '"', '\'', '(', ')', '$', '|', '&', '>', '<', '`': + p.tok = p.regToken(r) + case '#': + r = p.rune() + p.newLit(r) + runeLoop: + for { + switch r { + case '\n', utf8.RuneSelf: + break runeLoop + case escNewl: + p.litBs = append(p.litBs, '\\', '\n') + break runeLoop + case '`': + if p.backquoteEnd() { + break runeLoop + } + } + r = p.rune() + } + if p.keepComments { + *p.curComs = append(*p.curComs, Comment{ + Hash: p.pos, + Text: p.endLit(), + }) + } else { + p.litBs = nil + } + p.next() + case '[', '=': + if p.quote == arrayElems { + p.tok = p.paramToken(r) + } else { + p.advanceLitNone(r) + } + case '?', '*', '+', '@', '!': + if p.extendedGlob() { + switch r { + case '?': + p.tok = globQuest + case '*': + p.tok = globStar + case '+': + p.tok = globPlus + case '@': + p.tok = globAt + default: // '!' + p.tok = globExcl + } + p.rune() + p.rune() + } else { + p.advanceLitNone(r) + } + default: + p.advanceLitNone(r) + } + case p.quote&allArithmExpr != 0 && arithmOps(r): + p.tok = p.arithmToken(r) + case p.quote&allParamExp != 0 && paramOps(r): + p.tok = p.paramToken(r) + case p.quote == testExprRegexp: + if !p.rxFirstPart && p.spaced { + p.quote = noState + goto skipSpace + } + p.rxFirstPart = false + switch r { + case ';', '"', '\'', '$', '&', '>', '<', '`': + p.tok = p.regToken(r) + case ')': + if p.rxOpenParens > 0 { + // continuation of open paren + p.advanceLitRe(r) + } else { + p.tok = rightParen + p.quote = noState + p.rune() // we are tokenizing manually + } + default: // including '(', '|' + p.advanceLitRe(r) + } + case regOps(r): + p.tok = p.regToken(r) + default: + p.advanceLitOther(r) + } + if p.err != nil && p.tok != _EOF { + p.tok = _EOF + } +} + +// extendedGlob determines whether we're parsing a Bash extended globbing expression. +// For example, whether `*` or `@` are followed by `(` to form `@(foo)`. +func (p *Parser) extendedGlob() bool { + if p.val == "function" { + return false + } + if p.peekByte('(') { + // NOTE: empty pattern list is a valid globbing syntax like `@()`, + // but we'll operate on the "likelihood" that it is a function; + // only tokenize if its a non-empty pattern list. + // We do this after peeking for just one byte, so that the input `echo *` + // followed by a newline does not hang an interactive shell parser until + // another byte is input. + if p.peekBytes("()") { + return false + } + return true + } + return false +} + +func (p *Parser) peekBytes(s string) bool { + peekEnd := p.bsp + len(s) + // TODO: This should loop for slow readers, e.g. those providing one byte at + // a time. Use a loop and test it with testing/iotest.OneByteReader. + if peekEnd > len(p.bs) { + p.fill() + } + return peekEnd <= len(p.bs) && bytes.HasPrefix(p.bs[p.bsp:peekEnd], []byte(s)) +} + +func (p *Parser) peekByte(b byte) bool { + if p.bsp == len(p.bs) { + p.fill() + } + return p.bsp < len(p.bs) && p.bs[p.bsp] == b +} + +func (p *Parser) regToken(r rune) token { + switch r { + case '\'': + if p.openBquotes > 0 { + // bury openBquotes + p.buriedBquotes = p.openBquotes + p.openBquotes = 0 + } + p.rune() + return sglQuote + case '"': + p.rune() + return dblQuote + case '`': + // Don't call p.rune, as we need to work out p.openBquotes to + // properly handle backslashes in the lexer. + return bckQuote + case '&': + switch p.rune() { + case '&': + p.rune() + return andAnd + case '>': + if p.lang == LangPOSIX { + break + } + if p.rune() == '>' { + p.rune() + return appAll + } + return rdrAll + } + return and + case '|': + switch p.rune() { + case '|': + p.rune() + return orOr + case '&': + if p.lang == LangPOSIX { + break + } + p.rune() + return orAnd + } + return or + case '$': + switch p.rune() { + case '\'': + if p.lang == LangPOSIX { + break + } + p.rune() + return dollSglQuote + case '"': + if p.lang == LangPOSIX { + break + } + p.rune() + return dollDblQuote + case '{': + p.rune() + return dollBrace + case '[': + if !p.lang.isBash() || p.quote == paramExpName { + // latter to not tokenise ${$[@]} as $[ + break + } + p.rune() + return dollBrack + case '(': + if p.rune() == '(' { + p.rune() + return dollDblParen + } + return dollParen + } + return dollar + case '(': + if p.rune() == '(' && p.lang != LangPOSIX && p.quote != testExpr { + p.rune() + return dblLeftParen + } + return leftParen + case ')': + p.rune() + return rightParen + case ';': + switch p.rune() { + case ';': + if p.rune() == '&' && p.lang.isBash() { + p.rune() + return dblSemiAnd + } + return dblSemicolon + case '&': + if p.lang == LangPOSIX { + break + } + p.rune() + return semiAnd + case '|': + if p.lang != LangMirBSDKorn { + break + } + p.rune() + return semiOr + } + return semicolon + case '<': + switch p.rune() { + case '<': + if r = p.rune(); r == '-' { + p.rune() + return dashHdoc + } else if r == '<' { + p.rune() + return wordHdoc + } + return hdoc + case '>': + p.rune() + return rdrInOut + case '&': + p.rune() + return dplIn + case '(': + if !p.lang.isBash() { + break + } + p.rune() + return cmdIn + } + return rdrIn + default: // '>' + switch p.rune() { + case '>': + p.rune() + return appOut + case '&': + p.rune() + return dplOut + case '|': + p.rune() + return clbOut + case '(': + if !p.lang.isBash() { + break + } + p.rune() + return cmdOut + } + return rdrOut + } +} + +func (p *Parser) dqToken(r rune) token { + switch r { + case '"': + p.rune() + return dblQuote + case '`': + // Don't call p.rune, as we need to work out p.openBquotes to + // properly handle backslashes in the lexer. + return bckQuote + default: // '$' + switch p.rune() { + case '{': + p.rune() + return dollBrace + case '[': + if !p.lang.isBash() { + break + } + p.rune() + return dollBrack + case '(': + if p.rune() == '(' { + p.rune() + return dollDblParen + } + return dollParen + } + return dollar + } +} + +func (p *Parser) paramToken(r rune) token { + switch r { + case '}': + p.rune() + return rightBrace + case ':': + switch p.rune() { + case '+': + p.rune() + return colPlus + case '-': + p.rune() + return colMinus + case '?': + p.rune() + return colQuest + case '=': + p.rune() + return colAssgn + } + return colon + case '+': + p.rune() + return plus + case '-': + p.rune() + return minus + case '?': + p.rune() + return quest + case '=': + p.rune() + return assgn + case '%': + if p.rune() == '%' { + p.rune() + return dblPerc + } + return perc + case '#': + if p.rune() == '#' { + p.rune() + return dblHash + } + return hash + case '!': + p.rune() + return exclMark + case '[': + p.rune() + return leftBrack + case ']': + p.rune() + return rightBrack + case '/': + if p.rune() == '/' && p.quote != paramExpRepl { + p.rune() + return dblSlash + } + return slash + case '^': + if p.rune() == '^' { + p.rune() + return dblCaret + } + return caret + case ',': + if p.rune() == ',' { + p.rune() + return dblComma + } + return comma + case '@': + p.rune() + return at + default: // '*' + p.rune() + return star + } +} + +func (p *Parser) arithmToken(r rune) token { + switch r { + case '!': + if p.rune() == '=' { + p.rune() + return nequal + } + return exclMark + case '=': + if p.rune() == '=' { + p.rune() + return equal + } + return assgn + case '~': + p.rune() + return tilde + case '(': + p.rune() + return leftParen + case ')': + p.rune() + return rightParen + case '&': + switch p.rune() { + case '&': + p.rune() + return andAnd + case '=': + p.rune() + return andAssgn + } + return and + case '|': + switch p.rune() { + case '|': + p.rune() + return orOr + case '=': + p.rune() + return orAssgn + } + return or + case '<': + switch p.rune() { + case '<': + if p.rune() == '=' { + p.rune() + return shlAssgn + } + return hdoc + case '=': + p.rune() + return lequal + } + return rdrIn + case '>': + switch p.rune() { + case '>': + if p.rune() == '=' { + p.rune() + return shrAssgn + } + return appOut + case '=': + p.rune() + return gequal + } + return rdrOut + case '+': + switch p.rune() { + case '+': + p.rune() + return addAdd + case '=': + p.rune() + return addAssgn + } + return plus + case '-': + switch p.rune() { + case '-': + p.rune() + return subSub + case '=': + p.rune() + return subAssgn + } + return minus + case '%': + if p.rune() == '=' { + p.rune() + return remAssgn + } + return perc + case '*': + switch p.rune() { + case '*': + p.rune() + return power + case '=': + p.rune() + return mulAssgn + } + return star + case '/': + if p.rune() == '=' { + p.rune() + return quoAssgn + } + return slash + case '^': + if p.rune() == '=' { + p.rune() + return xorAssgn + } + return caret + case '[': + p.rune() + return leftBrack + case ']': + p.rune() + return rightBrack + case ',': + p.rune() + return comma + case '?': + p.rune() + return quest + case ':': + p.rune() + return colon + default: // '#' + p.rune() + return hash + } +} + +func (p *Parser) newLit(r rune) { + switch { + case r < utf8.RuneSelf: + p.litBs = p.litBuf[:1] + p.litBs[0] = byte(r) + case r > escNewl: + w := utf8.RuneLen(r) + p.litBs = append(p.litBuf[:0], p.bs[p.bsp-w:p.bsp]...) + default: + // don't let r == utf8.RuneSelf go to the second case as RuneLen + // would return -1 + p.litBs = p.litBuf[:0] + } +} + +func (p *Parser) endLit() (s string) { + if p.r == utf8.RuneSelf || p.r == escNewl { + s = string(p.litBs) + } else { + s = string(p.litBs[:len(p.litBs)-p.w]) + } + p.litBs = nil + return +} + +func (p *Parser) isLitRedir() bool { + lit := p.litBs[:len(p.litBs)-1] + if lit[0] == '{' && lit[len(lit)-1] == '}' { + return ValidName(string(lit[1 : len(lit)-1])) + } + for _, b := range lit { + switch b { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + return false + } + } + return true +} + +func (p *Parser) advanceNameCont(r rune) { + // we know that r is a letter or underscore +loop: + for p.newLit(r); r != utf8.RuneSelf; r = p.rune() { + switch { + case 'a' <= r && r <= 'z': + case 'A' <= r && r <= 'Z': + case r == '_': + case '0' <= r && r <= '9': + case r == escNewl: + default: + break loop + } + } + p.tok, p.val = _LitWord, p.endLit() +} + +func (p *Parser) advanceLitOther(r rune) { + tok := _LitWord +loop: + for p.newLit(r); r != utf8.RuneSelf; r = p.rune() { + switch r { + case '\\': // escaped byte follows + p.rune() + case '\'', '"', '`', '$': + tok = _Lit + break loop + case '}': + if p.quote&allParamExp != 0 { + break loop + } + case '/': + if p.quote != paramExpExp { + break loop + } + case ':', '=', '%', '^', ',', '?', '!', '~', '*': + if p.quote&allArithmExpr != 0 || p.quote == paramExpName { + break loop + } + case '[', ']': + if p.lang != LangPOSIX && p.quote&allArithmExpr != 0 { + break loop + } + fallthrough + case '#', '@': + if p.quote&allParamReg != 0 { + break loop + } + case '+', '-', ' ', '\t', ';', '&', '>', '<', '|', '(', ')', '\n', '\r': + if p.quote&allKeepSpaces == 0 { + break loop + } + } + } + p.tok, p.val = tok, p.endLit() +} + +func (p *Parser) advanceLitNone(r rune) { + p.eqlOffs = -1 + tok := _LitWord +loop: + for p.newLit(r); r != utf8.RuneSelf; r = p.rune() { + switch r { + case ' ', '\t', '\n', '\r', '&', '|', ';', '(', ')': + break loop + case '\\': // escaped byte follows + p.rune() + case '>', '<': + if p.peekByte('(') { + tok = _Lit + } else if p.isLitRedir() { + tok = _LitRedir + } + break loop + case '`': + if p.quote != subCmdBckquo { + tok = _Lit + } + break loop + case '"', '\'', '$': + tok = _Lit + break loop + case '?', '*', '+', '@', '!': + if p.extendedGlob() { + tok = _Lit + break loop + } + case '=': + if p.eqlOffs < 0 { + p.eqlOffs = len(p.litBs) - 1 + } + case '[': + if p.lang != LangPOSIX && len(p.litBs) > 1 && p.litBs[0] != '[' { + tok = _Lit + break loop + } + } + } + p.tok, p.val = tok, p.endLit() +} + +func (p *Parser) advanceLitDquote(r rune) { + tok := _LitWord +loop: + for p.newLit(r); r != utf8.RuneSelf; r = p.rune() { + switch r { + case '"': + break loop + case '\\': // escaped byte follows + p.rune() + case escNewl, '`', '$': + tok = _Lit + break loop + } + } + p.tok, p.val = tok, p.endLit() +} + +func (p *Parser) advanceLitHdoc(r rune) { + // Unlike the rest of nextKeepSpaces quote states, we handle escaped + // newlines here. If lastTok==_Lit, then we know we're following an + // escaped newline, so the first line can't end the heredoc. + lastTok := p.tok + for r == escNewl { + r = p.rune() + lastTok = _Lit + } + p.pos = p.nextPos() + + p.tok = _Lit + p.newLit(r) + if p.quote == hdocBodyTabs { + for r == '\t' { + r = p.rune() + } + } + lStart := len(p.litBs) - 1 + stop := p.hdocStops[len(p.hdocStops)-1] + for ; ; r = p.rune() { + switch r { + case escNewl, '$': + p.val = p.endLit() + return + case '\\': // escaped byte follows + p.rune() + case '`': + if !p.backquoteEnd() { + p.val = p.endLit() + return + } + fallthrough + case '\n', utf8.RuneSelf: + if p.parsingDoc { + if r == utf8.RuneSelf { + p.tok = _LitWord + p.val = p.endLit() + return + } + } else if lStart == 0 && lastTok == _Lit { + // This line starts right after an escaped + // newline, so it should never end the heredoc. + } else if lStart >= 0 { + // Compare the current line with the stop word. + line := p.litBs[lStart:] + if r != utf8.RuneSelf && len(line) > 0 { + line = line[:len(line)-1] // minus trailing character + } + if bytes.Equal(line, stop) { + p.tok = _LitWord + p.val = p.endLit()[:lStart] + if p.val == "" { + p.tok = _Newl + } + p.hdocStops[len(p.hdocStops)-1] = nil + return + } + } + if r != '\n' { + return // hit an unexpected EOF or closing backquote + } + if p.quote == hdocBodyTabs { + for p.peekByte('\t') { + p.rune() + } + } + lStart = len(p.litBs) + } + } +} + +func (p *Parser) quotedHdocWord() *Word { + r := p.r + p.newLit(r) + pos := p.nextPos() + stop := p.hdocStops[len(p.hdocStops)-1] + for ; ; r = p.rune() { + if r == utf8.RuneSelf { + return nil + } + if p.quote == hdocBodyTabs { + for r == '\t' { + r = p.rune() + } + } + lStart := len(p.litBs) - 1 + runeLoop: + for { + switch r { + case utf8.RuneSelf, '\n': + break runeLoop + case '`': + if p.backquoteEnd() { + break runeLoop + } + case escNewl: + p.litBs = append(p.litBs, '\\', '\n') + break runeLoop + } + r = p.rune() + } + if lStart < 0 { + continue + } + // Compare the current line with the stop word. + line := p.litBs[lStart:] + if r != utf8.RuneSelf && len(line) > 0 { + line = line[:len(line)-1] // minus \n + } + if bytes.Equal(line, stop) { + p.hdocStops[len(p.hdocStops)-1] = nil + val := p.endLit()[:lStart] + if val == "" { + return nil + } + return p.wordOne(p.lit(pos, val)) + } + } +} + +func (p *Parser) advanceLitRe(r rune) { + for p.newLit(r); ; r = p.rune() { + switch r { + case '\\': + p.rune() + case '(': + p.rxOpenParens++ + case ')': + if p.rxOpenParens--; p.rxOpenParens < 0 { + p.tok, p.val = _LitWord, p.endLit() + p.quote = noState + return + } + case ' ', '\t', '\r', '\n', ';', '&', '>', '<': + if p.rxOpenParens <= 0 { + p.tok, p.val = _LitWord, p.endLit() + p.quote = noState + return + } + case '"', '\'', '$', '`': + p.tok, p.val = _Lit, p.endLit() + return + case utf8.RuneSelf: + p.tok, p.val = _LitWord, p.endLit() + p.quote = noState + return + } + } +} + +func testUnaryOp(val string) UnTestOperator { + switch val { + case "!": + return TsNot + case "-e", "-a": + return TsExists + case "-f": + return TsRegFile + case "-d": + return TsDirect + case "-c": + return TsCharSp + case "-b": + return TsBlckSp + case "-p": + return TsNmPipe + case "-S": + return TsSocket + case "-L", "-h": + return TsSmbLink + case "-k": + return TsSticky + case "-g": + return TsGIDSet + case "-u": + return TsUIDSet + case "-G": + return TsGrpOwn + case "-O": + return TsUsrOwn + case "-N": + return TsModif + case "-r": + return TsRead + case "-w": + return TsWrite + case "-x": + return TsExec + case "-s": + return TsNoEmpty + case "-t": + return TsFdTerm + case "-z": + return TsEmpStr + case "-n": + return TsNempStr + case "-o": + return TsOptSet + case "-v": + return TsVarSet + case "-R": + return TsRefVar + default: + return 0 + } +} + +func testBinaryOp(val string) BinTestOperator { + switch val { + case "=": + return TsMatchShort + case "==": + return TsMatch + case "!=": + return TsNoMatch + case "=~": + return TsReMatch + case "-nt": + return TsNewer + case "-ot": + return TsOlder + case "-ef": + return TsDevIno + case "-eq": + return TsEql + case "-ne": + return TsNeq + case "-le": + return TsLeq + case "-ge": + return TsGeq + case "-lt": + return TsLss + case "-gt": + return TsGtr + default: + return 0 + } +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/nodes.go b/vendor/mvdan.cc/sh/v3/syntax/nodes.go new file mode 100644 index 00000000..a43021f7 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/nodes.go @@ -0,0 +1,953 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "strconv" + "strings" +) + +// Node represents a syntax tree node. +type Node interface { + // Pos returns the position of the first character of the node. Comments + // are ignored, except if the node is a *File. + Pos() Pos + // End returns the position of the character immediately after the node. + // If the character is a newline, the line number won't cross into the + // next line. Comments are ignored, except if the node is a *File. + End() Pos +} + +// File represents a shell source file. +type File struct { + Name string + + Stmts []*Stmt + Last []Comment +} + +func (f *File) Pos() Pos { return stmtsPos(f.Stmts, f.Last) } +func (f *File) End() Pos { return stmtsEnd(f.Stmts, f.Last) } + +func stmtsPos(stmts []*Stmt, last []Comment) Pos { + if len(stmts) > 0 { + s := stmts[0] + sPos := s.Pos() + if len(s.Comments) > 0 { + if cPos := s.Comments[0].Pos(); sPos.After(cPos) { + return cPos + } + } + return sPos + } + if len(last) > 0 { + return last[0].Pos() + } + return Pos{} +} + +func stmtsEnd(stmts []*Stmt, last []Comment) Pos { + if len(last) > 0 { + return last[len(last)-1].End() + } + if len(stmts) > 0 { + s := stmts[len(stmts)-1] + sEnd := s.End() + if len(s.Comments) > 0 { + if cEnd := s.Comments[0].End(); cEnd.After(sEnd) { + return cEnd + } + } + return sEnd + } + return Pos{} +} + +// Pos is a position within a shell source file. +type Pos struct { + offs, lineCol uint32 +} + +// We used to split line and column numbers evenly in 16 bits, but line numbers +// are significantly more important in practice. Use more bits for them. +const ( + lineBitSize = 18 + lineMax = (1 << lineBitSize) - 1 + + colBitSize = 32 - lineBitSize + colMax = (1 << colBitSize) - 1 + colBitMask = colMax +) + +// TODO(v4): consider using uint32 for Offset/Line/Col to better represent bit sizes. +// Or go with int64, which more closely resembles portable "sizes" elsewhere. +// The latter is probably nicest, as then we can change the number of internal +// bits later, and we can also do overflow checks for the user in NewPos. + +// NewPos creates a position with the given offset, line, and column. +// +// Note that Pos uses a limited number of bits to store these numbers. +// If line or column overflow their allocated space, they are replaced with 0. +func NewPos(offset, line, column uint) Pos { + if line > lineMax { + line = 0 // protect against overflows; rendered as "?" + } + if column > colMax { + column = 0 // protect against overflows; rendered as "?" + } + return Pos{ + offs: uint32(offset), + lineCol: (uint32(line) << colBitSize) | uint32(column), + } +} + +// Offset returns the byte offset of the position in the original source file. +// Byte offsets start at 0. +// +// Note that Offset is not protected against overflows; +// if an input is larger than 4GiB, the offset will wrap around to 0. +func (p Pos) Offset() uint { return uint(p.offs) } + +// Line returns the line number of the position, starting at 1. +// +// Line is protected against overflows; if an input has too many lines, extra +// lines will have a line number of 0, rendered as "?" by Pos.String. +func (p Pos) Line() uint { return uint(p.lineCol >> colBitSize) } + +// Col returns the column number of the position, starting at 1. It counts in +// bytes. +// +// Col is protected against overflows; if an input line has too many columns, +// extra columns will have a column number of 0, rendered as "?" by Pos.String. +func (p Pos) Col() uint { return uint(p.lineCol & colBitMask) } + +func (p Pos) String() string { + var b strings.Builder + if line := p.Line(); line > 0 { + b.WriteString(strconv.FormatUint(uint64(line), 10)) + } else { + b.WriteByte('?') + } + b.WriteByte(':') + if col := p.Col(); col > 0 { + b.WriteString(strconv.FormatUint(uint64(col), 10)) + } else { + b.WriteByte('?') + } + return b.String() +} + +// IsValid reports whether the position contains useful position information. +// Some positions returned via Parse may be invalid: for example, Stmt.Semicolon +// will only be valid if a statement contained a closing token such as ';'. +func (p Pos) IsValid() bool { return p != Pos{} } + +// After reports whether the position p is after p2. It is a more expressive +// version of p.Offset() > p2.Offset(). +func (p Pos) After(p2 Pos) bool { return p.offs > p2.offs } + +func posAddCol(p Pos, n int) Pos { + // TODO: guard against overflows + p.lineCol += uint32(n) + p.offs += uint32(n) + return p +} + +func posMax(p1, p2 Pos) Pos { + if p2.After(p1) { + return p2 + } + return p1 +} + +// Comment represents a single comment on a single line. +type Comment struct { + Hash Pos + Text string +} + +func (c *Comment) Pos() Pos { return c.Hash } +func (c *Comment) End() Pos { return posAddCol(c.Hash, 1+len(c.Text)) } + +// Stmt represents a statement, also known as a "complete command". It is +// compromised of a command and other components that may come before or after +// it. +type Stmt struct { + Comments []Comment + Cmd Command + Position Pos + Semicolon Pos // position of ';', '&', or '|&', if any + Negated bool // ! stmt + Background bool // stmt & + Coprocess bool // mksh's |& + + Redirs []*Redirect // stmt >a 0 { + end = posMax(end, s.Redirs[len(s.Redirs)-1].End()) + } + return end +} + +// Command represents all nodes that are simple or compound commands, including +// function declarations. +// +// These are *CallExpr, *IfClause, *WhileClause, *ForClause, *CaseClause, +// *Block, *Subshell, *BinaryCmd, *FuncDecl, *ArithmCmd, *TestClause, +// *DeclClause, *LetClause, *TimeClause, and *CoprocClause. +type Command interface { + Node + commandNode() +} + +func (*CallExpr) commandNode() {} +func (*IfClause) commandNode() {} +func (*WhileClause) commandNode() {} +func (*ForClause) commandNode() {} +func (*CaseClause) commandNode() {} +func (*Block) commandNode() {} +func (*Subshell) commandNode() {} +func (*BinaryCmd) commandNode() {} +func (*FuncDecl) commandNode() {} +func (*ArithmCmd) commandNode() {} +func (*TestClause) commandNode() {} +func (*DeclClause) commandNode() {} +func (*LetClause) commandNode() {} +func (*TimeClause) commandNode() {} +func (*CoprocClause) commandNode() {} +func (*TestDecl) commandNode() {} + +// Assign represents an assignment to a variable. +// +// Here and elsewhere, Index can mean either an index expression into an indexed +// array, or a string key into an associative array. +// +// If Index is non-nil, the value will be a word and not an array as nested +// arrays are not allowed. +// +// If Naked is true and Name is nil, the assignment is part of a DeclClause and +// the argument (in the Value field) will be evaluated at run-time. This +// includes parameter expansions, which may expand to assignments or options. +type Assign struct { + Append bool // += + Naked bool // without '=' + Name *Lit // must be a valid name + Index ArithmExpr // [i], ["k"] + Value *Word // =val + Array *ArrayExpr // =(arr) +} + +func (a *Assign) Pos() Pos { + if a.Name == nil { + return a.Value.Pos() + } + return a.Name.Pos() +} + +func (a *Assign) End() Pos { + if a.Value != nil { + return a.Value.End() + } + if a.Array != nil { + return a.Array.End() + } + if a.Index != nil { + return posAddCol(a.Index.End(), 2) + } + if a.Naked { + return a.Name.End() + } + return posAddCol(a.Name.End(), 1) +} + +// Redirect represents an input/output redirection. +type Redirect struct { + OpPos Pos + Op RedirOperator + N *Lit // fd>, or {varname}> in Bash + Word *Word // >word + Hdoc *Word // here-document body +} + +func (r *Redirect) Pos() Pos { + if r.N != nil { + return r.N.Pos() + } + return r.OpPos +} + +func (r *Redirect) End() Pos { + if r.Hdoc != nil { + return r.Hdoc.End() + } + return r.Word.End() +} + +// CallExpr represents a command execution or function call, otherwise known as +// a "simple command". +// +// If Args is empty, Assigns apply to the shell environment. Otherwise, they are +// variables that cannot be arrays and which only apply to the call. +type CallExpr struct { + Assigns []*Assign // a=x b=y args + Args []*Word +} + +func (c *CallExpr) Pos() Pos { + if len(c.Assigns) > 0 { + return c.Assigns[0].Pos() + } + return c.Args[0].Pos() +} + +func (c *CallExpr) End() Pos { + if len(c.Args) == 0 { + return c.Assigns[len(c.Assigns)-1].End() + } + return c.Args[len(c.Args)-1].End() +} + +// Subshell represents a series of commands that should be executed in a nested +// shell environment. +type Subshell struct { + Lparen, Rparen Pos + + Stmts []*Stmt + Last []Comment +} + +func (s *Subshell) Pos() Pos { return s.Lparen } +func (s *Subshell) End() Pos { return posAddCol(s.Rparen, 1) } + +// Block represents a series of commands that should be executed in a nested +// scope. It is essentially a list of statements within curly braces. +type Block struct { + Lbrace, Rbrace Pos + + Stmts []*Stmt + Last []Comment +} + +func (b *Block) Pos() Pos { return b.Lbrace } +func (b *Block) End() Pos { return posAddCol(b.Rbrace, 1) } + +// IfClause represents an if statement. +type IfClause struct { + Position Pos // position of the starting "if", "elif", or "else" token + ThenPos Pos // position of "then", empty if this is an "else" + FiPos Pos // position of "fi", shared with .Else if non-nil + + Cond []*Stmt + CondLast []Comment + Then []*Stmt + ThenLast []Comment + + Else *IfClause // if non-nil, an "elif" or an "else" + + Last []Comment // comments on the first "elif", "else", or "fi" +} + +func (c *IfClause) Pos() Pos { return c.Position } +func (c *IfClause) End() Pos { return posAddCol(c.FiPos, 2) } + +// WhileClause represents a while or an until clause. +type WhileClause struct { + WhilePos, DoPos, DonePos Pos + Until bool + + Cond []*Stmt + CondLast []Comment + Do []*Stmt + DoLast []Comment +} + +func (w *WhileClause) Pos() Pos { return w.WhilePos } +func (w *WhileClause) End() Pos { return posAddCol(w.DonePos, 4) } + +// ForClause represents a for or a select clause. The latter is only present in +// Bash. +type ForClause struct { + ForPos, DoPos, DonePos Pos + Select bool + Braces bool // deprecated form with { } instead of do/done + Loop Loop + + Do []*Stmt + DoLast []Comment +} + +func (f *ForClause) Pos() Pos { return f.ForPos } +func (f *ForClause) End() Pos { return posAddCol(f.DonePos, 4) } + +// Loop holds either *WordIter or *CStyleLoop. +type Loop interface { + Node + loopNode() +} + +func (*WordIter) loopNode() {} +func (*CStyleLoop) loopNode() {} + +// WordIter represents the iteration of a variable over a series of words in a +// for clause. If InPos is an invalid position, the "in" token was missing, so +// the iteration is over the shell's positional parameters. +type WordIter struct { + Name *Lit + InPos Pos // position of "in" + Items []*Word +} + +func (w *WordIter) Pos() Pos { return w.Name.Pos() } +func (w *WordIter) End() Pos { + if len(w.Items) > 0 { + return wordLastEnd(w.Items) + } + return posMax(w.Name.End(), posAddCol(w.InPos, 2)) +} + +// CStyleLoop represents the behavior of a for clause similar to the C +// language. +// +// This node will only appear with LangBash. +type CStyleLoop struct { + Lparen, Rparen Pos + // Init, Cond, Post can each be nil, if the for loop construct omits it. + Init, Cond, Post ArithmExpr +} + +func (c *CStyleLoop) Pos() Pos { return c.Lparen } +func (c *CStyleLoop) End() Pos { return posAddCol(c.Rparen, 2) } + +// BinaryCmd represents a binary expression between two statements. +type BinaryCmd struct { + OpPos Pos + Op BinCmdOperator + X, Y *Stmt +} + +func (b *BinaryCmd) Pos() Pos { return b.X.Pos() } +func (b *BinaryCmd) End() Pos { return b.Y.End() } + +// FuncDecl represents the declaration of a function. +type FuncDecl struct { + Position Pos + RsrvWord bool // non-posix "function f" style + Parens bool // with () parentheses, only meaningful with RsrvWord=true + Name *Lit + Body *Stmt +} + +func (f *FuncDecl) Pos() Pos { return f.Position } +func (f *FuncDecl) End() Pos { return f.Body.End() } + +// Word represents a shell word, containing one or more word parts contiguous to +// each other. The word is delimited by word boundaries, such as spaces, +// newlines, semicolons, or parentheses. +type Word struct { + Parts []WordPart +} + +func (w *Word) Pos() Pos { return w.Parts[0].Pos() } +func (w *Word) End() Pos { return w.Parts[len(w.Parts)-1].End() } + +// Lit returns the word as a literal value, if the word consists of *Lit nodes +// only. An empty string is returned otherwise. Words with multiple literals, +// which can appear in some edge cases, are handled properly. +// +// For example, the word "foo" will return "foo", but the word "foo${bar}" will +// return "". +func (w *Word) Lit() string { + // In the usual case, we'll have either a single part that's a literal, + // or one of the parts being a non-literal. Using strings.Join instead + // of a strings.Builder avoids extra work in these cases, since a single + // part is a shortcut, and many parts don't incur string copies. + lits := make([]string, 0, 1) + for _, part := range w.Parts { + lit, ok := part.(*Lit) + if !ok { + return "" + } + lits = append(lits, lit.Value) + } + return strings.Join(lits, "") +} + +// WordPart represents all nodes that can form part of a word. +// +// These are *Lit, *SglQuoted, *DblQuoted, *ParamExp, *CmdSubst, *ArithmExp, +// *ProcSubst, and *ExtGlob. +type WordPart interface { + Node + wordPartNode() +} + +func (*Lit) wordPartNode() {} +func (*SglQuoted) wordPartNode() {} +func (*DblQuoted) wordPartNode() {} +func (*ParamExp) wordPartNode() {} +func (*CmdSubst) wordPartNode() {} +func (*ArithmExp) wordPartNode() {} +func (*ProcSubst) wordPartNode() {} +func (*ExtGlob) wordPartNode() {} +func (*BraceExp) wordPartNode() {} + +// Lit represents a string literal. +// +// Note that a parsed string literal may not appear as-is in the original source +// code, as it is possible to split literals by escaping newlines. The splitting +// is lost, but the end position is not. +type Lit struct { + ValuePos, ValueEnd Pos + Value string +} + +func (l *Lit) Pos() Pos { return l.ValuePos } +func (l *Lit) End() Pos { return l.ValueEnd } + +// SglQuoted represents a string within single quotes. +type SglQuoted struct { + Left, Right Pos + Dollar bool // $'' + Value string +} + +func (q *SglQuoted) Pos() Pos { return q.Left } +func (q *SglQuoted) End() Pos { return posAddCol(q.Right, 1) } + +// DblQuoted represents a list of nodes within double quotes. +type DblQuoted struct { + Left, Right Pos + Dollar bool // $"" + Parts []WordPart +} + +func (q *DblQuoted) Pos() Pos { return q.Left } +func (q *DblQuoted) End() Pos { return posAddCol(q.Right, 1) } + +// CmdSubst represents a command substitution. +type CmdSubst struct { + Left, Right Pos + + Stmts []*Stmt + Last []Comment + + Backquotes bool // deprecated `foo` + TempFile bool // mksh's ${ foo;} + ReplyVar bool // mksh's ${|foo;} +} + +func (c *CmdSubst) Pos() Pos { return c.Left } +func (c *CmdSubst) End() Pos { return posAddCol(c.Right, 1) } + +// ParamExp represents a parameter expansion. +type ParamExp struct { + Dollar, Rbrace Pos + + Short bool // $a instead of ${a} + Excl bool // ${!a} + Length bool // ${#a} + Width bool // ${%a} + Param *Lit + Index ArithmExpr // ${a[i]}, ${a["k"]} + Slice *Slice // ${a:x:y} + Repl *Replace // ${a/x/y} + Names ParNamesOperator // ${!prefix*} or ${!prefix@} + Exp *Expansion // ${a:-b}, ${a#b}, etc +} + +func (p *ParamExp) Pos() Pos { return p.Dollar } +func (p *ParamExp) End() Pos { + if !p.Short { + return posAddCol(p.Rbrace, 1) + } + if p.Index != nil { + return posAddCol(p.Index.End(), 1) + } + return p.Param.End() +} + +func (p *ParamExp) nakedIndex() bool { + return p.Short && p.Index != nil +} + +// Slice represents a character slicing expression inside a ParamExp. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type Slice struct { + Offset, Length ArithmExpr +} + +// Replace represents a search and replace expression inside a ParamExp. +type Replace struct { + All bool + Orig, With *Word +} + +// Expansion represents string manipulation in a ParamExp other than those +// covered by Replace. +type Expansion struct { + Op ParExpOperator + Word *Word +} + +// ArithmExp represents an arithmetic expansion. +type ArithmExp struct { + Left, Right Pos + Bracket bool // deprecated $[expr] form + Unsigned bool // mksh's $((# expr)) + + X ArithmExpr +} + +func (a *ArithmExp) Pos() Pos { return a.Left } +func (a *ArithmExp) End() Pos { + if a.Bracket { + return posAddCol(a.Right, 1) + } + return posAddCol(a.Right, 2) +} + +// ArithmCmd represents an arithmetic command. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type ArithmCmd struct { + Left, Right Pos + Unsigned bool // mksh's ((# expr)) + + X ArithmExpr +} + +func (a *ArithmCmd) Pos() Pos { return a.Left } +func (a *ArithmCmd) End() Pos { return posAddCol(a.Right, 2) } + +// ArithmExpr represents all nodes that form arithmetic expressions. +// +// These are *BinaryArithm, *UnaryArithm, *ParenArithm, and *Word. +type ArithmExpr interface { + Node + arithmExprNode() +} + +func (*BinaryArithm) arithmExprNode() {} +func (*UnaryArithm) arithmExprNode() {} +func (*ParenArithm) arithmExprNode() {} +func (*Word) arithmExprNode() {} + +// BinaryArithm represents a binary arithmetic expression. +// +// If Op is any assign operator, X will be a word with a single *Lit whose value +// is a valid name. +// +// Ternary operators like "a ? b : c" are fit into this structure. Thus, if +// Op==TernQuest, Y will be a *BinaryArithm with Op==TernColon. Op can only be +// TernColon in that scenario. +type BinaryArithm struct { + OpPos Pos + Op BinAritOperator + X, Y ArithmExpr +} + +func (b *BinaryArithm) Pos() Pos { return b.X.Pos() } +func (b *BinaryArithm) End() Pos { return b.Y.End() } + +// UnaryArithm represents an unary arithmetic expression. The unary operator +// may come before or after the sub-expression. +// +// If Op is Inc or Dec, X will be a word with a single *Lit whose value is a +// valid name. +type UnaryArithm struct { + OpPos Pos + Op UnAritOperator + Post bool + X ArithmExpr +} + +func (u *UnaryArithm) Pos() Pos { + if u.Post { + return u.X.Pos() + } + return u.OpPos +} + +func (u *UnaryArithm) End() Pos { + if u.Post { + return posAddCol(u.OpPos, 2) + } + return u.X.End() +} + +// ParenArithm represents an arithmetic expression within parentheses. +type ParenArithm struct { + Lparen, Rparen Pos + + X ArithmExpr +} + +func (p *ParenArithm) Pos() Pos { return p.Lparen } +func (p *ParenArithm) End() Pos { return posAddCol(p.Rparen, 1) } + +// CaseClause represents a case (switch) clause. +type CaseClause struct { + Case, In, Esac Pos + Braces bool // deprecated mksh form with braces instead of in/esac + + Word *Word + Items []*CaseItem + Last []Comment +} + +func (c *CaseClause) Pos() Pos { return c.Case } +func (c *CaseClause) End() Pos { return posAddCol(c.Esac, 4) } + +// CaseItem represents a pattern list (case) within a CaseClause. +type CaseItem struct { + Op CaseOperator + OpPos Pos // unset if it was finished by "esac" + Comments []Comment + Patterns []*Word + + Stmts []*Stmt + Last []Comment +} + +func (c *CaseItem) Pos() Pos { return c.Patterns[0].Pos() } +func (c *CaseItem) End() Pos { + if c.OpPos.IsValid() { + return posAddCol(c.OpPos, len(c.Op.String())) + } + return stmtsEnd(c.Stmts, c.Last) +} + +// TestClause represents a Bash extended test clause. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type TestClause struct { + Left, Right Pos + + X TestExpr +} + +func (t *TestClause) Pos() Pos { return t.Left } +func (t *TestClause) End() Pos { return posAddCol(t.Right, 2) } + +// TestExpr represents all nodes that form test expressions. +// +// These are *BinaryTest, *UnaryTest, *ParenTest, and *Word. +type TestExpr interface { + Node + testExprNode() +} + +func (*BinaryTest) testExprNode() {} +func (*UnaryTest) testExprNode() {} +func (*ParenTest) testExprNode() {} +func (*Word) testExprNode() {} + +// BinaryTest represents a binary test expression. +type BinaryTest struct { + OpPos Pos + Op BinTestOperator + X, Y TestExpr +} + +func (b *BinaryTest) Pos() Pos { return b.X.Pos() } +func (b *BinaryTest) End() Pos { return b.Y.End() } + +// UnaryTest represents a unary test expression. The unary operator may come +// before or after the sub-expression. +type UnaryTest struct { + OpPos Pos + Op UnTestOperator + X TestExpr +} + +func (u *UnaryTest) Pos() Pos { return u.OpPos } +func (u *UnaryTest) End() Pos { return u.X.End() } + +// ParenTest represents a test expression within parentheses. +type ParenTest struct { + Lparen, Rparen Pos + + X TestExpr +} + +func (p *ParenTest) Pos() Pos { return p.Lparen } +func (p *ParenTest) End() Pos { return posAddCol(p.Rparen, 1) } + +// DeclClause represents a Bash declare clause. +// +// Args can contain a mix of regular and naked assignments. The naked +// assignments can represent either options or variable names. +// +// This node will only appear with LangBash. +type DeclClause struct { + // Variant is one of "declare", "local", "export", "readonly", + // "typeset", or "nameref". + Variant *Lit + Args []*Assign +} + +func (d *DeclClause) Pos() Pos { return d.Variant.Pos() } +func (d *DeclClause) End() Pos { + if len(d.Args) > 0 { + return d.Args[len(d.Args)-1].End() + } + return d.Variant.End() +} + +// ArrayExpr represents a Bash array expression. +// +// This node will only appear with LangBash. +type ArrayExpr struct { + Lparen, Rparen Pos + + Elems []*ArrayElem + Last []Comment +} + +func (a *ArrayExpr) Pos() Pos { return a.Lparen } +func (a *ArrayExpr) End() Pos { return posAddCol(a.Rparen, 1) } + +// ArrayElem represents a Bash array element. +// +// Index can be nil; for example, declare -a x=(value). +// Value can be nil; for example, declare -A x=([index]=). +// Finally, neither can be nil; for example, declare -A x=([index]=value) +type ArrayElem struct { + Index ArithmExpr + Value *Word + Comments []Comment +} + +func (a *ArrayElem) Pos() Pos { + if a.Index != nil { + return a.Index.Pos() + } + return a.Value.Pos() +} + +func (a *ArrayElem) End() Pos { + if a.Value != nil { + return a.Value.End() + } + return posAddCol(a.Index.Pos(), 1) +} + +// ExtGlob represents a Bash extended globbing expression. Note that these are +// parsed independently of whether shopt has been called or not. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type ExtGlob struct { + OpPos Pos + Op GlobOperator + Pattern *Lit +} + +func (e *ExtGlob) Pos() Pos { return e.OpPos } +func (e *ExtGlob) End() Pos { return posAddCol(e.Pattern.End(), 1) } + +// ProcSubst represents a Bash process substitution. +// +// This node will only appear with LangBash. +type ProcSubst struct { + OpPos, Rparen Pos + Op ProcOperator + + Stmts []*Stmt + Last []Comment +} + +func (s *ProcSubst) Pos() Pos { return s.OpPos } +func (s *ProcSubst) End() Pos { return posAddCol(s.Rparen, 1) } + +// TimeClause represents a Bash time clause. PosixFormat corresponds to the -p +// flag. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type TimeClause struct { + Time Pos + PosixFormat bool + Stmt *Stmt +} + +func (c *TimeClause) Pos() Pos { return c.Time } +func (c *TimeClause) End() Pos { + if c.Stmt == nil { + return posAddCol(c.Time, 4) + } + return c.Stmt.End() +} + +// CoprocClause represents a Bash coproc clause. +// +// This node will only appear with LangBash. +type CoprocClause struct { + Coproc Pos + Name *Word + Stmt *Stmt +} + +func (c *CoprocClause) Pos() Pos { return c.Coproc } +func (c *CoprocClause) End() Pos { return c.Stmt.End() } + +// LetClause represents a Bash let clause. +// +// This node will only appear in LangBash and LangMirBSDKorn. +type LetClause struct { + Let Pos + Exprs []ArithmExpr +} + +func (l *LetClause) Pos() Pos { return l.Let } +func (l *LetClause) End() Pos { return l.Exprs[len(l.Exprs)-1].End() } + +// BraceExp represents a Bash brace expression, such as "{a,f}" or "{1..10}". +// +// This node will only appear as a result of SplitBraces. +type BraceExp struct { + Sequence bool // {x..y[..incr]} instead of {x,y[,...]} + Elems []*Word +} + +func (b *BraceExp) Pos() Pos { + return posAddCol(b.Elems[0].Pos(), -1) +} + +func (b *BraceExp) End() Pos { + return posAddCol(wordLastEnd(b.Elems), 1) +} + +// TestDecl represents the declaration of a Bats test function. +type TestDecl struct { + Position Pos + Description *Word + Body *Stmt +} + +func (f *TestDecl) Pos() Pos { return f.Position } +func (f *TestDecl) End() Pos { return f.Body.End() } + +func wordLastEnd(ws []*Word) Pos { + if len(ws) == 0 { + return Pos{} + } + return ws[len(ws)-1].End() +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/parser.go b/vendor/mvdan.cc/sh/v3/syntax/parser.go new file mode 100644 index 00000000..86e7274c --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/parser.go @@ -0,0 +1,2487 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "bytes" + "fmt" + "io" + "strconv" + "strings" + "unicode/utf8" +) + +// ParserOption is a function which can be passed to NewParser +// to alter its behavior. To apply option to existing Parser +// call it directly, for example KeepComments(true)(parser). +type ParserOption func(*Parser) + +// KeepComments makes the parser parse comments and attach them to +// nodes, as opposed to discarding them. +func KeepComments(enabled bool) ParserOption { + return func(p *Parser) { p.keepComments = enabled } +} + +// LangVariant describes a shell language variant to use when tokenizing and +// parsing shell code. The zero value is LangBash. +type LangVariant int + +const ( + // LangBash corresponds to the GNU Bash language, as described in its + // manual at https://www.gnu.org/software/bash/manual/bash.html. + // + // We currently follow Bash version 5.1. + // + // Its string representation is "bash". + LangBash LangVariant = iota + + // LangPOSIX corresponds to the POSIX Shell language, as described at + // https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html. + // + // Its string representation is "posix" or "sh". + LangPOSIX + + // LangMirBSDKorn corresponds to the MirBSD Korn Shell, also known as + // mksh, as described at http://www.mirbsd.org/htman/i386/man1/mksh.htm. + // Note that it shares some features with Bash, due to the the shared + // ancestry that is ksh. + // + // We currently follow mksh version 59. + // + // Its string representation is "mksh". + LangMirBSDKorn + + // LangBats corresponds to the Bash Automated Testing System language, + // as described at https://github.com/bats-core/bats-core. Note that + // it's just a small extension of the Bash language. + // + // Its string representation is "bats". + LangBats + + // LangAuto corresponds to automatic language detection, + // commonly used by end-user applications like shfmt, + // which can guess a file's language variant given its filename or shebang. + // + // At this time, the Parser does not support LangAuto. + LangAuto +) + +// Variant changes the shell language variant that the parser will +// accept. +// +// The passed language variant must be one of the constant values defined in +// this package. +func Variant(l LangVariant) ParserOption { + switch l { + case LangBash, LangPOSIX, LangMirBSDKorn, LangBats: + case LangAuto: + panic("LangAuto is not supported by the parser at this time") + default: + panic(fmt.Sprintf("unknown shell language variant: %d", l)) + } + return func(p *Parser) { p.lang = l } +} + +func (l LangVariant) String() string { + switch l { + case LangBash: + return "bash" + case LangPOSIX: + return "posix" + case LangMirBSDKorn: + return "mksh" + case LangBats: + return "bats" + case LangAuto: + return "auto" + } + return "unknown shell language variant" +} + +func (l *LangVariant) Set(s string) error { + switch s { + case "bash": + *l = LangBash + case "posix", "sh": + *l = LangPOSIX + case "mksh": + *l = LangMirBSDKorn + case "bats": + *l = LangBats + case "auto": + *l = LangAuto + default: + return fmt.Errorf("unknown shell language variant: %q", s) + } + return nil +} + +func (l LangVariant) isBash() bool { + return l == LangBash || l == LangBats +} + +// StopAt configures the lexer to stop at an arbitrary word, treating it +// as if it were the end of the input. It can contain any characters +// except whitespace, and cannot be over four bytes in size. +// +// This can be useful to embed shell code within another language, as +// one can use a special word to mark the delimiters between the two. +// +// As a word, it will only apply when following whitespace or a +// separating token. For example, StopAt("$$") will act on the inputs +// "foo $$" and "foo;$$", but not on "foo '$$'". +// +// The match is done by prefix, so the example above will also act on +// "foo $$bar". +func StopAt(word string) ParserOption { + if len(word) > 4 { + panic("stop word can't be over four bytes in size") + } + if strings.ContainsAny(word, " \t\n\r") { + panic("stop word can't contain whitespace characters") + } + return func(p *Parser) { p.stopAt = []byte(word) } +} + +// NewParser allocates a new Parser and applies any number of options. +func NewParser(options ...ParserOption) *Parser { + p := &Parser{} + for _, opt := range options { + opt(p) + } + return p +} + +// Parse reads and parses a shell program with an optional name. It +// returns the parsed program if no issues were encountered. Otherwise, +// an error is returned. Reads from r are buffered. +// +// Parse can be called more than once, but not concurrently. That is, a +// Parser can be reused once it is done working. +func (p *Parser) Parse(r io.Reader, name string) (*File, error) { + p.reset() + p.f = &File{Name: name} + p.src = r + p.rune() + p.next() + p.f.Stmts, p.f.Last = p.stmtList() + if p.err == nil { + // EOF immediately after heredoc word so no newline to + // trigger it + p.doHeredocs() + } + return p.f, p.err +} + +// Stmts reads and parses statements one at a time, calling a function +// each time one is parsed. If the function returns false, parsing is +// stopped and the function is not called again. +func (p *Parser) Stmts(r io.Reader, fn func(*Stmt) bool) error { + p.reset() + p.f = &File{} + p.src = r + p.rune() + p.next() + p.stmts(fn) + if p.err == nil { + // EOF immediately after heredoc word so no newline to + // trigger it + p.doHeredocs() + } + return p.err +} + +type wrappedReader struct { + *Parser + io.Reader + + lastLine int + accumulated []*Stmt + fn func([]*Stmt) bool +} + +func (w *wrappedReader) Read(p []byte) (n int, err error) { + // If we lexed a newline for the first time, we just finished a line, so + // we may need to give a callback for the edge cases below not covered + // by Parser.Stmts. + if (w.r == '\n' || w.r == escNewl) && w.line > w.lastLine { + if w.Incomplete() { + // Incomplete statement; call back to print "> ". + if !w.fn(w.accumulated) { + return 0, io.EOF + } + } else if len(w.accumulated) == 0 { + // Nothing was parsed; call back to print another "$ ". + if !w.fn(nil) { + return 0, io.EOF + } + } + w.lastLine = w.line + } + return w.Reader.Read(p) +} + +// Interactive implements what is necessary to parse statements in an +// interactive shell. The parser will call the given function under two +// circumstances outlined below. +// +// If a line containing any number of statements is parsed, the function will be +// called with said statements. +// +// If a line ending in an incomplete statement is parsed, the function will be +// called with any fully parsed statements, and Parser.Incomplete will return +// true. +// +// One can imagine a simple interactive shell implementation as follows: +// +// fmt.Fprintf(os.Stdout, "$ ") +// parser.Interactive(os.Stdin, func(stmts []*syntax.Stmt) bool { +// if parser.Incomplete() { +// fmt.Fprintf(os.Stdout, "> ") +// return true +// } +// run(stmts) +// fmt.Fprintf(os.Stdout, "$ ") +// return true +// } +// +// If the callback function returns false, parsing is stopped and the function +// is not called again. +func (p *Parser) Interactive(r io.Reader, fn func([]*Stmt) bool) error { + w := wrappedReader{Parser: p, Reader: r, fn: fn} + return p.Stmts(&w, func(stmt *Stmt) bool { + w.accumulated = append(w.accumulated, stmt) + // We finished parsing a statement and we're at a newline token, + // so we finished fully parsing a number of statements. Call + // back to run the statements and print "$ ". + if p.tok == _Newl { + if !fn(w.accumulated) { + return false + } + w.accumulated = w.accumulated[:0] + // The callback above would already print "$ ", so we + // don't want the subsequent wrappedReader.Read to cause + // another "$ " print thinking that nothing was parsed. + w.lastLine = w.line + 1 + } + return true + }) +} + +// Words reads and parses words one at a time, calling a function each time one +// is parsed. If the function returns false, parsing is stopped and the function +// is not called again. +// +// Newlines are skipped, meaning that multi-line input will work fine. If the +// parser encounters a token that isn't a word, such as a semicolon, an error +// will be returned. +// +// Note that the lexer doesn't currently tokenize spaces, so it may need to read +// a non-space byte such as a newline or a letter before finishing the parsing +// of a word. This will be fixed in the future. +func (p *Parser) Words(r io.Reader, fn func(*Word) bool) error { + p.reset() + p.f = &File{} + p.src = r + p.rune() + p.next() + for { + p.got(_Newl) + w := p.getWord() + if w == nil { + if p.tok != _EOF { + p.curErr("%s is not a valid word", p.tok) + } + return p.err + } + if !fn(w) { + return nil + } + } +} + +// Document parses a single here-document word. That is, it parses the input as +// if they were lines following a < 0 || p.litBs != nil +} + +const bufSize = 1 << 10 + +func (p *Parser) reset() { + p.tok, p.val = illegalTok, "" + p.eqlOffs = 0 + p.bs, p.bsp = nil, 0 + p.offs, p.line, p.col = 0, 1, 1 + p.r, p.w = 0, 0 + p.err, p.readErr = nil, nil + p.quote, p.forbidNested = noState, false + p.openStmts = 0 + p.heredocs, p.buriedHdocs = p.heredocs[:0], 0 + p.parsingDoc = false + p.openBquotes, p.buriedBquotes = 0, 0 + p.accComs, p.curComs = nil, &p.accComs + p.litBatch = nil + p.wordBatch = nil + p.stmtBatch = nil + p.callBatch = nil +} + +func (p *Parser) nextPos() Pos { + // TODO: detect offset overflow while lexing as well. + var line, col uint + if !p.lineOverflow { + line = uint(p.line) + } + if !p.colOverflow { + col = uint(p.col) + } + return NewPos(uint(p.offs+p.bsp-p.w), line, col) +} + +func (p *Parser) lit(pos Pos, val string) *Lit { + if len(p.litBatch) == 0 { + p.litBatch = make([]Lit, 64) + } + l := &p.litBatch[0] + p.litBatch = p.litBatch[1:] + l.ValuePos = pos + l.ValueEnd = p.nextPos() + l.Value = val + return l +} + +type wordAlloc struct { + word Word + parts [1]WordPart +} + +func (p *Parser) wordAnyNumber() *Word { + if len(p.wordBatch) == 0 { + p.wordBatch = make([]wordAlloc, 32) + } + alloc := &p.wordBatch[0] + p.wordBatch = p.wordBatch[1:] + w := &alloc.word + w.Parts = p.wordParts(alloc.parts[:0]) + return w +} + +func (p *Parser) wordOne(part WordPart) *Word { + if len(p.wordBatch) == 0 { + p.wordBatch = make([]wordAlloc, 32) + } + alloc := &p.wordBatch[0] + p.wordBatch = p.wordBatch[1:] + w := &alloc.word + w.Parts = alloc.parts[:1] + w.Parts[0] = part + return w +} + +func (p *Parser) stmt(pos Pos) *Stmt { + if len(p.stmtBatch) == 0 { + p.stmtBatch = make([]Stmt, 32) + } + s := &p.stmtBatch[0] + p.stmtBatch = p.stmtBatch[1:] + s.Position = pos + return s +} + +type callAlloc struct { + ce CallExpr + ws [4]*Word +} + +func (p *Parser) call(w *Word) *CallExpr { + if len(p.callBatch) == 0 { + p.callBatch = make([]callAlloc, 32) + } + alloc := &p.callBatch[0] + p.callBatch = p.callBatch[1:] + ce := &alloc.ce + ce.Args = alloc.ws[:1] + ce.Args[0] = w + return ce +} + +//go:generate stringer -type=quoteState + +type quoteState uint32 + +const ( + noState quoteState = 1 << iota + subCmd + subCmdBckquo + dblQuotes + hdocWord + hdocBody + hdocBodyTabs + arithmExpr + arithmExprLet + arithmExprCmd + arithmExprBrack + testExpr + testExprRegexp + switchCase + paramExpName + paramExpSlice + paramExpRepl + paramExpExp + arrayElems + + allKeepSpaces = paramExpRepl | dblQuotes | hdocBody | + hdocBodyTabs | paramExpExp + allRegTokens = noState | subCmd | subCmdBckquo | hdocWord | + switchCase | arrayElems | testExpr + allArithmExpr = arithmExpr | arithmExprLet | arithmExprCmd | + arithmExprBrack | paramExpSlice + allParamReg = paramExpName | paramExpSlice + allParamExp = allParamReg | paramExpRepl | paramExpExp | arithmExprBrack +) + +type saveState struct { + quote quoteState + buriedHdocs int +} + +func (p *Parser) preNested(quote quoteState) (s saveState) { + s.quote, s.buriedHdocs = p.quote, p.buriedHdocs + p.buriedHdocs, p.quote = len(p.heredocs), quote + return +} + +func (p *Parser) postNested(s saveState) { + p.quote, p.buriedHdocs = s.quote, s.buriedHdocs +} + +func (p *Parser) unquotedWordBytes(w *Word) ([]byte, bool) { + buf := make([]byte, 0, 4) + didUnquote := false + for _, wp := range w.Parts { + buf, didUnquote = p.unquotedWordPart(buf, wp, false) + } + return buf, didUnquote +} + +func (p *Parser) unquotedWordPart(buf []byte, wp WordPart, quotes bool) (_ []byte, quoted bool) { + switch x := wp.(type) { + case *Lit: + for i := 0; i < len(x.Value); i++ { + if b := x.Value[i]; b == '\\' && !quotes { + if i++; i < len(x.Value) { + buf = append(buf, x.Value[i]) + } + quoted = true + } else { + buf = append(buf, b) + } + } + case *SglQuoted: + buf = append(buf, []byte(x.Value)...) + quoted = true + case *DblQuoted: + for _, wp2 := range x.Parts { + buf, _ = p.unquotedWordPart(buf, wp2, true) + } + quoted = true + } + return buf, quoted +} + +func (p *Parser) doHeredocs() { + hdocs := p.heredocs[p.buriedHdocs:] + if len(hdocs) == 0 { + // Nothing do do; don't even issue a read. + return + } + p.rune() // consume '\n', since we know p.tok == _Newl + old := p.quote + p.heredocs = p.heredocs[:p.buriedHdocs] + for i, r := range hdocs { + if p.err != nil { + break + } + p.quote = hdocBody + if r.Op == DashHdoc { + p.quote = hdocBodyTabs + } + stop, quoted := p.unquotedWordBytes(r.Word) + p.hdocStops = append(p.hdocStops, stop) + if i > 0 && p.r == '\n' { + p.rune() + } + lastLine := p.line + if quoted { + r.Hdoc = p.quotedHdocWord() + } else { + p.next() + r.Hdoc = p.getWord() + } + if r.Hdoc != nil { + lastLine = int(r.Hdoc.End().Line()) + } + if lastLine < p.line { + // TODO: It seems like this triggers more often than it + // should. Look into it. + l := p.lit(p.nextPos(), "") + if r.Hdoc == nil { + r.Hdoc = p.wordOne(l) + } else { + r.Hdoc.Parts = append(r.Hdoc.Parts, l) + } + } + if stop := p.hdocStops[len(p.hdocStops)-1]; stop != nil { + p.posErr(r.Pos(), "unclosed here-document '%s'", stop) + } + p.hdocStops = p.hdocStops[:len(p.hdocStops)-1] + } + p.quote = old +} + +func (p *Parser) got(tok token) bool { + if p.tok == tok { + p.next() + return true + } + return false +} + +func (p *Parser) gotRsrv(val string) (Pos, bool) { + pos := p.pos + if p.tok == _LitWord && p.val == val { + p.next() + return pos, true + } + return pos, false +} + +func readableStr(s string) string { + // don't quote tokens like & or } + if s != "" && s[0] >= 'a' && s[0] <= 'z' { + return strconv.Quote(s) + } + return s +} + +func (p *Parser) followErr(pos Pos, left, right string) { + leftStr := readableStr(left) + p.posErr(pos, "%s must be followed by %s", leftStr, right) +} + +func (p *Parser) followErrExp(pos Pos, left string) { + p.followErr(pos, left, "an expression") +} + +func (p *Parser) follow(lpos Pos, left string, tok token) { + if !p.got(tok) { + p.followErr(lpos, left, tok.String()) + } +} + +func (p *Parser) followRsrv(lpos Pos, left, val string) Pos { + pos, ok := p.gotRsrv(val) + if !ok { + p.followErr(lpos, left, fmt.Sprintf("%q", val)) + } + return pos +} + +func (p *Parser) followStmts(left string, lpos Pos, stops ...string) ([]*Stmt, []Comment) { + if p.got(semicolon) { + return nil, nil + } + newLine := p.got(_Newl) + stmts, last := p.stmtList(stops...) + if len(stmts) < 1 && !newLine { + p.followErr(lpos, left, "a statement list") + } + return stmts, last +} + +func (p *Parser) followWordTok(tok token, pos Pos) *Word { + w := p.getWord() + if w == nil { + p.followErr(pos, tok.String(), "a word") + } + return w +} + +func (p *Parser) stmtEnd(n Node, start, end string) Pos { + pos, ok := p.gotRsrv(end) + if !ok { + p.posErr(n.Pos(), "%s statement must end with %q", start, end) + } + return pos +} + +func (p *Parser) quoteErr(lpos Pos, quote token) { + p.posErr(lpos, "reached %s without closing quote %s", + p.tok.String(), quote) +} + +func (p *Parser) matchingErr(lpos Pos, left, right interface{}) { + p.posErr(lpos, "reached %s without matching %s with %s", + p.tok.String(), left, right) +} + +func (p *Parser) matched(lpos Pos, left, right token) Pos { + pos := p.pos + if !p.got(right) { + p.matchingErr(lpos, left, right) + } + return pos +} + +func (p *Parser) errPass(err error) { + if p.err == nil { + p.err = err + p.bsp = len(p.bs) + 1 + p.r = utf8.RuneSelf + p.w = 1 + p.tok = _EOF + } +} + +// IsIncomplete reports whether a Parser error could have been avoided with +// extra input bytes. For example, if an io.EOF was encountered while there was +// an unclosed quote or parenthesis. +func IsIncomplete(err error) bool { + perr, ok := err.(ParseError) + return ok && perr.Incomplete +} + +// IsKeyword returns true if the given word is part of the language keywords. +func IsKeyword(word string) bool { + // This list has been copied from the bash 5.1 source code, file y.tab.c +4460 + switch word { + case + "!", + "[[", // only if COND_COMMAND is defined + "]]", // only if COND_COMMAND is defined + "case", + "coproc", // only if COPROCESS_SUPPORT is defined + "do", + "done", + "else", + "esac", + "fi", + "for", + "function", + "if", + "in", + "select", // only if SELECT_COMMAND is defined + "then", + "time", // only if COMMAND_TIMING is defined + "until", + "while", + "{", + "}": + return true + } + return false +} + +// ParseError represents an error found when parsing a source file, from which +// the parser cannot recover. +type ParseError struct { + Filename string + Pos + Text string + + Incomplete bool +} + +func (e ParseError) Error() string { + if e.Filename == "" { + return fmt.Sprintf("%s: %s", e.Pos.String(), e.Text) + } + return fmt.Sprintf("%s:%s: %s", e.Filename, e.Pos.String(), e.Text) +} + +// LangError is returned when the parser encounters code that is only valid in +// other shell language variants. The error includes what feature is not present +// in the current language variant, and what languages support it. +type LangError struct { + Filename string + Pos + Feature string + Langs []LangVariant +} + +func (e LangError) Error() string { + var buf bytes.Buffer + if e.Filename != "" { + buf.WriteString(e.Filename + ":") + } + buf.WriteString(e.Pos.String() + ": ") + buf.WriteString(e.Feature) + if strings.HasSuffix(e.Feature, "s") { + buf.WriteString(" are a ") + } else { + buf.WriteString(" is a ") + } + for i, lang := range e.Langs { + if i > 0 { + buf.WriteString("/") + } + buf.WriteString(lang.String()) + } + buf.WriteString(" feature") + return buf.String() +} + +func (p *Parser) posErr(pos Pos, format string, a ...interface{}) { + p.errPass(ParseError{ + Filename: p.f.Name, + Pos: pos, + Text: fmt.Sprintf(format, a...), + Incomplete: p.tok == _EOF && p.Incomplete(), + }) +} + +func (p *Parser) curErr(format string, a ...interface{}) { + p.posErr(p.pos, format, a...) +} + +func (p *Parser) langErr(pos Pos, feature string, langs ...LangVariant) { + p.errPass(LangError{ + Filename: p.f.Name, + Pos: pos, + Feature: feature, + Langs: langs, + }) +} + +func (p *Parser) stmts(fn func(*Stmt) bool, stops ...string) { + gotEnd := true +loop: + for p.tok != _EOF { + newLine := p.got(_Newl) + switch p.tok { + case _LitWord: + for _, stop := range stops { + if p.val == stop { + break loop + } + } + case rightParen: + if p.quote == subCmd { + break loop + } + case bckQuote: + if p.backquoteEnd() { + break loop + } + case dblSemicolon, semiAnd, dblSemiAnd, semiOr: + if p.quote == switchCase { + break loop + } + p.curErr("%s can only be used in a case clause", p.tok) + } + if !newLine && !gotEnd { + p.curErr("statements must be separated by &, ; or a newline") + } + if p.tok == _EOF { + break + } + p.openStmts++ + s := p.getStmt(true, false, false) + p.openStmts-- + if s == nil { + p.invalidStmtStart() + break + } + gotEnd = s.Semicolon.IsValid() + if !fn(s) { + break + } + } +} + +func (p *Parser) stmtList(stops ...string) ([]*Stmt, []Comment) { + var stmts []*Stmt + var last []Comment + fn := func(s *Stmt) bool { + stmts = append(stmts, s) + return true + } + p.stmts(fn, stops...) + split := len(p.accComs) + if p.tok == _LitWord && (p.val == "elif" || p.val == "else" || p.val == "fi") { + // Split the comments, so that any aligned with an opening token + // get attached to it. For example: + // + // if foo; then + // # inside the body + // # document the else + // else + // fi + // TODO(mvdan): look into deduplicating this with similar logic + // in caseItems. + for i := len(p.accComs) - 1; i >= 0; i-- { + c := p.accComs[i] + if c.Pos().Col() != p.pos.Col() { + break + } + split = i + } + } + if split > 0 { // keep last nil if empty + last = p.accComs[:split] + } + p.accComs = p.accComs[split:] + return stmts, last +} + +func (p *Parser) invalidStmtStart() { + switch p.tok { + case semicolon, and, or, andAnd, orOr: + p.curErr("%s can only immediately follow a statement", p.tok) + case rightParen: + p.curErr("%s can only be used to close a subshell", p.tok) + default: + p.curErr("%s is not a valid start for a statement", p.tok) + } +} + +func (p *Parser) getWord() *Word { + if w := p.wordAnyNumber(); len(w.Parts) > 0 && p.err == nil { + return w + } + return nil +} + +func (p *Parser) getLit() *Lit { + switch p.tok { + case _Lit, _LitWord, _LitRedir: + l := p.lit(p.pos, p.val) + p.next() + return l + } + return nil +} + +func (p *Parser) wordParts(wps []WordPart) []WordPart { + for { + n := p.wordPart() + if n == nil { + if len(wps) == 0 { + return nil // normalize empty lists into nil + } + return wps + } + wps = append(wps, n) + if p.spaced { + return wps + } + } +} + +func (p *Parser) ensureNoNested() { + if p.forbidNested { + p.curErr("expansions not allowed in heredoc words") + } +} + +func (p *Parser) wordPart() WordPart { + switch p.tok { + case _Lit, _LitWord, _LitRedir: + l := p.lit(p.pos, p.val) + p.next() + return l + case dollBrace: + p.ensureNoNested() + switch p.r { + case '|': + if p.lang != LangMirBSDKorn { + p.curErr(`"${|stmts;}" is a mksh feature`) + } + fallthrough + case ' ', '\t', '\n': + if p.lang != LangMirBSDKorn { + p.curErr(`"${ stmts;}" is a mksh feature`) + } + cs := &CmdSubst{ + Left: p.pos, + TempFile: p.r != '|', + ReplyVar: p.r == '|', + } + old := p.preNested(subCmd) + p.rune() // don't tokenize '|' + p.next() + cs.Stmts, cs.Last = p.stmtList("}") + p.postNested(old) + pos, ok := p.gotRsrv("}") + if !ok { + p.matchingErr(cs.Left, "${", "}") + } + cs.Right = pos + return cs + default: + return p.paramExp() + } + case dollDblParen, dollBrack: + p.ensureNoNested() + left := p.tok + ar := &ArithmExp{Left: p.pos, Bracket: left == dollBrack} + var old saveState + if ar.Bracket { + old = p.preNested(arithmExprBrack) + } else { + old = p.preNested(arithmExpr) + } + p.next() + if p.got(hash) { + if p.lang != LangMirBSDKorn { + p.langErr(ar.Pos(), "unsigned expressions", LangMirBSDKorn) + } + ar.Unsigned = true + } + ar.X = p.followArithm(left, ar.Left) + if ar.Bracket { + if p.tok != rightBrack { + p.arithmMatchingErr(ar.Left, dollBrack, rightBrack) + } + p.postNested(old) + ar.Right = p.pos + p.next() + } else { + ar.Right = p.arithmEnd(dollDblParen, ar.Left, old) + } + return ar + case dollParen: + p.ensureNoNested() + cs := &CmdSubst{Left: p.pos} + old := p.preNested(subCmd) + p.next() + cs.Stmts, cs.Last = p.stmtList() + p.postNested(old) + cs.Right = p.matched(cs.Left, leftParen, rightParen) + return cs + case dollar: + r := p.r + switch { + case singleRuneParam(r): + p.tok, p.val = _LitWord, string(r) + p.rune() + case 'a' <= r && r <= 'z', 'A' <= r && r <= 'Z', + '0' <= r && r <= '9', r == '_', r == '\\': + p.advanceNameCont(r) + default: + l := p.lit(p.pos, "$") + p.next() + return l + } + p.ensureNoNested() + pe := &ParamExp{Dollar: p.pos, Short: true} + p.pos = posAddCol(p.pos, 1) + pe.Param = p.getLit() + if pe.Param != nil && pe.Param.Value == "" { + l := p.lit(pe.Dollar, "$") + // e.g. "$\\\"" within double quotes, so we must + // keep the rest of the literal characters. + l.ValueEnd = posAddCol(l.ValuePos, 1) + return l + } + return pe + case cmdIn, cmdOut: + p.ensureNoNested() + ps := &ProcSubst{Op: ProcOperator(p.tok), OpPos: p.pos} + old := p.preNested(subCmd) + p.next() + ps.Stmts, ps.Last = p.stmtList() + p.postNested(old) + ps.Rparen = p.matched(ps.OpPos, token(ps.Op), rightParen) + return ps + case sglQuote, dollSglQuote: + sq := &SglQuoted{Left: p.pos, Dollar: p.tok == dollSglQuote} + r := p.r + for p.newLit(r); ; r = p.rune() { + switch r { + case '\\': + if sq.Dollar { + p.rune() + } + case '\'': + sq.Right = p.nextPos() + sq.Value = p.endLit() + + // restore openBquotes + p.openBquotes = p.buriedBquotes + p.buriedBquotes = 0 + + p.rune() + p.next() + return sq + case escNewl: + p.litBs = append(p.litBs, '\\', '\n') + case utf8.RuneSelf: + p.tok = _EOF + p.quoteErr(sq.Pos(), sglQuote) + return nil + } + } + case dblQuote, dollDblQuote: + if p.quote == dblQuotes { + // p.tok == dblQuote, as "foo$" puts $ in the lit + return nil + } + return p.dblQuoted() + case bckQuote: + if p.backquoteEnd() { + return nil + } + p.ensureNoNested() + cs := &CmdSubst{Left: p.pos, Backquotes: true} + old := p.preNested(subCmdBckquo) + p.openBquotes++ + + // The lexer didn't call p.rune for us, so that it could have + // the right p.openBquotes to properly handle backslashes. + p.rune() + + p.next() + cs.Stmts, cs.Last = p.stmtList() + if p.tok == bckQuote && p.lastBquoteEsc < p.openBquotes-1 { + // e.g. found ` before the nested backquote \` was closed. + p.tok = _EOF + p.quoteErr(cs.Pos(), bckQuote) + } + p.postNested(old) + p.openBquotes-- + cs.Right = p.pos + + // Like above, the lexer didn't call p.rune for us. + p.rune() + if !p.got(bckQuote) { + p.quoteErr(cs.Pos(), bckQuote) + } + return cs + case globQuest, globStar, globPlus, globAt, globExcl: + if p.lang == LangPOSIX { + p.langErr(p.pos, "extended globs", LangBash, LangMirBSDKorn) + } + eg := &ExtGlob{Op: GlobOperator(p.tok), OpPos: p.pos} + lparens := 1 + r := p.r + globLoop: + for p.newLit(r); ; r = p.rune() { + switch r { + case utf8.RuneSelf: + break globLoop + case '(': + lparens++ + case ')': + if lparens--; lparens == 0 { + break globLoop + } + } + } + eg.Pattern = p.lit(posAddCol(eg.OpPos, 2), p.endLit()) + p.rune() + p.next() + if lparens != 0 { + p.matchingErr(eg.OpPos, eg.Op, rightParen) + } + return eg + default: + return nil + } +} + +func (p *Parser) dblQuoted() *DblQuoted { + alloc := &struct { + quoted DblQuoted + parts [1]WordPart + }{ + quoted: DblQuoted{Left: p.pos, Dollar: p.tok == dollDblQuote}, + } + q := &alloc.quoted + old := p.quote + p.quote = dblQuotes + p.next() + q.Parts = p.wordParts(alloc.parts[:0]) + p.quote = old + q.Right = p.pos + if !p.got(dblQuote) { + p.quoteErr(q.Pos(), dblQuote) + } + return q +} + +func singleRuneParam(r rune) bool { + switch r { + case '@', '*', '#', '$', '?', '!', '-', + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + return true + } + return false +} + +func (p *Parser) paramExp() *ParamExp { + pe := &ParamExp{Dollar: p.pos} + old := p.quote + p.quote = paramExpName + if p.r == '#' { + p.tok = hash + p.pos = p.nextPos() + p.rune() + } else { + p.next() + } + switch p.tok { + case hash: + if paramNameOp(p.r) { + pe.Length = true + p.next() + } + case perc: + if p.lang != LangMirBSDKorn { + p.posErr(pe.Pos(), `"${%%foo}" is a mksh feature`) + } + if paramNameOp(p.r) { + pe.Width = true + p.next() + } + case exclMark: + if paramNameOp(p.r) { + pe.Excl = true + p.next() + } + } + op := p.tok + switch p.tok { + case _Lit, _LitWord: + if !numberLiteral(p.val) && !ValidName(p.val) { + p.curErr("invalid parameter name") + } + pe.Param = p.lit(p.pos, p.val) + p.next() + case quest, minus: + if pe.Length && p.r != '}' { + // actually ${#-default}, not ${#-}; fix the ambiguity + pe.Length = false + pe.Param = p.lit(posAddCol(p.pos, -1), "#") + pe.Param.ValueEnd = p.pos + break + } + fallthrough + case at, star, hash, exclMark, dollar: + pe.Param = p.lit(p.pos, p.tok.String()) + p.next() + default: + p.curErr("parameter expansion requires a literal") + } + switch p.tok { + case _Lit, _LitWord: + p.curErr("%s cannot be followed by a word", op) + case rightBrace: + if pe.Excl && p.lang == LangPOSIX { + p.posErr(pe.Pos(), `"${!foo}" is a bash/mksh feature`) + } + pe.Rbrace = p.pos + p.quote = old + p.next() + return pe + case leftBrack: + if p.lang == LangPOSIX { + p.langErr(p.pos, "arrays", LangBash, LangMirBSDKorn) + } + if !ValidName(pe.Param.Value) { + p.curErr("cannot index a special parameter name") + } + pe.Index = p.eitherIndex() + } + if p.tok == rightBrace { + pe.Rbrace = p.pos + p.quote = old + p.next() + return pe + } + if p.tok != _EOF && (pe.Length || pe.Width) { + p.curErr("cannot combine multiple parameter expansion operators") + } + switch p.tok { + case slash, dblSlash: + // pattern search and replace + if p.lang == LangPOSIX { + p.langErr(p.pos, "search and replace", LangBash, LangMirBSDKorn) + } + pe.Repl = &Replace{All: p.tok == dblSlash} + p.quote = paramExpRepl + p.next() + pe.Repl.Orig = p.getWord() + p.quote = paramExpExp + if p.got(slash) { + pe.Repl.With = p.getWord() + } + case colon: + // slicing + if p.lang == LangPOSIX { + p.langErr(p.pos, "slicing", LangBash, LangMirBSDKorn) + } + pe.Slice = &Slice{} + colonPos := p.pos + p.quote = paramExpSlice + if p.next(); p.tok != colon { + pe.Slice.Offset = p.followArithm(colon, colonPos) + } + colonPos = p.pos + if p.got(colon) { + pe.Slice.Length = p.followArithm(colon, colonPos) + } + // Need to use a different matched style so arithm errors + // get reported correctly + p.quote = old + pe.Rbrace = p.pos + p.matchedArithm(pe.Dollar, dollBrace, rightBrace) + return pe + case caret, dblCaret, comma, dblComma: + // upper/lower case + if !p.lang.isBash() { + p.langErr(p.pos, "this expansion operator", LangBash) + } + pe.Exp = p.paramExpExp() + case at, star: + switch { + case p.tok == at && p.lang == LangPOSIX: + p.langErr(p.pos, "this expansion operator", LangBash, LangMirBSDKorn) + case p.tok == star && !pe.Excl: + p.curErr("not a valid parameter expansion operator: %v", p.tok) + case pe.Excl && p.r == '}': + if !p.lang.isBash() { + p.posErr(pe.Pos(), `"${!foo`+p.tok.String()+`}" is a bash feature`) + } + pe.Names = ParNamesOperator(p.tok) + p.next() + default: + pe.Exp = p.paramExpExp() + } + case plus, colPlus, minus, colMinus, quest, colQuest, assgn, colAssgn, + perc, dblPerc, hash, dblHash: + pe.Exp = p.paramExpExp() + case _EOF: + default: + p.curErr("not a valid parameter expansion operator: %v", p.tok) + } + p.quote = old + pe.Rbrace = p.pos + p.matched(pe.Dollar, dollBrace, rightBrace) + return pe +} + +func (p *Parser) paramExpExp() *Expansion { + op := ParExpOperator(p.tok) + p.quote = paramExpExp + p.next() + if op == OtherParamOps { + switch p.tok { + case _Lit, _LitWord: + default: + p.curErr("@ expansion operator requires a literal") + } + switch p.val { + case "a", "u", "A", "E", "K", "L", "P", "U": + if !p.lang.isBash() { + p.langErr(p.pos, "this expansion operator", LangBash) + } + case "#": + if p.lang != LangMirBSDKorn { + p.langErr(p.pos, "this expansion operator", LangMirBSDKorn) + } + case "Q": + default: + p.curErr("invalid @ expansion operator") + } + } + return &Expansion{Op: op, Word: p.getWord()} +} + +func (p *Parser) eitherIndex() ArithmExpr { + old := p.quote + lpos := p.pos + p.quote = arithmExprBrack + p.next() + if p.tok == star || p.tok == at { + p.tok, p.val = _LitWord, p.tok.String() + } + expr := p.followArithm(leftBrack, lpos) + p.quote = old + p.matchedArithm(lpos, leftBrack, rightBrack) + return expr +} + +func (p *Parser) stopToken() bool { + switch p.tok { + case _EOF, _Newl, semicolon, and, or, andAnd, orOr, orAnd, dblSemicolon, + semiAnd, dblSemiAnd, semiOr, rightParen: + return true + case bckQuote: + return p.backquoteEnd() + } + return false +} + +func (p *Parser) backquoteEnd() bool { + return p.lastBquoteEsc < p.openBquotes +} + +// ValidName returns whether val is a valid name as per the POSIX spec. +func ValidName(val string) bool { + if val == "" { + return false + } + for i, r := range val { + switch { + case 'a' <= r && r <= 'z': + case 'A' <= r && r <= 'Z': + case r == '_': + case i > 0 && '0' <= r && r <= '9': + default: + return false + } + } + return true +} + +func numberLiteral(val string) bool { + for _, r := range val { + if '0' > r || r > '9' { + return false + } + } + return true +} + +func (p *Parser) hasValidIdent() bool { + if p.tok != _Lit && p.tok != _LitWord { + return false + } + if end := p.eqlOffs; end > 0 { + if p.val[end-1] == '+' && p.lang != LangPOSIX { + end-- // a+=x + } + if ValidName(p.val[:end]) { + return true + } + } else if !ValidName(p.val) { + return false // *[i]=x + } + return p.r == '[' // a[i]=x +} + +func (p *Parser) getAssign(needEqual bool) *Assign { + as := &Assign{} + if p.eqlOffs > 0 { // foo=bar + nameEnd := p.eqlOffs + if p.lang != LangPOSIX && p.val[p.eqlOffs-1] == '+' { + // a+=b + as.Append = true + nameEnd-- + } + as.Name = p.lit(p.pos, p.val[:nameEnd]) + // since we're not using the entire p.val + as.Name.ValueEnd = posAddCol(as.Name.ValuePos, nameEnd) + left := p.lit(posAddCol(p.pos, 1), p.val[p.eqlOffs+1:]) + if left.Value != "" { + left.ValuePos = posAddCol(left.ValuePos, p.eqlOffs) + as.Value = p.wordOne(left) + } + p.next() + } else { // foo[x]=bar + as.Name = p.lit(p.pos, p.val) + // hasValidIdent already checks p.r is '[' + p.rune() + p.pos = posAddCol(p.pos, 1) + as.Index = p.eitherIndex() + if p.spaced || p.stopToken() { + if needEqual { + p.followErr(as.Pos(), "a[b]", "=") + } else { + as.Naked = true + return as + } + } + if len(p.val) > 0 && p.val[0] == '+' { + as.Append = true + p.val = p.val[1:] + p.pos = posAddCol(p.pos, 1) + } + if len(p.val) < 1 || p.val[0] != '=' { + if as.Append { + p.followErr(as.Pos(), "a[b]+", "=") + } else { + p.followErr(as.Pos(), "a[b]", "=") + } + return nil + } + p.pos = posAddCol(p.pos, 1) + p.val = p.val[1:] + if p.val == "" { + p.next() + } + } + if p.spaced || p.stopToken() { + return as + } + if as.Value == nil && p.tok == leftParen { + if p.lang == LangPOSIX { + p.langErr(p.pos, "arrays", LangBash, LangMirBSDKorn) + } + if as.Index != nil { + p.curErr("arrays cannot be nested") + } + as.Array = &ArrayExpr{Lparen: p.pos} + newQuote := p.quote + if p.lang.isBash() { + newQuote = arrayElems + } + old := p.preNested(newQuote) + p.next() + p.got(_Newl) + for p.tok != _EOF && p.tok != rightParen { + ae := &ArrayElem{} + ae.Comments, p.accComs = p.accComs, nil + if p.tok == leftBrack { + left := p.pos + ae.Index = p.eitherIndex() + p.follow(left, `"[x]"`, assgn) + } + if ae.Value = p.getWord(); ae.Value == nil { + switch p.tok { + case leftParen: + p.curErr("arrays cannot be nested") + return nil + case _Newl, rightParen, leftBrack: + // TODO: support [index]=[ + default: + p.curErr("array element values must be words") + return nil + } + } + if len(p.accComs) > 0 { + c := p.accComs[0] + if c.Pos().Line() == ae.End().Line() { + ae.Comments = append(ae.Comments, c) + p.accComs = p.accComs[1:] + } + } + as.Array.Elems = append(as.Array.Elems, ae) + p.got(_Newl) + } + as.Array.Last, p.accComs = p.accComs, nil + p.postNested(old) + as.Array.Rparen = p.matched(as.Array.Lparen, leftParen, rightParen) + } else if w := p.getWord(); w != nil { + if as.Value == nil { + as.Value = w + } else { + as.Value.Parts = append(as.Value.Parts, w.Parts...) + } + } + return as +} + +func (p *Parser) peekRedir() bool { + switch p.tok { + case rdrOut, appOut, rdrIn, dplIn, dplOut, clbOut, rdrInOut, + hdoc, dashHdoc, wordHdoc, rdrAll, appAll, _LitRedir: + return true + } + return false +} + +func (p *Parser) doRedirect(s *Stmt) { + var r *Redirect + if s.Redirs == nil { + var alloc struct { + redirs [4]*Redirect + redir Redirect + } + s.Redirs = alloc.redirs[:0] + r = &alloc.redir + s.Redirs = append(s.Redirs, r) + } else { + r = &Redirect{} + s.Redirs = append(s.Redirs, r) + } + r.N = p.getLit() + if !p.lang.isBash() && r.N != nil && r.N.Value[0] == '{' { + p.langErr(r.N.Pos(), "{varname} redirects", LangBash) + } + r.Op, r.OpPos = RedirOperator(p.tok), p.pos + p.next() + switch r.Op { + case Hdoc, DashHdoc: + old := p.quote + p.quote, p.forbidNested = hdocWord, true + p.heredocs = append(p.heredocs, r) + r.Word = p.followWordTok(token(r.Op), r.OpPos) + p.quote, p.forbidNested = old, false + if p.tok == _Newl { + if len(p.accComs) > 0 { + c := p.accComs[0] + if c.Pos().Line() == s.End().Line() { + s.Comments = append(s.Comments, c) + p.accComs = p.accComs[1:] + } + } + p.doHeredocs() + } + case WordHdoc: + if p.lang == LangPOSIX { + p.langErr(r.OpPos, "herestrings", LangBash, LangMirBSDKorn) + } + fallthrough + default: + r.Word = p.followWordTok(token(r.Op), r.OpPos) + } +} + +func (p *Parser) getStmt(readEnd, binCmd, fnBody bool) *Stmt { + pos, ok := p.gotRsrv("!") + s := p.stmt(pos) + if ok { + s.Negated = true + if p.stopToken() { + p.posErr(s.Pos(), `"!" cannot form a statement alone`) + } + if _, ok := p.gotRsrv("!"); ok { + p.posErr(s.Pos(), `cannot negate a command multiple times`) + } + } + if s = p.gotStmtPipe(s, false); s == nil || p.err != nil { + return nil + } + // instead of using recursion, iterate manually + for p.tok == andAnd || p.tok == orOr { + if binCmd { + // left associativity: in a list of BinaryCmds, the + // right recursion should only read a single element + return s + } + b := &BinaryCmd{ + OpPos: p.pos, + Op: BinCmdOperator(p.tok), + X: s, + } + p.next() + p.got(_Newl) + b.Y = p.getStmt(false, true, false) + if b.Y == nil || p.err != nil { + p.followErr(b.OpPos, b.Op.String(), "a statement") + return nil + } + s = p.stmt(s.Position) + s.Cmd = b + s.Comments, b.X.Comments = b.X.Comments, nil + } + if readEnd { + switch p.tok { + case semicolon: + s.Semicolon = p.pos + p.next() + case and: + s.Semicolon = p.pos + p.next() + s.Background = true + case orAnd: + s.Semicolon = p.pos + p.next() + s.Coprocess = true + } + } + if len(p.accComs) > 0 && !binCmd && !fnBody { + c := p.accComs[0] + if c.Pos().Line() == s.End().Line() { + s.Comments = append(s.Comments, c) + p.accComs = p.accComs[1:] + } + } + return s +} + +func (p *Parser) gotStmtPipe(s *Stmt, binCmd bool) *Stmt { + s.Comments, p.accComs = p.accComs, nil + switch p.tok { + case _LitWord: + switch p.val { + case "{": + p.block(s) + case "if": + p.ifClause(s) + case "while", "until": + p.whileClause(s, p.val == "until") + case "for": + p.forClause(s) + case "case": + p.caseClause(s) + case "}": + p.curErr(`%q can only be used to close a block`, p.val) + case "then": + p.curErr(`%q can only be used in an if`, p.val) + case "elif": + p.curErr(`%q can only be used in an if`, p.val) + case "fi": + p.curErr(`%q can only be used to end an if`, p.val) + case "do": + p.curErr(`%q can only be used in a loop`, p.val) + case "done": + p.curErr(`%q can only be used to end a loop`, p.val) + case "esac": + p.curErr(`%q can only be used to end a case`, p.val) + case "!": + if !s.Negated { + p.curErr(`"!" can only be used in full statements`) + break + } + case "[[": + if p.lang != LangPOSIX { + p.testClause(s) + } + case "]]": + if p.lang != LangPOSIX { + p.curErr(`%q can only be used to close a test`, p.val) + } + case "let": + if p.lang != LangPOSIX { + p.letClause(s) + } + case "function": + if p.lang != LangPOSIX { + p.bashFuncDecl(s) + } + case "declare": + if p.lang.isBash() { // Note that mksh lacks this one. + p.declClause(s) + } + case "local", "export", "readonly", "typeset", "nameref": + if p.lang != LangPOSIX { + p.declClause(s) + } + case "time": + if p.lang != LangPOSIX { + p.timeClause(s) + } + case "coproc": + if p.lang.isBash() { // Note that mksh lacks this one. + p.coprocClause(s) + } + case "select": + if p.lang != LangPOSIX { + p.selectClause(s) + } + case "@test": + if p.lang == LangBats { + p.testDecl(s) + } + } + if s.Cmd != nil { + break + } + if p.hasValidIdent() { + p.callExpr(s, nil, true) + break + } + name := p.lit(p.pos, p.val) + if p.next(); p.got(leftParen) { + p.follow(name.ValuePos, "foo(", rightParen) + if p.lang == LangPOSIX && !ValidName(name.Value) { + p.posErr(name.Pos(), "invalid func name") + } + p.funcDecl(s, name, name.ValuePos, true) + } else { + p.callExpr(s, p.wordOne(name), false) + } + case rdrOut, appOut, rdrIn, dplIn, dplOut, clbOut, rdrInOut, + hdoc, dashHdoc, wordHdoc, rdrAll, appAll, _LitRedir: + p.doRedirect(s) + p.callExpr(s, nil, false) + case bckQuote: + if p.backquoteEnd() { + return nil + } + fallthrough + case _Lit, dollBrace, dollDblParen, dollParen, dollar, cmdIn, cmdOut, + sglQuote, dollSglQuote, dblQuote, dollDblQuote, dollBrack, + globQuest, globStar, globPlus, globAt, globExcl: + if p.hasValidIdent() { + p.callExpr(s, nil, true) + break + } + w := p.wordAnyNumber() + if p.got(leftParen) { + p.posErr(w.Pos(), "invalid func name") + } + p.callExpr(s, w, false) + case leftParen: + p.subshell(s) + case dblLeftParen: + p.arithmExpCmd(s) + default: + if len(s.Redirs) == 0 { + return nil + } + } + for p.peekRedir() { + p.doRedirect(s) + } + // instead of using recursion, iterate manually + for p.tok == or || p.tok == orAnd { + if binCmd { + // left associativity: in a list of BinaryCmds, the + // right recursion should only read a single element + return s + } + if p.tok == orAnd && p.lang == LangMirBSDKorn { + // No need to check for LangPOSIX, as on that language + // we parse |& as two tokens. + break + } + b := &BinaryCmd{OpPos: p.pos, Op: BinCmdOperator(p.tok), X: s} + p.next() + p.got(_Newl) + if b.Y = p.gotStmtPipe(p.stmt(p.pos), true); b.Y == nil || p.err != nil { + p.followErr(b.OpPos, b.Op.String(), "a statement") + break + } + s = p.stmt(s.Position) + s.Cmd = b + s.Comments, b.X.Comments = b.X.Comments, nil + // in "! x | y", the bang applies to the entire pipeline + s.Negated = b.X.Negated + b.X.Negated = false + } + return s +} + +func (p *Parser) subshell(s *Stmt) { + sub := &Subshell{Lparen: p.pos} + old := p.preNested(subCmd) + p.next() + sub.Stmts, sub.Last = p.stmtList() + p.postNested(old) + sub.Rparen = p.matched(sub.Lparen, leftParen, rightParen) + s.Cmd = sub +} + +func (p *Parser) arithmExpCmd(s *Stmt) { + ar := &ArithmCmd{Left: p.pos} + old := p.preNested(arithmExprCmd) + p.next() + if p.got(hash) { + if p.lang != LangMirBSDKorn { + p.langErr(ar.Pos(), "unsigned expressions", LangMirBSDKorn) + } + ar.Unsigned = true + } + ar.X = p.followArithm(dblLeftParen, ar.Left) + ar.Right = p.arithmEnd(dblLeftParen, ar.Left, old) + s.Cmd = ar +} + +func (p *Parser) block(s *Stmt) { + b := &Block{Lbrace: p.pos} + p.next() + b.Stmts, b.Last = p.stmtList("}") + pos, ok := p.gotRsrv("}") + b.Rbrace = pos + if !ok { + p.matchingErr(b.Lbrace, "{", "}") + } + s.Cmd = b +} + +func (p *Parser) ifClause(s *Stmt) { + rootIf := &IfClause{Position: p.pos} + p.next() + rootIf.Cond, rootIf.CondLast = p.followStmts("if", rootIf.Position, "then") + rootIf.ThenPos = p.followRsrv(rootIf.Position, "if ", "then") + rootIf.Then, rootIf.ThenLast = p.followStmts("then", rootIf.ThenPos, "fi", "elif", "else") + curIf := rootIf + for p.tok == _LitWord && p.val == "elif" { + elf := &IfClause{Position: p.pos} + curIf.Last = p.accComs + p.accComs = nil + p.next() + elf.Cond, elf.CondLast = p.followStmts("elif", elf.Position, "then") + elf.ThenPos = p.followRsrv(elf.Position, "elif ", "then") + elf.Then, elf.ThenLast = p.followStmts("then", elf.ThenPos, "fi", "elif", "else") + curIf.Else = elf + curIf = elf + } + if elsePos, ok := p.gotRsrv("else"); ok { + curIf.Last = p.accComs + p.accComs = nil + els := &IfClause{Position: elsePos} + els.Then, els.ThenLast = p.followStmts("else", els.Position, "fi") + curIf.Else = els + curIf = els + } + curIf.Last = p.accComs + p.accComs = nil + rootIf.FiPos = p.stmtEnd(rootIf, "if", "fi") + for els := rootIf.Else; els != nil; els = els.Else { + // All the nested IfClauses share the same FiPos. + els.FiPos = rootIf.FiPos + } + s.Cmd = rootIf +} + +func (p *Parser) whileClause(s *Stmt, until bool) { + wc := &WhileClause{WhilePos: p.pos, Until: until} + rsrv := "while" + rsrvCond := "while " + if wc.Until { + rsrv = "until" + rsrvCond = "until " + } + p.next() + wc.Cond, wc.CondLast = p.followStmts(rsrv, wc.WhilePos, "do") + wc.DoPos = p.followRsrv(wc.WhilePos, rsrvCond, "do") + wc.Do, wc.DoLast = p.followStmts("do", wc.DoPos, "done") + wc.DonePos = p.stmtEnd(wc, rsrv, "done") + s.Cmd = wc +} + +func (p *Parser) forClause(s *Stmt) { + fc := &ForClause{ForPos: p.pos} + p.next() + fc.Loop = p.loop(fc.ForPos) + + start, end := "do", "done" + if pos, ok := p.gotRsrv("{"); ok { + if p.lang == LangPOSIX { + p.langErr(pos, "for loops with braces", LangBash, LangMirBSDKorn) + } + fc.DoPos = pos + fc.Braces = true + start, end = "{", "}" + } else { + fc.DoPos = p.followRsrv(fc.ForPos, "for foo [in words]", start) + } + + s.Comments = append(s.Comments, p.accComs...) + p.accComs = nil + fc.Do, fc.DoLast = p.followStmts(start, fc.DoPos, end) + fc.DonePos = p.stmtEnd(fc, "for", end) + s.Cmd = fc +} + +func (p *Parser) loop(fpos Pos) Loop { + if !p.lang.isBash() { + switch p.tok { + case leftParen, dblLeftParen: + p.langErr(p.pos, "c-style fors", LangBash) + } + } + if p.tok == dblLeftParen { + cl := &CStyleLoop{Lparen: p.pos} + old := p.preNested(arithmExprCmd) + p.next() + cl.Init = p.arithmExpr(false) + if !p.got(dblSemicolon) { + p.follow(p.pos, "expr", semicolon) + cl.Cond = p.arithmExpr(false) + p.follow(p.pos, "expr", semicolon) + } + cl.Post = p.arithmExpr(false) + cl.Rparen = p.arithmEnd(dblLeftParen, cl.Lparen, old) + p.got(semicolon) + p.got(_Newl) + return cl + } + return p.wordIter("for", fpos) +} + +func (p *Parser) wordIter(ftok string, fpos Pos) *WordIter { + wi := &WordIter{} + if wi.Name = p.getLit(); wi.Name == nil { + p.followErr(fpos, ftok, "a literal") + } + if p.got(semicolon) { + p.got(_Newl) + return wi + } + p.got(_Newl) + if pos, ok := p.gotRsrv("in"); ok { + wi.InPos = pos + for !p.stopToken() { + if w := p.getWord(); w == nil { + p.curErr("word list can only contain words") + } else { + wi.Items = append(wi.Items, w) + } + } + p.got(semicolon) + p.got(_Newl) + } else if p.tok == _LitWord && p.val == "do" { + } else { + p.followErr(fpos, ftok+" foo", `"in", "do", ;, or a newline`) + } + return wi +} + +func (p *Parser) selectClause(s *Stmt) { + fc := &ForClause{ForPos: p.pos, Select: true} + p.next() + fc.Loop = p.wordIter("select", fc.ForPos) + fc.DoPos = p.followRsrv(fc.ForPos, "select foo [in words]", "do") + fc.Do, fc.DoLast = p.followStmts("do", fc.DoPos, "done") + fc.DonePos = p.stmtEnd(fc, "select", "done") + s.Cmd = fc +} + +func (p *Parser) caseClause(s *Stmt) { + cc := &CaseClause{Case: p.pos} + p.next() + cc.Word = p.getWord() + if cc.Word == nil { + p.followErr(cc.Case, "case", "a word") + } + end := "esac" + p.got(_Newl) + if pos, ok := p.gotRsrv("{"); ok { + cc.In = pos + cc.Braces = true + if p.lang != LangMirBSDKorn { + p.posErr(cc.Pos(), `"case i {" is a mksh feature`) + } + end = "}" + } else { + cc.In = p.followRsrv(cc.Case, "case x", "in") + } + cc.Items = p.caseItems(end) + cc.Last, p.accComs = p.accComs, nil + cc.Esac = p.stmtEnd(cc, "case", end) + s.Cmd = cc +} + +func (p *Parser) caseItems(stop string) (items []*CaseItem) { + p.got(_Newl) + for p.tok != _EOF && !(p.tok == _LitWord && p.val == stop) { + ci := &CaseItem{} + ci.Comments, p.accComs = p.accComs, nil + p.got(leftParen) + for p.tok != _EOF { + if w := p.getWord(); w == nil { + p.curErr("case patterns must consist of words") + } else { + ci.Patterns = append(ci.Patterns, w) + } + if p.tok == rightParen { + break + } + if !p.got(or) { + p.curErr("case patterns must be separated with |") + } + } + old := p.preNested(switchCase) + p.next() + ci.Stmts, ci.Last = p.stmtList(stop) + p.postNested(old) + switch p.tok { + case dblSemicolon, semiAnd, dblSemiAnd, semiOr: + default: + ci.Op = Break + items = append(items, ci) + return + } + ci.Last = append(ci.Last, p.accComs...) + p.accComs = nil + ci.OpPos = p.pos + ci.Op = CaseOperator(p.tok) + p.next() + p.got(_Newl) + + // Split the comments: + // + // case x in + // a) + // foo + // ;; + // # comment for a + // # comment for b + // b) + // [...] + split := len(p.accComs) + for i := len(p.accComs) - 1; i >= 0; i-- { + c := p.accComs[i] + if c.Pos().Col() != p.pos.Col() { + break + } + split = i + } + ci.Comments = append(ci.Comments, p.accComs[:split]...) + p.accComs = p.accComs[split:] + + items = append(items, ci) + } + return +} + +func (p *Parser) testClause(s *Stmt) { + tc := &TestClause{Left: p.pos} + old := p.preNested(testExpr) + p.next() + if _, ok := p.gotRsrv("]]"); ok || p.tok == _EOF { + p.posErr(tc.Left, "test clause requires at least one expression") + } + tc.X = p.testExpr(dblLeftBrack, tc.Left, false) + if tc.X == nil { + p.followErrExp(tc.Left, "[[") + } + tc.Right = p.pos + if _, ok := p.gotRsrv("]]"); !ok { + p.matchingErr(tc.Left, "[[", "]]") + } + p.postNested(old) + s.Cmd = tc +} + +func (p *Parser) testExpr(ftok token, fpos Pos, pastAndOr bool) TestExpr { + p.got(_Newl) + var left TestExpr + if pastAndOr { + left = p.testExprBase() + } else { + left = p.testExpr(ftok, fpos, true) + } + if left == nil { + return left + } + p.got(_Newl) + switch p.tok { + case andAnd, orOr: + case _LitWord: + if p.val == "]]" { + return left + } + if p.tok = token(testBinaryOp(p.val)); p.tok == illegalTok { + p.curErr("not a valid test operator: %s", p.val) + } + case rdrIn, rdrOut: + case _EOF, rightParen: + return left + case _Lit: + p.curErr("test operator words must consist of a single literal") + default: + p.curErr("not a valid test operator: %v", p.tok) + } + b := &BinaryTest{ + OpPos: p.pos, + Op: BinTestOperator(p.tok), + X: left, + } + // Save the previous quoteState, since we change it in TsReMatch. + oldQuote := p.quote + + switch b.Op { + case AndTest, OrTest: + p.next() + if b.Y = p.testExpr(token(b.Op), b.OpPos, false); b.Y == nil { + p.followErrExp(b.OpPos, b.Op.String()) + } + case TsReMatch: + if !p.lang.isBash() { + p.langErr(p.pos, "regex tests", LangBash) + } + p.rxOpenParens = 0 + p.rxFirstPart = true + // TODO(mvdan): Using nested states within a regex will break in + // all sorts of ways. The better fix is likely to use a stop + // token, like we do with heredocs. + p.quote = testExprRegexp + fallthrough + default: + if _, ok := b.X.(*Word); !ok { + p.posErr(b.OpPos, "expected %s, %s or %s after complex expr", + AndTest, OrTest, "]]") + } + p.next() + b.Y = p.followWordTok(token(b.Op), b.OpPos) + } + p.quote = oldQuote + return b +} + +func (p *Parser) testExprBase() TestExpr { + switch p.tok { + case _EOF, rightParen: + return nil + case _LitWord: + op := token(testUnaryOp(p.val)) + switch op { + case illegalTok: + case tsRefVar, tsModif: // not available in mksh + if p.lang.isBash() { + p.tok = op + } + default: + p.tok = op + } + } + switch p.tok { + case exclMark: + u := &UnaryTest{OpPos: p.pos, Op: TsNot} + p.next() + if u.X = p.testExpr(token(u.Op), u.OpPos, false); u.X == nil { + p.followErrExp(u.OpPos, u.Op.String()) + } + return u + case tsExists, tsRegFile, tsDirect, tsCharSp, tsBlckSp, tsNmPipe, + tsSocket, tsSmbLink, tsSticky, tsGIDSet, tsUIDSet, tsGrpOwn, + tsUsrOwn, tsModif, tsRead, tsWrite, tsExec, tsNoEmpty, + tsFdTerm, tsEmpStr, tsNempStr, tsOptSet, tsVarSet, tsRefVar: + u := &UnaryTest{OpPos: p.pos, Op: UnTestOperator(p.tok)} + p.next() + u.X = p.followWordTok(token(u.Op), u.OpPos) + return u + case leftParen: + pe := &ParenTest{Lparen: p.pos} + p.next() + if pe.X = p.testExpr(leftParen, pe.Lparen, false); pe.X == nil { + p.followErrExp(pe.Lparen, "(") + } + pe.Rparen = p.matched(pe.Lparen, leftParen, rightParen) + return pe + case _LitWord: + if p.val == "]]" { + return nil + } + fallthrough + default: + if w := p.getWord(); w != nil { + return w + } + // otherwise we'd return a typed nil above + return nil + } +} + +func (p *Parser) declClause(s *Stmt) { + ds := &DeclClause{Variant: p.lit(p.pos, p.val)} + p.next() + for !p.stopToken() && !p.peekRedir() { + if p.hasValidIdent() { + ds.Args = append(ds.Args, p.getAssign(false)) + } else if p.eqlOffs > 0 { + p.curErr("invalid var name") + } else if p.tok == _LitWord && ValidName(p.val) { + ds.Args = append(ds.Args, &Assign{ + Naked: true, + Name: p.getLit(), + }) + } else if w := p.getWord(); w != nil { + ds.Args = append(ds.Args, &Assign{ + Naked: true, + Value: w, + }) + } else { + p.followErr(p.pos, ds.Variant.Value, "names or assignments") + } + } + s.Cmd = ds +} + +func isBashCompoundCommand(tok token, val string) bool { + switch tok { + case leftParen, dblLeftParen: + return true + case _LitWord: + switch val { + case "{", "if", "while", "until", "for", "case", "[[", + "coproc", "let", "function", "declare", "local", + "export", "readonly", "typeset", "nameref": + return true + } + } + return false +} + +func (p *Parser) timeClause(s *Stmt) { + tc := &TimeClause{Time: p.pos} + p.next() + if _, ok := p.gotRsrv("-p"); ok { + tc.PosixFormat = true + } + tc.Stmt = p.gotStmtPipe(p.stmt(p.pos), false) + s.Cmd = tc +} + +func (p *Parser) coprocClause(s *Stmt) { + cc := &CoprocClause{Coproc: p.pos} + if p.next(); isBashCompoundCommand(p.tok, p.val) { + // has no name + cc.Stmt = p.gotStmtPipe(p.stmt(p.pos), false) + s.Cmd = cc + return + } + cc.Name = p.getWord() + cc.Stmt = p.gotStmtPipe(p.stmt(p.pos), false) + if cc.Stmt == nil { + if cc.Name == nil { + p.posErr(cc.Coproc, "coproc clause requires a command") + return + } + // name was in fact the stmt + cc.Stmt = p.stmt(cc.Name.Pos()) + cc.Stmt.Cmd = p.call(cc.Name) + cc.Name = nil + } else if cc.Name != nil { + if call, ok := cc.Stmt.Cmd.(*CallExpr); ok { + // name was in fact the start of a call + call.Args = append([]*Word{cc.Name}, call.Args...) + cc.Name = nil + } + } + s.Cmd = cc +} + +func (p *Parser) letClause(s *Stmt) { + lc := &LetClause{Let: p.pos} + old := p.preNested(arithmExprLet) + p.next() + for !p.stopToken() && !p.peekRedir() { + x := p.arithmExpr(true) + if x == nil { + break + } + lc.Exprs = append(lc.Exprs, x) + } + if len(lc.Exprs) == 0 { + p.followErrExp(lc.Let, "let") + } + p.postNested(old) + s.Cmd = lc +} + +func (p *Parser) bashFuncDecl(s *Stmt) { + fpos := p.pos + if p.next(); p.tok != _LitWord { + p.followErr(fpos, "function", "a name") + } + name := p.lit(p.pos, p.val) + hasParens := false + if p.next(); p.got(leftParen) { + hasParens = true + p.follow(name.ValuePos, "foo(", rightParen) + } + p.funcDecl(s, name, fpos, hasParens) +} + +func (p *Parser) testDecl(s *Stmt) { + td := &TestDecl{Position: p.pos} + p.next() + if td.Description = p.getWord(); td.Description == nil { + p.followErr(td.Position, "@test", "a description word") + } + if td.Body = p.getStmt(false, false, true); td.Body == nil { + p.followErr(td.Position, `@test "desc"`, "a statement") + } + s.Cmd = td +} + +func (p *Parser) callExpr(s *Stmt, w *Word, assign bool) { + ce := p.call(w) + if w == nil { + ce.Args = ce.Args[:0] + } + if assign { + ce.Assigns = append(ce.Assigns, p.getAssign(true)) + } +loop: + for { + switch p.tok { + case _EOF, _Newl, semicolon, and, or, andAnd, orOr, orAnd, + dblSemicolon, semiAnd, dblSemiAnd, semiOr: + break loop + case _LitWord: + if len(ce.Args) == 0 && p.hasValidIdent() { + ce.Assigns = append(ce.Assigns, p.getAssign(true)) + break + } + ce.Args = append(ce.Args, p.wordOne(p.lit(p.pos, p.val))) + p.next() + case _Lit: + if len(ce.Args) == 0 && p.hasValidIdent() { + ce.Assigns = append(ce.Assigns, p.getAssign(true)) + break + } + ce.Args = append(ce.Args, p.wordAnyNumber()) + case bckQuote: + if p.backquoteEnd() { + break loop + } + fallthrough + case dollBrace, dollDblParen, dollParen, dollar, cmdIn, cmdOut, + sglQuote, dollSglQuote, dblQuote, dollDblQuote, dollBrack, + globQuest, globStar, globPlus, globAt, globExcl: + ce.Args = append(ce.Args, p.wordAnyNumber()) + case rdrOut, appOut, rdrIn, dplIn, dplOut, clbOut, rdrInOut, + hdoc, dashHdoc, wordHdoc, rdrAll, appAll, _LitRedir: + p.doRedirect(s) + case dblLeftParen: + p.curErr("%s can only be used to open an arithmetic cmd", p.tok) + case rightParen: + if p.quote == subCmd { + break loop + } + fallthrough + default: + // Note that we'll only keep the first error that happens. + if len(ce.Args) > 0 { + if cmd := ce.Args[0].Lit(); p.lang == LangPOSIX && isBashCompoundCommand(_LitWord, cmd) { + p.curErr("the %q builtin exists in bash; tried parsing as posix", cmd) + } + } + p.curErr("a command can only contain words and redirects; encountered %s", p.tok) + } + } + if len(ce.Assigns) == 0 && len(ce.Args) == 0 { + return + } + if len(ce.Args) == 0 { + ce.Args = nil + } else { + for _, asgn := range ce.Assigns { + if asgn.Index != nil || asgn.Array != nil { + p.posErr(asgn.Pos(), "inline variables cannot be arrays") + } + } + } + s.Cmd = ce +} + +func (p *Parser) funcDecl(s *Stmt, name *Lit, pos Pos, withParens bool) { + fd := &FuncDecl{ + Position: pos, + RsrvWord: pos != name.ValuePos, + Parens: withParens, + Name: name, + } + p.got(_Newl) + if fd.Body = p.getStmt(false, false, true); fd.Body == nil { + p.followErr(fd.Pos(), "foo()", "a statement") + } + s.Cmd = fd +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/parser_arithm.go b/vendor/mvdan.cc/sh/v3/syntax/parser_arithm.go new file mode 100644 index 00000000..a6d6a951 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/parser_arithm.go @@ -0,0 +1,353 @@ +package syntax + +// compact specifies whether we allow spaces between expressions. +// This is true for let +func (p *Parser) arithmExpr(compact bool) ArithmExpr { + return p.arithmExprComma(compact) +} + +// These function names are inspired by Bash's expr.c + +func (p *Parser) arithmExprComma(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprAssign, Comma) +} + +func (p *Parser) arithmExprAssign(compact bool) ArithmExpr { + // Assign is different from the other binary operators because it's + // right-associative and needs to check that it's placed after a name + value := p.arithmExprTernary(compact) + switch BinAritOperator(p.tok) { + case AddAssgn, SubAssgn, MulAssgn, QuoAssgn, RemAssgn, AndAssgn, + OrAssgn, XorAssgn, ShlAssgn, ShrAssgn, Assgn: + if compact && p.spaced { + return value + } + if !isArithName(value) { + p.posErr(p.pos, "%s must follow a name", p.tok.String()) + } + pos := p.pos + tok := p.tok + p.nextArithOp(compact) + y := p.arithmExprAssign(compact) + if y == nil { + p.followErrExp(pos, tok.String()) + } + return &BinaryArithm{ + OpPos: pos, + Op: BinAritOperator(tok), + X: value, + Y: y, + } + } + return value +} + +func (p *Parser) arithmExprTernary(compact bool) ArithmExpr { + value := p.arithmExprLor(compact) + if BinAritOperator(p.tok) != TernQuest || (compact && p.spaced) { + return value + } + + if value == nil { + p.curErr("%s must follow an expression", p.tok.String()) + } + questPos := p.pos + p.nextArithOp(compact) + if BinAritOperator(p.tok) == TernColon { + p.followErrExp(questPos, TernQuest.String()) + } + trueExpr := p.arithmExpr(compact) + if trueExpr == nil { + p.followErrExp(questPos, TernQuest.String()) + } + if BinAritOperator(p.tok) != TernColon { + p.posErr(questPos, "ternary operator missing : after ?") + } + colonPos := p.pos + p.nextArithOp(compact) + falseExpr := p.arithmExprTernary(compact) + if falseExpr == nil { + p.followErrExp(colonPos, TernColon.String()) + } + return &BinaryArithm{ + OpPos: questPos, + Op: TernQuest, + X: value, + Y: &BinaryArithm{ + OpPos: colonPos, + Op: TernColon, + X: trueExpr, + Y: falseExpr, + }, + } +} + +func (p *Parser) arithmExprLor(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprLand, OrArit) +} + +func (p *Parser) arithmExprLand(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprBor, AndArit) +} + +func (p *Parser) arithmExprBor(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprBxor, Or) +} + +func (p *Parser) arithmExprBxor(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprBand, Xor) +} + +func (p *Parser) arithmExprBand(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprEquality, And) +} + +func (p *Parser) arithmExprEquality(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprComparison, Eql, Neq) +} + +func (p *Parser) arithmExprComparison(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprShift, Lss, Gtr, Leq, Geq) +} + +func (p *Parser) arithmExprShift(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprAddition, Shl, Shr) +} + +func (p *Parser) arithmExprAddition(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprMultiplication, Add, Sub) +} + +func (p *Parser) arithmExprMultiplication(compact bool) ArithmExpr { + return p.arithmExprBinary(compact, p.arithmExprPower, Mul, Quo, Rem) +} + +func (p *Parser) arithmExprPower(compact bool) ArithmExpr { + // Power is different from the other binary operators because it's right-associative + value := p.arithmExprUnary(compact) + if BinAritOperator(p.tok) != Pow || (compact && p.spaced) { + return value + } + + if value == nil { + p.curErr("%s must follow an expression", p.tok.String()) + } + + op := p.tok + pos := p.pos + p.nextArithOp(compact) + y := p.arithmExprPower(compact) + if y == nil { + p.followErrExp(pos, op.String()) + } + return &BinaryArithm{ + OpPos: pos, + Op: BinAritOperator(op), + X: value, + Y: y, + } +} + +func (p *Parser) arithmExprUnary(compact bool) ArithmExpr { + if !compact { + p.got(_Newl) + } + + switch UnAritOperator(p.tok) { + case Not, BitNegation, Plus, Minus: + ue := &UnaryArithm{OpPos: p.pos, Op: UnAritOperator(p.tok)} + p.nextArithOp(compact) + if ue.X = p.arithmExprUnary(compact); ue.X == nil { + p.followErrExp(ue.OpPos, ue.Op.String()) + } + return ue + } + return p.arithmExprValue(compact) +} + +func (p *Parser) arithmExprValue(compact bool) ArithmExpr { + var x ArithmExpr + switch p.tok { + case addAdd, subSub: + ue := &UnaryArithm{OpPos: p.pos, Op: UnAritOperator(p.tok)} + p.nextArith(compact) + if p.tok != _LitWord { + p.followErr(ue.OpPos, token(ue.Op).String(), "a literal") + } + ue.X = p.arithmExprValue(compact) + return ue + case leftParen: + pe := &ParenArithm{Lparen: p.pos} + p.nextArithOp(compact) + pe.X = p.followArithm(leftParen, pe.Lparen) + pe.Rparen = p.matched(pe.Lparen, leftParen, rightParen) + x = pe + case leftBrack: + p.curErr("[ must follow a name") + case colon: + p.curErr("ternary operator missing ? before :") + case _LitWord: + l := p.getLit() + if p.tok != leftBrack { + x = p.wordOne(l) + break + } + pe := &ParamExp{Dollar: l.ValuePos, Short: true, Param: l} + pe.Index = p.eitherIndex() + x = p.wordOne(pe) + case bckQuote: + if p.quote == arithmExprLet && p.openBquotes > 0 { + return nil + } + fallthrough + default: + if w := p.getWord(); w != nil { + x = w + } else { + return nil + } + } + + if compact && p.spaced { + return x + } + if !compact { + p.got(_Newl) + } + + // we want real nil, not (*Word)(nil) as that + // sets the type to non-nil and then x != nil + if p.tok == addAdd || p.tok == subSub { + if !isArithName(x) { + p.curErr("%s must follow a name", p.tok.String()) + } + u := &UnaryArithm{ + Post: true, + OpPos: p.pos, + Op: UnAritOperator(p.tok), + X: x, + } + p.nextArith(compact) + return u + } + return x +} + +// nextArith consumes a token. +// It returns true if compact and the token was followed by spaces +func (p *Parser) nextArith(compact bool) bool { + p.next() + if compact && p.spaced { + return true + } + if !compact { + p.got(_Newl) + } + return false +} + +func (p *Parser) nextArithOp(compact bool) { + pos := p.pos + tok := p.tok + if p.nextArith(compact) { + p.followErrExp(pos, tok.String()) + } +} + +// arithmExprBinary is used for all left-associative binary operators +func (p *Parser) arithmExprBinary(compact bool, nextOp func(bool) ArithmExpr, operators ...BinAritOperator) ArithmExpr { + value := nextOp(compact) + for { + var foundOp BinAritOperator + for _, op := range operators { + if p.tok == token(op) { + foundOp = op + break + } + } + + if token(foundOp) == illegalTok || (compact && p.spaced) { + return value + } + + if value == nil { + p.curErr("%s must follow an expression", p.tok.String()) + } + + pos := p.pos + p.nextArithOp(compact) + y := nextOp(compact) + if y == nil { + p.followErrExp(pos, foundOp.String()) + } + + value = &BinaryArithm{ + OpPos: pos, + Op: foundOp, + X: value, + Y: y, + } + } +} + +func isArithName(left ArithmExpr) bool { + w, ok := left.(*Word) + if !ok || len(w.Parts) != 1 { + return false + } + switch x := w.Parts[0].(type) { + case *Lit: + return ValidName(x.Value) + case *ParamExp: + return x.nakedIndex() + default: + return false + } +} + +func (p *Parser) followArithm(ftok token, fpos Pos) ArithmExpr { + x := p.arithmExpr(false) + if x == nil { + p.followErrExp(fpos, ftok.String()) + } + return x +} + +func (p *Parser) peekArithmEnd() bool { + return p.tok == rightParen && p.r == ')' +} + +func (p *Parser) arithmMatchingErr(pos Pos, left, right token) { + switch p.tok { + case _Lit, _LitWord: + p.curErr("not a valid arithmetic operator: %s", p.val) + case leftBrack: + p.curErr("[ must follow a name") + case colon: + p.curErr("ternary operator missing ? before :") + case rightParen, _EOF: + p.matchingErr(pos, left, right) + default: + if p.quote == arithmExpr { + p.curErr("not a valid arithmetic operator: %v", p.tok) + } + p.matchingErr(pos, left, right) + } +} + +func (p *Parser) matchedArithm(lpos Pos, left, right token) { + if !p.got(right) { + p.arithmMatchingErr(lpos, left, right) + } +} + +func (p *Parser) arithmEnd(ltok token, lpos Pos, old saveState) Pos { + if !p.peekArithmEnd() { + p.arithmMatchingErr(lpos, ltok, dblRightParen) + } + p.rune() + p.postNested(old) + pos := p.pos + p.next() + return pos +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/printer.go b/vendor/mvdan.cc/sh/v3/syntax/printer.go new file mode 100644 index 00000000..6626b363 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/printer.go @@ -0,0 +1,1527 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strings" + "text/tabwriter" + "unicode" + + "mvdan.cc/sh/v3/fileutil" +) + +// PrinterOption is a function which can be passed to NewPrinter +// to alter its behavior. To apply option to existing Printer +// call it directly, for example KeepPadding(true)(printer). +type PrinterOption func(*Printer) + +// Indent sets the number of spaces used for indentation. If set to 0, +// tabs will be used instead. +func Indent(spaces uint) PrinterOption { + return func(p *Printer) { p.indentSpaces = spaces } +} + +// BinaryNextLine will make binary operators appear on the next line +// when a binary command, such as a pipe, spans multiple lines. A +// backslash will be used. +func BinaryNextLine(enabled bool) PrinterOption { + return func(p *Printer) { p.binNextLine = enabled } +} + +// SwitchCaseIndent will make switch cases be indented. As such, switch +// case bodies will be two levels deeper than the switch itself. +func SwitchCaseIndent(enabled bool) PrinterOption { + return func(p *Printer) { p.swtCaseIndent = enabled } +} + +// TODO(v4): consider turning this into a "space all operators" option, to also +// allow foo=( bar baz ), (( x + y )), and so on. + +// SpaceRedirects will put a space after most redirection operators. The +// exceptions are '>&', '<&', '>(', and '<('. +func SpaceRedirects(enabled bool) PrinterOption { + return func(p *Printer) { p.spaceRedirects = enabled } +} + +// KeepPadding will keep most nodes and tokens in the same column that +// they were in the original source. This allows the user to decide how +// to align and pad their code with spaces. +// +// Note that this feature is best-effort and will only keep the +// alignment stable, so it may need some human help the first time it is +// run. +func KeepPadding(enabled bool) PrinterOption { + return func(p *Printer) { + if enabled && !p.keepPadding { + // Enable the flag, and set up the writer wrapper. + p.keepPadding = true + p.cols.Writer = p.bufWriter.(*bufio.Writer) + p.bufWriter = &p.cols + + } else if !enabled && p.keepPadding { + // Ensure we reset the state to that of NewPrinter. + p.keepPadding = false + p.bufWriter = p.cols.Writer + p.cols = colCounter{} + } + } +} + +// Minify will print programs in a way to save the most bytes possible. +// For example, indentation and comments are skipped, and extra +// whitespace is avoided when possible. +func Minify(enabled bool) PrinterOption { + return func(p *Printer) { p.minify = enabled } +} + +// SingleLine will attempt to print programs in one line. For example, lists of +// commands or nested blocks do not use newlines in this mode. Note that some +// newlines must still appear, such as those following comments or around +// here-documents. +// +// Print's trailing newline when given a *File is not affected by this option. +func SingleLine(enabled bool) PrinterOption { + return func(p *Printer) { p.singleLine = enabled } +} + +// FunctionNextLine will place a function's opening braces on the next line. +func FunctionNextLine(enabled bool) PrinterOption { + return func(p *Printer) { p.funcNextLine = enabled } +} + +// NewPrinter allocates a new Printer and applies any number of options. +func NewPrinter(opts ...PrinterOption) *Printer { + p := &Printer{ + bufWriter: bufio.NewWriter(nil), + tabWriter: new(tabwriter.Writer), + } + for _, opt := range opts { + opt(p) + } + return p +} + +// Print "pretty-prints" the given syntax tree node to the given writer. Writes +// to w are buffered. +// +// The node types supported at the moment are *File, *Stmt, *Word, *Assign, any +// Command node, and any WordPart node. A trailing newline will only be printed +// when a *File is used. +func (p *Printer) Print(w io.Writer, node Node) error { + p.reset() + + if p.minify && p.singleLine { + return fmt.Errorf("Minify and SingleLine together are not supported yet; please file an issue describing your use case: https://github.com/mvdan/sh/issues") + } + + // TODO: consider adding a raw mode to skip the tab writer, much like in + // go/printer. + twmode := tabwriter.DiscardEmptyColumns | tabwriter.StripEscape + tabwidth := 8 + if p.indentSpaces == 0 { + // indenting with tabs + twmode |= tabwriter.TabIndent + } else { + // indenting with spaces + tabwidth = int(p.indentSpaces) + } + p.tabWriter.Init(w, 0, tabwidth, 1, ' ', twmode) + w = p.tabWriter + + p.bufWriter.Reset(w) + switch x := node.(type) { + case *File: + p.stmtList(x.Stmts, x.Last) + p.newline(Pos{}) + case *Stmt: + p.stmtList([]*Stmt{x}, nil) + case Command: + p.command(x, nil) + case *Word: + p.line = x.Pos().Line() + p.word(x) + case WordPart: + p.line = x.Pos().Line() + p.wordPart(x, nil) + case *Assign: + p.line = x.Pos().Line() + p.assigns([]*Assign{x}) + default: + return fmt.Errorf("unsupported node type: %T", x) + } + p.flushHeredocs() + p.flushComments() + + // flush the writers + if err := p.bufWriter.Flush(); err != nil { + return err + } + if tw, _ := w.(*tabwriter.Writer); tw != nil { + if err := tw.Flush(); err != nil { + return err + } + } + return nil +} + +type bufWriter interface { + Write([]byte) (int, error) + WriteString(string) (int, error) + WriteByte(byte) error + Reset(io.Writer) + Flush() error +} + +type colCounter struct { + *bufio.Writer + column int + lineStart bool +} + +func (c *colCounter) addByte(b byte) { + switch b { + case '\n': + c.column = 0 + c.lineStart = true + case '\t', ' ', tabwriter.Escape: + default: + c.lineStart = false + } + c.column++ +} + +func (c *colCounter) WriteByte(b byte) error { + c.addByte(b) + return c.Writer.WriteByte(b) +} + +func (c *colCounter) WriteString(s string) (int, error) { + for _, b := range []byte(s) { + c.addByte(b) + } + return c.Writer.WriteString(s) +} + +func (c *colCounter) Reset(w io.Writer) { + c.column = 1 + c.lineStart = true + c.Writer.Reset(w) +} + +// Printer holds the internal state of the printing mechanism of a +// program. +type Printer struct { + bufWriter + tabWriter *tabwriter.Writer + cols colCounter + + indentSpaces uint + binNextLine bool + swtCaseIndent bool + spaceRedirects bool + keepPadding bool + minify bool + singleLine bool + funcNextLine bool + + wantSpace wantSpaceState // whether space is required or has been written + + wantNewline bool // newline is wanted for pretty-printing; ignored by singleLine; ignored by singleLine + mustNewline bool // newline is required to keep shell syntax valid + wroteSemi bool // wrote ';' for the current statement + + // pendingComments are any comments in the current line or statement + // that we have yet to print. This is useful because that way, we can + // ensure that all comments are written immediately before a newline. + // Otherwise, in some edge cases we might wrongly place words after a + // comment in the same line, breaking programs. + pendingComments []Comment + + // firstLine means we are still writing the first line + firstLine bool + // line is the current line number + line uint + + // lastLevel is the last level of indentation that was used. + lastLevel uint + // level is the current level of indentation. + level uint + // levelIncs records which indentation level increments actually + // took place, to revert them once their section ends. + levelIncs []bool + + nestedBinary bool + + // pendingHdocs is the list of pending heredocs to write. + pendingHdocs []*Redirect + + // used when printing <<- heredocs with tab indentation + tabsPrinter *Printer +} + +func (p *Printer) reset() { + p.wantSpace = spaceWritten + p.wantNewline, p.mustNewline = false, false + p.pendingComments = p.pendingComments[:0] + + // minification uses its own newline logic + p.firstLine = !p.minify + p.line = 0 + + p.lastLevel, p.level = 0, 0 + p.levelIncs = p.levelIncs[:0] + p.nestedBinary = false + p.pendingHdocs = p.pendingHdocs[:0] +} + +func (p *Printer) spaces(n uint) { + for i := uint(0); i < n; i++ { + p.WriteByte(' ') + } +} + +func (p *Printer) space() { + p.WriteByte(' ') + p.wantSpace = spaceWritten +} + +func (p *Printer) spacePad(pos Pos) { + if p.cols.lineStart && p.indentSpaces == 0 { + // Never add padding at the start of a line unless we are indenting + // with spaces, since this may result in mixing of spaces and tabs. + return + } + if p.wantSpace == spaceRequired { + p.WriteByte(' ') + p.wantSpace = spaceWritten + } + for p.cols.column > 0 && p.cols.column < int(pos.Col()) { + p.WriteByte(' ') + } +} + +// wantsNewline reports whether we want to print at least one newline before +// printing a node at a given position. A zero position can be given to simply +// tell if we want a newline following what's just been printed. +func (p *Printer) wantsNewline(pos Pos) bool { + if p.mustNewline { + // We must have a newline here. + return true + } + if p.singleLine && len(p.pendingComments) == 0 { + // The newline is optional, and singleLine skips it. + // Don't skip if there are any pending comments, + // as that might move them further down to the wrong place. + return false + } + // THe newline is optional, and we want it via either wantNewline or via + // the position's line. + return p.wantNewline || pos.Line() > p.line +} + +func (p *Printer) bslashNewl() { + if p.wantSpace == spaceRequired { + p.space() + } + p.WriteString("\\\n") + p.line++ + p.indent() +} + +func (p *Printer) spacedString(s string, pos Pos) { + p.spacePad(pos) + p.WriteString(s) + p.wantSpace = spaceRequired +} + +func (p *Printer) spacedToken(s string, pos Pos) { + if p.minify { + p.WriteString(s) + p.wantSpace = spaceNotRequired + return + } + p.spacePad(pos) + p.WriteString(s) + p.wantSpace = spaceRequired +} + +func (p *Printer) semiOrNewl(s string, pos Pos) { + if p.wantsNewline(Pos{}) { + p.newline(pos) + p.indent() + } else { + if !p.wroteSemi { + p.WriteByte(';') + } + if !p.minify { + p.space() + } + p.advanceLine(pos.Line()) + } + p.WriteString(s) + p.wantSpace = spaceRequired +} + +func (p *Printer) writeLit(s string) { + // If p.tabWriter is nil, this is the nested printer being used to print + // <<- heredoc bodies, so the parent printer will add the escape bytes + // later. + if p.tabWriter != nil && strings.Contains(s, "\t") { + p.WriteByte(tabwriter.Escape) + defer p.WriteByte(tabwriter.Escape) + } + p.WriteString(s) +} + +func (p *Printer) incLevel() { + inc := false + if p.level <= p.lastLevel || len(p.levelIncs) == 0 { + p.level++ + inc = true + } else if last := &p.levelIncs[len(p.levelIncs)-1]; *last { + *last = false + inc = true + } + p.levelIncs = append(p.levelIncs, inc) +} + +func (p *Printer) decLevel() { + if p.levelIncs[len(p.levelIncs)-1] { + p.level-- + } + p.levelIncs = p.levelIncs[:len(p.levelIncs)-1] +} + +func (p *Printer) indent() { + if p.minify { + return + } + p.lastLevel = p.level + switch { + case p.level == 0: + case p.indentSpaces == 0: + p.WriteByte(tabwriter.Escape) + for i := uint(0); i < p.level; i++ { + p.WriteByte('\t') + } + p.WriteByte(tabwriter.Escape) + default: + p.spaces(p.indentSpaces * p.level) + } +} + +// TODO(mvdan): add an indent call at the end of newline? + +// newline prints one newline and advances p.line to pos.Line(). +func (p *Printer) newline(pos Pos) { + p.flushHeredocs() + p.flushComments() + p.WriteByte('\n') + p.wantSpace = spaceWritten + p.wantNewline, p.mustNewline = false, false + p.advanceLine(pos.Line()) +} + +func (p *Printer) advanceLine(line uint) { + if p.line < line { + p.line = line + } +} + +func (p *Printer) flushHeredocs() { + if len(p.pendingHdocs) == 0 { + return + } + hdocs := p.pendingHdocs + p.pendingHdocs = p.pendingHdocs[:0] + coms := p.pendingComments + p.pendingComments = nil + if len(coms) > 0 { + c := coms[0] + if c.Pos().Line() == p.line { + p.pendingComments = append(p.pendingComments, c) + p.flushComments() + coms = coms[1:] + } + } + + // Reuse the last indentation level, as + // indentation levels are usually changed before + // newlines are printed along with their + // subsequent indentation characters. + newLevel := p.level + p.level = p.lastLevel + + for _, r := range hdocs { + p.line++ + p.WriteByte('\n') + p.wantSpace = spaceWritten + p.wantNewline, p.wantNewline = false, false + if r.Op == DashHdoc && p.indentSpaces == 0 && !p.minify { + if r.Hdoc != nil { + extra := extraIndenter{ + bufWriter: p.bufWriter, + baseIndent: int(p.level + 1), + firstIndent: -1, + } + p.tabsPrinter = &Printer{ + bufWriter: &extra, + + // The options need to persist. + indentSpaces: p.indentSpaces, + binNextLine: p.binNextLine, + swtCaseIndent: p.swtCaseIndent, + spaceRedirects: p.spaceRedirects, + keepPadding: p.keepPadding, + minify: p.minify, + funcNextLine: p.funcNextLine, + + line: r.Hdoc.Pos().Line(), + } + p.tabsPrinter.wordParts(r.Hdoc.Parts, true) + } + p.indent() + } else if r.Hdoc != nil { + p.wordParts(r.Hdoc.Parts, true) + } + p.unquotedWord(r.Word) + if r.Hdoc != nil { + // Overwrite p.line, since printing r.Word again can set + // p.line to the beginning of the heredoc again. + p.advanceLine(r.Hdoc.End().Line()) + } + p.wantSpace = spaceNotRequired + } + p.level = newLevel + p.pendingComments = coms + p.mustNewline = true +} + +// newline prints between zero and two newlines. +// If any newlines are printed, it advances p.line to pos.Line(). +func (p *Printer) newlines(pos Pos) { + if p.firstLine && len(p.pendingComments) == 0 { + p.firstLine = false + return // no empty lines at the top + } + if !p.wantsNewline(pos) { + return + } + p.flushHeredocs() + p.flushComments() + p.WriteByte('\n') + p.wantSpace = spaceWritten + p.wantNewline, p.mustNewline = false, false + + l := pos.Line() + if l > p.line+1 && !p.minify { + p.WriteByte('\n') // preserve single empty lines + } + p.advanceLine(l) + p.indent() +} + +func (p *Printer) rightParen(pos Pos) { + if !p.minify { + p.newlines(pos) + } + p.WriteByte(')') + p.wantSpace = spaceRequired +} + +func (p *Printer) semiRsrv(s string, pos Pos) { + if p.wantsNewline(pos) { + p.newlines(pos) + } else { + if !p.wroteSemi { + p.WriteByte(';') + } + if !p.minify { + p.spacePad(pos) + } + } + p.WriteString(s) + p.wantSpace = spaceRequired +} + +func (p *Printer) flushComments() { + for i, c := range p.pendingComments { + if i == 0 { + // Flush any pending heredocs first. Otherwise, the + // comments would become part of a heredoc body. + p.flushHeredocs() + } + p.firstLine = false + // We can't call any of the newline methods, as they call this + // function and we'd recurse forever. + cline := c.Hash.Line() + switch { + case p.mustNewline, i > 0, cline > p.line && p.line > 0: + p.WriteByte('\n') + if cline > p.line+1 { + p.WriteByte('\n') + } + p.indent() + p.wantSpace = spaceWritten + p.spacePad(c.Pos()) + case p.wantSpace == spaceRequired: + if p.keepPadding { + p.spacePad(c.Pos()) + } else { + p.WriteByte('\t') + } + case p.wantSpace != spaceWritten: + p.space() + } + // don't go back one line, which may happen in some edge cases + p.advanceLine(cline) + p.WriteByte('#') + p.writeLit(strings.TrimRightFunc(c.Text, unicode.IsSpace)) + p.wantNewline = true + p.mustNewline = true + } + p.pendingComments = nil +} + +func (p *Printer) comments(comments ...Comment) { + if p.minify { + for _, c := range comments { + if fileutil.Shebang([]byte("#"+c.Text)) != "" && c.Hash.Col() == 1 && c.Hash.Line() == 1 { + p.WriteString(strings.TrimRightFunc("#"+c.Text, unicode.IsSpace)) + p.WriteString("\n") + p.line++ + } + } + return + } + p.pendingComments = append(p.pendingComments, comments...) +} + +func (p *Printer) wordParts(wps []WordPart, quoted bool) { + // We disallow unquoted escaped newlines between word parts below. + // However, we want to allow a leading escaped newline for cases such as: + // + // foo <<< \ + // "bar baz" + if !quoted && !p.singleLine && wps[0].Pos().Line() > p.line { + p.bslashNewl() + } + for i, wp := range wps { + var next WordPart + if i+1 < len(wps) { + next = wps[i+1] + } + // Keep escaped newlines separating word parts when quoted. + // Note that those escaped newlines don't cause indentaiton. + // When not quoted, we strip them out consistently, + // because attempting to keep them would prevent indentation. + // Can't use p.wantsNewline here, since this is only about + // escaped newlines. + for quoted && !p.singleLine && wp.Pos().Line() > p.line { + p.WriteString("\\\n") + p.line++ + } + p.wordPart(wp, next) + p.advanceLine(wp.End().Line()) + } +} + +func (p *Printer) wordPart(wp, next WordPart) { + switch x := wp.(type) { + case *Lit: + p.writeLit(x.Value) + case *SglQuoted: + if x.Dollar { + p.WriteByte('$') + } + p.WriteByte('\'') + p.writeLit(x.Value) + p.WriteByte('\'') + p.advanceLine(x.End().Line()) + case *DblQuoted: + p.dblQuoted(x) + case *CmdSubst: + p.advanceLine(x.Pos().Line()) + switch { + case x.TempFile: + p.WriteString("${") + p.wantSpace = spaceRequired + p.nestedStmts(x.Stmts, x.Last, x.Right) + p.wantSpace = spaceNotRequired + p.semiRsrv("}", x.Right) + case x.ReplyVar: + p.WriteString("${|") + p.nestedStmts(x.Stmts, x.Last, x.Right) + p.wantSpace = spaceNotRequired + p.semiRsrv("}", x.Right) + // Special case: `# inline comment` + case x.Backquotes && len(x.Stmts) == 0 && + len(x.Last) == 1 && x.Right.Line() == p.line: + p.WriteString("`#") + p.WriteString(x.Last[0].Text) + p.WriteString("`") + default: + p.WriteString("$(") + if len(x.Stmts) > 0 && startsWithLparen(x.Stmts[0]) { + p.wantSpace = spaceRequired + } else { + p.wantSpace = spaceNotRequired + } + p.nestedStmts(x.Stmts, x.Last, x.Right) + p.rightParen(x.Right) + } + case *ParamExp: + litCont := ";" + if nextLit, ok := next.(*Lit); ok && nextLit.Value != "" { + litCont = nextLit.Value[:1] + } + name := x.Param.Value + switch { + case !p.minify: + case x.Excl, x.Length, x.Width: + case x.Index != nil, x.Slice != nil: + case x.Repl != nil, x.Exp != nil: + case len(name) > 1 && !ValidName(name): // ${10} + case ValidName(name + litCont): // ${var}cont + default: + x2 := *x + x2.Short = true + p.paramExp(&x2) + return + } + p.paramExp(x) + case *ArithmExp: + p.WriteString("$((") + if x.Unsigned { + p.WriteString("# ") + } + p.arithmExpr(x.X, false, false) + p.WriteString("))") + case *ExtGlob: + p.WriteString(x.Op.String()) + p.writeLit(x.Pattern.Value) + p.WriteByte(')') + case *ProcSubst: + // avoid conflict with << and others + if p.wantSpace == spaceRequired { + p.space() + } + p.WriteString(x.Op.String()) + p.nestedStmts(x.Stmts, x.Last, x.Rparen) + p.rightParen(x.Rparen) + } +} + +func (p *Printer) dblQuoted(dq *DblQuoted) { + if dq.Dollar { + p.WriteByte('$') + } + p.WriteByte('"') + if len(dq.Parts) > 0 { + p.wordParts(dq.Parts, true) + } + // Add any trailing escaped newlines. + for p.line < dq.Right.Line() { + p.WriteString("\\\n") + p.line++ + } + p.WriteByte('"') +} + +func (p *Printer) wroteIndex(index ArithmExpr) bool { + if index == nil { + return false + } + p.WriteByte('[') + p.arithmExpr(index, false, false) + p.WriteByte(']') + return true +} + +func (p *Printer) paramExp(pe *ParamExp) { + if pe.nakedIndex() { // arr[x] + p.writeLit(pe.Param.Value) + p.wroteIndex(pe.Index) + return + } + if pe.Short { // $var + p.WriteByte('$') + p.writeLit(pe.Param.Value) + return + } + // ${var...} + p.WriteString("${") + switch { + case pe.Length: + p.WriteByte('#') + case pe.Width: + p.WriteByte('%') + case pe.Excl: + p.WriteByte('!') + } + p.writeLit(pe.Param.Value) + p.wroteIndex(pe.Index) + switch { + case pe.Slice != nil: + p.WriteByte(':') + p.arithmExpr(pe.Slice.Offset, true, true) + if pe.Slice.Length != nil { + p.WriteByte(':') + p.arithmExpr(pe.Slice.Length, true, false) + } + case pe.Repl != nil: + if pe.Repl.All { + p.WriteByte('/') + } + p.WriteByte('/') + if pe.Repl.Orig != nil { + p.word(pe.Repl.Orig) + } + p.WriteByte('/') + if pe.Repl.With != nil { + p.word(pe.Repl.With) + } + case pe.Names != 0: + p.writeLit(pe.Names.String()) + case pe.Exp != nil: + p.WriteString(pe.Exp.Op.String()) + if pe.Exp.Word != nil { + p.word(pe.Exp.Word) + } + } + p.WriteByte('}') +} + +func (p *Printer) loop(loop Loop) { + switch x := loop.(type) { + case *WordIter: + p.writeLit(x.Name.Value) + if x.InPos.IsValid() { + p.spacedString(" in", Pos{}) + p.wordJoin(x.Items) + } + case *CStyleLoop: + p.WriteString("((") + if x.Init == nil { + p.space() + } + p.arithmExpr(x.Init, false, false) + p.WriteString("; ") + p.arithmExpr(x.Cond, false, false) + p.WriteString("; ") + p.arithmExpr(x.Post, false, false) + p.WriteString("))") + } +} + +func (p *Printer) arithmExpr(expr ArithmExpr, compact, spacePlusMinus bool) { + if p.minify { + compact = true + } + switch x := expr.(type) { + case *Word: + p.word(x) + case *BinaryArithm: + if compact { + p.arithmExpr(x.X, compact, spacePlusMinus) + p.WriteString(x.Op.String()) + p.arithmExpr(x.Y, compact, false) + } else { + p.arithmExpr(x.X, compact, spacePlusMinus) + if x.Op != Comma { + p.space() + } + p.WriteString(x.Op.String()) + p.space() + p.arithmExpr(x.Y, compact, false) + } + case *UnaryArithm: + if x.Post { + p.arithmExpr(x.X, compact, spacePlusMinus) + p.WriteString(x.Op.String()) + } else { + if spacePlusMinus { + switch x.Op { + case Plus, Minus: + p.space() + } + } + p.WriteString(x.Op.String()) + p.arithmExpr(x.X, compact, false) + } + case *ParenArithm: + p.WriteByte('(') + p.arithmExpr(x.X, false, false) + p.WriteByte(')') + } +} + +func (p *Printer) testExpr(expr TestExpr) { + // Multi-line test expressions don't need to escape newlines. + if expr.Pos().Line() > p.line { + p.newlines(expr.Pos()) + p.spacePad(expr.Pos()) + } else if p.wantSpace == spaceRequired { + p.space() + } + p.testExprSameLine(expr) +} + +func (p *Printer) testExprSameLine(expr TestExpr) { + p.advanceLine(expr.Pos().Line()) + switch x := expr.(type) { + case *Word: + p.word(x) + case *BinaryTest: + p.testExprSameLine(x.X) + p.space() + p.WriteString(x.Op.String()) + switch x.Op { + case AndTest, OrTest: + p.wantSpace = spaceRequired + p.testExpr(x.Y) + default: + p.space() + p.testExprSameLine(x.Y) + } + case *UnaryTest: + p.WriteString(x.Op.String()) + p.space() + p.testExprSameLine(x.X) + case *ParenTest: + p.WriteByte('(') + if startsWithLparen(x.X) { + p.wantSpace = spaceRequired + } else { + p.wantSpace = spaceNotRequired + } + p.testExpr(x.X) + p.WriteByte(')') + } +} + +func (p *Printer) word(w *Word) { + p.wordParts(w.Parts, false) + p.wantSpace = spaceRequired +} + +func (p *Printer) unquotedWord(w *Word) { + for _, wp := range w.Parts { + switch x := wp.(type) { + case *SglQuoted: + p.writeLit(x.Value) + case *DblQuoted: + p.wordParts(x.Parts, true) + case *Lit: + for i := 0; i < len(x.Value); i++ { + if b := x.Value[i]; b == '\\' { + if i++; i < len(x.Value) { + p.WriteByte(x.Value[i]) + } + } else { + p.WriteByte(b) + } + } + } + } +} + +func (p *Printer) wordJoin(ws []*Word) { + anyNewline := false + for _, w := range ws { + if pos := w.Pos(); pos.Line() > p.line && !p.singleLine { + if !anyNewline { + p.incLevel() + anyNewline = true + } + p.bslashNewl() + } + p.spacePad(w.Pos()) + p.word(w) + } + if anyNewline { + p.decLevel() + } +} + +func (p *Printer) casePatternJoin(pats []*Word) { + anyNewline := false + for i, w := range pats { + if i > 0 { + p.spacedToken("|", Pos{}) + } + if p.wantsNewline(w.Pos()) { + if !anyNewline { + p.incLevel() + anyNewline = true + } + p.bslashNewl() + } else { + p.spacePad(w.Pos()) + } + p.word(w) + } + if anyNewline { + p.decLevel() + } +} + +func (p *Printer) elemJoin(elems []*ArrayElem, last []Comment) { + p.incLevel() + for _, el := range elems { + var left []Comment + for _, c := range el.Comments { + if c.Pos().After(el.Pos()) { + left = append(left, c) + break + } + p.comments(c) + } + // Multi-line array expressions don't need to escape newlines. + if el.Pos().Line() > p.line { + p.newlines(el.Pos()) + p.spacePad(el.Pos()) + } else if p.wantSpace == spaceRequired { + p.space() + } + if p.wroteIndex(el.Index) { + p.WriteByte('=') + } + if el.Value != nil { + p.word(el.Value) + } + p.comments(left...) + } + if len(last) > 0 { + p.comments(last...) + p.flushComments() + } + p.decLevel() +} + +func (p *Printer) stmt(s *Stmt) { + p.wroteSemi = false + if s.Negated { + p.spacedString("!", s.Pos()) + } + var startRedirs int + if s.Cmd != nil { + startRedirs = p.command(s.Cmd, s.Redirs) + } + p.incLevel() + for _, r := range s.Redirs[startRedirs:] { + if p.wantsNewline(r.OpPos) { + p.bslashNewl() + } + if p.wantSpace == spaceRequired { + p.spacePad(r.Pos()) + } + if r.N != nil { + p.writeLit(r.N.Value) + } + p.WriteString(r.Op.String()) + if p.spaceRedirects && (r.Op != DplIn && r.Op != DplOut) { + p.space() + } else { + p.wantSpace = spaceRequired + } + p.word(r.Word) + if r.Op == Hdoc || r.Op == DashHdoc { + p.pendingHdocs = append(p.pendingHdocs, r) + } + } + sep := s.Semicolon.IsValid() && s.Semicolon.Line() > p.line && !p.singleLine + if sep || s.Background || s.Coprocess { + if sep { + p.bslashNewl() + } else if !p.minify { + p.space() + } + if s.Background { + p.WriteString("&") + } else if s.Coprocess { + p.WriteString("|&") + } else { + p.WriteString(";") + } + p.wroteSemi = true + p.wantSpace = spaceRequired + } + p.decLevel() +} + +func (p *Printer) command(cmd Command, redirs []*Redirect) (startRedirs int) { + p.advanceLine(cmd.Pos().Line()) + p.spacePad(cmd.Pos()) + switch x := cmd.(type) { + case *CallExpr: + p.assigns(x.Assigns) + if len(x.Args) <= 1 { + p.wordJoin(x.Args) + return 0 + } + p.wordJoin(x.Args[:1]) + for _, r := range redirs { + if r.Pos().After(x.Args[1].Pos()) || r.Op == Hdoc || r.Op == DashHdoc { + break + } + if p.wantSpace == spaceRequired { + p.spacePad(r.Pos()) + } + if r.N != nil { + p.writeLit(r.N.Value) + } + p.WriteString(r.Op.String()) + if p.spaceRedirects && (r.Op != DplIn && r.Op != DplOut) { + p.space() + } else { + p.wantSpace = spaceRequired + } + p.word(r.Word) + startRedirs++ + } + p.wordJoin(x.Args[1:]) + case *Block: + p.WriteByte('{') + p.wantSpace = spaceRequired + // Forbid "foo()\n{ bar; }" + p.wantNewline = p.wantNewline || p.funcNextLine + p.nestedStmts(x.Stmts, x.Last, x.Rbrace) + p.semiRsrv("}", x.Rbrace) + case *IfClause: + p.ifClause(x, false) + case *Subshell: + p.WriteByte('(') + stmts := x.Stmts + if len(stmts) > 0 && startsWithLparen(stmts[0]) { + p.wantSpace = spaceRequired + // Add a space between nested parentheses if we're printing them in a single line, + // to avoid the ambiguity between `((` and `( (`. + if (x.Lparen.Line() != stmts[0].Pos().Line() || len(stmts) > 1) && !p.singleLine { + p.wantSpace = spaceNotRequired + + if p.minify { + p.mustNewline = true + } + } + } else { + p.wantSpace = spaceNotRequired + } + + p.spacePad(stmtsPos(x.Stmts, x.Last)) + p.nestedStmts(x.Stmts, x.Last, x.Rparen) + p.wantSpace = spaceNotRequired + p.spacePad(x.Rparen) + p.rightParen(x.Rparen) + case *WhileClause: + if x.Until { + p.spacedString("until", x.Pos()) + } else { + p.spacedString("while", x.Pos()) + } + p.nestedStmts(x.Cond, x.CondLast, Pos{}) + p.semiOrNewl("do", x.DoPos) + p.nestedStmts(x.Do, x.DoLast, x.DonePos) + p.semiRsrv("done", x.DonePos) + case *ForClause: + if x.Select { + p.WriteString("select ") + } else { + p.WriteString("for ") + } + p.loop(x.Loop) + p.semiOrNewl("do", x.DoPos) + p.nestedStmts(x.Do, x.DoLast, x.DonePos) + p.semiRsrv("done", x.DonePos) + case *BinaryCmd: + p.stmt(x.X) + if p.minify || p.singleLine || x.Y.Pos().Line() <= p.line { + // leave p.nestedBinary untouched + p.spacedToken(x.Op.String(), x.OpPos) + p.advanceLine(x.Y.Pos().Line()) + p.stmt(x.Y) + break + } + indent := !p.nestedBinary + if indent { + p.incLevel() + } + if p.binNextLine { + if len(p.pendingHdocs) == 0 { + p.bslashNewl() + } + p.spacedToken(x.Op.String(), x.OpPos) + if len(x.Y.Comments) > 0 { + p.wantSpace = spaceNotRequired + p.newline(x.Y.Pos()) + p.indent() + p.comments(x.Y.Comments...) + p.newline(Pos{}) + p.indent() + } + } else { + p.spacedToken(x.Op.String(), x.OpPos) + p.advanceLine(x.OpPos.Line()) + p.comments(x.Y.Comments...) + p.newline(Pos{}) + p.indent() + } + p.advanceLine(x.Y.Pos().Line()) + _, p.nestedBinary = x.Y.Cmd.(*BinaryCmd) + p.stmt(x.Y) + if indent { + p.decLevel() + } + p.nestedBinary = false + case *FuncDecl: + if x.RsrvWord { + p.WriteString("function ") + } + p.writeLit(x.Name.Value) + if !x.RsrvWord || x.Parens { + p.WriteString("()") + } + if p.funcNextLine { + p.newline(Pos{}) + p.indent() + } else if !x.Parens || !p.minify { + p.space() + } + p.advanceLine(x.Body.Pos().Line()) + p.comments(x.Body.Comments...) + p.stmt(x.Body) + case *CaseClause: + p.WriteString("case ") + p.word(x.Word) + p.WriteString(" in") + p.advanceLine(x.In.Line()) + p.wantSpace = spaceRequired + if p.swtCaseIndent { + p.incLevel() + } + if len(x.Items) == 0 { + // Apparently "case x in; esac" is invalid shell. + p.mustNewline = true + } + for i, ci := range x.Items { + var last []Comment + for i, c := range ci.Comments { + if c.Pos().After(ci.Pos()) { + last = ci.Comments[i:] + break + } + p.comments(c) + } + p.newlines(ci.Pos()) + p.spacePad(ci.Pos()) + p.casePatternJoin(ci.Patterns) + p.WriteByte(')') + if !p.minify { + p.wantSpace = spaceRequired + } else { + p.wantSpace = spaceNotRequired + } + + bodyPos := stmtsPos(ci.Stmts, ci.Last) + bodyEnd := stmtsEnd(ci.Stmts, ci.Last) + sep := len(ci.Stmts) > 1 || bodyPos.Line() > p.line || + (bodyEnd.IsValid() && ci.OpPos.Line() > bodyEnd.Line()) + p.nestedStmts(ci.Stmts, ci.Last, ci.OpPos) + p.level++ + if !p.minify || i != len(x.Items)-1 { + if sep { + p.newlines(ci.OpPos) + p.wantNewline = true + } + p.spacedToken(ci.Op.String(), ci.OpPos) + p.advanceLine(ci.OpPos.Line()) + // avoid ; directly after tokens like ;; + p.wroteSemi = true + } + p.comments(last...) + p.flushComments() + p.level-- + } + p.comments(x.Last...) + if p.swtCaseIndent { + p.flushComments() + p.decLevel() + } + p.semiRsrv("esac", x.Esac) + case *ArithmCmd: + p.WriteString("((") + if x.Unsigned { + p.WriteString("# ") + } + p.arithmExpr(x.X, false, false) + p.WriteString("))") + case *TestClause: + p.WriteString("[[ ") + p.incLevel() + p.testExpr(x.X) + p.decLevel() + p.spacedString("]]", x.Right) + case *DeclClause: + p.spacedString(x.Variant.Value, x.Pos()) + p.assigns(x.Args) + case *TimeClause: + p.spacedString("time", x.Pos()) + if x.PosixFormat { + p.spacedString("-p", x.Pos()) + } + if x.Stmt != nil { + p.stmt(x.Stmt) + } + case *CoprocClause: + p.spacedString("coproc", x.Pos()) + if x.Name != nil { + p.space() + p.word(x.Name) + } + p.space() + p.stmt(x.Stmt) + case *LetClause: + p.spacedString("let", x.Pos()) + for _, n := range x.Exprs { + p.space() + p.arithmExpr(n, true, false) + } + case *TestDecl: + p.spacedString("@test", x.Pos()) + p.space() + p.word(x.Description) + p.space() + p.stmt(x.Body) + default: + panic(fmt.Sprintf("syntax.Printer: unexpected node type %T", x)) + } + return startRedirs +} + +func (p *Printer) ifClause(ic *IfClause, elif bool) { + if !elif { + p.spacedString("if", ic.Pos()) + } + p.nestedStmts(ic.Cond, ic.CondLast, Pos{}) + p.semiOrNewl("then", ic.ThenPos) + thenEnd := ic.FiPos + el := ic.Else + if el != nil { + thenEnd = el.Position + } + p.nestedStmts(ic.Then, ic.ThenLast, thenEnd) + + if el != nil && el.ThenPos.IsValid() { + p.comments(ic.Last...) + p.semiRsrv("elif", el.Position) + p.ifClause(el, true) + return + } + if el == nil { + p.comments(ic.Last...) + } else { + var left []Comment + for _, c := range ic.Last { + if c.Pos().After(el.Position) { + left = append(left, c) + break + } + p.comments(c) + } + p.semiRsrv("else", el.Position) + p.comments(left...) + p.nestedStmts(el.Then, el.ThenLast, ic.FiPos) + p.comments(el.Last...) + } + p.semiRsrv("fi", ic.FiPos) +} + +func (p *Printer) stmtList(stmts []*Stmt, last []Comment) { + sep := p.wantNewline || (len(stmts) > 0 && stmts[0].Pos().Line() > p.line) + for i, s := range stmts { + if i > 0 && p.singleLine && p.wantNewline && !p.wroteSemi { + // In singleLine mode, ensure we use semicolons between + // statements. + p.WriteByte(';') + p.wantSpace = spaceRequired + } + pos := s.Pos() + var midComs, endComs []Comment + for _, c := range s.Comments { + // Comments after the end of this command. Note that + // this includes "< 1: + // Force a newline if we find: + // { stmt; stmt; } + p.wantNewline = true + case closing.Line() > p.line && len(stmts) > 0 && + stmtsEnd(stmts, last).Line() < closing.Line(): + // Force a newline if we find: + // { stmt + // } + p.wantNewline = true + case len(p.pendingComments) > 0 && len(stmts) > 0: + // Force a newline if we find: + // for i in a b # stmt + // do foo; done + p.wantNewline = true + } + p.stmtList(stmts, last) + if closing.IsValid() { + p.flushComments() + } + p.decLevel() +} + +func (p *Printer) assigns(assigns []*Assign) { + p.incLevel() + for _, a := range assigns { + if p.wantsNewline(a.Pos()) { + p.bslashNewl() + } else { + p.spacePad(a.Pos()) + } + if a.Name != nil { + p.writeLit(a.Name.Value) + p.wroteIndex(a.Index) + if a.Append { + p.WriteByte('+') + } + if !a.Naked { + p.WriteByte('=') + } + } + if a.Value != nil { + // Ensure we don't use an escaped newline after '=', + // because that can result in indentation, thus + // splitting "foo=bar" into "foo= bar". + p.advanceLine(a.Value.Pos().Line()) + p.word(a.Value) + } else if a.Array != nil { + p.wantSpace = spaceNotRequired + p.WriteByte('(') + p.elemJoin(a.Array.Elems, a.Array.Last) + p.rightParen(a.Array.Rparen) + } + p.wantSpace = spaceRequired + } + p.decLevel() +} + +type wantSpaceState uint8 + +const ( + spaceNotRequired wantSpaceState = iota + spaceRequired // we should generally print a space or a newline next + spaceWritten // we have just written a space or newline +) + +// extraIndenter ensures that all lines in a '<<-' heredoc body have at least +// baseIndent leading tabs. Those that had more tab indentation than the first +// heredoc line will keep that relative indentation. +type extraIndenter struct { + bufWriter + baseIndent int + + firstIndent int + firstChange int + curLine []byte +} + +func (e *extraIndenter) WriteByte(b byte) error { + e.curLine = append(e.curLine, b) + if b != '\n' { + return nil + } + trimmed := bytes.TrimLeft(e.curLine, "\t") + if len(trimmed) == 1 { + // no tabs if this is an empty line, i.e. "\n" + e.bufWriter.Write(trimmed) + e.curLine = e.curLine[:0] + return nil + } + + lineIndent := len(e.curLine) - len(trimmed) + if e.firstIndent < 0 { + // This is the first heredoc line we add extra indentation to. + // Keep track of how much we indented. + e.firstIndent = lineIndent + e.firstChange = e.baseIndent - lineIndent + lineIndent = e.baseIndent + + } else if lineIndent < e.firstIndent { + // This line did not have enough indentation; simply indent it + // like the first line. + lineIndent = e.firstIndent + } else { + // This line had plenty of indentation. Add the extra + // indentation that the first line had, for consistency. + lineIndent += e.firstChange + } + e.bufWriter.WriteByte(tabwriter.Escape) + for i := 0; i < lineIndent; i++ { + e.bufWriter.WriteByte('\t') + } + e.bufWriter.WriteByte(tabwriter.Escape) + e.bufWriter.Write(trimmed) + e.curLine = e.curLine[:0] + return nil +} + +func (e *extraIndenter) WriteString(s string) (int, error) { + for i := 0; i < len(s); i++ { + e.WriteByte(s[i]) + } + return len(s), nil +} + +func startsWithLparen(node Node) bool { + switch node := node.(type) { + case *Stmt: + return startsWithLparen(node.Cmd) + case *BinaryCmd: + return startsWithLparen(node.X) + case *Subshell: + return true // keep ( ( + case *ArithmCmd: + return true // keep ( (( + } + return false +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/quote.go b/vendor/mvdan.cc/sh/v3/syntax/quote.go new file mode 100644 index 00000000..6f27eba1 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/quote.go @@ -0,0 +1,185 @@ +// Copyright (c) 2021, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" +) + +type QuoteError struct { + ByteOffset int + Message string +} + +func (e QuoteError) Error() string { + return fmt.Sprintf("cannot quote character at byte %d: %s", e.ByteOffset, e.Message) +} + +const ( + quoteErrNull = "shell strings cannot contain null bytes" + quoteErrPOSIX = "POSIX shell lacks escape sequences" + quoteErrRange = "rune out of range" + quoteErrMksh = "mksh cannot escape codepoints above 16 bits" +) + +// Quote returns a quoted version of the input string, +// so that the quoted version is expanded or interpreted +// as the original string in the given language variant. +// +// Quoting is necessary when using arbitrary literal strings +// as words in a shell script or command. +// Without quoting, one can run into syntax errors, +// as well as the possibility of running unintended code. +// +// An error is returned when a string cannot be quoted for a variant. +// For instance, POSIX lacks escape sequences for non-printable characters, +// and no language variant can represent a string containing null bytes. +// In such cases, the returned error type will be *QuoteError. +// +// The quoting strategy is chosen on a best-effort basis, +// to minimize the amount of extra bytes necessary. +// +// Some strings do not require any quoting and are returned unchanged. +// Those strings can be directly surrounded in single quotes as well. +func Quote(s string, lang LangVariant) (string, error) { + if s == "" { + // Special case; an empty string must always be quoted, + // as otherwise it expands to zero fields. + return "''", nil + } + shellChars := false + nonPrintable := false + offs := 0 + for rem := s; len(rem) > 0; { + r, size := utf8.DecodeRuneInString(rem) + switch r { + // Like regOps; token characters. + case ';', '"', '\'', '(', ')', '$', '|', '&', '>', '<', '`', + // Whitespace; might result in multiple fields. + ' ', '\t', '\r', '\n', + // Escape sequences would be expanded. + '\\', + // Would start a comment unless quoted. + '#', + // Might result in brace expansion. + '{', + // Might result in tilde expansion. + '~', + // Might result in globbing. + '*', '?', '[', + // Might result in an assignment. + '=': + shellChars = true + case '\x00': + return "", &QuoteError{ByteOffset: offs, Message: quoteErrNull} + } + if r == utf8.RuneError || !unicode.IsPrint(r) { + if lang == LangPOSIX { + return "", &QuoteError{ByteOffset: offs, Message: quoteErrPOSIX} + } + nonPrintable = true + } + rem = rem[size:] + offs += size + } + if !shellChars && !nonPrintable && !IsKeyword(s) { + // Nothing to quote; avoid allocating. + return s, nil + } + + // Single quotes are usually best, + // as they don't require any escaping of characters. + // If we have any invalid utf8 or non-printable runes, + // use $'' so that we can escape them. + // Note that we can't use double quotes for those. + var b strings.Builder + if nonPrintable { + b.WriteString("$'") + lastRequoteIfHex := false + offs := 0 + for rem := s; len(rem) > 0; { + nextRequoteIfHex := false + r, size := utf8.DecodeRuneInString(rem) + switch { + case r == '\'', r == '\\': + b.WriteByte('\\') + b.WriteRune(r) + case unicode.IsPrint(r) && r != utf8.RuneError: + if lastRequoteIfHex && isHex(r) { + b.WriteString("'$'") + } + b.WriteRune(r) + case r == '\a': + b.WriteString(`\a`) + case r == '\b': + b.WriteString(`\b`) + case r == '\f': + b.WriteString(`\f`) + case r == '\n': + b.WriteString(`\n`) + case r == '\r': + b.WriteString(`\r`) + case r == '\t': + b.WriteString(`\t`) + case r == '\v': + b.WriteString(`\v`) + case r < utf8.RuneSelf, r == utf8.RuneError && size == 1: + // \xXX, fixed at two hexadecimal characters. + fmt.Fprintf(&b, "\\x%02x", rem[0]) + // Unfortunately, mksh allows \x to consume more hex characters. + // Ensure that we don't allow it to read more than two. + if lang == LangMirBSDKorn { + nextRequoteIfHex = true + } + case r > utf8.MaxRune: + // Not a valid Unicode code point? + return "", &QuoteError{ByteOffset: offs, Message: quoteErrRange} + case lang == LangMirBSDKorn && r > 0xFFFD: + // From the CAVEATS section in R59's man page: + // + // mksh currently uses OPTU-16 internally, which is the same as + // UTF-8 and CESU-8 with 0000..FFFD being valid codepoints. + return "", &QuoteError{ByteOffset: offs, Message: quoteErrMksh} + case r < 0x10000: + // \uXXXX, fixed at four hexadecimal characters. + fmt.Fprintf(&b, "\\u%04x", r) + default: + // \UXXXXXXXX, fixed at eight hexadecimal characters. + fmt.Fprintf(&b, "\\U%08x", r) + } + rem = rem[size:] + lastRequoteIfHex = nextRequoteIfHex + offs += size + } + b.WriteString("'") + return b.String(), nil + } + + // Single quotes without any need for escaping. + if !strings.Contains(s, "'") { + return "'" + s + "'", nil + } + + // The string contains single quotes, + // so fall back to double quotes. + b.WriteByte('"') + for _, r := range s { + switch r { + case '"', '\\', '`', '$': + b.WriteByte('\\') + } + b.WriteRune(r) + } + b.WriteByte('"') + return b.String(), nil +} + +func isHex(r rune) bool { + return (r >= '0' && r <= '9') || + (r >= 'a' && r <= 'f') || + (r >= 'A' && r <= 'F') +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/quotestate_string.go b/vendor/mvdan.cc/sh/v3/syntax/quotestate_string.go new file mode 100644 index 00000000..d43466f8 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/quotestate_string.go @@ -0,0 +1,61 @@ +// Code generated by "stringer -type=quoteState"; DO NOT EDIT. + +package syntax + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[noState-1] + _ = x[subCmd-2] + _ = x[subCmdBckquo-4] + _ = x[dblQuotes-8] + _ = x[hdocWord-16] + _ = x[hdocBody-32] + _ = x[hdocBodyTabs-64] + _ = x[arithmExpr-128] + _ = x[arithmExprLet-256] + _ = x[arithmExprCmd-512] + _ = x[arithmExprBrack-1024] + _ = x[testExpr-2048] + _ = x[testExprRegexp-4096] + _ = x[switchCase-8192] + _ = x[paramExpName-16384] + _ = x[paramExpSlice-32768] + _ = x[paramExpRepl-65536] + _ = x[paramExpExp-131072] + _ = x[arrayElems-262144] +} + +const _quoteState_name = "noStatesubCmdsubCmdBckquodblQuoteshdocWordhdocBodyhdocBodyTabsarithmExprarithmExprLetarithmExprCmdarithmExprBracktestExprtestExprRegexpswitchCaseparamExpNameparamExpSliceparamExpReplparamExpExparrayElems" + +var _quoteState_map = map[quoteState]string{ + 1: _quoteState_name[0:7], + 2: _quoteState_name[7:13], + 4: _quoteState_name[13:25], + 8: _quoteState_name[25:34], + 16: _quoteState_name[34:42], + 32: _quoteState_name[42:50], + 64: _quoteState_name[50:62], + 128: _quoteState_name[62:72], + 256: _quoteState_name[72:85], + 512: _quoteState_name[85:98], + 1024: _quoteState_name[98:113], + 2048: _quoteState_name[113:121], + 4096: _quoteState_name[121:135], + 8192: _quoteState_name[135:145], + 16384: _quoteState_name[145:157], + 32768: _quoteState_name[157:170], + 65536: _quoteState_name[170:182], + 131072: _quoteState_name[182:193], + 262144: _quoteState_name[193:203], +} + +func (i quoteState) String() string { + if str, ok := _quoteState_map[i]; ok { + return str + } + return "quoteState(" + strconv.FormatInt(int64(i), 10) + ")" +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/simplify.go b/vendor/mvdan.cc/sh/v3/syntax/simplify.go new file mode 100644 index 00000000..5a93966e --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/simplify.go @@ -0,0 +1,255 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import "bytes" + +// Simplify modifies a node to remove redundant pieces of syntax, and returns +// whether any changes were made. +// +// The changes currently applied are: +// +// Remove clearly useless parentheses $(( (expr) )) +// Remove dollars from vars in exprs (($var)) +// Remove duplicate subshells $( (stmts) ) +// Remove redundant quotes [[ "$var" == str ]] +// Merge negations with unary operators [[ ! -n $var ]] +// Use single quotes to shorten literals "\$foo" +// Remove redundant param expansion colons ${foo:-} +func Simplify(n Node) bool { + s := simplifier{} + Walk(n, s.visit) + return s.modified +} + +type simplifier struct { + modified bool +} + +func (s *simplifier) visit(node Node) bool { + switch x := node.(type) { + case *Assign: + x.Index = s.removeParensArithm(x.Index) + // Don't inline params, as x[i] and x[$i] mean + // different things when x is an associative + // array; the first means "i", the second "$i". + case *ParamExp: + x.Index = s.removeParensArithm(x.Index) + // don't inline params - same as above. + + if x.Exp != nil && x.Exp.Op == DefaultUnsetOrNull && x.Exp.Word == nil { + s.modified = true + x.Exp.Op = DefaultUnset + } + if x.Slice == nil { + break + } + x.Slice.Offset = s.removeParensArithm(x.Slice.Offset) + x.Slice.Offset = s.inlineSimpleParams(x.Slice.Offset) + x.Slice.Length = s.removeParensArithm(x.Slice.Length) + x.Slice.Length = s.inlineSimpleParams(x.Slice.Length) + case *ArithmExp: + x.X = s.removeParensArithm(x.X) + x.X = s.inlineSimpleParams(x.X) + case *ArithmCmd: + x.X = s.removeParensArithm(x.X) + x.X = s.inlineSimpleParams(x.X) + case *ParenArithm: + x.X = s.removeParensArithm(x.X) + x.X = s.inlineSimpleParams(x.X) + case *BinaryArithm: + x.X = s.inlineSimpleParams(x.X) + x.Y = s.inlineSimpleParams(x.Y) + case *CmdSubst: + x.Stmts = s.inlineSubshell(x.Stmts) + case *Subshell: + x.Stmts = s.inlineSubshell(x.Stmts) + case *Word: + x.Parts = s.simplifyWord(x.Parts) + case *TestClause: + x.X = s.removeParensTest(x.X) + x.X = s.removeNegateTest(x.X) + case *ParenTest: + x.X = s.removeParensTest(x.X) + x.X = s.removeNegateTest(x.X) + case *BinaryTest: + x.X = s.unquoteParams(x.X) + x.X = s.removeNegateTest(x.X) + if x.Op == TsMatchShort { + s.modified = true + x.Op = TsMatch + } + switch x.Op { + case TsMatch, TsNoMatch: + // unquoting enables globbing + default: + x.Y = s.unquoteParams(x.Y) + } + x.Y = s.removeNegateTest(x.Y) + case *UnaryTest: + x.X = s.unquoteParams(x.X) + } + return true +} + +func (s *simplifier) simplifyWord(wps []WordPart) []WordPart { +parts: + for i, wp := range wps { + dq, _ := wp.(*DblQuoted) + if dq == nil || len(dq.Parts) != 1 { + break + } + lit, _ := dq.Parts[0].(*Lit) + if lit == nil { + break + } + var buf bytes.Buffer + escaped := false + for _, r := range lit.Value { + switch r { + case '\\': + escaped = !escaped + if escaped { + continue + } + case '\'': + continue parts + case '$', '"', '`': + escaped = false + default: + if escaped { + continue parts + } + escaped = false + } + buf.WriteRune(r) + } + newVal := buf.String() + if newVal == lit.Value { + break + } + s.modified = true + wps[i] = &SglQuoted{ + Left: dq.Pos(), + Right: dq.End(), + Dollar: dq.Dollar, + Value: newVal, + } + } + return wps +} + +func (s *simplifier) removeParensArithm(x ArithmExpr) ArithmExpr { + for { + par, _ := x.(*ParenArithm) + if par == nil { + return x + } + s.modified = true + x = par.X + } +} + +func (s *simplifier) inlineSimpleParams(x ArithmExpr) ArithmExpr { + w, _ := x.(*Word) + if w == nil || len(w.Parts) != 1 { + return x + } + pe, _ := w.Parts[0].(*ParamExp) + if pe == nil || !ValidName(pe.Param.Value) { + // Not a parameter expansion, or not a valid name, like $3. + return x + } + if pe.Excl || pe.Length || pe.Width || pe.Slice != nil || + pe.Repl != nil || pe.Exp != nil || pe.Index != nil { + // A complex parameter expansion can't be simplified. + // + // Note that index expressions can't generally be simplified + // either. It's fine to turn ${a[0]} into a[0], but others like + // a[*] are invalid in many shells including Bash. + return x + } + s.modified = true + return &Word{Parts: []WordPart{pe.Param}} +} + +func (s *simplifier) inlineSubshell(stmts []*Stmt) []*Stmt { + for len(stmts) == 1 { + st := stmts[0] + if st.Negated || st.Background || st.Coprocess || + len(st.Redirs) > 0 { + break + } + sub, _ := st.Cmd.(*Subshell) + if sub == nil { + break + } + s.modified = true + stmts = sub.Stmts + } + return stmts +} + +func (s *simplifier) unquoteParams(x TestExpr) TestExpr { + w, _ := x.(*Word) + if w == nil || len(w.Parts) != 1 { + return x + } + dq, _ := w.Parts[0].(*DblQuoted) + if dq == nil || len(dq.Parts) != 1 { + return x + } + if _, ok := dq.Parts[0].(*ParamExp); !ok { + return x + } + s.modified = true + w.Parts = dq.Parts + return w +} + +func (s *simplifier) removeParensTest(x TestExpr) TestExpr { + for { + par, _ := x.(*ParenTest) + if par == nil { + return x + } + s.modified = true + x = par.X + } +} + +func (s *simplifier) removeNegateTest(x TestExpr) TestExpr { + u, _ := x.(*UnaryTest) + if u == nil || u.Op != TsNot { + return x + } + switch y := u.X.(type) { + case *UnaryTest: + switch y.Op { + case TsEmpStr: + y.Op = TsNempStr + s.modified = true + return y + case TsNempStr: + y.Op = TsEmpStr + s.modified = true + return y + case TsNot: + s.modified = true + return y.X + } + case *BinaryTest: + switch y.Op { + case TsMatch: + y.Op = TsNoMatch + s.modified = true + return y + case TsNoMatch: + y.Op = TsMatch + s.modified = true + return y + } + } + return x +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/token_string.go b/vendor/mvdan.cc/sh/v3/syntax/token_string.go new file mode 100644 index 00000000..ab5c83ac --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/token_string.go @@ -0,0 +1,149 @@ +// Code generated by "stringer -type token -linecomment -trimprefix _"; DO NOT EDIT. + +package syntax + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[illegalTok-0] + _ = x[_EOF-1] + _ = x[_Newl-2] + _ = x[_Lit-3] + _ = x[_LitWord-4] + _ = x[_LitRedir-5] + _ = x[sglQuote-6] + _ = x[dblQuote-7] + _ = x[bckQuote-8] + _ = x[and-9] + _ = x[andAnd-10] + _ = x[orOr-11] + _ = x[or-12] + _ = x[orAnd-13] + _ = x[dollar-14] + _ = x[dollSglQuote-15] + _ = x[dollDblQuote-16] + _ = x[dollBrace-17] + _ = x[dollBrack-18] + _ = x[dollParen-19] + _ = x[dollDblParen-20] + _ = x[leftBrack-21] + _ = x[dblLeftBrack-22] + _ = x[leftParen-23] + _ = x[dblLeftParen-24] + _ = x[rightBrace-25] + _ = x[rightBrack-26] + _ = x[rightParen-27] + _ = x[dblRightParen-28] + _ = x[semicolon-29] + _ = x[dblSemicolon-30] + _ = x[semiAnd-31] + _ = x[dblSemiAnd-32] + _ = x[semiOr-33] + _ = x[exclMark-34] + _ = x[tilde-35] + _ = x[addAdd-36] + _ = x[subSub-37] + _ = x[star-38] + _ = x[power-39] + _ = x[equal-40] + _ = x[nequal-41] + _ = x[lequal-42] + _ = x[gequal-43] + _ = x[addAssgn-44] + _ = x[subAssgn-45] + _ = x[mulAssgn-46] + _ = x[quoAssgn-47] + _ = x[remAssgn-48] + _ = x[andAssgn-49] + _ = x[orAssgn-50] + _ = x[xorAssgn-51] + _ = x[shlAssgn-52] + _ = x[shrAssgn-53] + _ = x[rdrOut-54] + _ = x[appOut-55] + _ = x[rdrIn-56] + _ = x[rdrInOut-57] + _ = x[dplIn-58] + _ = x[dplOut-59] + _ = x[clbOut-60] + _ = x[hdoc-61] + _ = x[dashHdoc-62] + _ = x[wordHdoc-63] + _ = x[rdrAll-64] + _ = x[appAll-65] + _ = x[cmdIn-66] + _ = x[cmdOut-67] + _ = x[plus-68] + _ = x[colPlus-69] + _ = x[minus-70] + _ = x[colMinus-71] + _ = x[quest-72] + _ = x[colQuest-73] + _ = x[assgn-74] + _ = x[colAssgn-75] + _ = x[perc-76] + _ = x[dblPerc-77] + _ = x[hash-78] + _ = x[dblHash-79] + _ = x[caret-80] + _ = x[dblCaret-81] + _ = x[comma-82] + _ = x[dblComma-83] + _ = x[at-84] + _ = x[slash-85] + _ = x[dblSlash-86] + _ = x[colon-87] + _ = x[tsExists-88] + _ = x[tsRegFile-89] + _ = x[tsDirect-90] + _ = x[tsCharSp-91] + _ = x[tsBlckSp-92] + _ = x[tsNmPipe-93] + _ = x[tsSocket-94] + _ = x[tsSmbLink-95] + _ = x[tsSticky-96] + _ = x[tsGIDSet-97] + _ = x[tsUIDSet-98] + _ = x[tsGrpOwn-99] + _ = x[tsUsrOwn-100] + _ = x[tsModif-101] + _ = x[tsRead-102] + _ = x[tsWrite-103] + _ = x[tsExec-104] + _ = x[tsNoEmpty-105] + _ = x[tsFdTerm-106] + _ = x[tsEmpStr-107] + _ = x[tsNempStr-108] + _ = x[tsOptSet-109] + _ = x[tsVarSet-110] + _ = x[tsRefVar-111] + _ = x[tsReMatch-112] + _ = x[tsNewer-113] + _ = x[tsOlder-114] + _ = x[tsDevIno-115] + _ = x[tsEql-116] + _ = x[tsNeq-117] + _ = x[tsLeq-118] + _ = x[tsGeq-119] + _ = x[tsLss-120] + _ = x[tsGtr-121] + _ = x[globQuest-122] + _ = x[globStar-123] + _ = x[globPlus-124] + _ = x[globAt-125] + _ = x[globExcl-126] +} + +const _token_name = "illegalTokEOFNewlLitLitWordLitRedir'\"`&&&||||&$$'$\"${$[$($(([[[(((}])));;;;&;;&;|!~++--***==!=<=>=+=-=*=/=%=&=|=^=<<=>>=>>><<><&>&>|<<<<-<<<&>&>><(>(+:+-:-?:?=:=%%%###^^^,,,@///:-e-f-d-c-b-p-S-L-k-g-u-G-O-N-r-w-x-s-t-z-n-o-v-R=~-nt-ot-ef-eq-ne-le-ge-lt-gt?(*(+(@(!(" + +var _token_index = [...]uint16{0, 10, 13, 17, 20, 27, 35, 36, 37, 38, 39, 41, 43, 44, 46, 47, 49, 51, 53, 55, 57, 60, 61, 63, 64, 66, 67, 68, 69, 71, 72, 74, 76, 79, 81, 82, 83, 85, 87, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 117, 120, 121, 123, 124, 126, 128, 130, 132, 134, 137, 140, 142, 145, 147, 149, 150, 152, 153, 155, 156, 158, 159, 161, 162, 164, 165, 167, 168, 170, 171, 173, 174, 175, 177, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204, 206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 231, 234, 237, 240, 243, 246, 249, 252, 255, 257, 259, 261, 263, 265} + +func (i token) String() string { + if i >= token(len(_token_index)-1) { + return "token(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _token_name[_token_index[i]:_token_index[i+1]] +} diff --git a/vendor/mvdan.cc/sh/v3/syntax/tokens.go b/vendor/mvdan.cc/sh/v3/syntax/tokens.go new file mode 100644 index 00000000..6a64b213 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/tokens.go @@ -0,0 +1,349 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +//go:generate stringer -type token -linecomment -trimprefix _ + +type token uint32 + +// The list of all possible tokens. +const ( + illegalTok token = iota + + _EOF + _Newl + _Lit + _LitWord + _LitRedir + + sglQuote // ' + dblQuote // " + bckQuote // ` + + and // & + andAnd // && + orOr // || + or // | + orAnd // |& + + dollar // $ + dollSglQuote // $' + dollDblQuote // $" + dollBrace // ${ + dollBrack // $[ + dollParen // $( + dollDblParen // $(( + leftBrack // [ + dblLeftBrack // [[ + leftParen // ( + dblLeftParen // (( + + rightBrace // } + rightBrack // ] + rightParen // ) + dblRightParen // )) + semicolon // ; + + dblSemicolon // ;; + semiAnd // ;& + dblSemiAnd // ;;& + semiOr // ;| + + exclMark // ! + tilde // ~ + addAdd // ++ + subSub // -- + star // * + power // ** + equal // == + nequal // != + lequal // <= + gequal // >= + + addAssgn // += + subAssgn // -= + mulAssgn // *= + quoAssgn // /= + remAssgn // %= + andAssgn // &= + orAssgn // |= + xorAssgn // ^= + shlAssgn // <<= + shrAssgn // >>= + + rdrOut // > + appOut // >> + rdrIn // < + rdrInOut // <> + dplIn // <& + dplOut // >& + clbOut // >| + hdoc // << + dashHdoc // <<- + wordHdoc // <<< + rdrAll // &> + appAll // &>> + + cmdIn // <( + cmdOut // >( + + plus // + + colPlus // :+ + minus // - + colMinus // :- + quest // ? + colQuest // :? + assgn // = + colAssgn // := + perc // % + dblPerc // %% + hash // # + dblHash // ## + caret // ^ + dblCaret // ^^ + comma // , + dblComma // ,, + at // @ + slash // / + dblSlash // // + colon // : + + tsExists // -e + tsRegFile // -f + tsDirect // -d + tsCharSp // -c + tsBlckSp // -b + tsNmPipe // -p + tsSocket // -S + tsSmbLink // -L + tsSticky // -k + tsGIDSet // -g + tsUIDSet // -u + tsGrpOwn // -G + tsUsrOwn // -O + tsModif // -N + tsRead // -r + tsWrite // -w + tsExec // -x + tsNoEmpty // -s + tsFdTerm // -t + tsEmpStr // -z + tsNempStr // -n + tsOptSet // -o + tsVarSet // -v + tsRefVar // -R + + tsReMatch // =~ + tsNewer // -nt + tsOlder // -ot + tsDevIno // -ef + tsEql // -eq + tsNeq // -ne + tsLeq // -le + tsGeq // -ge + tsLss // -lt + tsGtr // -gt + + globQuest // ?( + globStar // *( + globPlus // +( + globAt // @( + globExcl // !( +) + +type RedirOperator token + +const ( + RdrOut = RedirOperator(rdrOut) + iota // > + AppOut // >> + RdrIn // < + RdrInOut // <> + DplIn // <& + DplOut // >& + ClbOut // >| + Hdoc // << + DashHdoc // <<- + WordHdoc // <<< + RdrAll // &> + AppAll // &>> +) + +type ProcOperator token + +const ( + CmdIn = ProcOperator(cmdIn) + iota // <( + CmdOut // >( +) + +type GlobOperator token + +const ( + GlobZeroOrOne = GlobOperator(globQuest) + iota // ?( + GlobZeroOrMore // *( + GlobOneOrMore // +( + GlobOne // @( + GlobExcept // !( +) + +type BinCmdOperator token + +const ( + AndStmt = BinCmdOperator(andAnd) + iota // && + OrStmt // || + Pipe // | + PipeAll // |& +) + +type CaseOperator token + +const ( + Break = CaseOperator(dblSemicolon) + iota // ;; + Fallthrough // ;& + Resume // ;;& + ResumeKorn // ;| +) + +type ParNamesOperator token + +const ( + NamesPrefix = ParNamesOperator(star) // * + NamesPrefixWords = ParNamesOperator(at) // @ +) + +type ParExpOperator token + +const ( + AlternateUnset = ParExpOperator(plus) + iota // + + AlternateUnsetOrNull // :+ + DefaultUnset // - + DefaultUnsetOrNull // :- + ErrorUnset // ? + ErrorUnsetOrNull // :? + AssignUnset // = + AssignUnsetOrNull // := + RemSmallSuffix // % + RemLargeSuffix // %% + RemSmallPrefix // # + RemLargePrefix // ## + UpperFirst // ^ + UpperAll // ^^ + LowerFirst // , + LowerAll // ,, + OtherParamOps // @ +) + +type UnAritOperator token + +const ( + Not = UnAritOperator(exclMark) + iota // ! + BitNegation // ~ + Inc // ++ + Dec // -- + Plus = UnAritOperator(plus) // + + Minus = UnAritOperator(minus) // - +) + +type BinAritOperator token + +const ( + Add = BinAritOperator(plus) // + + Sub = BinAritOperator(minus) // - + Mul = BinAritOperator(star) // * + Quo = BinAritOperator(slash) // / + Rem = BinAritOperator(perc) // % + Pow = BinAritOperator(power) // ** + Eql = BinAritOperator(equal) // == + Gtr = BinAritOperator(rdrOut) // > + Lss = BinAritOperator(rdrIn) // < + Neq = BinAritOperator(nequal) // != + Leq = BinAritOperator(lequal) // <= + Geq = BinAritOperator(gequal) // >= + And = BinAritOperator(and) // & + Or = BinAritOperator(or) // | + Xor = BinAritOperator(caret) // ^ + Shr = BinAritOperator(appOut) // >> + Shl = BinAritOperator(hdoc) // << + + AndArit = BinAritOperator(andAnd) // && + OrArit = BinAritOperator(orOr) // || + Comma = BinAritOperator(comma) // , + TernQuest = BinAritOperator(quest) // ? + TernColon = BinAritOperator(colon) // : + + Assgn = BinAritOperator(assgn) // = + AddAssgn = BinAritOperator(addAssgn) // += + SubAssgn = BinAritOperator(subAssgn) // -= + MulAssgn = BinAritOperator(mulAssgn) // *= + QuoAssgn = BinAritOperator(quoAssgn) // /= + RemAssgn = BinAritOperator(remAssgn) // %= + AndAssgn = BinAritOperator(andAssgn) // &= + OrAssgn = BinAritOperator(orAssgn) // |= + XorAssgn = BinAritOperator(xorAssgn) // ^= + ShlAssgn = BinAritOperator(shlAssgn) // <<= + ShrAssgn = BinAritOperator(shrAssgn) // >>= +) + +type UnTestOperator token + +const ( + TsExists = UnTestOperator(tsExists) + iota // -e + TsRegFile // -f + TsDirect // -d + TsCharSp // -c + TsBlckSp // -b + TsNmPipe // -p + TsSocket // -S + TsSmbLink // -L + TsSticky // -k + TsGIDSet // -g + TsUIDSet // -u + TsGrpOwn // -G + TsUsrOwn // -O + TsModif // -N + TsRead // -r + TsWrite // -w + TsExec // -x + TsNoEmpty // -s + TsFdTerm // -t + TsEmpStr // -z + TsNempStr // -n + TsOptSet // -o + TsVarSet // -v + TsRefVar // -R + TsNot = UnTestOperator(exclMark) // ! +) + +type BinTestOperator token + +const ( + TsReMatch = BinTestOperator(tsReMatch) + iota // =~ + TsNewer // -nt + TsOlder // -ot + TsDevIno // -ef + TsEql // -eq + TsNeq // -ne + TsLeq // -le + TsGeq // -ge + TsLss // -lt + TsGtr // -gt + AndTest = BinTestOperator(andAnd) // && + OrTest = BinTestOperator(orOr) // || + TsMatchShort = BinTestOperator(assgn) // = + TsMatch = BinTestOperator(equal) // == + TsNoMatch = BinTestOperator(nequal) // != + TsBefore = BinTestOperator(rdrIn) // < + TsAfter = BinTestOperator(rdrOut) // > +) + +func (o RedirOperator) String() string { return token(o).String() } +func (o ProcOperator) String() string { return token(o).String() } +func (o GlobOperator) String() string { return token(o).String() } +func (o BinCmdOperator) String() string { return token(o).String() } +func (o CaseOperator) String() string { return token(o).String() } +func (o ParNamesOperator) String() string { return token(o).String() } +func (o ParExpOperator) String() string { return token(o).String() } +func (o UnAritOperator) String() string { return token(o).String() } +func (o BinAritOperator) String() string { return token(o).String() } +func (o UnTestOperator) String() string { return token(o).String() } +func (o BinTestOperator) String() string { return token(o).String() } diff --git a/vendor/mvdan.cc/sh/v3/syntax/walk.go b/vendor/mvdan.cc/sh/v3/syntax/walk.go new file mode 100644 index 00000000..be3f2090 --- /dev/null +++ b/vendor/mvdan.cc/sh/v3/syntax/walk.go @@ -0,0 +1,313 @@ +// Copyright (c) 2016, Daniel Martí +// See LICENSE for licensing information + +package syntax + +import ( + "fmt" + "io" + "reflect" +) + +func walkStmts(stmts []*Stmt, last []Comment, f func(Node) bool) { + for _, s := range stmts { + Walk(s, f) + } + for _, c := range last { + Walk(&c, f) + } +} + +func walkWords(words []*Word, f func(Node) bool) { + for _, w := range words { + Walk(w, f) + } +} + +// Walk traverses a syntax tree in depth-first order: It starts by calling +// f(node); node must not be nil. If f returns true, Walk invokes f +// recursively for each of the non-nil children of node, followed by +// f(nil). +func Walk(node Node, f func(Node) bool) { + if !f(node) { + return + } + + switch x := node.(type) { + case *File: + walkStmts(x.Stmts, x.Last, f) + case *Comment: + case *Stmt: + for _, c := range x.Comments { + if !x.End().After(c.Pos()) { + defer Walk(&c, f) + break + } + Walk(&c, f) + } + if x.Cmd != nil { + Walk(x.Cmd, f) + } + for _, r := range x.Redirs { + Walk(r, f) + } + case *Assign: + if x.Name != nil { + Walk(x.Name, f) + } + if x.Value != nil { + Walk(x.Value, f) + } + if x.Index != nil { + Walk(x.Index, f) + } + if x.Array != nil { + Walk(x.Array, f) + } + case *Redirect: + if x.N != nil { + Walk(x.N, f) + } + Walk(x.Word, f) + if x.Hdoc != nil { + Walk(x.Hdoc, f) + } + case *CallExpr: + for _, a := range x.Assigns { + Walk(a, f) + } + walkWords(x.Args, f) + case *Subshell: + walkStmts(x.Stmts, x.Last, f) + case *Block: + walkStmts(x.Stmts, x.Last, f) + case *IfClause: + walkStmts(x.Cond, x.CondLast, f) + walkStmts(x.Then, x.ThenLast, f) + if x.Else != nil { + Walk(x.Else, f) + } + case *WhileClause: + walkStmts(x.Cond, x.CondLast, f) + walkStmts(x.Do, x.DoLast, f) + case *ForClause: + Walk(x.Loop, f) + walkStmts(x.Do, x.DoLast, f) + case *WordIter: + Walk(x.Name, f) + walkWords(x.Items, f) + case *CStyleLoop: + if x.Init != nil { + Walk(x.Init, f) + } + if x.Cond != nil { + Walk(x.Cond, f) + } + if x.Post != nil { + Walk(x.Post, f) + } + case *BinaryCmd: + Walk(x.X, f) + Walk(x.Y, f) + case *FuncDecl: + Walk(x.Name, f) + Walk(x.Body, f) + case *Word: + for _, wp := range x.Parts { + Walk(wp, f) + } + case *Lit: + case *SglQuoted: + case *DblQuoted: + for _, wp := range x.Parts { + Walk(wp, f) + } + case *CmdSubst: + walkStmts(x.Stmts, x.Last, f) + case *ParamExp: + Walk(x.Param, f) + if x.Index != nil { + Walk(x.Index, f) + } + if x.Repl != nil { + if x.Repl.Orig != nil { + Walk(x.Repl.Orig, f) + } + if x.Repl.With != nil { + Walk(x.Repl.With, f) + } + } + if x.Exp != nil && x.Exp.Word != nil { + Walk(x.Exp.Word, f) + } + case *ArithmExp: + Walk(x.X, f) + case *ArithmCmd: + Walk(x.X, f) + case *BinaryArithm: + Walk(x.X, f) + Walk(x.Y, f) + case *BinaryTest: + Walk(x.X, f) + Walk(x.Y, f) + case *UnaryArithm: + Walk(x.X, f) + case *UnaryTest: + Walk(x.X, f) + case *ParenArithm: + Walk(x.X, f) + case *ParenTest: + Walk(x.X, f) + case *CaseClause: + Walk(x.Word, f) + for _, ci := range x.Items { + Walk(ci, f) + } + for _, c := range x.Last { + Walk(&c, f) + } + case *CaseItem: + for _, c := range x.Comments { + if c.Pos().After(x.Pos()) { + defer Walk(&c, f) + break + } + Walk(&c, f) + } + walkWords(x.Patterns, f) + walkStmts(x.Stmts, x.Last, f) + case *TestClause: + Walk(x.X, f) + case *DeclClause: + for _, a := range x.Args { + Walk(a, f) + } + case *ArrayExpr: + for _, el := range x.Elems { + Walk(el, f) + } + for _, c := range x.Last { + Walk(&c, f) + } + case *ArrayElem: + for _, c := range x.Comments { + if c.Pos().After(x.Pos()) { + defer Walk(&c, f) + break + } + Walk(&c, f) + } + if x.Index != nil { + Walk(x.Index, f) + } + if x.Value != nil { + Walk(x.Value, f) + } + case *ExtGlob: + Walk(x.Pattern, f) + case *ProcSubst: + walkStmts(x.Stmts, x.Last, f) + case *TimeClause: + if x.Stmt != nil { + Walk(x.Stmt, f) + } + case *CoprocClause: + if x.Name != nil { + Walk(x.Name, f) + } + Walk(x.Stmt, f) + case *LetClause: + for _, expr := range x.Exprs { + Walk(expr, f) + } + case *TestDecl: + Walk(x.Description, f) + Walk(x.Body, f) + default: + panic(fmt.Sprintf("syntax.Walk: unexpected node type %T", x)) + } + + f(nil) +} + +// DebugPrint prints the provided syntax tree, spanning multiple lines and with +// indentation. Can be useful to investigate the content of a syntax tree. +func DebugPrint(w io.Writer, node Node) error { + p := debugPrinter{out: w} + p.print(reflect.ValueOf(node)) + return p.err +} + +type debugPrinter struct { + out io.Writer + level int + err error +} + +func (p *debugPrinter) printf(format string, args ...interface{}) { + _, err := fmt.Fprintf(p.out, format, args...) + if err != nil && p.err == nil { + p.err = err + } +} + +func (p *debugPrinter) newline() { + p.printf("\n") + for i := 0; i < p.level; i++ { + p.printf(". ") + } +} + +func (p *debugPrinter) print(x reflect.Value) { + switch x.Kind() { + case reflect.Interface: + if x.IsNil() { + p.printf("nil") + return + } + p.print(x.Elem()) + case reflect.Ptr: + if x.IsNil() { + p.printf("nil") + return + } + p.printf("*") + p.print(x.Elem()) + case reflect.Slice: + p.printf("%s (len = %d) {", x.Type(), x.Len()) + if x.Len() > 0 { + p.level++ + p.newline() + for i := 0; i < x.Len(); i++ { + p.printf("%d: ", i) + p.print(x.Index(i)) + if i == x.Len()-1 { + p.level-- + } + p.newline() + } + } + p.printf("}") + + case reflect.Struct: + if v, ok := x.Interface().(Pos); ok { + p.printf("%v:%v", v.Line(), v.Col()) + return + } + t := x.Type() + p.printf("%s {", t) + p.level++ + p.newline() + for i := 0; i < t.NumField(); i++ { + p.printf("%s: ", t.Field(i).Name) + p.print(x.Field(i)) + if i == x.NumField()-1 { + p.level-- + } + p.newline() + } + p.printf("}") + default: + p.printf("%#v", x.Interface()) + } +}