diff --git a/go.mod b/go.mod index 14439978..08295ae9 100644 --- a/go.mod +++ b/go.mod @@ -6,16 +6,16 @@ require ( github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.14.9 github.com/aws/aws-sdk-go-v2/service/dynamodb v1.34.3 github.com/aws/smithy-go v1.20.3 - github.com/getkin/kin-openapi v0.123.0 github.com/go-openapi/spec v0.21.0 github.com/go-openapi/strfmt v0.23.0 github.com/go-openapi/validate v0.24.0 github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 - github.com/hashicorp/terraform-plugin-docs v0.18.0 + github.com/hashicorp/terraform-plugin-docs v0.19.4 github.com/hashicorp/terraform-plugin-sdk/v2 v2.34.0 ) require ( + github.com/BurntSushi/toml v1.2.1 // indirect github.com/Kunde21/markdownfmt/v3 v3.1.0 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect @@ -27,6 +27,7 @@ require ( github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.22.3 // indirect github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/bmatcuk/doublestar/v4 v4.6.1 // indirect github.com/cloudflare/circl v1.3.7 // indirect github.com/fatih/color v1.17.0 // indirect github.com/go-openapi/analysis v0.23.0 // indirect @@ -35,6 +36,7 @@ require ( github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect + github.com/go-test/deep v1.0.8 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/google/go-cmp v0.6.0 // indirect github.com/google/uuid v1.6.0 // indirect @@ -47,7 +49,7 @@ require ( github.com/hashicorp/go-plugin v1.6.1 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.7.0 // indirect - github.com/hashicorp/hc-install v0.6.4 // indirect + github.com/hashicorp/hc-install v0.7.0 // indirect github.com/hashicorp/hcl/v2 v2.21.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/terraform-exec v0.21.0 // indirect @@ -59,7 +61,6 @@ require ( github.com/hashicorp/yamux v0.1.1 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.16 // indirect - github.com/invopop/yaml v0.2.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -70,20 +71,18 @@ require ( github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect - github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/oklog/run v1.1.0 // indirect github.com/oklog/ulid v1.3.1 // indirect - github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/posener/complete v1.2.3 // indirect - github.com/russross/blackfriday v1.6.0 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/spf13/cast v1.6.0 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/yuin/goldmark v1.6.0 // indirect + github.com/yuin/goldmark v1.7.1 // indirect github.com/yuin/goldmark-meta v1.1.0 // indirect github.com/zclconf/go-cty v1.14.4 // indirect + go.abhg.dev/goldmark/frontmatter v0.2.0 // indirect go.mongodb.org/mongo-driver v1.16.0 // indirect golang.org/x/crypto v0.25.0 // indirect golang.org/x/exp v0.0.0-20230626212559-97b1e661b5df // indirect diff --git a/go.sum b/go.sum index ec63008a..c5491864 100644 --- a/go.sum +++ b/go.sum @@ -1,5 +1,7 @@ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= +github.com/BurntSushi/toml v1.2.1 h1:9F2/+DoOYIOksmaJFPw1tGFy1eDnIJXg+UHjuD8lTak= +github.com/BurntSushi/toml v1.2.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/Kunde21/markdownfmt/v3 v3.1.0 h1:KiZu9LKs+wFFBQKhrZJrFZwtLnCCWJahL+S+E/3VnM0= github.com/Kunde21/markdownfmt/v3 v3.1.0/go.mod h1:tPXN1RTyOzJwhfHoon9wUr4HGYmWgVxSQN6VBJDkrVc= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= @@ -34,6 +36,8 @@ github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE= github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bmatcuk/doublestar/v4 v4.6.1 h1:FH9SifrbvJhnlQpztAx++wlkk70QBf0iBWDwNy7PA4I= +github.com/bmatcuk/doublestar/v4 v4.6.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bufbuild/protocompile v0.4.0 h1:LbFKd2XowZvQ/kajzguUp2DC9UEIQhIq77fZZlaQsNA= github.com/bufbuild/protocompile v0.4.0/go.mod h1:3v93+mbWn/v3xzN+31nwkJfrEpAUwp+BagBSZWx+TP8= github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= @@ -50,8 +54,6 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= -github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8= -github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= @@ -116,8 +118,8 @@ github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/C github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.6.4 h1:QLqlM56/+SIIGvGcfFiwMY3z5WGXT066suo/v9Km8e0= -github.com/hashicorp/hc-install v0.6.4/go.mod h1:05LWLy8TD842OtgcfBbOT0WMoInBMUSHjmDx10zuBIA= +github.com/hashicorp/hc-install v0.7.0 h1:Uu9edVqjKQxxuD28mR5TikkKDd/p55S8vzPC1659aBk= +github.com/hashicorp/hc-install v0.7.0/go.mod h1:ELmmzZlGnEcqoUMKUuykHaPCIR1sYLYX+KSggWSKZuA= github.com/hashicorp/hcl/v2 v2.21.0 h1:lve4q/o/2rqwYOgUg3y3V2YPyD1/zkCLGjIV74Jit14= github.com/hashicorp/hcl/v2 v2.21.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= @@ -126,8 +128,8 @@ github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVW github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg= github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec= github.com/hashicorp/terraform-json v0.22.1/go.mod h1:JbWSQCLFSXFFhg42T7l9iJwdGXBYV8fmmD6o/ML4p3A= -github.com/hashicorp/terraform-plugin-docs v0.18.0 h1:2bINhzXc+yDeAcafurshCrIjtdu1XHn9zZ3ISuEhgpk= -github.com/hashicorp/terraform-plugin-docs v0.18.0/go.mod h1:iIUfaJpdUmpi+rI42Kgq+63jAjI8aZVTyxp3Bvk9Hg8= +github.com/hashicorp/terraform-plugin-docs v0.19.4 h1:G3Bgo7J22OMtegIgn8Cd/CaSeyEljqjH3G39w28JK4c= +github.com/hashicorp/terraform-plugin-docs v0.19.4/go.mod h1:4pLASsatTmRynVzsjEhbXZ6s7xBlUw/2Kt0zfrq8HxA= github.com/hashicorp/terraform-plugin-go v0.23.0 h1:AALVuU1gD1kPb48aPQUjug9Ir/125t+AAurhqphJ2Co= github.com/hashicorp/terraform-plugin-go v0.23.0/go.mod h1:1E3Cr9h2vMlahWMbsSEcNrOCxovCZhOOIXjFHbjc/lQ= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= @@ -146,8 +148,6 @@ github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= -github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= -github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jhump/protoreflect v1.15.1 h1:HUMERORf3I3ZdX05WaQ6MIpd/NJ434hTp5YiKgfCL6c= @@ -188,14 +188,10 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= -github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA= github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= -github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= -github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -204,8 +200,6 @@ github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXq github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= -github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= -github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= @@ -223,8 +217,6 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0= -github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI= github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= @@ -235,14 +227,16 @@ github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yuin/goldmark v1.6.0 h1:boZcn2GTjpsynOsC0iJHnBWa4Bi0qzfJjthwauItG68= -github.com/yuin/goldmark v1.6.0/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +github.com/yuin/goldmark v1.7.1 h1:3bajkSilaCbjdKVsKdZjZCLBNPL9pYzrCakKaf4U49U= +github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRlaOzY1E= github.com/yuin/goldmark-meta v1.1.0 h1:pWw+JLHGZe8Rk0EGsMVssiNb/AaPMHfSRszZeUeiOUc= github.com/yuin/goldmark-meta v1.1.0/go.mod h1:U4spWENafuA7Zyg+Lj5RqK/MF+ovMYtBvXi1lBb2VP0= github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940 h1:4r45xpDWB6ZMSMNJFMOjqrGHynW3DIBuR2H9j0ug+Mo= github.com/zclconf/go-cty-debug v0.0.0-20240509010212-0d6042c53940/go.mod h1:CmBdvvj3nqzfzJ6nTCIwDTPZ56aVGvDrmztiO5g3qrM= +go.abhg.dev/goldmark/frontmatter v0.2.0 h1:P8kPG0YkL12+aYk2yU3xHv4tcXzeVnN+gU0tJ5JnxRw= +go.abhg.dev/goldmark/frontmatter v0.2.0/go.mod h1:XqrEkZuM57djk7zrlRUB02x8I5J0px76YjkOzhB4YlU= go.mongodb.org/mongo-driver v1.16.0 h1:tpRsfBJMROVHKpdGyc1BBEzzjDUWjItxbVSZ8Ls4BQ4= go.mongodb.org/mongo-driver v1.16.0/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= @@ -320,6 +314,5 @@ gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRN gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore new file mode 100644 index 00000000..fe79e3ad --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/.gitignore @@ -0,0 +1,2 @@ +/toml.test +/toml-test diff --git a/vendor/github.com/perimeterx/marshmallow/LICENSE b/vendor/github.com/BurntSushi/toml/COPYING similarity index 86% rename from vendor/github.com/perimeterx/marshmallow/LICENSE rename to vendor/github.com/BurntSushi/toml/COPYING index 8ffe8691..01b57432 100644 --- a/vendor/github.com/perimeterx/marshmallow/LICENSE +++ b/vendor/github.com/BurntSushi/toml/COPYING @@ -1,6 +1,6 @@ -MIT License +The MIT License (MIT) -Copyright (c) 2022 PerimeterX +Copyright (c) 2013 TOML authors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -9,13 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md new file mode 100644 index 00000000..3651cfa9 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -0,0 +1,120 @@ +TOML stands for Tom's Obvious, Minimal Language. This Go package provides a +reflection interface similar to Go's standard library `json` and `xml` packages. + +Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). + +Documentation: https://godocs.io/github.com/BurntSushi/toml + +See the [releases page](https://github.com/BurntSushi/toml/releases) for a +changelog; this information is also in the git tag annotations (e.g. `git show +v0.4.0`). + +This library requires Go 1.13 or newer; add it to your go.mod with: + + % go get github.com/BurntSushi/toml@latest + +It also comes with a TOML validator CLI tool: + + % go install github.com/BurntSushi/toml/cmd/tomlv@latest + % tomlv some-toml-file.toml + +### Examples +For the simplest example, consider some TOML file as just a list of keys and +values: + +```toml +Age = 25 +Cats = [ "Cauchy", "Plato" ] +Pi = 3.14 +Perfection = [ 6, 28, 496, 8128 ] +DOB = 1987-07-05T05:45:00Z +``` + +Which can be decoded with: + +```go +type Config struct { + Age int + Cats []string + Pi float64 + Perfection []int + DOB time.Time +} + +var conf Config +_, err := toml.Decode(tomlData, &conf) +``` + +You can also use struct tags if your struct field name doesn't map to a TOML key +value directly: + +```toml +some_key_NAME = "wat" +``` + +```go +type TOML struct { + ObscureKey string `toml:"some_key_NAME"` +} +``` + +Beware that like other decoders **only exported fields** are considered when +encoding and decoding; private fields are silently ignored. + +### Using the `Marshaler` and `encoding.TextUnmarshaler` interfaces +Here's an example that automatically parses values in a `mail.Address`: + +```toml +contacts = [ + "Donald Duck ", + "Scrooge McDuck ", +] +``` + +Can be decoded with: + +```go +// Create address type which satisfies the encoding.TextUnmarshaler interface. +type address struct { + *mail.Address +} + +func (a *address) UnmarshalText(text []byte) error { + var err error + a.Address, err = mail.ParseAddress(string(text)) + return err +} + +// Decode it. +func decode() { + blob := ` + contacts = [ + "Donald Duck ", + "Scrooge McDuck ", + ] + ` + + var contacts struct { + Contacts []address + } + + _, err := toml.Decode(blob, &contacts) + if err != nil { + log.Fatal(err) + } + + for _, c := range contacts.Contacts { + fmt.Printf("%#v\n", c.Address) + } + + // Output: + // &mail.Address{Name:"Donald Duck", Address:"donald@duckburg.com"} + // &mail.Address{Name:"Scrooge McDuck", Address:"scrooge@duckburg.com"} +} +``` + +To target TOML specifically you can implement `UnmarshalTOML` TOML interface in +a similar way. + +### More complex usage +See the [`_example/`](/_example) directory for a more complex example. diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go new file mode 100644 index 00000000..0ca1dc4f --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -0,0 +1,602 @@ +package toml + +import ( + "bytes" + "encoding" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "math" + "os" + "reflect" + "strconv" + "strings" + "time" +) + +// Unmarshaler is the interface implemented by objects that can unmarshal a +// TOML description of themselves. +type Unmarshaler interface { + UnmarshalTOML(interface{}) error +} + +// Unmarshal decodes the contents of data in TOML format into a pointer v. +// +// See [Decoder] for a description of the decoding process. +func Unmarshal(data []byte, v interface{}) error { + _, err := NewDecoder(bytes.NewReader(data)).Decode(v) + return err +} + +// Decode the TOML data in to the pointer v. +// +// See [Decoder] for a description of the decoding process. +func Decode(data string, v interface{}) (MetaData, error) { + return NewDecoder(strings.NewReader(data)).Decode(v) +} + +// DecodeFile reads the contents of a file and decodes it with [Decode]. +func DecodeFile(path string, v interface{}) (MetaData, error) { + fp, err := os.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} + +// Primitive is a TOML value that hasn't been decoded into a Go value. +// +// This type can be used for any value, which will cause decoding to be delayed. +// You can use [PrimitiveDecode] to "manually" decode these values. +// +// NOTE: The underlying representation of a `Primitive` value is subject to +// change. Do not rely on it. +// +// NOTE: Primitive values are still parsed, so using them will only avoid the +// overhead of reflection. They can be useful when you don't know the exact type +// of TOML data until runtime. +type Primitive struct { + undecoded interface{} + context Key +} + +// The significand precision for float32 and float64 is 24 and 53 bits; this is +// the range a natural number can be stored in a float without loss of data. +const ( + maxSafeFloat32Int = 16777215 // 2^24-1 + maxSafeFloat64Int = int64(9007199254740991) // 2^53-1 +) + +// Decoder decodes TOML data. +// +// TOML tables correspond to Go structs or maps; they can be used +// interchangeably, but structs offer better type safety. +// +// TOML table arrays correspond to either a slice of structs or a slice of maps. +// +// TOML datetimes correspond to [time.Time]. Local datetimes are parsed in the +// local timezone. +// +// [time.Duration] types are treated as nanoseconds if the TOML value is an +// integer, or they're parsed with time.ParseDuration() if they're strings. +// +// All other TOML types (float, string, int, bool and array) correspond to the +// obvious Go types. +// +// An exception to the above rules is if a type implements the TextUnmarshaler +// interface, in which case any primitive TOML value (floats, strings, integers, +// booleans, datetimes) will be converted to a []byte and given to the value's +// UnmarshalText method. See the Unmarshaler example for a demonstration with +// email addresses. +// +// ### Key mapping +// +// TOML keys can map to either keys in a Go map or field names in a Go struct. +// The special `toml` struct tag can be used to map TOML keys to struct fields +// that don't match the key name exactly (see the example). A case insensitive +// match to struct names will be tried if an exact match can't be found. +// +// The mapping between TOML values and Go values is loose. That is, there may +// exist TOML values that cannot be placed into your representation, and there +// may be parts of your representation that do not correspond to TOML values. +// This loose mapping can be made stricter by using the IsDefined and/or +// Undecoded methods on the MetaData returned. +// +// This decoder does not handle cyclic types. Decode will not terminate if a +// cyclic type is passed. +type Decoder struct { + r io.Reader +} + +// NewDecoder creates a new Decoder. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{r: r} +} + +var ( + unmarshalToml = reflect.TypeOf((*Unmarshaler)(nil)).Elem() + unmarshalText = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() + primitiveType = reflect.TypeOf((*Primitive)(nil)).Elem() +) + +// Decode TOML data in to the pointer `v`. +func (dec *Decoder) Decode(v interface{}) (MetaData, error) { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr { + s := "%q" + if reflect.TypeOf(v) == nil { + s = "%v" + } + + return MetaData{}, fmt.Errorf("toml: cannot decode to non-pointer "+s, reflect.TypeOf(v)) + } + if rv.IsNil() { + return MetaData{}, fmt.Errorf("toml: cannot decode to nil value of %q", reflect.TypeOf(v)) + } + + // Check if this is a supported type: struct, map, interface{}, or something + // that implements UnmarshalTOML or UnmarshalText. + rv = indirect(rv) + rt := rv.Type() + if rv.Kind() != reflect.Struct && rv.Kind() != reflect.Map && + !(rv.Kind() == reflect.Interface && rv.NumMethod() == 0) && + !rt.Implements(unmarshalToml) && !rt.Implements(unmarshalText) { + return MetaData{}, fmt.Errorf("toml: cannot decode to type %s", rt) + } + + // TODO: parser should read from io.Reader? Or at the very least, make it + // read from []byte rather than string + data, err := ioutil.ReadAll(dec.r) + if err != nil { + return MetaData{}, err + } + + p, err := parse(string(data)) + if err != nil { + return MetaData{}, err + } + + md := MetaData{ + mapping: p.mapping, + keyInfo: p.keyInfo, + keys: p.ordered, + decoded: make(map[string]struct{}, len(p.ordered)), + context: nil, + data: data, + } + return md, md.unify(p.mapping, rv) +} + +// PrimitiveDecode is just like the other Decode* functions, except it decodes a +// TOML value that has already been parsed. Valid primitive values can *only* be +// obtained from values filled by the decoder functions, including this method. +// (i.e., v may contain more [Primitive] values.) +// +// Meta data for primitive values is included in the meta data returned by the +// Decode* functions with one exception: keys returned by the Undecoded method +// will only reflect keys that were decoded. Namely, any keys hidden behind a +// Primitive will be considered undecoded. Executing this method will update the +// undecoded keys in the meta data. (See the example.) +func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { + md.context = primValue.context + defer func() { md.context = nil }() + return md.unify(primValue.undecoded, rvalue(v)) +} + +// unify performs a sort of type unification based on the structure of `rv`, +// which is the client representation. +// +// Any type mismatch produces an error. Finding a type that we don't know +// how to handle produces an unsupported type error. +func (md *MetaData) unify(data interface{}, rv reflect.Value) error { + // Special case. Look for a `Primitive` value. + // TODO: #76 would make this superfluous after implemented. + if rv.Type() == primitiveType { + // Save the undecoded data and the key context into the primitive + // value. + context := make(Key, len(md.context)) + copy(context, md.context) + rv.Set(reflect.ValueOf(Primitive{ + undecoded: data, + context: context, + })) + return nil + } + + rvi := rv.Interface() + if v, ok := rvi.(Unmarshaler); ok { + return v.UnmarshalTOML(data) + } + if v, ok := rvi.(encoding.TextUnmarshaler); ok { + return md.unifyText(data, v) + } + + // TODO: + // The behavior here is incorrect whenever a Go type satisfies the + // encoding.TextUnmarshaler interface but also corresponds to a TOML hash or + // array. In particular, the unmarshaler should only be applied to primitive + // TOML values. But at this point, it will be applied to all kinds of values + // and produce an incorrect error whenever those values are hashes or arrays + // (including arrays of tables). + + k := rv.Kind() + + if k >= reflect.Int && k <= reflect.Uint64 { + return md.unifyInt(data, rv) + } + switch k { + case reflect.Ptr: + elem := reflect.New(rv.Type().Elem()) + err := md.unify(data, reflect.Indirect(elem)) + if err != nil { + return err + } + rv.Set(elem) + return nil + case reflect.Struct: + return md.unifyStruct(data, rv) + case reflect.Map: + return md.unifyMap(data, rv) + case reflect.Array: + return md.unifyArray(data, rv) + case reflect.Slice: + return md.unifySlice(data, rv) + case reflect.String: + return md.unifyString(data, rv) + case reflect.Bool: + return md.unifyBool(data, rv) + case reflect.Interface: + if rv.NumMethod() > 0 { // Only support empty interfaces are supported. + return md.e("unsupported type %s", rv.Type()) + } + return md.unifyAnything(data, rv) + case reflect.Float32, reflect.Float64: + return md.unifyFloat64(data, rv) + } + return md.e("unsupported type %s", rv.Kind()) +} + +func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if mapping == nil { + return nil + } + return md.e("type mismatch for %s: expected table but found %T", + rv.Type().String(), mapping) + } + + for key, datum := range tmap { + var f *field + fields := cachedTypeFields(rv.Type()) + for i := range fields { + ff := &fields[i] + if ff.name == key { + f = ff + break + } + if f == nil && strings.EqualFold(ff.name, key) { + f = ff + } + } + if f != nil { + subv := rv + for _, i := range f.index { + subv = indirect(subv.Field(i)) + } + + if isUnifiable(subv) { + md.decoded[md.context.add(key).String()] = struct{}{} + md.context = append(md.context, key) + + err := md.unify(datum, subv) + if err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + } else if f.name != "" { + return md.e("cannot write unexported field %s.%s", rv.Type().String(), f.name) + } + } + } + return nil +} + +func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { + keyType := rv.Type().Key().Kind() + if keyType != reflect.String && keyType != reflect.Interface { + return fmt.Errorf("toml: cannot decode to a map with non-string key type (%s in %q)", + keyType, rv.Type()) + } + + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if tmap == nil { + return nil + } + return md.badtype("map", mapping) + } + if rv.IsNil() { + rv.Set(reflect.MakeMap(rv.Type())) + } + for k, v := range tmap { + md.decoded[md.context.add(k).String()] = struct{}{} + md.context = append(md.context, k) + + rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) + + err := md.unify(v, indirect(rvval)) + if err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + + rvkey := indirect(reflect.New(rv.Type().Key())) + + switch keyType { + case reflect.Interface: + rvkey.Set(reflect.ValueOf(k)) + case reflect.String: + rvkey.SetString(k) + } + + rv.SetMapIndex(rvkey, rvval) + } + return nil +} + +func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return md.badtype("slice", data) + } + if l := datav.Len(); l != rv.Len() { + return md.e("expected array length %d; got TOML array of length %d", rv.Len(), l) + } + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return md.badtype("slice", data) + } + n := datav.Len() + if rv.IsNil() || rv.Cap() < n { + rv.Set(reflect.MakeSlice(rv.Type(), n, n)) + } + rv.SetLen(n) + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { + l := data.Len() + for i := 0; i < l; i++ { + err := md.unify(data.Index(i).Interface(), indirect(rv.Index(i))) + if err != nil { + return err + } + } + return nil +} + +func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { + _, ok := rv.Interface().(json.Number) + if ok { + if i, ok := data.(int64); ok { + rv.SetString(strconv.FormatInt(i, 10)) + } else if f, ok := data.(float64); ok { + rv.SetString(strconv.FormatFloat(f, 'f', -1, 64)) + } else { + return md.badtype("string", data) + } + return nil + } + + if s, ok := data.(string); ok { + rv.SetString(s) + return nil + } + return md.badtype("string", data) +} + +func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { + rvk := rv.Kind() + + if num, ok := data.(float64); ok { + switch rvk { + case reflect.Float32: + if num < -math.MaxFloat32 || num > math.MaxFloat32 { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + fallthrough + case reflect.Float64: + rv.SetFloat(num) + default: + panic("bug") + } + return nil + } + + if num, ok := data.(int64); ok { + if (rvk == reflect.Float32 && (num < -maxSafeFloat32Int || num > maxSafeFloat32Int)) || + (rvk == reflect.Float64 && (num < -maxSafeFloat64Int || num > maxSafeFloat64Int)) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetFloat(float64(num)) + return nil + } + + return md.badtype("float", data) +} + +func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { + _, ok := rv.Interface().(time.Duration) + if ok { + // Parse as string duration, and fall back to regular integer parsing + // (as nanosecond) if this is not a string. + if s, ok := data.(string); ok { + dur, err := time.ParseDuration(s) + if err != nil { + return md.parseErr(errParseDuration{s}) + } + rv.SetInt(int64(dur)) + return nil + } + } + + num, ok := data.(int64) + if !ok { + return md.badtype("integer", data) + } + + rvk := rv.Kind() + switch { + case rvk >= reflect.Int && rvk <= reflect.Int64: + if (rvk == reflect.Int8 && (num < math.MinInt8 || num > math.MaxInt8)) || + (rvk == reflect.Int16 && (num < math.MinInt16 || num > math.MaxInt16)) || + (rvk == reflect.Int32 && (num < math.MinInt32 || num > math.MaxInt32)) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetInt(num) + case rvk >= reflect.Uint && rvk <= reflect.Uint64: + unum := uint64(num) + if rvk == reflect.Uint8 && (num < 0 || unum > math.MaxUint8) || + rvk == reflect.Uint16 && (num < 0 || unum > math.MaxUint16) || + rvk == reflect.Uint32 && (num < 0 || unum > math.MaxUint32) { + return md.parseErr(errParseRange{i: num, size: rvk.String()}) + } + rv.SetUint(unum) + default: + panic("unreachable") + } + return nil +} + +func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { + if b, ok := data.(bool); ok { + rv.SetBool(b) + return nil + } + return md.badtype("boolean", data) +} + +func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { + rv.Set(reflect.ValueOf(data)) + return nil +} + +func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error { + var s string + switch sdata := data.(type) { + case Marshaler: + text, err := sdata.MarshalTOML() + if err != nil { + return err + } + s = string(text) + case encoding.TextMarshaler: + text, err := sdata.MarshalText() + if err != nil { + return err + } + s = string(text) + case fmt.Stringer: + s = sdata.String() + case string: + s = sdata + case bool: + s = fmt.Sprintf("%v", sdata) + case int64: + s = fmt.Sprintf("%d", sdata) + case float64: + s = fmt.Sprintf("%f", sdata) + default: + return md.badtype("primitive (string-like)", data) + } + if err := v.UnmarshalText([]byte(s)); err != nil { + return err + } + return nil +} + +func (md *MetaData) badtype(dst string, data interface{}) error { + return md.e("incompatible types: TOML value has type %T; destination has type %s", data, dst) +} + +func (md *MetaData) parseErr(err error) error { + k := md.context.String() + return ParseError{ + LastKey: k, + Position: md.keyInfo[k].pos, + Line: md.keyInfo[k].pos.Line, + err: err, + input: string(md.data), + } +} + +func (md *MetaData) e(format string, args ...interface{}) error { + f := "toml: " + if len(md.context) > 0 { + f = fmt.Sprintf("toml: (last key %q): ", md.context) + p := md.keyInfo[md.context.String()].pos + if p.Line > 0 { + f = fmt.Sprintf("toml: line %d (last key %q): ", p.Line, md.context) + } + } + return fmt.Errorf(f+format, args...) +} + +// rvalue returns a reflect.Value of `v`. All pointers are resolved. +func rvalue(v interface{}) reflect.Value { + return indirect(reflect.ValueOf(v)) +} + +// indirect returns the value pointed to by a pointer. +// +// Pointers are followed until the value is not a pointer. New values are +// allocated for each nil pointer. +// +// An exception to this rule is if the value satisfies an interface of interest +// to us (like encoding.TextUnmarshaler). +func indirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr { + if v.CanSet() { + pv := v.Addr() + pvi := pv.Interface() + if _, ok := pvi.(encoding.TextUnmarshaler); ok { + return pv + } + if _, ok := pvi.(Unmarshaler); ok { + return pv + } + } + return v + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + return indirect(reflect.Indirect(v)) +} + +func isUnifiable(rv reflect.Value) bool { + if rv.CanSet() { + return true + } + rvi := rv.Interface() + if _, ok := rvi.(encoding.TextUnmarshaler); ok { + return true + } + if _, ok := rvi.(Unmarshaler); ok { + return true + } + return false +} diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go new file mode 100644 index 00000000..086d0b68 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode_go116.go @@ -0,0 +1,19 @@ +//go:build go1.16 +// +build go1.16 + +package toml + +import ( + "io/fs" +) + +// DecodeFS reads the contents of a file from [fs.FS] and decodes it with +// [Decode]. +func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) { + fp, err := fsys.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} diff --git a/vendor/github.com/BurntSushi/toml/deprecated.go b/vendor/github.com/BurntSushi/toml/deprecated.go new file mode 100644 index 00000000..c6af3f23 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/deprecated.go @@ -0,0 +1,21 @@ +package toml + +import ( + "encoding" + "io" +) + +// Deprecated: use encoding.TextMarshaler +type TextMarshaler encoding.TextMarshaler + +// Deprecated: use encoding.TextUnmarshaler +type TextUnmarshaler encoding.TextUnmarshaler + +// Deprecated: use MetaData.PrimitiveDecode. +func PrimitiveDecode(primValue Primitive, v interface{}) error { + md := MetaData{decoded: make(map[string]struct{})} + return md.unify(primValue.undecoded, rvalue(v)) +} + +// Deprecated: use NewDecoder(reader).Decode(&value). +func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { return NewDecoder(r).Decode(v) } diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go new file mode 100644 index 00000000..81a7c0fe --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/doc.go @@ -0,0 +1,11 @@ +// Package toml implements decoding and encoding of TOML files. +// +// This package supports TOML v1.0.0, as specified at https://toml.io +// +// There is also support for delaying decoding with the Primitive type, and +// querying the set of keys in a TOML document with the MetaData type. +// +// The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, +// and can be used to verify if TOML document is valid. It can also be used to +// print the type of each key. +package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go new file mode 100644 index 00000000..930e1d52 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -0,0 +1,750 @@ +package toml + +import ( + "bufio" + "encoding" + "encoding/json" + "errors" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/BurntSushi/toml/internal" +) + +type tomlEncodeError struct{ error } + +var ( + errArrayNilElement = errors.New("toml: cannot encode array with nil element") + errNonString = errors.New("toml: cannot encode a map with non-string key type") + errNoKey = errors.New("toml: top-level values must be Go maps or structs") + errAnything = errors.New("") // used in testing +) + +var dblQuotedReplacer = strings.NewReplacer( + "\"", "\\\"", + "\\", "\\\\", + "\x00", `\u0000`, + "\x01", `\u0001`, + "\x02", `\u0002`, + "\x03", `\u0003`, + "\x04", `\u0004`, + "\x05", `\u0005`, + "\x06", `\u0006`, + "\x07", `\u0007`, + "\b", `\b`, + "\t", `\t`, + "\n", `\n`, + "\x0b", `\u000b`, + "\f", `\f`, + "\r", `\r`, + "\x0e", `\u000e`, + "\x0f", `\u000f`, + "\x10", `\u0010`, + "\x11", `\u0011`, + "\x12", `\u0012`, + "\x13", `\u0013`, + "\x14", `\u0014`, + "\x15", `\u0015`, + "\x16", `\u0016`, + "\x17", `\u0017`, + "\x18", `\u0018`, + "\x19", `\u0019`, + "\x1a", `\u001a`, + "\x1b", `\u001b`, + "\x1c", `\u001c`, + "\x1d", `\u001d`, + "\x1e", `\u001e`, + "\x1f", `\u001f`, + "\x7f", `\u007f`, +) + +var ( + marshalToml = reflect.TypeOf((*Marshaler)(nil)).Elem() + marshalText = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() + timeType = reflect.TypeOf((*time.Time)(nil)).Elem() +) + +// Marshaler is the interface implemented by types that can marshal themselves +// into valid TOML. +type Marshaler interface { + MarshalTOML() ([]byte, error) +} + +// Encoder encodes a Go to a TOML document. +// +// The mapping between Go values and TOML values should be precisely the same as +// for [Decode]. +// +// time.Time is encoded as a RFC 3339 string, and time.Duration as its string +// representation. +// +// The [Marshaler] and [encoding.TextMarshaler] interfaces are supported to +// encoding the value as custom TOML. +// +// If you want to write arbitrary binary data then you will need to use +// something like base64 since TOML does not have any binary types. +// +// When encoding TOML hashes (Go maps or structs), keys without any sub-hashes +// are encoded first. +// +// Go maps will be sorted alphabetically by key for deterministic output. +// +// The toml struct tag can be used to provide the key name; if omitted the +// struct field name will be used. If the "omitempty" option is present the +// following value will be skipped: +// +// - arrays, slices, maps, and string with len of 0 +// - struct with all zero values +// - bool false +// +// If omitzero is given all int and float types with a value of 0 will be +// skipped. +// +// Encoding Go values without a corresponding TOML representation will return an +// error. Examples of this includes maps with non-string keys, slices with nil +// elements, embedded non-struct types, and nested slices containing maps or +// structs. (e.g. [][]map[string]string is not allowed but []map[string]string +// is okay, as is []map[string][]string). +// +// NOTE: only exported keys are encoded due to the use of reflection. Unexported +// keys are silently discarded. +type Encoder struct { + // String to use for a single indentation level; default is two spaces. + Indent string + + w *bufio.Writer + hasWritten bool // written any output to w yet? +} + +// NewEncoder create a new Encoder. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: bufio.NewWriter(w), + Indent: " ", + } +} + +// Encode writes a TOML representation of the Go value to the [Encoder]'s writer. +// +// An error is returned if the value given cannot be encoded to a valid TOML +// document. +func (enc *Encoder) Encode(v interface{}) error { + rv := eindirect(reflect.ValueOf(v)) + if err := enc.safeEncode(Key([]string{}), rv); err != nil { + return err + } + return enc.w.Flush() +} + +func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { + defer func() { + if r := recover(); r != nil { + if terr, ok := r.(tomlEncodeError); ok { + err = terr.error + return + } + panic(r) + } + }() + enc.encode(key, rv) + return nil +} + +func (enc *Encoder) encode(key Key, rv reflect.Value) { + // If we can marshal the type to text, then we use that. This prevents the + // encoder for handling these types as generic structs (or whatever the + // underlying type of a TextMarshaler is). + switch { + case isMarshaler(rv): + enc.writeKeyValue(key, rv, false) + return + case rv.Type() == primitiveType: // TODO: #76 would make this superfluous after implemented. + enc.encode(key, reflect.ValueOf(rv.Interface().(Primitive).undecoded)) + return + } + + k := rv.Kind() + switch k { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64, + reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: + enc.writeKeyValue(key, rv, false) + case reflect.Array, reflect.Slice: + if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { + enc.eArrayOfTables(key, rv) + } else { + enc.writeKeyValue(key, rv, false) + } + case reflect.Interface: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Map: + if rv.IsNil() { + return + } + enc.eTable(key, rv) + case reflect.Ptr: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Struct: + enc.eTable(key, rv) + default: + encPanic(fmt.Errorf("unsupported type for key '%s': %s", key, k)) + } +} + +// eElement encodes any value that can be an array element. +func (enc *Encoder) eElement(rv reflect.Value) { + switch v := rv.Interface().(type) { + case time.Time: // Using TextMarshaler adds extra quotes, which we don't want. + format := time.RFC3339Nano + switch v.Location() { + case internal.LocalDatetime: + format = "2006-01-02T15:04:05.999999999" + case internal.LocalDate: + format = "2006-01-02" + case internal.LocalTime: + format = "15:04:05.999999999" + } + switch v.Location() { + default: + enc.wf(v.Format(format)) + case internal.LocalDatetime, internal.LocalDate, internal.LocalTime: + enc.wf(v.In(time.UTC).Format(format)) + } + return + case Marshaler: + s, err := v.MarshalTOML() + if err != nil { + encPanic(err) + } + if s == nil { + encPanic(errors.New("MarshalTOML returned nil and no error")) + } + enc.w.Write(s) + return + case encoding.TextMarshaler: + s, err := v.MarshalText() + if err != nil { + encPanic(err) + } + if s == nil { + encPanic(errors.New("MarshalText returned nil and no error")) + } + enc.writeQuoted(string(s)) + return + case time.Duration: + enc.writeQuoted(v.String()) + return + case json.Number: + n, _ := rv.Interface().(json.Number) + + if n == "" { /// Useful zero value. + enc.w.WriteByte('0') + return + } else if v, err := n.Int64(); err == nil { + enc.eElement(reflect.ValueOf(v)) + return + } else if v, err := n.Float64(); err == nil { + enc.eElement(reflect.ValueOf(v)) + return + } + encPanic(fmt.Errorf("unable to convert %q to int64 or float64", n)) + } + + switch rv.Kind() { + case reflect.Ptr: + enc.eElement(rv.Elem()) + return + case reflect.String: + enc.writeQuoted(rv.String()) + case reflect.Bool: + enc.wf(strconv.FormatBool(rv.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + enc.wf(strconv.FormatInt(rv.Int(), 10)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + enc.wf(strconv.FormatUint(rv.Uint(), 10)) + case reflect.Float32: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 32))) + } + case reflect.Float64: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 64))) + } + case reflect.Array, reflect.Slice: + enc.eArrayOrSliceElement(rv) + case reflect.Struct: + enc.eStruct(nil, rv, true) + case reflect.Map: + enc.eMap(nil, rv, true) + case reflect.Interface: + enc.eElement(rv.Elem()) + default: + encPanic(fmt.Errorf("unexpected type: %T", rv.Interface())) + } +} + +// By the TOML spec, all floats must have a decimal with at least one number on +// either side. +func floatAddDecimal(fstr string) string { + if !strings.Contains(fstr, ".") { + return fstr + ".0" + } + return fstr +} + +func (enc *Encoder) writeQuoted(s string) { + enc.wf("\"%s\"", dblQuotedReplacer.Replace(s)) +} + +func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { + length := rv.Len() + enc.wf("[") + for i := 0; i < length; i++ { + elem := eindirect(rv.Index(i)) + enc.eElement(elem) + if i != length-1 { + enc.wf(", ") + } + } + enc.wf("]") +} + +func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { + if len(key) == 0 { + encPanic(errNoKey) + } + for i := 0; i < rv.Len(); i++ { + trv := eindirect(rv.Index(i)) + if isNil(trv) { + continue + } + enc.newline() + enc.wf("%s[[%s]]", enc.indentStr(key), key) + enc.newline() + enc.eMapOrStruct(key, trv, false) + } +} + +func (enc *Encoder) eTable(key Key, rv reflect.Value) { + if len(key) == 1 { + // Output an extra newline between top-level tables. + // (The newline isn't written if nothing else has been written though.) + enc.newline() + } + if len(key) > 0 { + enc.wf("%s[%s]", enc.indentStr(key), key) + enc.newline() + } + enc.eMapOrStruct(key, rv, false) +} + +func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value, inline bool) { + switch rv.Kind() { + case reflect.Map: + enc.eMap(key, rv, inline) + case reflect.Struct: + enc.eStruct(key, rv, inline) + default: + // Should never happen? + panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) + } +} + +func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { + rt := rv.Type() + if rt.Key().Kind() != reflect.String { + encPanic(errNonString) + } + + // Sort keys so that we have deterministic output. And write keys directly + // underneath this key first, before writing sub-structs or sub-maps. + var mapKeysDirect, mapKeysSub []string + for _, mapKey := range rv.MapKeys() { + k := mapKey.String() + if typeIsTable(tomlTypeOfGo(eindirect(rv.MapIndex(mapKey)))) { + mapKeysSub = append(mapKeysSub, k) + } else { + mapKeysDirect = append(mapKeysDirect, k) + } + } + + var writeMapKeys = func(mapKeys []string, trailC bool) { + sort.Strings(mapKeys) + for i, mapKey := range mapKeys { + val := eindirect(rv.MapIndex(reflect.ValueOf(mapKey))) + if isNil(val) { + continue + } + + if inline { + enc.writeKeyValue(Key{mapKey}, val, true) + if trailC || i != len(mapKeys)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(mapKey), val) + } + } + } + + if inline { + enc.wf("{") + } + writeMapKeys(mapKeysDirect, len(mapKeysSub) > 0) + writeMapKeys(mapKeysSub, false) + if inline { + enc.wf("}") + } +} + +const is32Bit = (32 << (^uint(0) >> 63)) == 32 + +func pointerTo(t reflect.Type) reflect.Type { + if t.Kind() == reflect.Ptr { + return pointerTo(t.Elem()) + } + return t +} + +func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { + // Write keys for fields directly under this key first, because if we write + // a field that creates a new table then all keys under it will be in that + // table (not the one we're writing here). + // + // Fields is a [][]int: for fieldsDirect this always has one entry (the + // struct index). For fieldsSub it contains two entries: the parent field + // index from tv, and the field indexes for the fields of the sub. + var ( + rt = rv.Type() + fieldsDirect, fieldsSub [][]int + addFields func(rt reflect.Type, rv reflect.Value, start []int) + ) + addFields = func(rt reflect.Type, rv reflect.Value, start []int) { + for i := 0; i < rt.NumField(); i++ { + f := rt.Field(i) + isEmbed := f.Anonymous && pointerTo(f.Type).Kind() == reflect.Struct + if f.PkgPath != "" && !isEmbed { /// Skip unexported fields. + continue + } + opts := getOptions(f.Tag) + if opts.skip { + continue + } + + frv := eindirect(rv.Field(i)) + + // Treat anonymous struct fields with tag names as though they are + // not anonymous, like encoding/json does. + // + // Non-struct anonymous fields use the normal encoding logic. + if isEmbed { + if getOptions(f.Tag).name == "" && frv.Kind() == reflect.Struct { + addFields(frv.Type(), frv, append(start, f.Index...)) + continue + } + } + + if typeIsTable(tomlTypeOfGo(frv)) { + fieldsSub = append(fieldsSub, append(start, f.Index...)) + } else { + // Copy so it works correct on 32bit archs; not clear why this + // is needed. See #314, and https://www.reddit.com/r/golang/comments/pnx8v4 + // This also works fine on 64bit, but 32bit archs are somewhat + // rare and this is a wee bit faster. + if is32Bit { + copyStart := make([]int, len(start)) + copy(copyStart, start) + fieldsDirect = append(fieldsDirect, append(copyStart, f.Index...)) + } else { + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + } + } + } + } + addFields(rt, rv, nil) + + writeFields := func(fields [][]int) { + for _, fieldIndex := range fields { + fieldType := rt.FieldByIndex(fieldIndex) + fieldVal := eindirect(rv.FieldByIndex(fieldIndex)) + + if isNil(fieldVal) { /// Don't write anything for nil fields. + continue + } + + opts := getOptions(fieldType.Tag) + if opts.skip { + continue + } + keyName := fieldType.Name + if opts.name != "" { + keyName = opts.name + } + + if opts.omitempty && enc.isEmpty(fieldVal) { + continue + } + if opts.omitzero && isZero(fieldVal) { + continue + } + + if inline { + enc.writeKeyValue(Key{keyName}, fieldVal, true) + if fieldIndex[0] != len(fields)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(keyName), fieldVal) + } + } + } + + if inline { + enc.wf("{") + } + writeFields(fieldsDirect) + writeFields(fieldsSub) + if inline { + enc.wf("}") + } +} + +// tomlTypeOfGo returns the TOML type name of the Go value's type. +// +// It is used to determine whether the types of array elements are mixed (which +// is forbidden). If the Go value is nil, then it is illegal for it to be an +// array element, and valueIsNil is returned as true. +// +// The type may be `nil`, which means no concrete TOML type could be found. +func tomlTypeOfGo(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() { + return nil + } + + if rv.Kind() == reflect.Struct { + if rv.Type() == timeType { + return tomlDatetime + } + if isMarshaler(rv) { + return tomlString + } + return tomlHash + } + + if isMarshaler(rv) { + return tomlString + } + + switch rv.Kind() { + case reflect.Bool: + return tomlBool + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64: + return tomlInteger + case reflect.Float32, reflect.Float64: + return tomlFloat + case reflect.Array, reflect.Slice: + if isTableArray(rv) { + return tomlArrayHash + } + return tomlArray + case reflect.Ptr, reflect.Interface: + return tomlTypeOfGo(rv.Elem()) + case reflect.String: + return tomlString + case reflect.Map: + return tomlHash + default: + encPanic(errors.New("unsupported type: " + rv.Kind().String())) + panic("unreachable") + } +} + +func isMarshaler(rv reflect.Value) bool { + return rv.Type().Implements(marshalText) || rv.Type().Implements(marshalToml) +} + +// isTableArray reports if all entries in the array or slice are a table. +func isTableArray(arr reflect.Value) bool { + if isNil(arr) || !arr.IsValid() || arr.Len() == 0 { + return false + } + + ret := true + for i := 0; i < arr.Len(); i++ { + tt := tomlTypeOfGo(eindirect(arr.Index(i))) + // Don't allow nil. + if tt == nil { + encPanic(errArrayNilElement) + } + + if ret && !typeEqual(tomlHash, tt) { + ret = false + } + } + return ret +} + +type tagOptions struct { + skip bool // "-" + name string + omitempty bool + omitzero bool +} + +func getOptions(tag reflect.StructTag) tagOptions { + t := tag.Get("toml") + if t == "-" { + return tagOptions{skip: true} + } + var opts tagOptions + parts := strings.Split(t, ",") + opts.name = parts[0] + for _, s := range parts[1:] { + switch s { + case "omitempty": + opts.omitempty = true + case "omitzero": + opts.omitzero = true + } + } + return opts +} + +func isZero(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return rv.Uint() == 0 + case reflect.Float32, reflect.Float64: + return rv.Float() == 0.0 + } + return false +} + +func (enc *Encoder) isEmpty(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Array, reflect.Slice, reflect.Map, reflect.String: + return rv.Len() == 0 + case reflect.Struct: + if rv.Type().Comparable() { + return reflect.Zero(rv.Type()).Interface() == rv.Interface() + } + // Need to also check if all the fields are empty, otherwise something + // like this with uncomparable types will always return true: + // + // type a struct{ field b } + // type b struct{ s []string } + // s := a{field: b{s: []string{"AAA"}}} + for i := 0; i < rv.NumField(); i++ { + if !enc.isEmpty(rv.Field(i)) { + return false + } + } + return true + case reflect.Bool: + return !rv.Bool() + } + return false +} + +func (enc *Encoder) newline() { + if enc.hasWritten { + enc.wf("\n") + } +} + +// Write a key/value pair: +// +// key = +// +// This is also used for "k = v" in inline tables; so something like this will +// be written in three calls: +// +// ┌───────────────────┐ +// │ ┌───┐ ┌────┐│ +// v v v v vv +// key = {k = 1, k2 = 2} +func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { + if len(key) == 0 { + encPanic(errNoKey) + } + enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) + enc.eElement(val) + if !inline { + enc.newline() + } +} + +func (enc *Encoder) wf(format string, v ...interface{}) { + _, err := fmt.Fprintf(enc.w, format, v...) + if err != nil { + encPanic(err) + } + enc.hasWritten = true +} + +func (enc *Encoder) indentStr(key Key) string { + return strings.Repeat(enc.Indent, len(key)-1) +} + +func encPanic(err error) { + panic(tomlEncodeError{err}) +} + +// Resolve any level of pointers to the actual value (e.g. **string → string). +func eindirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr && v.Kind() != reflect.Interface { + if isMarshaler(v) { + return v + } + if v.CanAddr() { /// Special case for marshalers; see #358. + if pv := v.Addr(); isMarshaler(pv) { + return pv + } + } + return v + } + + if v.IsNil() { + return v + } + + return eindirect(v.Elem()) +} + +func isNil(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return rv.IsNil() + default: + return false + } +} diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go new file mode 100644 index 00000000..f4f390e6 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/error.go @@ -0,0 +1,279 @@ +package toml + +import ( + "fmt" + "strings" +) + +// ParseError is returned when there is an error parsing the TOML syntax such as +// invalid syntax, duplicate keys, etc. +// +// In addition to the error message itself, you can also print detailed location +// information with context by using [ErrorWithPosition]: +// +// toml: error: Key 'fruit' was already created and cannot be used as an array. +// +// At line 4, column 2-7: +// +// 2 | fruit = [] +// 3 | +// 4 | [[fruit]] # Not allowed +// ^^^^^ +// +// [ErrorWithUsage] can be used to print the above with some more detailed usage +// guidance: +// +// toml: error: newlines not allowed within inline tables +// +// At line 1, column 18: +// +// 1 | x = [{ key = 42 # +// ^ +// +// Error help: +// +// Inline tables must always be on a single line: +// +// table = {key = 42, second = 43} +// +// It is invalid to split them over multiple lines like so: +// +// # INVALID +// table = { +// key = 42, +// second = 43 +// } +// +// Use regular for this: +// +// [table] +// key = 42 +// second = 43 +type ParseError struct { + Message string // Short technical message. + Usage string // Longer message with usage guidance; may be blank. + Position Position // Position of the error + LastKey string // Last parsed key, may be blank. + + // Line the error occurred. + // + // Deprecated: use [Position]. + Line int + + err error + input string +} + +// Position of an error. +type Position struct { + Line int // Line number, starting at 1. + Start int // Start of error, as byte offset starting at 0. + Len int // Lenght in bytes. +} + +func (pe ParseError) Error() string { + msg := pe.Message + if msg == "" { // Error from errorf() + msg = pe.err.Error() + } + + if pe.LastKey == "" { + return fmt.Sprintf("toml: line %d: %s", pe.Position.Line, msg) + } + return fmt.Sprintf("toml: line %d (last key %q): %s", + pe.Position.Line, pe.LastKey, msg) +} + +// ErrorWithUsage() returns the error with detailed location context. +// +// See the documentation on [ParseError]. +func (pe ParseError) ErrorWithPosition() string { + if pe.input == "" { // Should never happen, but just in case. + return pe.Error() + } + + var ( + lines = strings.Split(pe.input, "\n") + col = pe.column(lines) + b = new(strings.Builder) + ) + + msg := pe.Message + if msg == "" { + msg = pe.err.Error() + } + + // TODO: don't show control characters as literals? This may not show up + // well everywhere. + + if pe.Position.Len == 1 { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d:\n\n", + msg, pe.Position.Line, col+1) + } else { + fmt.Fprintf(b, "toml: error: %s\n\nAt line %d, column %d-%d:\n\n", + msg, pe.Position.Line, col, col+pe.Position.Len) + } + if pe.Position.Line > 2 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-2, lines[pe.Position.Line-3]) + } + if pe.Position.Line > 1 { + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line-1, lines[pe.Position.Line-2]) + } + fmt.Fprintf(b, "% 7d | %s\n", pe.Position.Line, lines[pe.Position.Line-1]) + fmt.Fprintf(b, "% 10s%s%s\n", "", strings.Repeat(" ", col), strings.Repeat("^", pe.Position.Len)) + return b.String() +} + +// ErrorWithUsage() returns the error with detailed location context and usage +// guidance. +// +// See the documentation on [ParseError]. +func (pe ParseError) ErrorWithUsage() string { + m := pe.ErrorWithPosition() + if u, ok := pe.err.(interface{ Usage() string }); ok && u.Usage() != "" { + lines := strings.Split(strings.TrimSpace(u.Usage()), "\n") + for i := range lines { + if lines[i] != "" { + lines[i] = " " + lines[i] + } + } + return m + "Error help:\n\n" + strings.Join(lines, "\n") + "\n" + } + return m +} + +func (pe ParseError) column(lines []string) int { + var pos, col int + for i := range lines { + ll := len(lines[i]) + 1 // +1 for the removed newline + if pos+ll >= pe.Position.Start { + col = pe.Position.Start - pos + if col < 0 { // Should never happen, but just in case. + col = 0 + } + break + } + pos += ll + } + + return col +} + +type ( + errLexControl struct{ r rune } + errLexEscape struct{ r rune } + errLexUTF8 struct{ b byte } + errLexInvalidNum struct{ v string } + errLexInvalidDate struct{ v string } + errLexInlineTableNL struct{} + errLexStringNL struct{} + errParseRange struct { + i interface{} // int or float + size string // "int64", "uint16", etc. + } + errParseDuration struct{ d string } +) + +func (e errLexControl) Error() string { + return fmt.Sprintf("TOML files cannot contain control characters: '0x%02x'", e.r) +} +func (e errLexControl) Usage() string { return "" } + +func (e errLexEscape) Error() string { return fmt.Sprintf(`invalid escape in string '\%c'`, e.r) } +func (e errLexEscape) Usage() string { return usageEscape } +func (e errLexUTF8) Error() string { return fmt.Sprintf("invalid UTF-8 byte: 0x%02x", e.b) } +func (e errLexUTF8) Usage() string { return "" } +func (e errLexInvalidNum) Error() string { return fmt.Sprintf("invalid number: %q", e.v) } +func (e errLexInvalidNum) Usage() string { return "" } +func (e errLexInvalidDate) Error() string { return fmt.Sprintf("invalid date: %q", e.v) } +func (e errLexInvalidDate) Usage() string { return "" } +func (e errLexInlineTableNL) Error() string { return "newlines not allowed within inline tables" } +func (e errLexInlineTableNL) Usage() string { return usageInlineNewline } +func (e errLexStringNL) Error() string { return "strings cannot contain newlines" } +func (e errLexStringNL) Usage() string { return usageStringNewline } +func (e errParseRange) Error() string { return fmt.Sprintf("%v is out of range for %s", e.i, e.size) } +func (e errParseRange) Usage() string { return usageIntOverflow } +func (e errParseDuration) Error() string { return fmt.Sprintf("invalid duration: %q", e.d) } +func (e errParseDuration) Usage() string { return usageDuration } + +const usageEscape = ` +A '\' inside a "-delimited string is interpreted as an escape character. + +The following escape sequences are supported: +\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX + +To prevent a '\' from being recognized as an escape character, use either: + +- a ' or '''-delimited string; escape characters aren't processed in them; or +- write two backslashes to get a single backslash: '\\'. + +If you're trying to add a Windows path (e.g. "C:\Users\martin") then using '/' +instead of '\' will usually also work: "C:/Users/martin". +` + +const usageInlineNewline = ` +Inline tables must always be on a single line: + + table = {key = 42, second = 43} + +It is invalid to split them over multiple lines like so: + + # INVALID + table = { + key = 42, + second = 43 + } + +Use regular for this: + + [table] + key = 42 + second = 43 +` + +const usageStringNewline = ` +Strings must always be on a single line, and cannot span more than one line: + + # INVALID + string = "Hello, + world!" + +Instead use """ or ''' to split strings over multiple lines: + + string = """Hello, + world!""" +` + +const usageIntOverflow = ` +This number is too large; this may be an error in the TOML, but it can also be a +bug in the program that uses too small of an integer. + +The maximum and minimum values are: + + size │ lowest │ highest + ───────┼────────────────┼────────── + int8 │ -128 │ 127 + int16 │ -32,768 │ 32,767 + int32 │ -2,147,483,648 │ 2,147,483,647 + int64 │ -9.2 × 10¹⁷ │ 9.2 × 10¹⁷ + uint8 │ 0 │ 255 + uint16 │ 0 │ 65535 + uint32 │ 0 │ 4294967295 + uint64 │ 0 │ 1.8 × 10¹⁸ + +int refers to int32 on 32-bit systems and int64 on 64-bit systems. +` + +const usageDuration = ` +A duration must be as "number", without any spaces. Valid units are: + + ns nanoseconds (billionth of a second) + us, µs microseconds (millionth of a second) + ms milliseconds (thousands of a second) + s seconds + m minutes + h hours + +You can combine multiple units; for example "5m10s" for 5 minutes and 10 +seconds. +` diff --git a/vendor/github.com/BurntSushi/toml/internal/tz.go b/vendor/github.com/BurntSushi/toml/internal/tz.go new file mode 100644 index 00000000..022f15bc --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/internal/tz.go @@ -0,0 +1,36 @@ +package internal + +import "time" + +// Timezones used for local datetime, date, and time TOML types. +// +// The exact way times and dates without a timezone should be interpreted is not +// well-defined in the TOML specification and left to the implementation. These +// defaults to current local timezone offset of the computer, but this can be +// changed by changing these variables before decoding. +// +// TODO: +// Ideally we'd like to offer people the ability to configure the used timezone +// by setting Decoder.Timezone and Encoder.Timezone; however, this is a bit +// tricky: the reason we use three different variables for this is to support +// round-tripping – without these specific TZ names we wouldn't know which +// format to use. +// +// There isn't a good way to encode this right now though, and passing this sort +// of information also ties in to various related issues such as string format +// encoding, encoding of comments, etc. +// +// So, for the time being, just put this in internal until we can write a good +// comprehensive API for doing all of this. +// +// The reason they're exported is because they're referred from in e.g. +// internal/tag. +// +// Note that this behaviour is valid according to the TOML spec as the exact +// behaviour is left up to implementations. +var ( + localOffset = func() int { _, o := time.Now().Zone(); return o }() + LocalDatetime = time.FixedZone("datetime-local", localOffset) + LocalDate = time.FixedZone("date-local", localOffset) + LocalTime = time.FixedZone("time-local", localOffset) +) diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go new file mode 100644 index 00000000..d4d70871 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -0,0 +1,1233 @@ +package toml + +import ( + "fmt" + "reflect" + "runtime" + "strings" + "unicode" + "unicode/utf8" +) + +type itemType int + +const ( + itemError itemType = iota + itemNIL // used in the parser to indicate no type + itemEOF + itemText + itemString + itemRawString + itemMultilineString + itemRawMultilineString + itemBool + itemInteger + itemFloat + itemDatetime + itemArray // the start of an array + itemArrayEnd + itemTableStart + itemTableEnd + itemArrayTableStart + itemArrayTableEnd + itemKeyStart + itemKeyEnd + itemCommentStart + itemInlineTableStart + itemInlineTableEnd +) + +const eof = 0 + +type stateFn func(lx *lexer) stateFn + +func (p Position) String() string { + return fmt.Sprintf("at line %d; start %d; length %d", p.Line, p.Start, p.Len) +} + +type lexer struct { + input string + start int + pos int + line int + state stateFn + items chan item + + // Allow for backing up up to 4 runes. This is necessary because TOML + // contains 3-rune tokens (""" and '''). + prevWidths [4]int + nprev int // how many of prevWidths are in use + atEOF bool // If we emit an eof, we can still back up, but it is not OK to call next again. + + // A stack of state functions used to maintain context. + // + // The idea is to reuse parts of the state machine in various places. For + // example, values can appear at the top level or within arbitrarily nested + // arrays. The last state on the stack is used after a value has been lexed. + // Similarly for comments. + stack []stateFn +} + +type item struct { + typ itemType + val string + err error + pos Position +} + +func (lx *lexer) nextItem() item { + for { + select { + case item := <-lx.items: + return item + default: + lx.state = lx.state(lx) + //fmt.Printf(" STATE %-24s current: %-10s stack: %s\n", lx.state, lx.current(), lx.stack) + } + } +} + +func lex(input string) *lexer { + lx := &lexer{ + input: input, + state: lexTop, + items: make(chan item, 10), + stack: make([]stateFn, 0, 10), + line: 1, + } + return lx +} + +func (lx *lexer) push(state stateFn) { + lx.stack = append(lx.stack, state) +} + +func (lx *lexer) pop() stateFn { + if len(lx.stack) == 0 { + return lx.errorf("BUG in lexer: no states to pop") + } + last := lx.stack[len(lx.stack)-1] + lx.stack = lx.stack[0 : len(lx.stack)-1] + return last +} + +func (lx *lexer) current() string { + return lx.input[lx.start:lx.pos] +} + +func (lx lexer) getPos() Position { + p := Position{ + Line: lx.line, + Start: lx.start, + Len: lx.pos - lx.start, + } + if p.Len <= 0 { + p.Len = 1 + } + return p +} + +func (lx *lexer) emit(typ itemType) { + // Needed for multiline strings ending with an incomplete UTF-8 sequence. + if lx.start > lx.pos { + lx.error(errLexUTF8{lx.input[lx.pos]}) + return + } + lx.items <- item{typ: typ, pos: lx.getPos(), val: lx.current()} + lx.start = lx.pos +} + +func (lx *lexer) emitTrim(typ itemType) { + lx.items <- item{typ: typ, pos: lx.getPos(), val: strings.TrimSpace(lx.current())} + lx.start = lx.pos +} + +func (lx *lexer) next() (r rune) { + if lx.atEOF { + panic("BUG in lexer: next called after EOF") + } + if lx.pos >= len(lx.input) { + lx.atEOF = true + return eof + } + + if lx.input[lx.pos] == '\n' { + lx.line++ + } + lx.prevWidths[3] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[0] + if lx.nprev < 4 { + lx.nprev++ + } + + r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) + if r == utf8.RuneError { + lx.error(errLexUTF8{lx.input[lx.pos]}) + return utf8.RuneError + } + + // Note: don't use peek() here, as this calls next(). + if isControl(r) || (r == '\r' && (len(lx.input)-1 == lx.pos || lx.input[lx.pos+1] != '\n')) { + lx.errorControlChar(r) + return utf8.RuneError + } + + lx.prevWidths[0] = w + lx.pos += w + return r +} + +// ignore skips over the pending input before this point. +func (lx *lexer) ignore() { + lx.start = lx.pos +} + +// backup steps back one rune. Can be called 4 times between calls to next. +func (lx *lexer) backup() { + if lx.atEOF { + lx.atEOF = false + return + } + if lx.nprev < 1 { + panic("BUG in lexer: backed up too far") + } + w := lx.prevWidths[0] + lx.prevWidths[0] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[3] + lx.nprev-- + + lx.pos -= w + if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { + lx.line-- + } +} + +// accept consumes the next rune if it's equal to `valid`. +func (lx *lexer) accept(valid rune) bool { + if lx.next() == valid { + return true + } + lx.backup() + return false +} + +// peek returns but does not consume the next rune in the input. +func (lx *lexer) peek() rune { + r := lx.next() + lx.backup() + return r +} + +// skip ignores all input that matches the given predicate. +func (lx *lexer) skip(pred func(rune) bool) { + for { + r := lx.next() + if pred(r) { + continue + } + lx.backup() + lx.ignore() + return + } +} + +// error stops all lexing by emitting an error and returning `nil`. +// +// Note that any value that is a character is escaped if it's a special +// character (newlines, tabs, etc.). +func (lx *lexer) error(err error) stateFn { + if lx.atEOF { + return lx.errorPrevLine(err) + } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: err} + return nil +} + +// errorfPrevline is like error(), but sets the position to the last column of +// the previous line. +// +// This is so that unexpected EOF or NL errors don't show on a new blank line. +func (lx *lexer) errorPrevLine(err error) stateFn { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorPos is like error(), but allows explicitly setting the position. +func (lx *lexer) errorPos(start, length int, err error) stateFn { + pos := lx.getPos() + pos.Start = start + pos.Len = length + lx.items <- item{typ: itemError, pos: pos, err: err} + return nil +} + +// errorf is like error, and creates a new error. +func (lx *lexer) errorf(format string, values ...interface{}) stateFn { + if lx.atEOF { + pos := lx.getPos() + pos.Line-- + pos.Len = 1 + pos.Start = lx.pos - 1 + lx.items <- item{typ: itemError, pos: pos, err: fmt.Errorf(format, values...)} + return nil + } + lx.items <- item{typ: itemError, pos: lx.getPos(), err: fmt.Errorf(format, values...)} + return nil +} + +func (lx *lexer) errorControlChar(cc rune) stateFn { + return lx.errorPos(lx.pos-1, 1, errLexControl{cc}) +} + +// lexTop consumes elements at the top level of TOML data. +func lexTop(lx *lexer) stateFn { + r := lx.next() + if isWhitespace(r) || isNL(r) { + return lexSkip(lx, lexTop) + } + switch r { + case '#': + lx.push(lexTop) + return lexCommentStart + case '[': + return lexTableStart + case eof: + if lx.pos > lx.start { + return lx.errorf("unexpected EOF") + } + lx.emit(itemEOF) + return nil + } + + // At this point, the only valid item can be a key, so we back up + // and let the key lexer do the rest. + lx.backup() + lx.push(lexTopEnd) + return lexKeyStart +} + +// lexTopEnd is entered whenever a top-level item has been consumed. (A value +// or a table.) It must see only whitespace, and will turn back to lexTop +// upon a newline. If it sees EOF, it will quit the lexer successfully. +func lexTopEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case r == '#': + // a comment will read to a newline for us. + lx.push(lexTop) + return lexCommentStart + case isWhitespace(r): + return lexTopEnd + case isNL(r): + lx.ignore() + return lexTop + case r == eof: + lx.emit(itemEOF) + return nil + } + return lx.errorf( + "expected a top-level item to end with a newline, comment, or EOF, but got %q instead", + r) +} + +// lexTable lexes the beginning of a table. Namely, it makes sure that +// it starts with a character other than '.' and ']'. +// It assumes that '[' has already been consumed. +// It also handles the case that this is an item in an array of tables. +// e.g., '[[name]]'. +func lexTableStart(lx *lexer) stateFn { + if lx.peek() == '[' { + lx.next() + lx.emit(itemArrayTableStart) + lx.push(lexArrayTableEnd) + } else { + lx.emit(itemTableStart) + lx.push(lexTableEnd) + } + return lexTableNameStart +} + +func lexTableEnd(lx *lexer) stateFn { + lx.emit(itemTableEnd) + return lexTopEnd +} + +func lexArrayTableEnd(lx *lexer) stateFn { + if r := lx.next(); r != ']' { + return lx.errorf("expected end of table array name delimiter ']', but got %q instead", r) + } + lx.emit(itemArrayTableEnd) + return lexTopEnd +} + +func lexTableNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == ']' || r == eof: + return lx.errorf("unexpected end of table name (table names cannot be empty)") + case r == '.': + return lx.errorf("unexpected table separator (table names cannot be empty)") + case r == '"' || r == '\'': + lx.ignore() + lx.push(lexTableNameEnd) + return lexQuotedName + default: + lx.push(lexTableNameEnd) + return lexBareName + } +} + +// lexTableNameEnd reads the end of a piece of a table name, optionally +// consuming whitespace. +func lexTableNameEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexTableNameEnd + case r == '.': + lx.ignore() + return lexTableNameStart + case r == ']': + return lx.pop() + default: + return lx.errorf("expected '.' or ']' to end table name, but got %q instead", r) + } +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only 'a' inside 'a.b'. +func lexBareName(lx *lexer) stateFn { + r := lx.next() + if isBareKeyChar(r) { + return lexBareName + } + lx.backup() + lx.emit(itemText) + return lx.pop() +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only '"a"' inside '"a".b'. +func lexQuotedName(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case r == '"': + lx.ignore() // ignore the '"' + return lexString + case r == '\'': + lx.ignore() // ignore the "'" + return lexRawString + case r == eof: + return lx.errorf("unexpected EOF; expected value") + default: + return lx.errorf("expected value but found %q instead", r) + } +} + +// lexKeyStart consumes all key parts until a '='. +func lexKeyStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '=': key name appears blank") + case r == '.': + return lx.errorf("unexpected '.': keys cannot start with a '.'") + case r == '"' || r == '\'': + lx.ignore() + fallthrough + default: // Bare key + lx.emit(itemKeyStart) + return lexKeyNameStart + } +} + +func lexKeyNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '='") + case r == '.': + return lx.errorf("unexpected '.'") + case r == '"' || r == '\'': + lx.ignore() + lx.push(lexKeyEnd) + return lexQuotedName + default: + lx.push(lexKeyEnd) + return lexBareName + } +} + +// lexKeyEnd consumes the end of a key and trims whitespace (up to the key +// separator). +func lexKeyEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexKeyEnd) + case r == eof: + return lx.errorf("unexpected EOF; expected key separator '='") + case r == '.': + lx.ignore() + return lexKeyNameStart + case r == '=': + lx.emit(itemKeyEnd) + return lexSkip(lx, lexValue) + default: + return lx.errorf("expected '.' or '=', but got %q instead", r) + } +} + +// lexValue starts the consumption of a value anywhere a value is expected. +// lexValue will ignore whitespace. +// After a value is lexed, the last state on the next is popped and returned. +func lexValue(lx *lexer) stateFn { + // We allow whitespace to precede a value, but NOT newlines. + // In array syntax, the array states are responsible for ignoring newlines. + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case isDigit(r): + lx.backup() // avoid an extra state and use the same as above + return lexNumberOrDateStart + } + switch r { + case '[': + lx.ignore() + lx.emit(itemArray) + return lexArrayValue + case '{': + lx.ignore() + lx.emit(itemInlineTableStart) + return lexInlineTableValue + case '"': + if lx.accept('"') { + if lx.accept('"') { + lx.ignore() // Ignore """ + return lexMultilineString + } + lx.backup() + } + lx.ignore() // ignore the '"' + return lexString + case '\'': + if lx.accept('\'') { + if lx.accept('\'') { + lx.ignore() // Ignore """ + return lexMultilineRawString + } + lx.backup() + } + lx.ignore() // ignore the "'" + return lexRawString + case '.': // special error case, be kind to users + return lx.errorf("floats must start with a digit, not '.'") + case 'i', 'n': + if (lx.accept('n') && lx.accept('f')) || (lx.accept('a') && lx.accept('n')) { + lx.emit(itemFloat) + return lx.pop() + } + case '-', '+': + return lexDecimalNumberStart + } + if unicode.IsLetter(r) { + // Be permissive here; lexBool will give a nice error if the + // user wrote something like + // x = foo + // (i.e. not 'true' or 'false' but is something else word-like.) + lx.backup() + return lexBool + } + if r == eof { + return lx.errorf("unexpected EOF; expected value") + } + return lx.errorf("expected value but found %q instead", r) +} + +// lexArrayValue consumes one value in an array. It assumes that '[' or ',' +// have already been consumed. All whitespace and newlines are ignored. +func lexArrayValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValue) + case r == '#': + lx.push(lexArrayValue) + return lexCommentStart + case r == ',': + return lx.errorf("unexpected comma") + case r == ']': + return lexArrayEnd + } + + lx.backup() + lx.push(lexArrayValueEnd) + return lexValue +} + +// lexArrayValueEnd consumes everything between the end of an array value and +// the next value (or the end of the array): it ignores whitespace and newlines +// and expects either a ',' or a ']'. +func lexArrayValueEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValueEnd) + case r == '#': + lx.push(lexArrayValueEnd) + return lexCommentStart + case r == ',': + lx.ignore() + return lexArrayValue // move on to the next value + case r == ']': + return lexArrayEnd + default: + return lx.errorf("expected a comma (',') or array terminator (']'), but got %s", runeOrEOF(r)) + } +} + +// lexArrayEnd finishes the lexing of an array. +// It assumes that a ']' has just been consumed. +func lexArrayEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemArrayEnd) + return lx.pop() +} + +// lexInlineTableValue consumes one key/value pair in an inline table. +// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. +func lexInlineTableValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValue) + case isNL(r): + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': + lx.push(lexInlineTableValue) + return lexCommentStart + case r == ',': + return lx.errorf("unexpected comma") + case r == '}': + return lexInlineTableEnd + } + lx.backup() + lx.push(lexInlineTableValueEnd) + return lexKeyStart +} + +// lexInlineTableValueEnd consumes everything between the end of an inline table +// key/value pair and the next pair (or the end of the table): +// it ignores whitespace and expects either a ',' or a '}'. +func lexInlineTableValueEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValueEnd) + case isNL(r): + return lx.errorPrevLine(errLexInlineTableNL{}) + case r == '#': + lx.push(lexInlineTableValueEnd) + return lexCommentStart + case r == ',': + lx.ignore() + lx.skip(isWhitespace) + if lx.peek() == '}' { + return lx.errorf("trailing comma not allowed in inline tables") + } + return lexInlineTableValue + case r == '}': + return lexInlineTableEnd + default: + return lx.errorf("expected a comma or an inline table terminator '}', but got %s instead", runeOrEOF(r)) + } +} + +func runeOrEOF(r rune) string { + if r == eof { + return "end of file" + } + return "'" + string(r) + "'" +} + +// lexInlineTableEnd finishes the lexing of an inline table. +// It assumes that a '}' has just been consumed. +func lexInlineTableEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemInlineTableEnd) + return lx.pop() +} + +// lexString consumes the inner contents of a string. It assumes that the +// beginning '"' has already been consumed and ignored. +func lexString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf(`unexpected EOF; expected '"'`) + case isNL(r): + return lx.errorPrevLine(errLexStringNL{}) + case r == '\\': + lx.push(lexString) + return lexStringEscape + case r == '"': + lx.backup() + lx.emit(itemString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexString +} + +// lexMultilineString consumes the inner contents of a string. It assumes that +// the beginning '"""' has already been consumed and ignored. +func lexMultilineString(lx *lexer) stateFn { + r := lx.next() + switch r { + default: + return lexMultilineString + case eof: + return lx.errorf(`unexpected EOF; expected '"""'`) + case '\\': + return lexMultilineStringEscape + case '"': + /// Found " → try to read two more "". + if lx.accept('"') { + if lx.accept('"') { + /// Peek ahead: the string can contain " and "", including at the + /// end: """str""""" + /// 6 or more at the end, however, is an error. + if lx.peek() == '"' { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + /// + /// Second check is for the edge case: + /// + /// two quotes allowed. + /// vv + /// """lol \"""""" + /// ^^ ^^^---- closing three + /// escaped + /// + /// But ugly, but it works + if strings.HasSuffix(lx.current(), `"""""`) && !strings.HasSuffix(lx.current(), `\"""""`) { + return lx.errorf(`unexpected '""""""'`) + } + lx.backup() + lx.backup() + return lexMultilineString + } + + lx.backup() /// backup: don't include the """ in the item. + lx.backup() + lx.backup() + lx.emit(itemMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + return lexMultilineString + } +} + +// lexRawString consumes a raw string. Nothing can be escaped in such a string. +// It assumes that the beginning "'" has already been consumed and ignored. +func lexRawString(lx *lexer) stateFn { + r := lx.next() + switch { + default: + return lexRawString + case r == eof: + return lx.errorf(`unexpected EOF; expected "'"`) + case isNL(r): + return lx.errorPrevLine(errLexStringNL{}) + case r == '\'': + lx.backup() + lx.emit(itemRawString) + lx.next() + lx.ignore() + return lx.pop() + } +} + +// lexMultilineRawString consumes a raw string. Nothing can be escaped in such +// a string. It assumes that the beginning ''' has already been consumed and +// ignored. +func lexMultilineRawString(lx *lexer) stateFn { + r := lx.next() + switch r { + default: + return lexMultilineRawString + case eof: + return lx.errorf(`unexpected EOF; expected "'''"`) + case '\'': + /// Found ' → try to read two more ''. + if lx.accept('\'') { + if lx.accept('\'') { + /// Peek ahead: the string can contain ' and '', including at the + /// end: '''str''''' + /// 6 or more at the end, however, is an error. + if lx.peek() == '\'' { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + if strings.HasSuffix(lx.current(), "'''''") { + return lx.errorf(`unexpected "''''''"`) + } + lx.backup() + lx.backup() + return lexMultilineRawString + } + + lx.backup() /// backup: don't include the ''' in the item. + lx.backup() + lx.backup() + lx.emit(itemRawMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + return lexMultilineRawString + } +} + +// lexMultilineStringEscape consumes an escaped character. It assumes that the +// preceding '\\' has already been consumed. +func lexMultilineStringEscape(lx *lexer) stateFn { + if isNL(lx.next()) { /// \ escaping newline. + return lexMultilineString + } + lx.backup() + lx.push(lexMultilineString) + return lexStringEscape(lx) +} + +func lexStringEscape(lx *lexer) stateFn { + r := lx.next() + switch r { + case 'b': + fallthrough + case 't': + fallthrough + case 'n': + fallthrough + case 'f': + fallthrough + case 'r': + fallthrough + case '"': + fallthrough + case ' ', '\t': + // Inside """ .. """ strings you can use \ to escape newlines, and any + // amount of whitespace can be between the \ and \n. + fallthrough + case '\\': + return lx.pop() + case 'u': + return lexShortUnicodeEscape + case 'U': + return lexLongUnicodeEscape + } + return lx.error(errLexEscape{r}) +} + +func lexShortUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 4; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected four hexadecimal digits after '\u', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +func lexLongUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 8; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected eight hexadecimal digits after '\U', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +// lexNumberOrDateStart processes the first character of a value which begins +// with a digit. It exists to catch values starting with '0', so that +// lexBaseNumberOrDate can differentiate base prefixed integers from other +// types. +func lexNumberOrDateStart(lx *lexer) stateFn { + r := lx.next() + switch r { + case '0': + return lexBaseNumberOrDate + } + + if !isDigit(r) { + // The only way to reach this state is if the value starts + // with a digit, so specifically treat anything else as an + // error. + return lx.errorf("expected a digit but got %q", r) + } + + return lexNumberOrDate +} + +// lexNumberOrDate consumes either an integer, float or datetime. +func lexNumberOrDate(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '-', ':': + return lexDatetime + case '_': + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDatetime consumes a Datetime, to a first approximation. +// The parser validates that it matches one of the accepted formats. +func lexDatetime(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDatetime + } + switch r { + case '-', ':', 'T', 't', ' ', '.', 'Z', 'z', '+': + return lexDatetime + } + + lx.backup() + lx.emitTrim(itemDatetime) + return lx.pop() +} + +// lexHexInteger consumes a hexadecimal integer after seeing the '0x' prefix. +func lexHexInteger(lx *lexer) stateFn { + r := lx.next() + if isHexadecimal(r) { + return lexHexInteger + } + switch r { + case '_': + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexOctalInteger consumes an octal integer after seeing the '0o' prefix. +func lexOctalInteger(lx *lexer) stateFn { + r := lx.next() + if isOctal(r) { + return lexOctalInteger + } + switch r { + case '_': + return lexOctalInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexBinaryInteger consumes a binary integer after seeing the '0b' prefix. +func lexBinaryInteger(lx *lexer) stateFn { + r := lx.next() + if isBinary(r) { + return lexBinaryInteger + } + switch r { + case '_': + return lexBinaryInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes a decimal float or integer. +func lexDecimalNumber(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDecimalNumber + } + switch r { + case '.', 'e', 'E': + return lexFloat + case '_': + return lexDecimalNumber + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes the first digit of a number beginning with a sign. +// It assumes the sign has already been consumed. Values which start with a sign +// are only allowed to be decimal integers or floats. +// +// The special "nan" and "inf" values are also recognized. +func lexDecimalNumberStart(lx *lexer) stateFn { + r := lx.next() + + // Special error cases to give users better error messages + switch r { + case 'i': + if !lx.accept('n') || !lx.accept('f') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case 'n': + if !lx.accept('a') || !lx.accept('n') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case '0': + p := lx.peek() + switch p { + case 'b', 'o', 'x': + return lx.errorf("cannot use sign with non-decimal numbers: '%s%c'", lx.current(), p) + } + case '.': + return lx.errorf("floats must start with a digit, not '.'") + } + + if isDigit(r) { + return lexDecimalNumber + } + + return lx.errorf("expected a digit but got %q", r) +} + +// lexBaseNumberOrDate differentiates between the possible values which +// start with '0'. It assumes that before reaching this state, the initial '0' +// has been consumed. +func lexBaseNumberOrDate(lx *lexer) stateFn { + r := lx.next() + // Note: All datetimes start with at least two digits, so we don't + // handle date characters (':', '-', etc.) here. + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '_': + // Can only be decimal, because there can't be an underscore + // between the '0' and the base designator, and dates can't + // contain underscores. + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + case 'b': + r = lx.peek() + if !isBinary(r) { + lx.errorf("not a binary number: '%s%c'", lx.current(), r) + } + return lexBinaryInteger + case 'o': + r = lx.peek() + if !isOctal(r) { + lx.errorf("not an octal number: '%s%c'", lx.current(), r) + } + return lexOctalInteger + case 'x': + r = lx.peek() + if !isHexadecimal(r) { + lx.errorf("not a hexidecimal number: '%s%c'", lx.current(), r) + } + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexFloat consumes the elements of a float. It allows any sequence of +// float-like characters, so floats emitted by the lexer are only a first +// approximation and must be validated by the parser. +func lexFloat(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexFloat + } + switch r { + case '_', '.', '-', '+', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemFloat) + return lx.pop() +} + +// lexBool consumes a bool string: 'true' or 'false. +func lexBool(lx *lexer) stateFn { + var rs []rune + for { + r := lx.next() + if !unicode.IsLetter(r) { + lx.backup() + break + } + rs = append(rs, r) + } + s := string(rs) + switch s { + case "true", "false": + lx.emit(itemBool) + return lx.pop() + } + return lx.errorf("expected value but found %q instead", s) +} + +// lexCommentStart begins the lexing of a comment. It will emit +// itemCommentStart and consume no characters, passing control to lexComment. +func lexCommentStart(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemCommentStart) + return lexComment +} + +// lexComment lexes an entire comment. It assumes that '#' has been consumed. +// It will consume *up to* the first newline character, and pass control +// back to the last state on the stack. +func lexComment(lx *lexer) stateFn { + switch r := lx.next(); { + case isNL(r) || r == eof: + lx.backup() + lx.emit(itemText) + return lx.pop() + default: + return lexComment + } +} + +// lexSkip ignores all slurped input and moves on to the next state. +func lexSkip(lx *lexer, nextState stateFn) stateFn { + lx.ignore() + return nextState +} + +func (s stateFn) String() string { + name := runtime.FuncForPC(reflect.ValueOf(s).Pointer()).Name() + if i := strings.LastIndexByte(name, '.'); i > -1 { + name = name[i+1:] + } + if s == nil { + name = "" + } + return name + "()" +} + +func (itype itemType) String() string { + switch itype { + case itemError: + return "Error" + case itemNIL: + return "NIL" + case itemEOF: + return "EOF" + case itemText: + return "Text" + case itemString, itemRawString, itemMultilineString, itemRawMultilineString: + return "String" + case itemBool: + return "Bool" + case itemInteger: + return "Integer" + case itemFloat: + return "Float" + case itemDatetime: + return "DateTime" + case itemTableStart: + return "TableStart" + case itemTableEnd: + return "TableEnd" + case itemKeyStart: + return "KeyStart" + case itemKeyEnd: + return "KeyEnd" + case itemArray: + return "Array" + case itemArrayEnd: + return "ArrayEnd" + case itemCommentStart: + return "CommentStart" + case itemInlineTableStart: + return "InlineTableStart" + case itemInlineTableEnd: + return "InlineTableEnd" + } + panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) +} + +func (item item) String() string { + return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) +} + +func isWhitespace(r rune) bool { return r == '\t' || r == ' ' } +func isNL(r rune) bool { return r == '\n' || r == '\r' } +func isControl(r rune) bool { // Control characters except \t, \r, \n + switch r { + case '\t', '\r', '\n': + return false + default: + return (r >= 0x00 && r <= 0x1f) || r == 0x7f + } +} +func isDigit(r rune) bool { return r >= '0' && r <= '9' } +func isBinary(r rune) bool { return r == '0' || r == '1' } +func isOctal(r rune) bool { return r >= '0' && r <= '7' } +func isHexadecimal(r rune) bool { + return (r >= '0' && r <= '9') || (r >= 'a' && r <= 'f') || (r >= 'A' && r <= 'F') +} +func isBareKeyChar(r rune) bool { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || r == '-' +} diff --git a/vendor/github.com/BurntSushi/toml/meta.go b/vendor/github.com/BurntSushi/toml/meta.go new file mode 100644 index 00000000..71847a04 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/meta.go @@ -0,0 +1,121 @@ +package toml + +import ( + "strings" +) + +// MetaData allows access to meta information about TOML data that's not +// accessible otherwise. +// +// It allows checking if a key is defined in the TOML data, whether any keys +// were undecoded, and the TOML type of a key. +type MetaData struct { + context Key // Used only during decoding. + + keyInfo map[string]keyInfo + mapping map[string]interface{} + keys []Key + decoded map[string]struct{} + data []byte // Input file; for errors. +} + +// IsDefined reports if the key exists in the TOML data. +// +// The key should be specified hierarchically, for example to access the TOML +// key "a.b.c" you would use IsDefined("a", "b", "c"). Keys are case sensitive. +// +// Returns false for an empty key. +func (md *MetaData) IsDefined(key ...string) bool { + if len(key) == 0 { + return false + } + + var ( + hash map[string]interface{} + ok bool + hashOrVal interface{} = md.mapping + ) + for _, k := range key { + if hash, ok = hashOrVal.(map[string]interface{}); !ok { + return false + } + if hashOrVal, ok = hash[k]; !ok { + return false + } + } + return true +} + +// Type returns a string representation of the type of the key specified. +// +// Type will return the empty string if given an empty key or a key that does +// not exist. Keys are case sensitive. +func (md *MetaData) Type(key ...string) string { + if ki, ok := md.keyInfo[Key(key).String()]; ok { + return ki.tomlType.typeString() + } + return "" +} + +// Keys returns a slice of every key in the TOML data, including key groups. +// +// Each key is itself a slice, where the first element is the top of the +// hierarchy and the last is the most specific. The list will have the same +// order as the keys appeared in the TOML data. +// +// All keys returned are non-empty. +func (md *MetaData) Keys() []Key { + return md.keys +} + +// Undecoded returns all keys that have not been decoded in the order in which +// they appear in the original TOML document. +// +// This includes keys that haven't been decoded because of a [Primitive] value. +// Once the Primitive value is decoded, the keys will be considered decoded. +// +// Also note that decoding into an empty interface will result in no decoding, +// and so no keys will be considered decoded. +// +// In this sense, the Undecoded keys correspond to keys in the TOML document +// that do not have a concrete type in your representation. +func (md *MetaData) Undecoded() []Key { + undecoded := make([]Key, 0, len(md.keys)) + for _, key := range md.keys { + if _, ok := md.decoded[key.String()]; !ok { + undecoded = append(undecoded, key) + } + } + return undecoded +} + +// Key represents any TOML key, including key groups. Use [MetaData.Keys] to get +// values of this type. +type Key []string + +func (k Key) String() string { + ss := make([]string, len(k)) + for i := range k { + ss[i] = k.maybeQuoted(i) + } + return strings.Join(ss, ".") +} + +func (k Key) maybeQuoted(i int) string { + if k[i] == "" { + return `""` + } + for _, c := range k[i] { + if !isBareKeyChar(c) { + return `"` + dblQuotedReplacer.Replace(k[i]) + `"` + } + } + return k[i] +} + +func (k Key) add(piece string) Key { + newKey := make(Key, len(k)+1) + copy(newKey, k) + newKey[len(k)] = piece + return newKey +} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go new file mode 100644 index 00000000..d2542d6f --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -0,0 +1,781 @@ +package toml + +import ( + "fmt" + "strconv" + "strings" + "time" + "unicode/utf8" + + "github.com/BurntSushi/toml/internal" +) + +type parser struct { + lx *lexer + context Key // Full key for the current hash in scope. + currentKey string // Base key name for everything except hashes. + pos Position // Current position in the TOML file. + + ordered []Key // List of keys in the order that they appear in the TOML data. + + keyInfo map[string]keyInfo // Map keyname → info about the TOML key. + mapping map[string]interface{} // Map keyname → key value. + implicits map[string]struct{} // Record implicit keys (e.g. "key.group.names"). +} + +type keyInfo struct { + pos Position + tomlType tomlType +} + +func parse(data string) (p *parser, err error) { + defer func() { + if r := recover(); r != nil { + if pErr, ok := r.(ParseError); ok { + pErr.input = data + err = pErr + return + } + panic(r) + } + }() + + // Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString() + // which mangles stuff. + if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { + data = data[2:] + } + + // Examine first few bytes for NULL bytes; this probably means it's a UTF-16 + // file (second byte in surrogate pair being NULL). Again, do this here to + // avoid having to deal with UTF-8/16 stuff in the lexer. + ex := 6 + if len(data) < 6 { + ex = len(data) + } + if i := strings.IndexRune(data[:ex], 0); i > -1 { + return nil, ParseError{ + Message: "files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8", + Position: Position{Line: 1, Start: i, Len: 1}, + Line: 1, + input: data, + } + } + + p = &parser{ + keyInfo: make(map[string]keyInfo), + mapping: make(map[string]interface{}), + lx: lex(data), + ordered: make([]Key, 0), + implicits: make(map[string]struct{}), + } + for { + item := p.next() + if item.typ == itemEOF { + break + } + p.topLevel(item) + } + + return p, nil +} + +func (p *parser) panicErr(it item, err error) { + panic(ParseError{ + err: err, + Position: it.pos, + Line: it.pos.Len, + LastKey: p.current(), + }) +} + +func (p *parser) panicItemf(it item, format string, v ...interface{}) { + panic(ParseError{ + Message: fmt.Sprintf(format, v...), + Position: it.pos, + Line: it.pos.Len, + LastKey: p.current(), + }) +} + +func (p *parser) panicf(format string, v ...interface{}) { + panic(ParseError{ + Message: fmt.Sprintf(format, v...), + Position: p.pos, + Line: p.pos.Line, + LastKey: p.current(), + }) +} + +func (p *parser) next() item { + it := p.lx.nextItem() + //fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.pos.Line, it.val) + if it.typ == itemError { + if it.err != nil { + panic(ParseError{ + Position: it.pos, + Line: it.pos.Line, + LastKey: p.current(), + err: it.err, + }) + } + + p.panicItemf(it, "%s", it.val) + } + return it +} + +func (p *parser) nextPos() item { + it := p.next() + p.pos = it.pos + return it +} + +func (p *parser) bug(format string, v ...interface{}) { + panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) +} + +func (p *parser) expect(typ itemType) item { + it := p.next() + p.assertEqual(typ, it.typ) + return it +} + +func (p *parser) assertEqual(expected, got itemType) { + if expected != got { + p.bug("Expected '%s' but got '%s'.", expected, got) + } +} + +func (p *parser) topLevel(item item) { + switch item.typ { + case itemCommentStart: // # .. + p.expect(itemText) + case itemTableStart: // [ .. ] + name := p.nextPos() + + var key Key + for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemTableEnd, name.typ) + + p.addContext(key, false) + p.setType("", tomlHash, item.pos) + p.ordered = append(p.ordered, key) + case itemArrayTableStart: // [[ .. ]] + name := p.nextPos() + + var key Key + for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemArrayTableEnd, name.typ) + + p.addContext(key, true) + p.setType("", tomlArrayHash, item.pos) + p.ordered = append(p.ordered, key) + case itemKeyStart: // key = .. + outerContext := p.context + /// Read all the key parts (e.g. 'a' and 'b' in 'a.b') + k := p.nextPos() + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set value. + vItem := p.next() + val, typ := p.value(vItem, false) + p.set(p.currentKey, val, typ, vItem.pos) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + + /// Remove the context we added (preserving any context from [tbl] lines). + p.context = outerContext + p.currentKey = "" + default: + p.bug("Unexpected type at top level: %s", item.typ) + } +} + +// Gets a string for a key (or part of a key in a table name). +func (p *parser) keyString(it item) string { + switch it.typ { + case itemText: + return it.val + case itemString, itemMultilineString, + itemRawString, itemRawMultilineString: + s, _ := p.value(it, false) + return s.(string) + default: + p.bug("Unexpected key type: %s", it.typ) + } + panic("unreachable") +} + +var datetimeRepl = strings.NewReplacer( + "z", "Z", + "t", "T", + " ", "T") + +// value translates an expected value from the lexer into a Go value wrapped +// as an empty interface. +func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { + switch it.typ { + case itemString: + return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) + case itemMultilineString: + return p.replaceEscapes(it, stripFirstNewline(p.stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + case itemRawString: + return it.val, p.typeOfPrimitive(it) + case itemRawMultilineString: + return stripFirstNewline(it.val), p.typeOfPrimitive(it) + case itemInteger: + return p.valueInteger(it) + case itemFloat: + return p.valueFloat(it) + case itemBool: + switch it.val { + case "true": + return true, p.typeOfPrimitive(it) + case "false": + return false, p.typeOfPrimitive(it) + default: + p.bug("Expected boolean value, but got '%s'.", it.val) + } + case itemDatetime: + return p.valueDatetime(it) + case itemArray: + return p.valueArray(it) + case itemInlineTableStart: + return p.valueInlineTable(it, parentIsArray) + default: + p.bug("Unexpected value type: %s", it.typ) + } + panic("unreachable") +} + +func (p *parser) valueInteger(it item) (interface{}, tomlType) { + if !numUnderscoresOK(it.val) { + p.panicItemf(it, "Invalid integer %q: underscores must be surrounded by digits", it.val) + } + if numHasLeadingZero(it.val) { + p.panicItemf(it, "Invalid integer %q: cannot have leading zeroes", it.val) + } + + num, err := strconv.ParseInt(it.val, 0, 64) + if err != nil { + // Distinguish integer values. Normally, it'd be a bug if the lexer + // provides an invalid integer, but it's possible that the number is + // out of range of valid values (which the lexer cannot determine). + // So mark the former as a bug but the latter as a legitimate user + // error. + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicErr(it, errParseRange{i: it.val, size: "int64"}) + } else { + p.bug("Expected integer value, but got '%s'.", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +func (p *parser) valueFloat(it item) (interface{}, tomlType) { + parts := strings.FieldsFunc(it.val, func(r rune) bool { + switch r { + case '.', 'e', 'E': + return true + } + return false + }) + for _, part := range parts { + if !numUnderscoresOK(part) { + p.panicItemf(it, "Invalid float %q: underscores must be surrounded by digits", it.val) + } + } + if len(parts) > 0 && numHasLeadingZero(parts[0]) { + p.panicItemf(it, "Invalid float %q: cannot have leading zeroes", it.val) + } + if !numPeriodsOK(it.val) { + // As a special case, numbers like '123.' or '1.e2', + // which are valid as far as Go/strconv are concerned, + // must be rejected because TOML says that a fractional + // part consists of '.' followed by 1+ digits. + p.panicItemf(it, "Invalid float %q: '.' must be followed by one or more digits", it.val) + } + val := strings.Replace(it.val, "_", "", -1) + if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does. + val = "nan" + } + num, err := strconv.ParseFloat(val, 64) + if err != nil { + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicErr(it, errParseRange{i: it.val, size: "float64"}) + } else { + p.panicItemf(it, "Invalid float value: %q", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +var dtTypes = []struct { + fmt string + zone *time.Location +}{ + {time.RFC3339Nano, time.Local}, + {"2006-01-02T15:04:05.999999999", internal.LocalDatetime}, + {"2006-01-02", internal.LocalDate}, + {"15:04:05.999999999", internal.LocalTime}, +} + +func (p *parser) valueDatetime(it item) (interface{}, tomlType) { + it.val = datetimeRepl.Replace(it.val) + var ( + t time.Time + ok bool + err error + ) + for _, dt := range dtTypes { + t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone) + if err == nil { + ok = true + break + } + } + if !ok { + p.panicItemf(it, "Invalid TOML Datetime: %q.", it.val) + } + return t, p.typeOfPrimitive(it) +} + +func (p *parser) valueArray(it item) (interface{}, tomlType) { + p.setType(p.currentKey, tomlArray, it.pos) + + var ( + types []tomlType + + // Initialize to a non-nil empty slice. This makes it consistent with + // how S = [] decodes into a non-nil slice inside something like struct + // { S []string }. See #338 + array = []interface{}{} + ) + for it = p.next(); it.typ != itemArrayEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + val, typ := p.value(it, true) + array = append(array, val) + types = append(types, typ) + + // XXX: types isn't used here, we need it to record the accurate type + // information. + // + // Not entirely sure how to best store this; could use "key[0]", + // "key[1]" notation, or maybe store it on the Array type? + } + return array, tomlArray +} + +func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tomlType) { + var ( + hash = make(map[string]interface{}) + outerContext = p.context + outerKey = p.currentKey + ) + + p.context = append(p.context, p.currentKey) + prevContext := p.context + p.currentKey = "" + + p.addImplicit(p.context) + p.addContext(p.context, parentIsArray) + + /// Loop over all table key/value pairs. + for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + /// Read all key parts. + k := p.nextPos() + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set the value. + val, typ := p.value(p.next(), false) + p.set(p.currentKey, val, typ, it.pos) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + hash[p.currentKey] = val + + /// Restore context. + p.context = prevContext + } + p.context = outerContext + p.currentKey = outerKey + return hash, tomlHash +} + +// numHasLeadingZero checks if this number has leading zeroes, allowing for '0', +// +/- signs, and base prefixes. +func numHasLeadingZero(s string) bool { + if len(s) > 1 && s[0] == '0' && !(s[1] == 'b' || s[1] == 'o' || s[1] == 'x') { // Allow 0b, 0o, 0x + return true + } + if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' { + return true + } + return false +} + +// numUnderscoresOK checks whether each underscore in s is surrounded by +// characters that are not underscores. +func numUnderscoresOK(s string) bool { + switch s { + case "nan", "+nan", "-nan", "inf", "-inf", "+inf": + return true + } + accept := false + for _, r := range s { + if r == '_' { + if !accept { + return false + } + } + + // isHexadecimal is a superset of all the permissable characters + // surrounding an underscore. + accept = isHexadecimal(r) + } + return accept +} + +// numPeriodsOK checks whether every period in s is followed by a digit. +func numPeriodsOK(s string) bool { + period := false + for _, r := range s { + if period && !isDigit(r) { + return false + } + period = r == '.' + } + return !period +} + +// Set the current context of the parser, where the context is either a hash or +// an array of hashes, depending on the value of the `array` parameter. +// +// Establishing the context also makes sure that the key isn't a duplicate, and +// will create implicit hashes automatically. +func (p *parser) addContext(key Key, array bool) { + var ok bool + + // Always start at the top level and drill down for our context. + hashContext := p.mapping + keyContext := make(Key, 0) + + // We only need implicit hashes for key[0:-1] + for _, k := range key[0 : len(key)-1] { + _, ok = hashContext[k] + keyContext = append(keyContext, k) + + // No key? Make an implicit hash and move on. + if !ok { + p.addImplicit(keyContext) + hashContext[k] = make(map[string]interface{}) + } + + // If the hash context is actually an array of tables, then set + // the hash context to the last element in that array. + // + // Otherwise, it better be a table, since this MUST be a key group (by + // virtue of it not being the last element in a key). + switch t := hashContext[k].(type) { + case []map[string]interface{}: + hashContext = t[len(t)-1] + case map[string]interface{}: + hashContext = t + default: + p.panicf("Key '%s' was already created as a hash.", keyContext) + } + } + + p.context = keyContext + if array { + // If this is the first element for this array, then allocate a new + // list of tables for it. + k := key[len(key)-1] + if _, ok := hashContext[k]; !ok { + hashContext[k] = make([]map[string]interface{}, 0, 4) + } + + // Add a new table. But make sure the key hasn't already been used + // for something else. + if hash, ok := hashContext[k].([]map[string]interface{}); ok { + hashContext[k] = append(hash, make(map[string]interface{})) + } else { + p.panicf("Key '%s' was already created and cannot be used as an array.", key) + } + } else { + p.setValue(key[len(key)-1], make(map[string]interface{})) + } + p.context = append(p.context, key[len(key)-1]) +} + +// set calls setValue and setType. +func (p *parser) set(key string, val interface{}, typ tomlType, pos Position) { + p.setValue(key, val) + p.setType(key, typ, pos) + +} + +// setValue sets the given key to the given value in the current context. +// It will make sure that the key hasn't already been defined, account for +// implicit key groups. +func (p *parser) setValue(key string, value interface{}) { + var ( + tmpHash interface{} + ok bool + hash = p.mapping + keyContext Key + ) + for _, k := range p.context { + keyContext = append(keyContext, k) + if tmpHash, ok = hash[k]; !ok { + p.bug("Context for key '%s' has not been established.", keyContext) + } + switch t := tmpHash.(type) { + case []map[string]interface{}: + // The context is a table of hashes. Pick the most recent table + // defined as the current hash. + hash = t[len(t)-1] + case map[string]interface{}: + hash = t + default: + p.panicf("Key '%s' has already been defined.", keyContext) + } + } + keyContext = append(keyContext, key) + + if _, ok := hash[key]; ok { + // Normally redefining keys isn't allowed, but the key could have been + // defined implicitly and it's allowed to be redefined concretely. (See + // the `valid/implicit-and-explicit-after.toml` in toml-test) + // + // But we have to make sure to stop marking it as an implicit. (So that + // another redefinition provokes an error.) + // + // Note that since it has already been defined (as a hash), we don't + // want to overwrite it. So our business is done. + if p.isArray(keyContext) { + p.removeImplicit(keyContext) + hash[key] = value + return + } + if p.isImplicit(keyContext) { + p.removeImplicit(keyContext) + return + } + + // Otherwise, we have a concrete key trying to override a previous + // key, which is *always* wrong. + p.panicf("Key '%s' has already been defined.", keyContext) + } + + hash[key] = value +} + +// setType sets the type of a particular value at a given key. It should be +// called immediately AFTER setValue. +// +// Note that if `key` is empty, then the type given will be applied to the +// current context (which is either a table or an array of tables). +func (p *parser) setType(key string, typ tomlType, pos Position) { + keyContext := make(Key, 0, len(p.context)+1) + keyContext = append(keyContext, p.context...) + if len(key) > 0 { // allow type setting for hashes + keyContext = append(keyContext, key) + } + // Special case to make empty keys ("" = 1) work. + // Without it it will set "" rather than `""`. + // TODO: why is this needed? And why is this only needed here? + if len(keyContext) == 0 { + keyContext = Key{""} + } + p.keyInfo[keyContext.String()] = keyInfo{tomlType: typ, pos: pos} +} + +// Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and +// "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). +func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = struct{}{} } +func (p *parser) removeImplicit(key Key) { delete(p.implicits, key.String()) } +func (p *parser) isImplicit(key Key) bool { _, ok := p.implicits[key.String()]; return ok } +func (p *parser) isArray(key Key) bool { return p.keyInfo[key.String()].tomlType == tomlArray } +func (p *parser) addImplicitContext(key Key) { + p.addImplicit(key) + p.addContext(key, false) +} + +// current returns the full key name of the current context. +func (p *parser) current() string { + if len(p.currentKey) == 0 { + return p.context.String() + } + if len(p.context) == 0 { + return p.currentKey + } + return fmt.Sprintf("%s.%s", p.context, p.currentKey) +} + +func stripFirstNewline(s string) string { + if len(s) > 0 && s[0] == '\n' { + return s[1:] + } + if len(s) > 1 && s[0] == '\r' && s[1] == '\n' { + return s[2:] + } + return s +} + +// Remove newlines inside triple-quoted strings if a line ends with "\". +func (p *parser) stripEscapedNewlines(s string) string { + split := strings.Split(s, "\n") + if len(split) < 1 { + return s + } + + escNL := false // Keep track of the last non-blank line was escaped. + for i, line := range split { + line = strings.TrimRight(line, " \t\r") + + if len(line) == 0 || line[len(line)-1] != '\\' { + split[i] = strings.TrimRight(split[i], "\r") + if !escNL && i != len(split)-1 { + split[i] += "\n" + } + continue + } + + escBS := true + for j := len(line) - 1; j >= 0 && line[j] == '\\'; j-- { + escBS = !escBS + } + if escNL { + line = strings.TrimLeft(line, " \t\r") + } + escNL = !escBS + + if escBS { + split[i] += "\n" + continue + } + + if i == len(split)-1 { + p.panicf("invalid escape: '\\ '") + } + + split[i] = line[:len(line)-1] // Remove \ + if len(split)-1 > i { + split[i+1] = strings.TrimLeft(split[i+1], " \t\r") + } + } + return strings.Join(split, "") +} + +func (p *parser) replaceEscapes(it item, str string) string { + replaced := make([]rune, 0, len(str)) + s := []byte(str) + r := 0 + for r < len(s) { + if s[r] != '\\' { + c, size := utf8.DecodeRune(s[r:]) + r += size + replaced = append(replaced, c) + continue + } + r += 1 + if r >= len(s) { + p.bug("Escape sequence at end of string.") + return "" + } + switch s[r] { + default: + p.bug("Expected valid escape code after \\, but got %q.", s[r]) + case ' ', '\t': + p.panicItemf(it, "invalid escape: '\\%c'", s[r]) + case 'b': + replaced = append(replaced, rune(0x0008)) + r += 1 + case 't': + replaced = append(replaced, rune(0x0009)) + r += 1 + case 'n': + replaced = append(replaced, rune(0x000A)) + r += 1 + case 'f': + replaced = append(replaced, rune(0x000C)) + r += 1 + case 'r': + replaced = append(replaced, rune(0x000D)) + r += 1 + case '"': + replaced = append(replaced, rune(0x0022)) + r += 1 + case '\\': + replaced = append(replaced, rune(0x005C)) + r += 1 + case 'u': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+5). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+5]) + replaced = append(replaced, escaped) + r += 5 + case 'U': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+9). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(it, s[r+1:r+9]) + replaced = append(replaced, escaped) + r += 9 + } + } + return string(replaced) +} + +func (p *parser) asciiEscapeToUnicode(it item, bs []byte) rune { + s := string(bs) + hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) + if err != nil { + p.bug("Could not parse '%s' as a hexadecimal number, but the lexer claims it's OK: %s", s, err) + } + if !utf8.ValidRune(rune(hex)) { + p.panicItemf(it, "Escaped character '\\u%s' is not valid UTF-8.", s) + } + return rune(hex) +} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go new file mode 100644 index 00000000..254ca82e --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_fields.go @@ -0,0 +1,242 @@ +package toml + +// Struct field handling is adapted from code in encoding/json: +// +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the Go distribution. + +import ( + "reflect" + "sort" + "sync" +) + +// A field represents a single field found in a struct. +type field struct { + name string // the name of the field (`toml` tag included) + tag bool // whether field has a `toml` tag + index []int // represents the depth of an anonymous field + typ reflect.Type // the type of the field +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from toml tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that TOML should recognize for the given +// type. The algorithm is breadth-first search over the set of structs to +// include - the top struct and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + var count map[reflect.Type]int + var nextCount map[reflect.Type]int + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" && !sf.Anonymous { // unexported + continue + } + opts := getOptions(sf.Tag) + if opts.skip { + continue + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := opts.name != "" + name := opts.name + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + f := field{name: ft.Name(), index: index, typ: ft} + next = append(next, f) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with TOML tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// TOML tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} diff --git a/vendor/github.com/BurntSushi/toml/type_toml.go b/vendor/github.com/BurntSushi/toml/type_toml.go new file mode 100644 index 00000000..4e90d773 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_toml.go @@ -0,0 +1,70 @@ +package toml + +// tomlType represents any Go type that corresponds to a TOML type. +// While the first draft of the TOML spec has a simplistic type system that +// probably doesn't need this level of sophistication, we seem to be militating +// toward adding real composite types. +type tomlType interface { + typeString() string +} + +// typeEqual accepts any two types and returns true if they are equal. +func typeEqual(t1, t2 tomlType) bool { + if t1 == nil || t2 == nil { + return false + } + return t1.typeString() == t2.typeString() +} + +func typeIsTable(t tomlType) bool { + return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) +} + +type tomlBaseType string + +func (btype tomlBaseType) typeString() string { + return string(btype) +} + +func (btype tomlBaseType) String() string { + return btype.typeString() +} + +var ( + tomlInteger tomlBaseType = "Integer" + tomlFloat tomlBaseType = "Float" + tomlDatetime tomlBaseType = "Datetime" + tomlString tomlBaseType = "String" + tomlBool tomlBaseType = "Bool" + tomlArray tomlBaseType = "Array" + tomlHash tomlBaseType = "Hash" + tomlArrayHash tomlBaseType = "ArrayHash" +) + +// typeOfPrimitive returns a tomlType of any primitive value in TOML. +// Primitive values are: Integer, Float, Datetime, String and Bool. +// +// Passing a lexer item other than the following will cause a BUG message +// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. +func (p *parser) typeOfPrimitive(lexItem item) tomlType { + switch lexItem.typ { + case itemInteger: + return tomlInteger + case itemFloat: + return tomlFloat + case itemDatetime: + return tomlDatetime + case itemString: + return tomlString + case itemMultilineString: + return tomlString + case itemRawString: + return tomlString + case itemRawMultilineString: + return tomlString + case itemBool: + return tomlBool + } + p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) + panic("unreachable") +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml new file mode 100644 index 00000000..db6e504a --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/.codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + threshold: 1% + patch: + default: + target: 70% +ignore: + - globoptions.go diff --git a/vendor/github.com/mohae/deepcopy/.gitignore b/vendor/github.com/bmatcuk/doublestar/v4/.gitignore similarity index 84% rename from vendor/github.com/mohae/deepcopy/.gitignore rename to vendor/github.com/bmatcuk/doublestar/v4/.gitignore index 5846dd15..af212ecc 100644 --- a/vendor/github.com/mohae/deepcopy/.gitignore +++ b/vendor/github.com/bmatcuk/doublestar/v4/.gitignore @@ -1,3 +1,8 @@ +# vi +*~ +*.swp +*.swo + # Compiled Object files, Static and Dynamic libs (Shared Objects) *.o *.a @@ -21,6 +26,7 @@ _testmain.go *.exe *.test -*~ -*.out -*.log +*.prof + +# test directory +test/ diff --git a/vendor/github.com/mohae/deepcopy/LICENSE b/vendor/github.com/bmatcuk/doublestar/v4/LICENSE similarity index 97% rename from vendor/github.com/mohae/deepcopy/LICENSE rename to vendor/github.com/bmatcuk/doublestar/v4/LICENSE index 419673f0..309c9d1d 100644 --- a/vendor/github.com/mohae/deepcopy/LICENSE +++ b/vendor/github.com/bmatcuk/doublestar/v4/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2014 Joel +Copyright (c) 2014 Bob Matcuk Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,3 +19,4 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/bmatcuk/doublestar/v4/README.md b/vendor/github.com/bmatcuk/doublestar/v4/README.md new file mode 100644 index 00000000..70117eff --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/README.md @@ -0,0 +1,402 @@ +# doublestar + +Path pattern matching and globbing supporting `doublestar` (`**`) patterns. + +[![PkgGoDev](https://pkg.go.dev/badge/github.com/bmatcuk/doublestar)](https://pkg.go.dev/github.com/bmatcuk/doublestar/v4) +[![Release](https://img.shields.io/github/release/bmatcuk/doublestar.svg?branch=master)](https://github.com/bmatcuk/doublestar/releases) +[![Build Status](https://github.com/bmatcuk/doublestar/actions/workflows/test.yml/badge.svg)](https://github.com/bmatcuk/doublestar/actions) +[![codecov.io](https://img.shields.io/codecov/c/github/bmatcuk/doublestar.svg?branch=master)](https://codecov.io/github/bmatcuk/doublestar?branch=master) +[![Sponsor](https://img.shields.io/static/v1?label=Sponsor&message=%E2%9D%A4&logo=GitHub&color=%23fe8e86)](https://github.com/sponsors/bmatcuk) + +## About + +#### [Upgrading?](UPGRADING.md) + +**doublestar** is a [golang] implementation of path pattern matching and +globbing with support for "doublestar" (aka globstar: `**`) patterns. + +doublestar patterns match files and directories recursively. For example, if +you had the following directory structure: + +```bash +grandparent +`-- parent + |-- child1 + `-- child2 +``` + +You could find the children with patterns such as: `**/child*`, +`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will +return all files and directories recursively). + +Bash's globstar is doublestar's inspiration and, as such, works similarly. +Note that the doublestar must appear as a path component by itself. A pattern +such as `/path**` is invalid and will be treated the same as `/path*`, but +`/path*/**` should achieve the desired result. Additionally, `/path/**` will +match all directories and files under the path directory, but `/path/**/` will +only match directories. + +v4 is a complete rewrite with a focus on performance. Additionally, +[doublestar] has been updated to use the new [io/fs] package for filesystem +access. As a result, it is only supported by [golang] v1.16+. + +## Installation + +**doublestar** can be installed via `go get`: + +```bash +go get github.com/bmatcuk/doublestar/v4 +``` + +To use it in your code, you must import it: + +```go +import "github.com/bmatcuk/doublestar/v4" +``` + +## Usage + +### ErrBadPattern + +```go +doublestar.ErrBadPattern +``` + +Returned by various functions to report that the pattern is malformed. At the +moment, this value is equal to `path.ErrBadPattern`, but, for portability, this +equivalence should probably not be relied upon. + +### Match + +```go +func Match(pattern, name string) (bool, error) +``` + +Match returns true if `name` matches the file name `pattern` ([see +"patterns"]). `name` and `pattern` are split on forward slash (`/`) characters +and may be relative or absolute. + +Match requires pattern to match all of name, not just a substring. The only +possible returned error is `ErrBadPattern`, when pattern is malformed. + +Note: this is meant as a drop-in replacement for `path.Match()` which always +uses `'/'` as the path separator. If you want to support systems which use a +different path separator (such as Windows), what you want is `PathMatch()`. +Alternatively, you can run `filepath.ToSlash()` on both pattern and name and +then use this function. + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + + +### PathMatch + +```go +func PathMatch(pattern, name string) (bool, error) +``` + +PathMatch returns true if `name` matches the file name `pattern` ([see +"patterns"]). The difference between Match and PathMatch is that PathMatch will +automatically use your system's path separator to split `name` and `pattern`. +On systems where the path separator is `'\'`, escaping will be disabled. + +Note: this is meant as a drop-in replacement for `filepath.Match()`. It assumes +that both `pattern` and `name` are using the system's path separator. If you +can't be sure of that, use `filepath.ToSlash()` on both `pattern` and `name`, +and then use the `Match()` function instead. + +### GlobOption + +Options that may be passed to `Glob`, `GlobWalk`, or `FilepathGlob`. Any number +of options may be passed to these functions, and in any order, as the last +argument(s). + +```go +WithFailOnIOErrors() +``` + +If passed, doublestar will abort and return IO errors when encountered. Note +that if the glob pattern references a path that does not exist (such as +`nonexistent/path/*`), this is _not_ considered an IO error: it is considered a +pattern with no matches. + +```go +WithFailOnPatternNotExist() +``` + +If passed, doublestar will abort and return `doublestar.ErrPatternNotExist` if +the pattern references a path that does not exist before any meta characters +such as `nonexistent/path/*`. Note that alts (ie, `{...}`) are expanded before +this check. In other words, a pattern such as `{a,b}/*` may fail if either `a` +or `b` do not exist but `*/{a,b}` will never fail because the star may match +nothing. + +```go +WithFilesOnly() +``` + +If passed, doublestar will only return "files" from `Glob`, `GlobWalk`, or +`FilepathGlob`. In this context, "files" are anything that is not a directory +or a symlink to a directory. + +Note: if combined with the WithNoFollow option, symlinks to directories _will_ +be included in the result since no attempt is made to follow the symlink. + +```go +WithNoFollow() +``` + +If passed, doublestar will not follow symlinks while traversing the filesystem. +However, due to io/fs's _very_ poor support for querying the filesystem about +symlinks, there's a caveat here: if part of the pattern before any meta +characters contains a reference to a symlink, it will be followed. For example, +a pattern such as `path/to/symlink/*` will be followed assuming it is a valid +symlink to a directory. However, from this same example, a pattern such as +`path/to/**` will not traverse the `symlink`, nor would `path/*/symlink/*` + +Note: if combined with the WithFilesOnly option, symlinks to directories _will_ +be included in the result since no attempt is made to follow the symlink. + +### Glob + +```go +func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error) +``` + +Glob returns the names of all files matching pattern or nil if there is no +matching file. The syntax of patterns is the same as in `Match()`. The pattern +may describe hierarchical names such as `usr/*/bin/ed`. + +Glob ignores file system errors such as I/O errors reading directories by +default. The only possible returned error is `ErrBadPattern`, reporting that +the pattern is malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Note: this is meant as a drop-in replacement for `io/fs.Glob()`. Like +`io/fs.Glob()`, this function assumes that your pattern uses `/` as the path +separator even if that's not correct for your OS (like Windows). If you aren't +sure if that's the case, you can use `filepath.ToSlash()` on your pattern +before calling `Glob()`. + +Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` +will return no results and no errors. This seems to be a [conscious +decision](https://github.com/golang/go/issues/44092#issuecomment-774132549), +even if counter-intuitive. You can use [SplitPattern] to divide a pattern into +a base path (to initialize an `FS` object) and pattern. + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + +### GlobWalk + +```go +type GlobWalkFunc func(path string, d fs.DirEntry) error + +func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error +``` + +GlobWalk calls the callback function `fn` for every file matching pattern. The +syntax of pattern is the same as in Match() and the behavior is the same as +Glob(), with regard to limitations (such as patterns containing `/./`, `/../`, +or starting with `/`). The pattern may describe hierarchical names such as +usr/*/bin/ed. + +GlobWalk may have a small performance benefit over Glob if you do not need a +slice of matches because it can avoid allocating memory for the matches. +Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for each +match, and lets you quit early by returning a non-nil error from your callback +function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`, GlobWalk +will skip the current directory. This means that if the current path _is_ a +directory, GlobWalk will not recurse into it. If the current path is not a +directory, the rest of the parent directory will be skipped. + +GlobWalk ignores file system errors such as I/O errors reading directories by +default. GlobWalk may return `ErrBadPattern`, reporting that the pattern is +malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Additionally, if the callback function `fn` returns an error, GlobWalk will +exit immediately and return that error. + +Like Glob(), this function assumes that your pattern uses `/` as the path +separator even if that's not correct for your OS (like Windows). If you aren't +sure if that's the case, you can use filepath.ToSlash() on your pattern before +calling GlobWalk(). + +Note: users should _not_ count on the returned error, +`doublestar.ErrBadPattern`, being equal to `path.ErrBadPattern`. + +### FilepathGlob + +```go +func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error) +``` + +FilepathGlob returns the names of all files matching pattern or nil if there is +no matching file. The syntax of pattern is the same as in Match(). The pattern +may describe hierarchical names such as usr/*/bin/ed. + +FilepathGlob ignores file system errors such as I/O errors reading directories +by default. The only possible returned error is `ErrBadPattern`, reporting that +the pattern is malformed. + +To enable aborting on I/O errors, the `WithFailOnIOErrors` option can be +passed. + +Note: FilepathGlob is a convenience function that is meant as a drop-in +replacement for `path/filepath.Glob()` for users who don't need the +complication of io/fs. Basically, it: + +* Runs `filepath.Clean()` and `ToSlash()` on the pattern +* Runs `SplitPattern()` to get a base path and a pattern to Glob +* Creates an FS object from the base path and `Glob()s` on the pattern +* Joins the base path with all of the matches from `Glob()` + +Returned paths will use the system's path separator, just like +`filepath.Glob()`. + +Note: the returned error `doublestar.ErrBadPattern` is not equal to +`filepath.ErrBadPattern`. + +### SplitPattern + +```go +func SplitPattern(p string) (base, pattern string) +``` + +SplitPattern is a utility function. Given a pattern, SplitPattern will return +two strings: the first string is everything up to the last slash (`/`) that +appears _before_ any unescaped "meta" characters (ie, `*?[{`). The second +string is everything after that slash. For example, given the pattern: + +``` +../../path/to/meta*/** + ^----------- split here +``` + +SplitPattern returns "../../path/to" and "meta*/**". This is useful for +initializing os.DirFS() to call Glob() because Glob() will silently fail if +your pattern includes `/./` or `/../`. For example: + +```go +base, pattern := SplitPattern("../../path/to/meta*/**") +fsys := os.DirFS(base) +matches, err := Glob(fsys, pattern) +``` + +If SplitPattern cannot find somewhere to split the pattern (for example, +`meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in this +example). + +Of course, it is your responsibility to decide if the returned base path is +"safe" in the context of your application. Perhaps you could use Match() to +validate against a list of approved base directories? + +### ValidatePattern + +```go +func ValidatePattern(s string) bool +``` + +Validate a pattern. Patterns are validated while they run in Match(), +PathMatch(), and Glob(), so, you normally wouldn't need to call this. However, +there are cases where this might be useful: for example, if your program allows +a user to enter a pattern that you'll run at a later time, you might want to +validate it. + +ValidatePattern assumes your pattern uses '/' as the path separator. + +### ValidatePathPattern + +```go +func ValidatePathPattern(s string) bool +``` + +Like ValidatePattern, only uses your OS path separator. In other words, use +ValidatePattern if you would normally use Match() or Glob(). Use +ValidatePathPattern if you would normally use PathMatch(). Keep in mind, Glob() +requires '/' separators, even if your OS uses something else. + +### Patterns + +**doublestar** supports the following special terms in the patterns: + +Special Terms | Meaning +------------- | ------- +`*` | matches any sequence of non-path-separators +`/**/` | matches zero or more directories +`?` | matches any single non-path-separator character +`[class]` | matches any single non-path-separator character against a class of characters ([see "character classes"]) +`{alt1,...}` | matches a sequence of characters if one of the comma-separated alternatives matches + +Any character with a special meaning can be escaped with a backslash (`\`). + +A doublestar (`**`) should appear surrounded by path separators such as `/**/`. +A mid-pattern doublestar (`**`) behaves like bash's globstar option: a pattern +such as `path/to/**.txt` would return the same results as `path/to/*.txt`. The +pattern you're looking for is `path/to/**/*.txt`. + +#### Character Classes + +Character classes support the following: + +Class | Meaning +---------- | ------- +`[abc]` | matches any single character within the set +`[a-z]` | matches any single character in the range +`[^class]` | matches any single character which does *not* match the class +`[!class]` | same as `^`: negates the class + +## Performance + +``` +goos: darwin +goarch: amd64 +pkg: github.com/bmatcuk/doublestar/v4 +cpu: Intel(R) Core(TM) i7-4870HQ CPU @ 2.50GHz +BenchmarkMatch-8 285639 3868 ns/op 0 B/op 0 allocs/op +BenchmarkGoMatch-8 286945 3726 ns/op 0 B/op 0 allocs/op +BenchmarkPathMatch-8 320511 3493 ns/op 0 B/op 0 allocs/op +BenchmarkGoPathMatch-8 304236 3434 ns/op 0 B/op 0 allocs/op +BenchmarkGlob-8 466 2501123 ns/op 190225 B/op 2849 allocs/op +BenchmarkGlobWalk-8 476 2536293 ns/op 184017 B/op 2750 allocs/op +BenchmarkGoGlob-8 463 2574836 ns/op 194249 B/op 2929 allocs/op +``` + +These benchmarks (in `doublestar_test.go`) compare Match() to path.Match(), +PathMath() to filepath.Match(), and Glob() + GlobWalk() to io/fs.Glob(). They +only run patterns that the standard go packages can understand as well (so, no +`{alts}` or `**`) for a fair comparison. Of course, alts and doublestars will +be less performant than the other pattern meta characters. + +Alts are essentially like running multiple patterns, the number of which can +get large if your pattern has alts nested inside alts. This affects both +matching (ie, Match()) and globbing (Glob()). + +`**` performance in matching is actually pretty similar to a regular `*`, but +can cause a large number of reads when globbing as it will need to recursively +traverse your filesystem. + +## Sponsors +I started this project in 2014 in my spare time and have been maintaining it +ever since. In that time, it has grown into one of the most popular globbing +libraries in the Go ecosystem. So, if **doublestar** is a useful library in +your project, consider [sponsoring] my work! I'd really appreciate it! + +Thanks for sponsoring me! + +## License + +[MIT License](LICENSE) + +[SplitPattern]: #splitpattern +[doublestar]: https://github.com/bmatcuk/doublestar +[golang]: http://golang.org/ +[io/fs]: https://pkg.go.dev/io/fs +[see "character classes"]: #character-classes +[see "patterns"]: #patterns +[sponsoring]: https://github.com/sponsors/bmatcuk diff --git a/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md new file mode 100644 index 00000000..25aace3d --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/UPGRADING.md @@ -0,0 +1,63 @@ +# Upgrading from v3 to v4 + +v4 is a complete rewrite with a focus on performance. Additionally, +[doublestar] has been updated to use the new [io/fs] package for filesystem +access. As a result, it is only supported by [golang] v1.16+. + +`Match()` and `PathMatch()` mostly did not change, besides big performance +improvements. Their API is the same. However, note the following corner cases: + +* In previous versions of [doublestar], `PathMatch()` could accept patterns + that used either platform-specific path separators, or `/`. This was + undocumented and didn't match `filepath.Match()`. In v4, both `pattern` and + `name` must be using appropriate path separators for the platform. You can + use `filepath.FromSlash()` to change `/` to platform-specific separators if + you aren't sure. +* In previous versions of [doublestar], a pattern such as `path/to/a/**` would + _not_ match `path/to/a`. In v4, this pattern _will_ match because if `a` was + a directory, `Glob()` would return it. In other words, the following returns + true: `Match("path/to/a/**", "path/to/a")` + +`Glob()` changed from using a [doublestar]-specific filesystem abstraction (the +`OS` interface) to the [io/fs] package. As a result, it now takes a `fs.FS` as +its first argument. This change has a couple ramifications: + +* Like `io/fs.Glob`, `pattern` must use a `/` as path separator, even on + platforms that use something else. You can use `filepath.ToSlash()` on your + patterns if you aren't sure. +* Patterns that contain `/./` or `/../` are invalid. The [io/fs] package + rejects them, returning an IO error. Since `Glob()` ignores IO errors, it'll + end up being silently rejected. You can run `path.Clean()` to ensure they are + removed from the pattern. + +v4 also added a `GlobWalk()` function that is slightly more performant than +`Glob()` if you just need to iterate over the results and don't need a string +slice. You also get `fs.DirEntry` objects for each result, and can quit early +if your callback returns an error. + +# Upgrading from v2 to v3 + +v3 introduced using `!` to negate character classes, in addition to `^`. If any +of your patterns include a character class that starts with an exclamation mark +(ie, `[!...]`), you'll need to update the pattern to escape or move the +exclamation mark. Note that, like the caret (`^`), it only negates the +character class if it is the first character in the character class. + +# Upgrading from v1 to v2 + +The change from v1 to v2 was fairly minor: the return type of the `Open` method +on the `OS` interface was changed from `*os.File` to `File`, a new interface +exported by doublestar. The new `File` interface only defines the functionality +doublestar actually needs (`io.Closer` and `Readdir`), making it easier to use +doublestar with [go-billy], [afero], or something similar. If you were using +this functionality, updating should be as easy as updating `Open's` return +type, since `os.File` already implements `doublestar.File`. + +If you weren't using this functionality, updating should be as easy as changing +your dependencies to point to v2. + +[afero]: https://github.com/spf13/afero +[doublestar]: https://github.com/bmatcuk/doublestar +[go-billy]: https://github.com/src-d/go-billy +[golang]: http://golang.org/ +[io/fs]: https://golang.org/pkg/io/fs/ diff --git a/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go new file mode 100644 index 00000000..210fd40c --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/doublestar.go @@ -0,0 +1,13 @@ +package doublestar + +import ( + "errors" + "path" +) + +// ErrBadPattern indicates a pattern was malformed. +var ErrBadPattern = path.ErrBadPattern + +// ErrPatternNotExist indicates that the pattern passed to Glob, GlobWalk, or +// FilepathGlob references a path that does not exist. +var ErrPatternNotExist = errors.New("pattern does not exist") diff --git a/vendor/github.com/bmatcuk/doublestar/v4/glob.go b/vendor/github.com/bmatcuk/doublestar/v4/glob.go new file mode 100644 index 00000000..519601b1 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/glob.go @@ -0,0 +1,473 @@ +package doublestar + +import ( + "errors" + "io/fs" + "path" +) + +// Glob returns the names of all files matching pattern or nil if there is no +// matching file. The syntax of pattern is the same as in Match(). The pattern +// may describe hierarchical names such as usr/*/bin/ed. +// +// Glob ignores file system errors such as I/O errors reading directories by +// default. The only possible returned error is ErrBadPattern, reporting that +// the pattern is malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Note: this is meant as a drop-in replacement for io/fs.Glob(). Like +// io/fs.Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling Glob(). +// +// Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` +// will return no results and no errors. You can use SplitPattern to divide a +// pattern into a base path (to initialize an `FS` object) and pattern. +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func Glob(fsys fs.FS, pattern string, opts ...GlobOption) ([]string, error) { + if !ValidatePattern(pattern) { + return nil, ErrBadPattern + } + + g := newGlob(opts...) + + if hasMidDoubleStar(pattern) { + // If the pattern has a `**` anywhere but the very end, GlobWalk is more + // performant because it can get away with less allocations. If the pattern + // ends in a `**`, both methods are pretty much the same, but Glob has a + // _very_ slight advantage because of lower function call overhead. + var matches []string + err := g.doGlobWalk(fsys, pattern, true, true, func(p string, d fs.DirEntry) error { + matches = append(matches, p) + return nil + }) + return matches, err + } + return g.doGlob(fsys, pattern, nil, true, true) +} + +// Does the actual globbin' +// - firstSegment is true if we're in the first segment of the pattern, ie, +// the right-most part where we can match files. If it's false, we're +// somewhere in the middle (or at the beginning) and can only match +// directories since there are path segments above us. +// - beforeMeta is true if we're exploring segments before any meta +// characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/` +// bit does not contain any meta characters. +func (g *glob) doGlob(fsys fs.FS, pattern string, m []string, firstSegment, beforeMeta bool) (matches []string, err error) { + matches = m + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + // The pattern may contain escaped wildcard characters for an exact path match. + path := unescapeMeta(pattern) + pathInfo, pathExists, pathErr := g.exists(fsys, path, beforeMeta) + if pathErr != nil { + return nil, pathErr + } + + if pathExists && (!firstSegment || !g.filesOnly || !pathInfo.IsDir()) { + matches = append(matches, path) + } + + return + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + if splitIdx != -1 { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } else { + // otherwise, we have to handle the alts: + return g.globAlts(fsys, pattern, openingIdx, splitIdx, matches, firstSegment, beforeMeta) + } + } else { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return g.globDir(fsys, dir, pattern, matches, firstSegment, beforeMeta) + } + + var dirs []string + dirs, err = g.doGlob(fsys, dir, matches, false, beforeMeta) + if err != nil { + return + } + for _, d := range dirs { + matches, err = g.globDir(fsys, d, pattern, matches, firstSegment, false) + if err != nil { + return + } + } + + return +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func (g *glob) globAlts(fsys fs.FS, pattern string, openingIdx, closingIdx int, m []string, firstSegment, beforeMeta bool) (matches []string, err error) { + matches = m + + var dirs []string + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + dirs = []string{""} + } else { + // our alts have a common prefix that we can process first + dirs, err = g.doGlob(fsys, pattern[:splitIdx], matches, false, beforeMeta) + if err != nil { + return + } + + startIdx = splitIdx + 1 + } + + for _, d := range dirs { + patIdx := openingIdx + 1 + altResultsStartIdx := len(matches) + thisResultStartIdx := altResultsStartIdx + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + matches, err = g.doGlob(fsys, alt, matches, firstSegment, beforeMeta) + if err != nil { + return + } + + matchesLen := len(matches) + if altResultsStartIdx != thisResultStartIdx && thisResultStartIdx != matchesLen { + // Alts can result in matches that aren't sorted, or, worse, duplicates + // (consider the trivial pattern `path/to/{a,*}`). Since doGlob returns + // sorted results, we can do a sort of in-place merge and remove + // duplicates. But, we only need to do this if this isn't the first alt + // (ie, `altResultsStartIdx != thisResultsStartIdx`) and if the latest + // alt actually added some matches (`thisResultStartIdx != + // len(matches)`) + matches = sortAndRemoveDups(matches, altResultsStartIdx, thisResultStartIdx, matchesLen) + + // length of matches may have changed + thisResultStartIdx = len(matches) + } else { + thisResultStartIdx = matchesLen + } + + patIdx = nextIdx + 1 + } + } + + return +} + +// find files/subdirectories in the given `dir` that match `pattern` +func (g *glob) globDir(fsys fs.FS, dir, pattern string, matches []string, canMatchFiles, beforeMeta bool) (m []string, e error) { + m = matches + + if pattern == "" { + if !canMatchFiles || !g.filesOnly { + // pattern can be an empty string if the original pattern ended in a + // slash, in which case, we should just return dir, but only if it + // actually exists and it's a directory (or a symlink to a directory) + _, isDir, err := g.isPathDir(fsys, dir, beforeMeta) + if err != nil { + return nil, err + } + if isDir { + m = append(m, dir) + } + } + return + } + + if pattern == "**" { + return g.globDoubleStar(fsys, dir, m, canMatchFiles, beforeMeta) + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + e = g.handlePatternNotExist(beforeMeta) + } else { + e = g.forwardErrIfFailOnIOErrors(err) + } + return + } + + var matched bool + for _, info := range dirs { + name := info.Name() + matched, e = matchWithSeparator(pattern, name, '/', false) + if e != nil { + return + } + if matched { + matched = canMatchFiles + if !matched || g.filesOnly { + matched, e = g.isDir(fsys, dir, name, info) + if e != nil { + return + } + if canMatchFiles { + // if we're here, it's because g.filesOnly + // is set and we don't want directories + matched = !matched + } + } + if matched { + m = append(m, path.Join(dir, name)) + } + } + } + + return +} + +func (g *glob) globDoubleStar(fsys fs.FS, dir string, matches []string, canMatchFiles, beforeMeta bool) ([]string, error) { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return matches, g.handlePatternNotExist(beforeMeta) + } else { + return matches, g.forwardErrIfFailOnIOErrors(err) + } + } + + if !g.filesOnly { + // `**` can match *this* dir, so add it + matches = append(matches, dir) + } + + for _, info := range dirs { + name := info.Name() + isDir, err := g.isDir(fsys, dir, name, info) + if err != nil { + return nil, err + } + if isDir { + matches, err = g.globDoubleStar(fsys, path.Join(dir, name), matches, canMatchFiles, false) + if err != nil { + return nil, err + } + } else if canMatchFiles { + matches = append(matches, path.Join(dir, name)) + } + } + + return matches, nil +} + +// Returns true if the pattern has a doublestar in the middle of the pattern. +// In this case, GlobWalk is faster because it can get away with less +// allocations. However, Glob has a _very_ slight edge if the pattern ends in +// `**`. +func hasMidDoubleStar(p string) bool { + // subtract 3: 2 because we want to return false if the pattern ends in `**` + // (Glob is _very_ slightly faster in that case), and the extra 1 because our + // loop checks p[i] and p[i+1]. + l := len(p) - 3 + for i := 0; i < l; i++ { + if p[i] == '\\' { + // escape next byte + i++ + } else if p[i] == '*' && p[i+1] == '*' { + return true + } + } + return false +} + +// Returns the index of the first unescaped meta character, or negative 1. +func indexMeta(s string) int { + var c byte + l := len(s) + for i := 0; i < l; i++ { + c = s[i] + if c == '*' || c == '?' || c == '[' || c == '{' { + return i + } else if c == '\\' { + // skip next byte + i++ + } + } + return -1 +} + +// Returns the index of the last unescaped slash or closing alt (`}`) in the +// string, or negative 1. +func lastIndexSlashOrAlt(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if (s[i] == '/' || s[i] == '}') && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Returns the index of the last unescaped slash in the string, or negative 1. +func lastIndexSlash(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '/' && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Assuming the byte after the end of `s` is a closing `}`, this function will +// find the index of the matching `{`. That is, it'll skip over any nested `{}` +// and account for escaping. +func indexMatchedOpeningAlt(s string) int { + alts := 1 + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '}' && (i == 0 || s[i-1] != '\\') { + alts++ + } else if s[i] == '{' && (i == 0 || s[i-1] != '\\') { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} + +// Returns true if the path exists +func (g *glob) exists(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) { + // name might end in a slash, but Stat doesn't like that + namelen := len(name) + if namelen > 1 && name[namelen-1] == '/' { + name = name[:namelen-1] + } + + info, err := fs.Stat(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + return nil, false, g.handlePatternNotExist(beforeMeta) + } + return info, err == nil, g.forwardErrIfFailOnIOErrors(err) +} + +// Returns true if the path exists and is a directory or a symlink to a +// directory +func (g *glob) isPathDir(fsys fs.FS, name string, beforeMeta bool) (fs.FileInfo, bool, error) { + info, err := fs.Stat(fsys, name) + if errors.Is(err, fs.ErrNotExist) { + return nil, false, g.handlePatternNotExist(beforeMeta) + } + return info, err == nil && info.IsDir(), g.forwardErrIfFailOnIOErrors(err) +} + +// Returns whether or not the given DirEntry is a directory. If the DirEntry +// represents a symbolic link, the link is followed by running fs.Stat() on +// `path.Join(dir, name)` (if dir is "", name will be used without joining) +func (g *glob) isDir(fsys fs.FS, dir, name string, info fs.DirEntry) (bool, error) { + if !g.noFollow && (info.Type()&fs.ModeSymlink) > 0 { + p := name + if dir != "" { + p = path.Join(dir, name) + } + finfo, err := fs.Stat(fsys, p) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + // this function is only ever called while expanding a glob, so it can + // never return ErrPatternNotExist + return false, nil + } + return false, g.forwardErrIfFailOnIOErrors(err) + } + return finfo.IsDir(), nil + } + return info.IsDir(), nil +} + +// Builds a string from an alt +func buildAlt(prefix, pattern string, startIdx, openingIdx, currentIdx, nextIdx, afterIdx int) string { + // pattern: + // ignored/start{alts,go,here}remaining - len = 36 + // | | | | ^--- afterIdx = 27 + // | | | \--------- nextIdx = 21 + // | | \----------- currentIdx = 19 + // | \----------------- openingIdx = 13 + // \---------------------- startIdx = 8 + // + // result: + // prefix/startgoremaining - len = 7 + 5 + 2 + 9 = 23 + var buf []byte + patLen := len(pattern) + size := (openingIdx - startIdx) + (nextIdx - currentIdx) + (patLen - afterIdx) + if prefix != "" && prefix != "." { + buf = make([]byte, 0, size+len(prefix)+1) + buf = append(buf, prefix...) + buf = append(buf, '/') + } else { + buf = make([]byte, 0, size) + } + buf = append(buf, pattern[startIdx:openingIdx]...) + buf = append(buf, pattern[currentIdx:nextIdx]...) + if afterIdx < patLen { + buf = append(buf, pattern[afterIdx:]...) + } + return string(buf) +} + +// Running alts can produce results that are not sorted, and, worse, can cause +// duplicates (consider the trivial pattern `path/to/{a,*}`). Since we know +// each run of doGlob is sorted, we can basically do the "merge" step of a +// merge sort in-place. +func sortAndRemoveDups(matches []string, idx1, idx2, l int) []string { + var tmp string + for ; idx1 < idx2; idx1++ { + if matches[idx1] < matches[idx2] { + // order is correct + continue + } else if matches[idx1] > matches[idx2] { + // need to swap and then re-sort matches above idx2 + tmp = matches[idx1] + matches[idx1] = matches[idx2] + + shft := idx2 + 1 + for ; shft < l && matches[shft] < tmp; shft++ { + matches[shft-1] = matches[shft] + } + matches[shft-1] = tmp + } else { + // duplicate - shift matches above idx2 down one and decrement l + for shft := idx2 + 1; shft < l; shft++ { + matches[shft-1] = matches[shft] + } + if l--; idx2 == l { + // nothing left to do... matches[idx2:] must have been full of dups + break + } + } + } + return matches[:l] +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go new file mode 100644 index 00000000..9483c4bb --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/globoptions.go @@ -0,0 +1,144 @@ +package doublestar + +import "strings" + +// glob is an internal type to store options during globbing. +type glob struct { + failOnIOErrors bool + failOnPatternNotExist bool + filesOnly bool + noFollow bool +} + +// GlobOption represents a setting that can be passed to Glob, GlobWalk, and +// FilepathGlob. +type GlobOption func(*glob) + +// Construct a new glob object with the given options +func newGlob(opts ...GlobOption) *glob { + g := &glob{} + for _, opt := range opts { + opt(g) + } + return g +} + +// WithFailOnIOErrors is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will abort and return IO errors when +// encountered. Note that if the glob pattern references a path that does not +// exist (such as `nonexistent/path/*`), this is _not_ considered an IO error: +// it is considered a pattern with no matches. +// +func WithFailOnIOErrors() GlobOption { + return func(g *glob) { + g.failOnIOErrors = true + } +} + +// WithFailOnPatternNotExist is an option that can be passed to Glob, GlobWalk, +// or FilepathGlob. If passed, doublestar will abort and return +// ErrPatternNotExist if the pattern references a path that does not exist +// before any meta charcters such as `nonexistent/path/*`. Note that alts (ie, +// `{...}`) are expanded before this check. In other words, a pattern such as +// `{a,b}/*` may fail if either `a` or `b` do not exist but `*/{a,b}` will +// never fail because the star may match nothing. +// +func WithFailOnPatternNotExist() GlobOption { + return func(g *glob) { + g.failOnPatternNotExist = true + } +} + +// WithFilesOnly is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will only return files that match the +// pattern, not directories. +// +// Note: if combined with the WithNoFollow option, symlinks to directories +// _will_ be included in the result since no attempt is made to follow the +// symlink. +// +func WithFilesOnly() GlobOption { + return func(g *glob) { + g.filesOnly = true + } +} + +// WithNoFollow is an option that can be passed to Glob, GlobWalk, or +// FilepathGlob. If passed, doublestar will not follow symlinks while +// traversing the filesystem. However, due to io/fs's _very_ poor support for +// querying the filesystem about symlinks, there's a caveat here: if part of +// the pattern before any meta characters contains a reference to a symlink, it +// will be followed. For example, a pattern such as `path/to/symlink/*` will be +// followed assuming it is a valid symlink to a directory. However, from this +// same example, a pattern such as `path/to/**` will not traverse the +// `symlink`, nor would `path/*/symlink/*` +// +// Note: if combined with the WithFilesOnly option, symlinks to directories +// _will_ be included in the result since no attempt is made to follow the +// symlink. +// +func WithNoFollow() GlobOption { + return func(g *glob) { + g.noFollow = true + } +} + +// forwardErrIfFailOnIOErrors is used to wrap the return values of I/O +// functions. When failOnIOErrors is enabled, it will return err; otherwise, it +// always returns nil. +// +func (g *glob) forwardErrIfFailOnIOErrors(err error) error { + if g.failOnIOErrors { + return err + } + return nil +} + +// handleErrNotExist handles fs.ErrNotExist errors. If +// WithFailOnPatternNotExist has been enabled and canFail is true, this will +// return ErrPatternNotExist. Otherwise, it will return nil. +// +func (g *glob) handlePatternNotExist(canFail bool) error { + if canFail && g.failOnPatternNotExist { + return ErrPatternNotExist + } + return nil +} + +// Format options for debugging/testing purposes +func (g *glob) GoString() string { + var b strings.Builder + b.WriteString("opts: ") + + hasOpts := false + if g.failOnIOErrors { + b.WriteString("WithFailOnIOErrors") + hasOpts = true + } + if g.failOnPatternNotExist { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithFailOnPatternNotExist") + hasOpts = true + } + if g.filesOnly { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithFilesOnly") + hasOpts = true + } + if g.noFollow { + if hasOpts { + b.WriteString(", ") + } + b.WriteString("WithNoFollow") + hasOpts = true + } + + if !hasOpts { + b.WriteString("nil") + } + return b.String() +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go new file mode 100644 index 00000000..84e764f0 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/globwalk.go @@ -0,0 +1,414 @@ +package doublestar + +import ( + "errors" + "io/fs" + "path" + "path/filepath" + "strings" +) + +// If returned from GlobWalkFunc, will cause GlobWalk to skip the current +// directory. In other words, if the current path is a directory, GlobWalk will +// not recurse into it. Otherwise, GlobWalk will skip the rest of the current +// directory. +var SkipDir = fs.SkipDir + +// Callback function for GlobWalk(). If the function returns an error, GlobWalk +// will end immediately and return the same error. +type GlobWalkFunc func(path string, d fs.DirEntry) error + +// GlobWalk calls the callback function `fn` for every file matching pattern. +// The syntax of pattern is the same as in Match() and the behavior is the same +// as Glob(), with regard to limitations (such as patterns containing `/./`, +// `/../`, or starting with `/`). The pattern may describe hierarchical names +// such as usr/*/bin/ed. +// +// GlobWalk may have a small performance benefit over Glob if you do not need a +// slice of matches because it can avoid allocating memory for the matches. +// Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for +// each match, and lets you quit early by returning a non-nil error from your +// callback function. Like `io/fs.WalkDir`, if your callback returns `SkipDir`, +// GlobWalk will skip the current directory. This means that if the current +// path _is_ a directory, GlobWalk will not recurse into it. If the current +// path is not a directory, the rest of the parent directory will be skipped. +// +// GlobWalk ignores file system errors such as I/O errors reading directories +// by default. GlobWalk may return ErrBadPattern, reporting that the pattern is +// malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Additionally, if the callback function `fn` returns an error, GlobWalk will +// exit immediately and return that error. +// +// Like Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling GlobWalk(). +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc, opts ...GlobOption) error { + if !ValidatePattern(pattern) { + return ErrBadPattern + } + + g := newGlob(opts...) + return g.doGlobWalk(fsys, pattern, true, true, fn) +} + +// Actually execute GlobWalk +// - firstSegment is true if we're in the first segment of the pattern, ie, +// the right-most part where we can match files. If it's false, we're +// somewhere in the middle (or at the beginning) and can only match +// directories since there are path segments above us. +// - beforeMeta is true if we're exploring segments before any meta +// characters, ie, in a pattern such as `path/to/file*.txt`, the `path/to/` +// bit does not contain any meta characters. +func (g *glob) doGlobWalk(fsys fs.FS, pattern string, firstSegment, beforeMeta bool, fn GlobWalkFunc) error { + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + // The pattern may contain escaped wildcard characters for an exact path match. + path := unescapeMeta(pattern) + info, pathExists, err := g.exists(fsys, path, beforeMeta) + if pathExists && (!firstSegment || !g.filesOnly || !info.IsDir()) { + err = fn(path, dirEntryFromFileInfo(info)) + if err == SkipDir { + err = nil + } + } + return err + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + if splitIdx != -1 { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } else { + // otherwise, we have to handle the alts: + return g.globAltsWalk(fsys, pattern, openingIdx, splitIdx, firstSegment, beforeMeta, fn) + } + } else { + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return g.globDirWalk(fsys, dir, pattern, firstSegment, beforeMeta, fn) + } + + return g.doGlobWalk(fsys, dir, false, beforeMeta, func(p string, d fs.DirEntry) error { + if err := g.globDirWalk(fsys, p, pattern, firstSegment, false, fn); err != nil { + return err + } + return nil + }) +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func (g *glob) globAltsWalk(fsys fs.FS, pattern string, openingIdx, closingIdx int, firstSegment, beforeMeta bool, fn GlobWalkFunc) (err error) { + var matches []DirEntryWithFullPath + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + matches, err = g.doGlobAltsWalk(fsys, "", pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, beforeMeta, matches) + if err != nil { + return + } + } else { + // our alts have a common prefix that we can process first + startIdx = splitIdx + 1 + innerBeforeMeta := beforeMeta && !hasMetaExceptAlts(pattern[:splitIdx]) + err = g.doGlobWalk(fsys, pattern[:splitIdx], false, beforeMeta, func(p string, d fs.DirEntry) (e error) { + matches, e = g.doGlobAltsWalk(fsys, p, pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, innerBeforeMeta, matches) + return e + }) + if err != nil { + return + } + } + + skip := "" + for _, m := range matches { + if skip != "" { + // Because matches are sorted, we know that descendants of the skipped + // item must come immediately after the skipped item. If we find an item + // that does not have a prefix matching the skipped item, we know we're + // done skipping. I'm using strings.HasPrefix here because + // filepath.HasPrefix has been marked deprecated (and just calls + // strings.HasPrefix anyway). The reason it's deprecated is because it + // doesn't handle case-insensitive paths, nor does it guarantee that the + // prefix is actually a parent directory. Neither is an issue here: the + // paths come from the system so their cases will match, and we guarantee + // a parent directory by appending a slash to the prefix. + // + // NOTE: m.Path will always use slashes as path separators. + if strings.HasPrefix(m.Path, skip) { + continue + } + skip = "" + } + if err = fn(m.Path, m.Entry); err != nil { + if err == SkipDir { + isDir, err := g.isDir(fsys, "", m.Path, m.Entry) + if err != nil { + return err + } + if isDir { + // append a slash to guarantee `skip` will be treated as a parent dir + skip = m.Path + "/" + } else { + // Dir() calls Clean() which calls FromSlash(), so we need to convert + // back to slashes + skip = filepath.ToSlash(filepath.Dir(m.Path)) + "/" + } + err = nil + continue + } + return + } + } + + return +} + +// runs actual matching for alts +func (g *glob) doGlobAltsWalk(fsys fs.FS, d, pattern string, startIdx, openingIdx, closingIdx, afterIdx int, firstSegment, beforeMeta bool, m []DirEntryWithFullPath) (matches []DirEntryWithFullPath, err error) { + matches = m + matchesLen := len(m) + patIdx := openingIdx + 1 + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + err = g.doGlobWalk(fsys, alt, firstSegment, beforeMeta, func(p string, d fs.DirEntry) error { + // insertion sort, ignoring dups + insertIdx := matchesLen + for insertIdx > 0 && matches[insertIdx-1].Path > p { + insertIdx-- + } + if insertIdx > 0 && matches[insertIdx-1].Path == p { + // dup + return nil + } + + // append to grow the slice, then insert + entry := DirEntryWithFullPath{d, p} + matches = append(matches, entry) + for i := matchesLen; i > insertIdx; i-- { + matches[i] = matches[i-1] + } + matches[insertIdx] = entry + matchesLen++ + + return nil + }) + if err != nil { + return + } + + patIdx = nextIdx + 1 + } + + return +} + +func (g *glob) globDirWalk(fsys fs.FS, dir, pattern string, canMatchFiles, beforeMeta bool, fn GlobWalkFunc) (e error) { + if pattern == "" { + if !canMatchFiles || !g.filesOnly { + // pattern can be an empty string if the original pattern ended in a + // slash, in which case, we should just return dir, but only if it + // actually exists and it's a directory (or a symlink to a directory) + info, isDir, err := g.isPathDir(fsys, dir, beforeMeta) + if err != nil { + return err + } + if isDir { + e = fn(dir, dirEntryFromFileInfo(info)) + if e == SkipDir { + e = nil + } + } + } + return + } + + if pattern == "**" { + // `**` can match *this* dir + info, dirExists, err := g.exists(fsys, dir, beforeMeta) + if err != nil { + return err + } + if !dirExists || !info.IsDir() { + return nil + } + if !canMatchFiles || !g.filesOnly { + if e = fn(dir, dirEntryFromFileInfo(info)); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + return g.globDoubleStarWalk(fsys, dir, canMatchFiles, fn) + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + return g.handlePatternNotExist(beforeMeta) + } + return g.forwardErrIfFailOnIOErrors(err) + } + + var matched bool + for _, info := range dirs { + name := info.Name() + matched, e = matchWithSeparator(pattern, name, '/', false) + if e != nil { + return + } + if matched { + matched = canMatchFiles + if !matched || g.filesOnly { + matched, e = g.isDir(fsys, dir, name, info) + if e != nil { + return e + } + if canMatchFiles { + // if we're here, it's because g.filesOnly + // is set and we don't want directories + matched = !matched + } + } + if matched { + if e = fn(path.Join(dir, name), info); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + } + } + + return +} + +// recursively walk files/directories in a directory +func (g *glob) globDoubleStarWalk(fsys fs.FS, dir string, canMatchFiles bool, fn GlobWalkFunc) (e error) { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + if errors.Is(err, fs.ErrNotExist) { + // This function is only ever called after we know the top-most directory + // exists, so, if we ever get here, we know we'll never return + // ErrPatternNotExist. + return nil + } + return g.forwardErrIfFailOnIOErrors(err) + } + + for _, info := range dirs { + name := info.Name() + isDir, err := g.isDir(fsys, dir, name, info) + if err != nil { + return err + } + + if isDir { + p := path.Join(dir, name) + if !canMatchFiles || !g.filesOnly { + // `**` can match *this* dir, so add it + if e = fn(p, info); e != nil { + if e == SkipDir { + e = nil + continue + } + return + } + } + if e = g.globDoubleStarWalk(fsys, p, canMatchFiles, fn); e != nil { + return + } + } else if canMatchFiles { + if e = fn(path.Join(dir, name), info); e != nil { + if e == SkipDir { + e = nil + } + return + } + } + } + + return +} + +type DirEntryFromFileInfo struct { + fi fs.FileInfo +} + +func (d *DirEntryFromFileInfo) Name() string { + return d.fi.Name() +} + +func (d *DirEntryFromFileInfo) IsDir() bool { + return d.fi.IsDir() +} + +func (d *DirEntryFromFileInfo) Type() fs.FileMode { + return d.fi.Mode().Type() +} + +func (d *DirEntryFromFileInfo) Info() (fs.FileInfo, error) { + return d.fi, nil +} + +func dirEntryFromFileInfo(fi fs.FileInfo) fs.DirEntry { + return &DirEntryFromFileInfo{fi} +} + +type DirEntryWithFullPath struct { + Entry fs.DirEntry + Path string +} + +func hasMetaExceptAlts(s string) bool { + var c byte + l := len(s) + for i := 0; i < l; i++ { + c = s[i] + if c == '*' || c == '?' || c == '[' { + return true + } else if c == '\\' { + // skip next byte + i++ + } + } + return false +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/match.go b/vendor/github.com/bmatcuk/doublestar/v4/match.go new file mode 100644 index 00000000..4232c79f --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/match.go @@ -0,0 +1,381 @@ +package doublestar + +import ( + "path/filepath" + "unicode/utf8" +) + +// Match reports whether name matches the shell pattern. +// The pattern syntax is: +// +// pattern: +// { term } +// term: +// '*' matches any sequence of non-path-separators +// '/**/' matches zero or more directories +// '?' matches any single non-path-separator character +// '[' [ '^' '!' ] { character-range } ']' +// character class (must be non-empty) +// starting with `^` or `!` negates the class +// '{' { term } [ ',' { term } ... ] '}' +// alternatives +// c matches character c (c != '*', '?', '\\', '[') +// '\\' c matches character c +// +// character-range: +// c matches character c (c != '\\', '-', ']') +// '\\' c matches character c +// lo '-' hi matches character c for lo <= c <= hi +// +// Match returns true if `name` matches the file name `pattern`. `name` and +// `pattern` are split on forward slash (`/`) characters and may be relative or +// absolute. +// +// Match requires pattern to match all of name, not just a substring. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// A doublestar (`**`) should appear surrounded by path separators such as +// `/**/`. A mid-pattern doublestar (`**`) behaves like bash's globstar +// option: a pattern such as `path/to/**.txt` would return the same results as +// `path/to/*.txt`. The pattern you're looking for is `path/to/**/*.txt`. +// +// Note: this is meant as a drop-in replacement for path.Match() which +// always uses '/' as the path separator. If you want to support systems +// which use a different path separator (such as Windows), what you want +// is PathMatch(). Alternatively, you can run filepath.ToSlash() on both +// pattern and name and then use this function. +// +// Note: users should _not_ count on the returned error, +// doublestar.ErrBadPattern, being equal to path.ErrBadPattern. +// +func Match(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, '/', true) +} + +// PathMatch returns true if `name` matches the file name `pattern`. The +// difference between Match and PathMatch is that PathMatch will automatically +// use your system's path separator to split `name` and `pattern`. On systems +// where the path separator is `'\'`, escaping will be disabled. +// +// Note: this is meant as a drop-in replacement for filepath.Match(). It +// assumes that both `pattern` and `name` are using the system's path +// separator. If you can't be sure of that, use filepath.ToSlash() on both +// `pattern` and `name`, and then use the Match() function instead. +// +func PathMatch(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, filepath.Separator, true) +} + +func matchWithSeparator(pattern, name string, separator rune, validate bool) (matched bool, err error) { + return doMatchWithSeparator(pattern, name, separator, validate, -1, -1, -1, -1, 0, 0) +} + +func doMatchWithSeparator(pattern, name string, separator rune, validate bool, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, patIdx, nameIdx int) (matched bool, err error) { + patLen := len(pattern) + nameLen := len(name) + startOfSegment := true +MATCH: + for nameIdx < nameLen { + if patIdx < patLen { + switch pattern[patIdx] { + case '*': + if patIdx++; patIdx < patLen && pattern[patIdx] == '*' { + // doublestar - must begin with a path separator, otherwise we'll + // treat it like a single star like bash + patIdx++ + if startOfSegment { + if patIdx >= patLen { + // pattern ends in `/**`: return true + return true, nil + } + + // doublestar must also end with a path separator, otherwise we're + // just going to treat the doublestar as a single star like bash + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + if patRune == separator { + patIdx += patRuneLen + + doublestarPatternBacktrack = patIdx + doublestarNameBacktrack = nameIdx + starPatternBacktrack = -1 + starNameBacktrack = -1 + continue + } + } + } + startOfSegment = false + + starPatternBacktrack = patIdx + starNameBacktrack = nameIdx + continue + + case '?': + startOfSegment = false + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if nameRune == separator { + // `?` cannot match the separator + break + } + + patIdx++ + nameIdx += nameRuneLen + continue + + case '[': + startOfSegment = false + if patIdx++; patIdx >= patLen { + // class didn't end + return false, ErrBadPattern + } + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + + matched := false + negate := pattern[patIdx] == '!' || pattern[patIdx] == '^' + if negate { + patIdx++ + } + + if patIdx >= patLen || pattern[patIdx] == ']' { + // class didn't end or empty character class + return false, ErrBadPattern + } + + last := utf8.MaxRune + for patIdx < patLen && pattern[patIdx] != ']' { + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + // match a range + if last < utf8.MaxRune && patRune == '-' && patIdx < patLen && pattern[patIdx] != ']' { + if pattern[patIdx] == '\\' { + // next character is escaped + patIdx++ + } + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + if last <= nameRune && nameRune <= patRune { + matched = true + break + } + + // didn't match range - reset `last` + last = utf8.MaxRune + continue + } + + // not a range - check if the next rune is escaped + if patRune == '\\' { + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + } + + // check if the rune matches + if patRune == nameRune { + matched = true + break + } + + // no matches yet + last = patRune + } + + if matched == negate { + // failed to match - if we reached the end of the pattern, that means + // we never found a closing `]` + if patIdx >= patLen { + return false, ErrBadPattern + } + break + } + + closingIdx := indexUnescapedByte(pattern[patIdx:], ']', true) + if closingIdx == -1 { + // no closing `]` + return false, ErrBadPattern + } + + patIdx += closingIdx + 1 + nameIdx += nameRuneLen + continue + + case '{': + startOfSegment = false + beforeIdx := patIdx + patIdx++ + closingIdx := indexMatchedClosingAlt(pattern[patIdx:], separator != '\\') + if closingIdx == -1 { + // no closing `}` + return false, ErrBadPattern + } + closingIdx += patIdx + + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + result, err := doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:commaIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx) + if result || err != nil { + return result, err + } + + patIdx = commaIdx + 1 + } + return doMatchWithSeparator(pattern[:beforeIdx]+pattern[patIdx:closingIdx]+pattern[closingIdx+1:], name, separator, validate, doublestarPatternBacktrack, doublestarNameBacktrack, starPatternBacktrack, starNameBacktrack, beforeIdx, nameIdx) + + case '\\': + if separator != '\\' { + // next rune is "escaped" in the pattern - literal match + if patIdx++; patIdx >= patLen { + // pattern ended + return false, ErrBadPattern + } + } + fallthrough + + default: + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if patRune != nameRune { + if separator != '\\' && patIdx > 0 && pattern[patIdx-1] == '\\' { + // if this rune was meant to be escaped, we need to move patIdx + // back to the backslash before backtracking or validating below + patIdx-- + } + break + } + + patIdx += patRuneLen + nameIdx += nameRuneLen + startOfSegment = patRune == separator + continue + } + } + + if starPatternBacktrack >= 0 { + // `*` backtrack, but only if the `name` rune isn't the separator + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[starNameBacktrack:]) + if nameRune != separator { + starNameBacktrack += nameRuneLen + patIdx = starPatternBacktrack + nameIdx = starNameBacktrack + startOfSegment = false + continue + } + } + + if doublestarPatternBacktrack >= 0 { + // `**` backtrack, advance `name` past next separator + nameIdx = doublestarNameBacktrack + for nameIdx < nameLen { + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + nameIdx += nameRuneLen + if nameRune == separator { + doublestarNameBacktrack = nameIdx + patIdx = doublestarPatternBacktrack + startOfSegment = true + continue MATCH + } + } + } + + if validate && patIdx < patLen && !doValidatePattern(pattern[patIdx:], separator) { + return false, ErrBadPattern + } + return false, nil + } + + if nameIdx < nameLen { + // we reached the end of `pattern` before the end of `name` + return false, nil + } + + // we've reached the end of `name`; we've successfully matched if we've also + // reached the end of `pattern`, or if the rest of `pattern` can match a + // zero-length string + return isZeroLengthPattern(pattern[patIdx:], separator) +} + +func isZeroLengthPattern(pattern string, separator rune) (ret bool, err error) { + // `/**`, `**/`, and `/**/` are special cases - a pattern such as `path/to/a/**` or `path/to/a/**/` + // *should* match `path/to/a` because `a` might be a directory + if pattern == "" || + pattern == "*" || + pattern == "**" || + pattern == string(separator)+"**" || + pattern == "**"+string(separator) || + pattern == string(separator)+"**"+string(separator) { + return true, nil + } + + if pattern[0] == '{' { + closingIdx := indexMatchedClosingAlt(pattern[1:], separator != '\\') + if closingIdx == -1 { + // no closing '}' + return false, ErrBadPattern + } + closingIdx += 1 + + patIdx := 1 + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + ret, err = isZeroLengthPattern(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], separator) + if ret || err != nil { + return + } + + patIdx = commaIdx + 1 + } + return isZeroLengthPattern(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], separator) + } + + // no luck - validate the rest of the pattern + if !doValidatePattern(pattern, separator) { + return false, ErrBadPattern + } + return false, nil +} + +// Finds the index of the first unescaped byte `c`, or negative 1. +func indexUnescapedByte(s string, c byte, allowEscaping bool) int { + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == c { + return i + } + } + return -1 +} + +// Assuming the byte before the beginning of `s` is an opening `{`, this +// function will find the index of the matching `}`. That is, it'll skip over +// any nested `{}` and account for escaping +func indexMatchedClosingAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/utils.go b/vendor/github.com/bmatcuk/doublestar/v4/utils.go new file mode 100644 index 00000000..0ab1dc98 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/utils.go @@ -0,0 +1,147 @@ +package doublestar + +import ( + "errors" + "os" + "path" + "path/filepath" + "strings" +) + +// SplitPattern is a utility function. Given a pattern, SplitPattern will +// return two strings: the first string is everything up to the last slash +// (`/`) that appears _before_ any unescaped "meta" characters (ie, `*?[{`). +// The second string is everything after that slash. For example, given the +// pattern: +// +// ../../path/to/meta*/** +// ^----------- split here +// +// SplitPattern returns "../../path/to" and "meta*/**". This is useful for +// initializing os.DirFS() to call Glob() because Glob() will silently fail if +// your pattern includes `/./` or `/../`. For example: +// +// base, pattern := SplitPattern("../../path/to/meta*/**") +// fsys := os.DirFS(base) +// matches, err := Glob(fsys, pattern) +// +// If SplitPattern cannot find somewhere to split the pattern (for example, +// `meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in +// this example). +// +// Of course, it is your responsibility to decide if the returned base path is +// "safe" in the context of your application. Perhaps you could use Match() to +// validate against a list of approved base directories? +// +func SplitPattern(p string) (base, pattern string) { + base = "." + pattern = p + + splitIdx := -1 + for i := 0; i < len(p); i++ { + c := p[i] + if c == '\\' { + i++ + } else if c == '/' { + splitIdx = i + } else if c == '*' || c == '?' || c == '[' || c == '{' { + break + } + } + + if splitIdx == 0 { + return "/", p[1:] + } else if splitIdx > 0 { + return p[:splitIdx], p[splitIdx+1:] + } + + return +} + +// FilepathGlob returns the names of all files matching pattern or nil if there +// is no matching file. The syntax of pattern is the same as in Match(). The +// pattern may describe hierarchical names such as usr/*/bin/ed. +// +// FilepathGlob ignores file system errors such as I/O errors reading +// directories by default. The only possible returned error is ErrBadPattern, +// reporting that the pattern is malformed. +// +// To enable aborting on I/O errors, the WithFailOnIOErrors option can be +// passed. +// +// Note: FilepathGlob is a convenience function that is meant as a drop-in +// replacement for `path/filepath.Glob()` for users who don't need the +// complication of io/fs. Basically, it: +// - Runs `filepath.Clean()` and `ToSlash()` on the pattern +// - Runs `SplitPattern()` to get a base path and a pattern to Glob +// - Creates an FS object from the base path and `Glob()s` on the pattern +// - Joins the base path with all of the matches from `Glob()` +// +// Returned paths will use the system's path separator, just like +// `filepath.Glob()`. +// +// Note: the returned error doublestar.ErrBadPattern is not equal to +// filepath.ErrBadPattern. +// +func FilepathGlob(pattern string, opts ...GlobOption) (matches []string, err error) { + pattern = filepath.Clean(pattern) + pattern = filepath.ToSlash(pattern) + base, f := SplitPattern(pattern) + if f == "" || f == "." || f == ".." { + // some special cases to match filepath.Glob behavior + if !ValidatePathPattern(pattern) { + return nil, ErrBadPattern + } + + if filepath.Separator != '\\' { + pattern = unescapeMeta(pattern) + } + + if _, err = os.Lstat(pattern); err != nil { + g := newGlob(opts...) + if errors.Is(err, os.ErrNotExist) { + return nil, g.handlePatternNotExist(true) + } + return nil, g.forwardErrIfFailOnIOErrors(err) + } + return []string{filepath.FromSlash(pattern)}, nil + } + + fs := os.DirFS(base) + if matches, err = Glob(fs, f, opts...); err != nil { + return nil, err + } + for i := range matches { + // use path.Join because we used ToSlash above to ensure our paths are made + // of forward slashes, no matter what the system uses + matches[i] = filepath.FromSlash(path.Join(base, matches[i])) + } + return +} + +// Finds the next comma, but ignores any commas that appear inside nested `{}`. +// Assumes that each opening bracket has a corresponding closing bracket. +func indexNextAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + alts-- + } else if s[i] == ',' && alts == 1 { + return i + } + } + return -1 +} + +var metaReplacer = strings.NewReplacer("\\*", "*", "\\?", "?", "\\[", "[", "\\]", "]", "\\{", "{", "\\}", "}") + +// Unescapes meta characters (*?[]{}) +func unescapeMeta(pattern string) string { + return metaReplacer.Replace(pattern) +} diff --git a/vendor/github.com/bmatcuk/doublestar/v4/validate.go b/vendor/github.com/bmatcuk/doublestar/v4/validate.go new file mode 100644 index 00000000..c689b9eb --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/v4/validate.go @@ -0,0 +1,82 @@ +package doublestar + +import "path/filepath" + +// Validate a pattern. Patterns are validated while they run in Match(), +// PathMatch(), and Glob(), so, you normally wouldn't need to call this. +// However, there are cases where this might be useful: for example, if your +// program allows a user to enter a pattern that you'll run at a later time, +// you might want to validate it. +// +// ValidatePattern assumes your pattern uses '/' as the path separator. +// +func ValidatePattern(s string) bool { + return doValidatePattern(s, '/') +} + +// Like ValidatePattern, only uses your OS path separator. In other words, use +// ValidatePattern if you would normally use Match() or Glob(). Use +// ValidatePathPattern if you would normally use PathMatch(). Keep in mind, +// Glob() requires '/' separators, even if your OS uses something else. +// +func ValidatePathPattern(s string) bool { + return doValidatePattern(s, filepath.Separator) +} + +func doValidatePattern(s string, separator rune) bool { + altDepth := 0 + l := len(s) +VALIDATE: + for i := 0; i < l; i++ { + switch s[i] { + case '\\': + if separator != '\\' { + // skip the next byte - return false if there is no next byte + if i++; i >= l { + return false + } + } + continue + + case '[': + if i++; i >= l { + // class didn't end + return false + } + if s[i] == '^' || s[i] == '!' { + i++ + } + if i >= l || s[i] == ']' { + // class didn't end or empty character class + return false + } + + for ; i < l; i++ { + if separator != '\\' && s[i] == '\\' { + i++ + } else if s[i] == ']' { + // looks good + continue VALIDATE + } + } + + // class didn't end + return false + + case '{': + altDepth++ + continue + + case '}': + if altDepth == 0 { + // alt end without a corresponding start + return false + } + altDepth-- + continue + } + } + + // valid as long as all alts are closed + return altDepth == 0 +} diff --git a/vendor/github.com/getkin/kin-openapi/LICENSE b/vendor/github.com/getkin/kin-openapi/LICENSE deleted file mode 100644 index 992b9831..00000000 --- a/vendor/github.com/getkin/kin-openapi/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017-2018 the project authors. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/callback.go b/vendor/github.com/getkin/kin-openapi/openapi3/callback.go deleted file mode 100644 index 13532b15..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/callback.go +++ /dev/null @@ -1,54 +0,0 @@ -package openapi3 - -import ( - "context" - "sort" -) - -// Callback is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#callback-object -type Callback struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - m map[string]*PathItem -} - -// NewCallback builds a Callback object with path items in insertion order. -func NewCallback(opts ...NewCallbackOption) *Callback { - Callback := NewCallbackWithCapacity(len(opts)) - for _, opt := range opts { - opt(Callback) - } - return Callback -} - -// NewCallbackOption describes options to NewCallback func -type NewCallbackOption func(*Callback) - -// WithCallback adds Callback as an option to NewCallback -func WithCallback(cb string, pathItem *PathItem) NewCallbackOption { - return func(callback *Callback) { - if p := pathItem; p != nil && cb != "" { - callback.Set(cb, p) - } - } -} - -// Validate returns an error if Callback does not comply with the OpenAPI spec. -func (callback *Callback) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - keys := make([]string, 0, callback.Len()) - for key := range callback.Map() { - keys = append(keys, key) - } - sort.Strings(keys) - for _, key := range keys { - v := callback.Value(key) - if err := v.Validate(ctx); err != nil { - return err - } - } - - return validateExtensions(ctx, callback.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/components.go b/vendor/github.com/getkin/kin-openapi/openapi3/components.go deleted file mode 100644 index 656ea193..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/components.go +++ /dev/null @@ -1,361 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "fmt" - "sort" - - "github.com/go-openapi/jsonpointer" -) - -type ( - Callbacks map[string]*CallbackRef - Examples map[string]*ExampleRef - Headers map[string]*HeaderRef - Links map[string]*LinkRef - ParametersMap map[string]*ParameterRef - RequestBodies map[string]*RequestBodyRef - ResponseBodies map[string]*ResponseRef - Schemas map[string]*SchemaRef - SecuritySchemes map[string]*SecuritySchemeRef -) - -// Components is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#components-object -type Components struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Schemas Schemas `json:"schemas,omitempty" yaml:"schemas,omitempty"` - Parameters ParametersMap `json:"parameters,omitempty" yaml:"parameters,omitempty"` - Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` - RequestBodies RequestBodies `json:"requestBodies,omitempty" yaml:"requestBodies,omitempty"` - Responses ResponseBodies `json:"responses,omitempty" yaml:"responses,omitempty"` - SecuritySchemes SecuritySchemes `json:"securitySchemes,omitempty" yaml:"securitySchemes,omitempty"` - Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` - Links Links `json:"links,omitempty" yaml:"links,omitempty"` - Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"` -} - -func NewComponents() Components { - return Components{} -} - -// MarshalJSON returns the JSON encoding of Components. -func (components Components) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 9+len(components.Extensions)) - for k, v := range components.Extensions { - m[k] = v - } - if x := components.Schemas; len(x) != 0 { - m["schemas"] = x - } - if x := components.Parameters; len(x) != 0 { - m["parameters"] = x - } - if x := components.Headers; len(x) != 0 { - m["headers"] = x - } - if x := components.RequestBodies; len(x) != 0 { - m["requestBodies"] = x - } - if x := components.Responses; len(x) != 0 { - m["responses"] = x - } - if x := components.SecuritySchemes; len(x) != 0 { - m["securitySchemes"] = x - } - if x := components.Examples; len(x) != 0 { - m["examples"] = x - } - if x := components.Links; len(x) != 0 { - m["links"] = x - } - if x := components.Callbacks; len(x) != 0 { - m["callbacks"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Components to a copy of data. -func (components *Components) UnmarshalJSON(data []byte) error { - type ComponentsBis Components - var x ComponentsBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "schemas") - delete(x.Extensions, "parameters") - delete(x.Extensions, "headers") - delete(x.Extensions, "requestBodies") - delete(x.Extensions, "responses") - delete(x.Extensions, "securitySchemes") - delete(x.Extensions, "examples") - delete(x.Extensions, "links") - delete(x.Extensions, "callbacks") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *components = Components(x) - return nil -} - -// Validate returns an error if Components does not comply with the OpenAPI spec. -func (components *Components) Validate(ctx context.Context, opts ...ValidationOption) (err error) { - ctx = WithValidationOptions(ctx, opts...) - - schemas := make([]string, 0, len(components.Schemas)) - for name := range components.Schemas { - schemas = append(schemas, name) - } - sort.Strings(schemas) - for _, k := range schemas { - v := components.Schemas[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("schema %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("schema %q: %w", k, err) - } - } - - parameters := make([]string, 0, len(components.Parameters)) - for name := range components.Parameters { - parameters = append(parameters, name) - } - sort.Strings(parameters) - for _, k := range parameters { - v := components.Parameters[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("parameter %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("parameter %q: %w", k, err) - } - } - - requestBodies := make([]string, 0, len(components.RequestBodies)) - for name := range components.RequestBodies { - requestBodies = append(requestBodies, name) - } - sort.Strings(requestBodies) - for _, k := range requestBodies { - v := components.RequestBodies[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("request body %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("request body %q: %w", k, err) - } - } - - responses := make([]string, 0, len(components.Responses)) - for name := range components.Responses { - responses = append(responses, name) - } - sort.Strings(responses) - for _, k := range responses { - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("response %q: %w", k, err) - } - v := components.Responses[k] - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("response %q: %w", k, err) - } - } - - headers := make([]string, 0, len(components.Headers)) - for name := range components.Headers { - headers = append(headers, name) - } - sort.Strings(headers) - for _, k := range headers { - v := components.Headers[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("header %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("header %q: %w", k, err) - } - } - - securitySchemes := make([]string, 0, len(components.SecuritySchemes)) - for name := range components.SecuritySchemes { - securitySchemes = append(securitySchemes, name) - } - sort.Strings(securitySchemes) - for _, k := range securitySchemes { - v := components.SecuritySchemes[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("security scheme %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("security scheme %q: %w", k, err) - } - } - - examples := make([]string, 0, len(components.Examples)) - for name := range components.Examples { - examples = append(examples, name) - } - sort.Strings(examples) - for _, k := range examples { - v := components.Examples[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("example %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("example %q: %w", k, err) - } - } - - links := make([]string, 0, len(components.Links)) - for name := range components.Links { - links = append(links, name) - } - sort.Strings(links) - for _, k := range links { - v := components.Links[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("link %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("link %q: %w", k, err) - } - } - - callbacks := make([]string, 0, len(components.Callbacks)) - for name := range components.Callbacks { - callbacks = append(callbacks, name) - } - sort.Strings(callbacks) - for _, k := range callbacks { - v := components.Callbacks[k] - if err = ValidateIdentifier(k); err != nil { - return fmt.Errorf("callback %q: %w", k, err) - } - if err = v.Validate(ctx); err != nil { - return fmt.Errorf("callback %q: %w", k, err) - } - } - - return validateExtensions(ctx, components.Extensions) -} - -var _ jsonpointer.JSONPointable = (*Schemas)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m Schemas) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no schema %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*ParametersMap)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m ParametersMap) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no parameter %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*Headers)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m Headers) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no header %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m RequestBodies) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no request body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*ResponseRef)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m ResponseBodies) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no response body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*SecuritySchemes)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m SecuritySchemes) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no security scheme body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*Examples)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m Examples) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no example body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*Links)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m Links) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no link body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} - -var _ jsonpointer.JSONPointable = (*Callbacks)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (m Callbacks) JSONLookup(token string) (interface{}, error) { - if v, ok := m[token]; !ok || v == nil { - return nil, fmt.Errorf("no callback body %q", token) - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - return v.Value, nil - } -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/contact.go b/vendor/github.com/getkin/kin-openapi/openapi3/contact.go deleted file mode 100644 index e60d2818..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/contact.go +++ /dev/null @@ -1,59 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" -) - -// Contact is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#contact-object -type Contact struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Name string `json:"name,omitempty" yaml:"name,omitempty"` - URL string `json:"url,omitempty" yaml:"url,omitempty"` - Email string `json:"email,omitempty" yaml:"email,omitempty"` -} - -// MarshalJSON returns the JSON encoding of Contact. -func (contact Contact) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 3+len(contact.Extensions)) - for k, v := range contact.Extensions { - m[k] = v - } - if x := contact.Name; x != "" { - m["name"] = x - } - if x := contact.URL; x != "" { - m["url"] = x - } - if x := contact.Email; x != "" { - m["email"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Contact to a copy of data. -func (contact *Contact) UnmarshalJSON(data []byte) error { - type ContactBis Contact - var x ContactBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "name") - delete(x.Extensions, "url") - delete(x.Extensions, "email") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *contact = Contact(x) - return nil -} - -// Validate returns an error if Contact does not comply with the OpenAPI spec. -func (contact *Contact) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - return validateExtensions(ctx, contact.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/content.go b/vendor/github.com/getkin/kin-openapi/openapi3/content.go deleted file mode 100644 index 81b070ee..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/content.go +++ /dev/null @@ -1,124 +0,0 @@ -package openapi3 - -import ( - "context" - "sort" - "strings" -) - -// Content is specified by OpenAPI/Swagger 3.0 standard. -type Content map[string]*MediaType - -func NewContent() Content { - return make(map[string]*MediaType) -} - -func NewContentWithSchema(schema *Schema, consumes []string) Content { - if len(consumes) == 0 { - return Content{ - "*/*": NewMediaType().WithSchema(schema), - } - } - content := make(map[string]*MediaType, len(consumes)) - for _, mediaType := range consumes { - content[mediaType] = NewMediaType().WithSchema(schema) - } - return content -} - -func NewContentWithSchemaRef(schema *SchemaRef, consumes []string) Content { - if len(consumes) == 0 { - return Content{ - "*/*": NewMediaType().WithSchemaRef(schema), - } - } - content := make(map[string]*MediaType, len(consumes)) - for _, mediaType := range consumes { - content[mediaType] = NewMediaType().WithSchemaRef(schema) - } - return content -} - -func NewContentWithJSONSchema(schema *Schema) Content { - return Content{ - "application/json": NewMediaType().WithSchema(schema), - } -} -func NewContentWithJSONSchemaRef(schema *SchemaRef) Content { - return Content{ - "application/json": NewMediaType().WithSchemaRef(schema), - } -} - -func NewContentWithFormDataSchema(schema *Schema) Content { - return Content{ - "multipart/form-data": NewMediaType().WithSchema(schema), - } -} - -func NewContentWithFormDataSchemaRef(schema *SchemaRef) Content { - return Content{ - "multipart/form-data": NewMediaType().WithSchemaRef(schema), - } -} - -func (content Content) Get(mime string) *MediaType { - // If the mime is empty then short-circuit to the wildcard. - // We do this here so that we catch only the specific case of - // and empty mime rather than a present, but invalid, mime type. - if mime == "" { - return content["*/*"] - } - // Start by making the most specific match possible - // by using the mime type in full. - if v := content[mime]; v != nil { - return v - } - // If an exact match is not found then we strip all - // metadata from the mime type and only use the x/y - // portion. - i := strings.IndexByte(mime, ';') - if i < 0 { - // If there is no metadata then preserve the full mime type - // string for later wildcard searches. - i = len(mime) - } - mime = mime[:i] - if v := content[mime]; v != nil { - return v - } - // If the x/y pattern has no specific match then we - // try the x/* pattern. - i = strings.IndexByte(mime, '/') - if i < 0 { - // In the case that the given mime type is not valid because it is - // missing the subtype we return nil so that this does not accidentally - // resolve with the wildcard. - return nil - } - mime = mime[:i] + "/*" - if v := content[mime]; v != nil { - return v - } - // Finally, the most generic match of */* is returned - // as a catch-all. - return content["*/*"] -} - -// Validate returns an error if Content does not comply with the OpenAPI spec. -func (content Content) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - keys := make([]string, 0, len(content)) - for key := range content { - keys = append(keys, key) - } - sort.Strings(keys) - for _, k := range keys { - v := content[k] - if err := v.Validate(ctx); err != nil { - return err - } - } - return nil -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go b/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go deleted file mode 100644 index abb48074..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/discriminator.go +++ /dev/null @@ -1,52 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" -) - -// Discriminator is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#discriminator-object -type Discriminator struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - PropertyName string `json:"propertyName" yaml:"propertyName"` // required - Mapping map[string]string `json:"mapping,omitempty" yaml:"mapping,omitempty"` -} - -// MarshalJSON returns the JSON encoding of Discriminator. -func (discriminator Discriminator) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 2+len(discriminator.Extensions)) - for k, v := range discriminator.Extensions { - m[k] = v - } - m["propertyName"] = discriminator.PropertyName - if x := discriminator.Mapping; len(x) != 0 { - m["mapping"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Discriminator to a copy of data. -func (discriminator *Discriminator) UnmarshalJSON(data []byte) error { - type DiscriminatorBis Discriminator - var x DiscriminatorBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "propertyName") - delete(x.Extensions, "mapping") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *discriminator = Discriminator(x) - return nil -} - -// Validate returns an error if Discriminator does not comply with the OpenAPI spec. -func (discriminator *Discriminator) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - return validateExtensions(ctx, discriminator.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/doc.go b/vendor/github.com/getkin/kin-openapi/openapi3/doc.go deleted file mode 100644 index 41c9965c..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/doc.go +++ /dev/null @@ -1,4 +0,0 @@ -// Package openapi3 parses and writes OpenAPI 3 specification documents. -// -// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.3.md -package openapi3 diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go b/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go deleted file mode 100644 index 8e810279..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/encoding.go +++ /dev/null @@ -1,139 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "fmt" - "sort" -) - -// Encoding is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#encoding-object -type Encoding struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - ContentType string `json:"contentType,omitempty" yaml:"contentType,omitempty"` - Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` - Style string `json:"style,omitempty" yaml:"style,omitempty"` - Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"` - AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"` -} - -func NewEncoding() *Encoding { - return &Encoding{} -} - -func (encoding *Encoding) WithHeader(name string, header *Header) *Encoding { - return encoding.WithHeaderRef(name, &HeaderRef{ - Value: header, - }) -} - -func (encoding *Encoding) WithHeaderRef(name string, ref *HeaderRef) *Encoding { - headers := encoding.Headers - if headers == nil { - headers = make(map[string]*HeaderRef) - encoding.Headers = headers - } - headers[name] = ref - return encoding -} - -// MarshalJSON returns the JSON encoding of Encoding. -func (encoding Encoding) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 5+len(encoding.Extensions)) - for k, v := range encoding.Extensions { - m[k] = v - } - if x := encoding.ContentType; x != "" { - m["contentType"] = x - } - if x := encoding.Headers; len(x) != 0 { - m["headers"] = x - } - if x := encoding.Style; x != "" { - m["style"] = x - } - if x := encoding.Explode; x != nil { - m["explode"] = x - } - if x := encoding.AllowReserved; x { - m["allowReserved"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Encoding to a copy of data. -func (encoding *Encoding) UnmarshalJSON(data []byte) error { - type EncodingBis Encoding - var x EncodingBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "contentType") - delete(x.Extensions, "headers") - delete(x.Extensions, "style") - delete(x.Extensions, "explode") - delete(x.Extensions, "allowReserved") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *encoding = Encoding(x) - return nil -} - -// SerializationMethod returns a serialization method of request body. -// When serialization method is not defined the method returns the default serialization method. -func (encoding *Encoding) SerializationMethod() *SerializationMethod { - sm := &SerializationMethod{Style: SerializationForm, Explode: true} - if encoding != nil { - if encoding.Style != "" { - sm.Style = encoding.Style - } - if encoding.Explode != nil { - sm.Explode = *encoding.Explode - } - } - return sm -} - -// Validate returns an error if Encoding does not comply with the OpenAPI spec. -func (encoding *Encoding) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if encoding == nil { - return nil - } - - headers := make([]string, 0, len(encoding.Headers)) - for k := range encoding.Headers { - headers = append(headers, k) - } - sort.Strings(headers) - for _, k := range headers { - v := encoding.Headers[k] - if err := ValidateIdentifier(k); err != nil { - return nil - } - if err := v.Validate(ctx); err != nil { - return nil - } - } - - // Validate a media types's serialization method. - sm := encoding.SerializationMethod() - switch { - case sm.Style == SerializationForm && sm.Explode, - sm.Style == SerializationForm && !sm.Explode, - sm.Style == SerializationSpaceDelimited && sm.Explode, - sm.Style == SerializationSpaceDelimited && !sm.Explode, - sm.Style == SerializationPipeDelimited && sm.Explode, - sm.Style == SerializationPipeDelimited && !sm.Explode, - sm.Style == SerializationDeepObject && sm.Explode: - default: - return fmt.Errorf("serialization method with style=%q and explode=%v is not supported by media type", sm.Style, sm.Explode) - } - - return validateExtensions(ctx, encoding.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/errors.go b/vendor/github.com/getkin/kin-openapi/openapi3/errors.go deleted file mode 100644 index 74baab9a..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/errors.go +++ /dev/null @@ -1,59 +0,0 @@ -package openapi3 - -import ( - "bytes" - "errors" -) - -// MultiError is a collection of errors, intended for when -// multiple issues need to be reported upstream -type MultiError []error - -func (me MultiError) Error() string { - return spliceErr(" | ", me) -} - -func spliceErr(sep string, errs []error) string { - buff := &bytes.Buffer{} - for i, e := range errs { - buff.WriteString(e.Error()) - if i != len(errs)-1 { - buff.WriteString(sep) - } - } - return buff.String() -} - -// Is allows you to determine if a generic error is in fact a MultiError using `errors.Is()` -// It will also return true if any of the contained errors match target -func (me MultiError) Is(target error) bool { - if _, ok := target.(MultiError); ok { - return true - } - for _, e := range me { - if errors.Is(e, target) { - return true - } - } - return false -} - -// As allows you to use `errors.As()` to set target to the first error within the multi error that matches the target type -func (me MultiError) As(target interface{}) bool { - for _, e := range me { - if errors.As(e, target) { - return true - } - } - return false -} - -type multiErrorForOneOf MultiError - -func (meo multiErrorForOneOf) Error() string { - return spliceErr(" Or ", meo) -} - -func (meo multiErrorForOneOf) Unwrap() error { - return MultiError(meo) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/example.go b/vendor/github.com/getkin/kin-openapi/openapi3/example.go deleted file mode 100644 index 44e71d82..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/example.go +++ /dev/null @@ -1,76 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" -) - -// Example is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#example-object -type Example struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Value interface{} `json:"value,omitempty" yaml:"value,omitempty"` - ExternalValue string `json:"externalValue,omitempty" yaml:"externalValue,omitempty"` -} - -func NewExample(value interface{}) *Example { - return &Example{Value: value} -} - -// MarshalJSON returns the JSON encoding of Example. -func (example Example) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(example.Extensions)) - for k, v := range example.Extensions { - m[k] = v - } - if x := example.Summary; x != "" { - m["summary"] = x - } - if x := example.Description; x != "" { - m["description"] = x - } - if x := example.Value; x != nil { - m["value"] = x - } - if x := example.ExternalValue; x != "" { - m["externalValue"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Example to a copy of data. -func (example *Example) UnmarshalJSON(data []byte) error { - type ExampleBis Example - var x ExampleBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "summary") - delete(x.Extensions, "description") - delete(x.Extensions, "value") - delete(x.Extensions, "externalValue") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *example = Example(x) - return nil -} - -// Validate returns an error if Example does not comply with the OpenAPI spec. -func (example *Example) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if example.Value != nil && example.ExternalValue != "" { - return errors.New("value and externalValue are mutually exclusive") - } - if example.Value == nil && example.ExternalValue == "" { - return errors.New("no value or externalValue field") - } - - return validateExtensions(ctx, example.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go b/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go deleted file mode 100644 index fb7a1da1..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/example_validation.go +++ /dev/null @@ -1,16 +0,0 @@ -package openapi3 - -import "context" - -func validateExampleValue(ctx context.Context, input interface{}, schema *Schema) error { - opts := make([]SchemaValidationOption, 0, 2) - - if vo := getValidationOptions(ctx); vo.examplesValidationAsReq { - opts = append(opts, VisitAsRequest()) - } else if vo.examplesValidationAsRes { - opts = append(opts, VisitAsResponse()) - } - opts = append(opts, MultiErrors()) - - return schema.VisitJSON(input, opts...) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/extension.go b/vendor/github.com/getkin/kin-openapi/openapi3/extension.go deleted file mode 100644 index 37f6b01e..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/extension.go +++ /dev/null @@ -1,32 +0,0 @@ -package openapi3 - -import ( - "context" - "fmt" - "sort" - "strings" -) - -func validateExtensions(ctx context.Context, extensions map[string]interface{}) error { // FIXME: newtype + Validate(...) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - - var unknowns []string - for k := range extensions { - if strings.HasPrefix(k, "x-") { - continue - } - if allowed != nil { - if _, ok := allowed[k]; ok { - continue - } - } - unknowns = append(unknowns, k) - } - - if len(unknowns) != 0 { - sort.Strings(unknowns) - return fmt.Errorf("extra sibling fields: %+v", unknowns) - } - - return nil -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go b/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go deleted file mode 100644 index 7190be4b..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/external_docs.go +++ /dev/null @@ -1,64 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/url" -) - -// ExternalDocs is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#external-documentation-object -type ExternalDocs struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Description string `json:"description,omitempty" yaml:"description,omitempty"` - URL string `json:"url,omitempty" yaml:"url,omitempty"` -} - -// MarshalJSON returns the JSON encoding of ExternalDocs. -func (e ExternalDocs) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 2+len(e.Extensions)) - for k, v := range e.Extensions { - m[k] = v - } - if x := e.Description; x != "" { - m["description"] = x - } - if x := e.URL; x != "" { - m["url"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets ExternalDocs to a copy of data. -func (e *ExternalDocs) UnmarshalJSON(data []byte) error { - type ExternalDocsBis ExternalDocs - var x ExternalDocsBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "description") - delete(x.Extensions, "url") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *e = ExternalDocs(x) - return nil -} - -// Validate returns an error if ExternalDocs does not comply with the OpenAPI spec. -func (e *ExternalDocs) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if e.URL == "" { - return errors.New("url is required") - } - if _, err := url.Parse(e.URL); err != nil { - return fmt.Errorf("url is incorrect: %w", err) - } - - return validateExtensions(ctx, e.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/header.go b/vendor/github.com/getkin/kin-openapi/openapi3/header.go deleted file mode 100644 index e5eee6cc..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/header.go +++ /dev/null @@ -1,96 +0,0 @@ -package openapi3 - -import ( - "context" - "errors" - "fmt" - - "github.com/go-openapi/jsonpointer" -) - -// Header is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#header-object -type Header struct { - Parameter -} - -var _ jsonpointer.JSONPointable = (*Header)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (header Header) JSONLookup(token string) (interface{}, error) { - return header.Parameter.JSONLookup(token) -} - -// MarshalJSON returns the JSON encoding of Header. -func (header Header) MarshalJSON() ([]byte, error) { - return header.Parameter.MarshalJSON() -} - -// UnmarshalJSON sets Header to a copy of data. -func (header *Header) UnmarshalJSON(data []byte) error { - return header.Parameter.UnmarshalJSON(data) -} - -// MarshalYAML returns the JSON encoding of Header. -func (header Header) MarshalYAML() (interface{}, error) { - return header.Parameter, nil -} - -// SerializationMethod returns a header's serialization method. -func (header *Header) SerializationMethod() (*SerializationMethod, error) { - style := header.Style - if style == "" { - style = SerializationSimple - } - explode := false - if header.Explode != nil { - explode = *header.Explode - } - return &SerializationMethod{Style: style, Explode: explode}, nil -} - -// Validate returns an error if Header does not comply with the OpenAPI spec. -func (header *Header) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if header.Name != "" { - return errors.New("header 'name' MUST NOT be specified, it is given in the corresponding headers map") - } - if header.In != "" { - return errors.New("header 'in' MUST NOT be specified, it is implicitly in header") - } - - // Validate a parameter's serialization method. - sm, err := header.SerializationMethod() - if err != nil { - return err - } - if smSupported := false || - sm.Style == SerializationSimple && !sm.Explode || - sm.Style == SerializationSimple && sm.Explode; !smSupported { - e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a header parameter", sm.Style, sm.Explode) - return fmt.Errorf("header schema is invalid: %w", e) - } - - if (header.Schema == nil) == (len(header.Content) == 0) { - e := fmt.Errorf("parameter must contain exactly one of content and schema: %v", header) - return fmt.Errorf("header schema is invalid: %w", e) - } - if schema := header.Schema; schema != nil { - if err := schema.Validate(ctx); err != nil { - return fmt.Errorf("header schema is invalid: %w", err) - } - } - - if content := header.Content; content != nil { - e := errors.New("parameter content must only contain one entry") - if len(content) > 1 { - return fmt.Errorf("header content is invalid: %w", e) - } - - if err := content.Validate(ctx); err != nil { - return fmt.Errorf("header content is invalid: %w", err) - } - } - return nil -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go b/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go deleted file mode 100644 index d160eb1e..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/helpers.go +++ /dev/null @@ -1,41 +0,0 @@ -package openapi3 - -import ( - "fmt" - "regexp" -) - -const identifierPattern = `^[a-zA-Z0-9._-]+$` - -// IdentifierRegExp verifies whether Component object key matches 'identifierPattern' pattern, according to OpenAPI v3.x. -// However, to be able supporting legacy OpenAPI v2.x, there is a need to customize above pattern in order not to fail -// converted v2-v3 validation -var IdentifierRegExp = regexp.MustCompile(identifierPattern) - -// ValidateIdentifier returns an error if the given component name does not match IdentifierRegExp. -func ValidateIdentifier(value string) error { - if IdentifierRegExp.MatchString(value) { - return nil - } - return fmt.Errorf("identifier %q is not supported by OpenAPIv3 standard (regexp: %q)", value, identifierPattern) -} - -// Float64Ptr is a helper for defining OpenAPI schemas. -func Float64Ptr(value float64) *float64 { - return &value -} - -// BoolPtr is a helper for defining OpenAPI schemas. -func BoolPtr(value bool) *bool { - return &value -} - -// Int64Ptr is a helper for defining OpenAPI schemas. -func Int64Ptr(value int64) *int64 { - return &value -} - -// Uint64Ptr is a helper for defining OpenAPI schemas. -func Uint64Ptr(value uint64) *uint64 { - return &value -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/info.go b/vendor/github.com/getkin/kin-openapi/openapi3/info.go deleted file mode 100644 index ffcd3b0e..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/info.go +++ /dev/null @@ -1,91 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" -) - -// Info is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#info-object -type Info struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Title string `json:"title" yaml:"title"` // Required - Description string `json:"description,omitempty" yaml:"description,omitempty"` - TermsOfService string `json:"termsOfService,omitempty" yaml:"termsOfService,omitempty"` - Contact *Contact `json:"contact,omitempty" yaml:"contact,omitempty"` - License *License `json:"license,omitempty" yaml:"license,omitempty"` - Version string `json:"version" yaml:"version"` // Required -} - -// MarshalJSON returns the JSON encoding of Info. -func (info Info) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 6+len(info.Extensions)) - for k, v := range info.Extensions { - m[k] = v - } - m["title"] = info.Title - if x := info.Description; x != "" { - m["description"] = x - } - if x := info.TermsOfService; x != "" { - m["termsOfService"] = x - } - if x := info.Contact; x != nil { - m["contact"] = x - } - if x := info.License; x != nil { - m["license"] = x - } - m["version"] = info.Version - return json.Marshal(m) -} - -// UnmarshalJSON sets Info to a copy of data. -func (info *Info) UnmarshalJSON(data []byte) error { - type InfoBis Info - var x InfoBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "title") - delete(x.Extensions, "description") - delete(x.Extensions, "termsOfService") - delete(x.Extensions, "contact") - delete(x.Extensions, "license") - delete(x.Extensions, "version") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *info = Info(x) - return nil -} - -// Validate returns an error if Info does not comply with the OpenAPI spec. -func (info *Info) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if contact := info.Contact; contact != nil { - if err := contact.Validate(ctx); err != nil { - return err - } - } - - if license := info.License; license != nil { - if err := license.Validate(ctx); err != nil { - return err - } - } - - if info.Version == "" { - return errors.New("value of version must be a non-empty string") - } - - if info.Title == "" { - return errors.New("value of title must be a non-empty string") - } - - return validateExtensions(ctx, info.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go b/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go deleted file mode 100644 index e313e553..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/internalize_refs.go +++ /dev/null @@ -1,443 +0,0 @@ -package openapi3 - -import ( - "context" - "path/filepath" - "strings" -) - -type RefNameResolver func(string) string - -// DefaultRefResolver is a default implementation of refNameResolver for the -// InternalizeRefs function. -// -// If a reference points to an element inside a document, it returns the last -// element in the reference using filepath.Base. Otherwise if the reference points -// to a file, it returns the file name trimmed of all extensions. -func DefaultRefNameResolver(ref string) string { - if ref == "" { - return "" - } - split := strings.SplitN(ref, "#", 2) - if len(split) == 2 { - return filepath.Base(split[1]) - } - ref = split[0] - for ext := filepath.Ext(ref); len(ext) > 0; ext = filepath.Ext(ref) { - ref = strings.TrimSuffix(ref, ext) - } - return filepath.Base(ref) -} - -func schemaNames(s Schemas) []string { - out := make([]string, 0, len(s)) - for i := range s { - out = append(out, i) - } - return out -} - -func parametersMapNames(s ParametersMap) []string { - out := make([]string, 0, len(s)) - for i := range s { - out = append(out, i) - } - return out -} - -func isExternalRef(ref string, parentIsExternal bool) bool { - return ref != "" && (!strings.HasPrefix(ref, "#/components/") || parentIsExternal) -} - -func (doc *T) addSchemaToSpec(s *SchemaRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if s == nil || !isExternalRef(s.Ref, parentIsExternal) { - return false - } - - name := refNameResolver(s.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Schemas[name]; ok { - s.Ref = "#/components/schemas/" + name - return true - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Schemas == nil { - doc.Components.Schemas = make(Schemas) - } - doc.Components.Schemas[name] = s.Value.NewRef() - s.Ref = "#/components/schemas/" + name - return true -} - -func (doc *T) addParameterToSpec(p *ParameterRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if p == nil || !isExternalRef(p.Ref, parentIsExternal) { - return false - } - name := refNameResolver(p.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Parameters[name]; ok { - p.Ref = "#/components/parameters/" + name - return true - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Parameters == nil { - doc.Components.Parameters = make(ParametersMap) - } - doc.Components.Parameters[name] = &ParameterRef{Value: p.Value} - p.Ref = "#/components/parameters/" + name - return true -} - -func (doc *T) addHeaderToSpec(h *HeaderRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if h == nil || !isExternalRef(h.Ref, parentIsExternal) { - return false - } - name := refNameResolver(h.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Headers[name]; ok { - h.Ref = "#/components/headers/" + name - return true - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Headers == nil { - doc.Components.Headers = make(Headers) - } - doc.Components.Headers[name] = &HeaderRef{Value: h.Value} - h.Ref = "#/components/headers/" + name - return true -} - -func (doc *T) addRequestBodyToSpec(r *RequestBodyRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if r == nil || !isExternalRef(r.Ref, parentIsExternal) { - return false - } - name := refNameResolver(r.Ref) - if doc.Components != nil { - if _, ok := doc.Components.RequestBodies[name]; ok { - r.Ref = "#/components/requestBodies/" + name - return true - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.RequestBodies == nil { - doc.Components.RequestBodies = make(RequestBodies) - } - doc.Components.RequestBodies[name] = &RequestBodyRef{Value: r.Value} - r.Ref = "#/components/requestBodies/" + name - return true -} - -func (doc *T) addResponseToSpec(r *ResponseRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if r == nil || !isExternalRef(r.Ref, parentIsExternal) { - return false - } - name := refNameResolver(r.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Responses[name]; ok { - r.Ref = "#/components/responses/" + name - return true - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Responses == nil { - doc.Components.Responses = make(ResponseBodies) - } - doc.Components.Responses[name] = &ResponseRef{Value: r.Value} - r.Ref = "#/components/responses/" + name - return true -} - -func (doc *T) addSecuritySchemeToSpec(ss *SecuritySchemeRef, refNameResolver RefNameResolver, parentIsExternal bool) { - if ss == nil || !isExternalRef(ss.Ref, parentIsExternal) { - return - } - name := refNameResolver(ss.Ref) - if doc.Components != nil { - if _, ok := doc.Components.SecuritySchemes[name]; ok { - ss.Ref = "#/components/securitySchemes/" + name - return - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.SecuritySchemes == nil { - doc.Components.SecuritySchemes = make(SecuritySchemes) - } - doc.Components.SecuritySchemes[name] = &SecuritySchemeRef{Value: ss.Value} - ss.Ref = "#/components/securitySchemes/" + name - -} - -func (doc *T) addExampleToSpec(e *ExampleRef, refNameResolver RefNameResolver, parentIsExternal bool) { - if e == nil || !isExternalRef(e.Ref, parentIsExternal) { - return - } - name := refNameResolver(e.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Examples[name]; ok { - e.Ref = "#/components/examples/" + name - return - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Examples == nil { - doc.Components.Examples = make(Examples) - } - doc.Components.Examples[name] = &ExampleRef{Value: e.Value} - e.Ref = "#/components/examples/" + name - -} - -func (doc *T) addLinkToSpec(l *LinkRef, refNameResolver RefNameResolver, parentIsExternal bool) { - if l == nil || !isExternalRef(l.Ref, parentIsExternal) { - return - } - name := refNameResolver(l.Ref) - if doc.Components != nil { - if _, ok := doc.Components.Links[name]; ok { - l.Ref = "#/components/links/" + name - return - } - } - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Links == nil { - doc.Components.Links = make(Links) - } - doc.Components.Links[name] = &LinkRef{Value: l.Value} - l.Ref = "#/components/links/" + name - -} - -func (doc *T) addCallbackToSpec(c *CallbackRef, refNameResolver RefNameResolver, parentIsExternal bool) bool { - if c == nil || !isExternalRef(c.Ref, parentIsExternal) { - return false - } - name := refNameResolver(c.Ref) - - if doc.Components == nil { - doc.Components = &Components{} - } - if doc.Components.Callbacks == nil { - doc.Components.Callbacks = make(Callbacks) - } - c.Ref = "#/components/callbacks/" + name - doc.Components.Callbacks[name] = &CallbackRef{Value: c.Value} - return true -} - -func (doc *T) derefSchema(s *Schema, refNameResolver RefNameResolver, parentIsExternal bool) { - if s == nil || doc.isVisitedSchema(s) { - return - } - - for _, list := range []SchemaRefs{s.AllOf, s.AnyOf, s.OneOf} { - for _, s2 := range list { - isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal) - if s2 != nil { - doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal) - } - } - } - for _, s2 := range s.Properties { - isExternal := doc.addSchemaToSpec(s2, refNameResolver, parentIsExternal) - if s2 != nil { - doc.derefSchema(s2.Value, refNameResolver, isExternal || parentIsExternal) - } - } - for _, ref := range []*SchemaRef{s.Not, s.AdditionalProperties.Schema, s.Items} { - isExternal := doc.addSchemaToSpec(ref, refNameResolver, parentIsExternal) - if ref != nil { - doc.derefSchema(ref.Value, refNameResolver, isExternal || parentIsExternal) - } - } -} - -func (doc *T) derefHeaders(hs Headers, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, h := range hs { - isExternal := doc.addHeaderToSpec(h, refNameResolver, parentIsExternal) - if doc.isVisitedHeader(h.Value) { - continue - } - doc.derefParameter(h.Value.Parameter, refNameResolver, parentIsExternal || isExternal) - } -} - -func (doc *T) derefExamples(es Examples, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, e := range es { - doc.addExampleToSpec(e, refNameResolver, parentIsExternal) - } -} - -func (doc *T) derefContent(c Content, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, mediatype := range c { - isExternal := doc.addSchemaToSpec(mediatype.Schema, refNameResolver, parentIsExternal) - if mediatype.Schema != nil { - doc.derefSchema(mediatype.Schema.Value, refNameResolver, isExternal || parentIsExternal) - } - doc.derefExamples(mediatype.Examples, refNameResolver, parentIsExternal) - for _, e := range mediatype.Encoding { - doc.derefHeaders(e.Headers, refNameResolver, parentIsExternal) - } - } -} - -func (doc *T) derefLinks(ls Links, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, l := range ls { - doc.addLinkToSpec(l, refNameResolver, parentIsExternal) - } -} - -func (doc *T) derefResponse(r *ResponseRef, refNameResolver RefNameResolver, parentIsExternal bool) { - isExternal := doc.addResponseToSpec(r, refNameResolver, parentIsExternal) - if v := r.Value; v != nil { - doc.derefHeaders(v.Headers, refNameResolver, isExternal || parentIsExternal) - doc.derefContent(v.Content, refNameResolver, isExternal || parentIsExternal) - doc.derefLinks(v.Links, refNameResolver, isExternal || parentIsExternal) - } -} - -func (doc *T) derefResponses(rs *Responses, refNameResolver RefNameResolver, parentIsExternal bool) { - doc.derefResponseBodies(rs.Map(), refNameResolver, parentIsExternal) -} - -func (doc *T) derefResponseBodies(es ResponseBodies, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, e := range es { - doc.derefResponse(e, refNameResolver, parentIsExternal) - } -} - -func (doc *T) derefParameter(p Parameter, refNameResolver RefNameResolver, parentIsExternal bool) { - isExternal := doc.addSchemaToSpec(p.Schema, refNameResolver, parentIsExternal) - doc.derefContent(p.Content, refNameResolver, parentIsExternal) - if p.Schema != nil { - doc.derefSchema(p.Schema.Value, refNameResolver, isExternal || parentIsExternal) - } -} - -func (doc *T) derefRequestBody(r RequestBody, refNameResolver RefNameResolver, parentIsExternal bool) { - doc.derefContent(r.Content, refNameResolver, parentIsExternal) -} - -func (doc *T) derefPaths(paths map[string]*PathItem, refNameResolver RefNameResolver, parentIsExternal bool) { - for _, ops := range paths { - pathIsExternal := isExternalRef(ops.Ref, parentIsExternal) - // inline full operations - ops.Ref = "" - - for _, param := range ops.Parameters { - doc.addParameterToSpec(param, refNameResolver, pathIsExternal) - } - - for _, op := range ops.Operations() { - isExternal := doc.addRequestBodyToSpec(op.RequestBody, refNameResolver, pathIsExternal) - if op.RequestBody != nil && op.RequestBody.Value != nil { - doc.derefRequestBody(*op.RequestBody.Value, refNameResolver, pathIsExternal || isExternal) - } - for _, cb := range op.Callbacks { - isExternal := doc.addCallbackToSpec(cb, refNameResolver, pathIsExternal) - if cb.Value != nil { - cbValue := (*cb.Value).Map() - doc.derefPaths(cbValue, refNameResolver, pathIsExternal || isExternal) - } - } - doc.derefResponses(op.Responses, refNameResolver, pathIsExternal) - for _, param := range op.Parameters { - isExternal := doc.addParameterToSpec(param, refNameResolver, pathIsExternal) - if param.Value != nil { - doc.derefParameter(*param.Value, refNameResolver, pathIsExternal || isExternal) - } - } - } - } -} - -// InternalizeRefs removes all references to external files from the spec and moves them -// to the components section. -// -// refNameResolver takes in references to returns a name to store the reference under locally. -// It MUST return a unique name for each reference type. -// A default implementation is provided that will suffice for most use cases. See the function -// documentation for more details. -// -// Example: -// -// doc.InternalizeRefs(context.Background(), nil) -func (doc *T) InternalizeRefs(ctx context.Context, refNameResolver func(ref string) string) { - doc.resetVisited() - - if refNameResolver == nil { - refNameResolver = DefaultRefNameResolver - } - - if components := doc.Components; components != nil { - names := schemaNames(components.Schemas) - for _, name := range names { - schema := components.Schemas[name] - isExternal := doc.addSchemaToSpec(schema, refNameResolver, false) - if schema != nil { - schema.Ref = "" // always dereference the top level - doc.derefSchema(schema.Value, refNameResolver, isExternal) - } - } - names = parametersMapNames(components.Parameters) - for _, name := range names { - p := components.Parameters[name] - isExternal := doc.addParameterToSpec(p, refNameResolver, false) - if p != nil && p.Value != nil { - p.Ref = "" // always dereference the top level - doc.derefParameter(*p.Value, refNameResolver, isExternal) - } - } - doc.derefHeaders(components.Headers, refNameResolver, false) - for _, req := range components.RequestBodies { - isExternal := doc.addRequestBodyToSpec(req, refNameResolver, false) - if req != nil && req.Value != nil { - req.Ref = "" // always dereference the top level - doc.derefRequestBody(*req.Value, refNameResolver, isExternal) - } - } - doc.derefResponseBodies(components.Responses, refNameResolver, false) - for _, ss := range components.SecuritySchemes { - doc.addSecuritySchemeToSpec(ss, refNameResolver, false) - } - doc.derefExamples(components.Examples, refNameResolver, false) - doc.derefLinks(components.Links, refNameResolver, false) - - for _, cb := range components.Callbacks { - isExternal := doc.addCallbackToSpec(cb, refNameResolver, false) - if cb != nil && cb.Value != nil { - cb.Ref = "" // always dereference the top level - cbValue := (*cb.Value).Map() - doc.derefPaths(cbValue, refNameResolver, isExternal) - } - } - } - - doc.derefPaths(doc.Paths.Map(), refNameResolver, false) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/license.go b/vendor/github.com/getkin/kin-openapi/openapi3/license.go deleted file mode 100644 index 3d2d2f06..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/license.go +++ /dev/null @@ -1,57 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" -) - -// License is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#license-object -type License struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Name string `json:"name" yaml:"name"` // Required - URL string `json:"url,omitempty" yaml:"url,omitempty"` -} - -// MarshalJSON returns the JSON encoding of License. -func (license License) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 2+len(license.Extensions)) - for k, v := range license.Extensions { - m[k] = v - } - m["name"] = license.Name - if x := license.URL; x != "" { - m["url"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets License to a copy of data. -func (license *License) UnmarshalJSON(data []byte) error { - type LicenseBis License - var x LicenseBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "name") - delete(x.Extensions, "url") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *license = License(x) - return nil -} - -// Validate returns an error if License does not comply with the OpenAPI spec. -func (license *License) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if license.Name == "" { - return errors.New("value of license name must be a non-empty string") - } - - return validateExtensions(ctx, license.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/link.go b/vendor/github.com/getkin/kin-openapi/openapi3/link.go deleted file mode 100644 index 23a8df41..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/link.go +++ /dev/null @@ -1,85 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" -) - -// Link is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#link-object -type Link struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - OperationRef string `json:"operationRef,omitempty" yaml:"operationRef,omitempty"` - OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Parameters map[string]interface{} `json:"parameters,omitempty" yaml:"parameters,omitempty"` - Server *Server `json:"server,omitempty" yaml:"server,omitempty"` - RequestBody interface{} `json:"requestBody,omitempty" yaml:"requestBody,omitempty"` -} - -// MarshalJSON returns the JSON encoding of Link. -func (link Link) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 6+len(link.Extensions)) - for k, v := range link.Extensions { - m[k] = v - } - - if x := link.OperationRef; x != "" { - m["operationRef"] = x - } - if x := link.OperationID; x != "" { - m["operationId"] = x - } - if x := link.Description; x != "" { - m["description"] = x - } - if x := link.Parameters; len(x) != 0 { - m["parameters"] = x - } - if x := link.Server; x != nil { - m["server"] = x - } - if x := link.RequestBody; x != nil { - m["requestBody"] = x - } - - return json.Marshal(m) -} - -// UnmarshalJSON sets Link to a copy of data. -func (link *Link) UnmarshalJSON(data []byte) error { - type LinkBis Link - var x LinkBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "operationRef") - delete(x.Extensions, "operationId") - delete(x.Extensions, "description") - delete(x.Extensions, "parameters") - delete(x.Extensions, "server") - delete(x.Extensions, "requestBody") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *link = Link(x) - return nil -} - -// Validate returns an error if Link does not comply with the OpenAPI spec. -func (link *Link) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if link.OperationID == "" && link.OperationRef == "" { - return errors.New("missing operationId or operationRef on link") - } - if link.OperationID != "" && link.OperationRef != "" { - return fmt.Errorf("operationId %q and operationRef %q are mutually exclusive", link.OperationID, link.OperationRef) - } - - return validateExtensions(ctx, link.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/loader.go b/vendor/github.com/getkin/kin-openapi/openapi3/loader.go deleted file mode 100644 index 1128aaa7..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/loader.go +++ /dev/null @@ -1,1109 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "io" - "net/url" - "os" - "path" - "path/filepath" - "reflect" - "sort" - "strconv" - "strings" -) - -var CircularReferenceError = "kin-openapi bug found: circular schema reference not handled" -var CircularReferenceCounter = 3 - -func foundUnresolvedRef(ref string) error { - return fmt.Errorf("found unresolved ref: %q", ref) -} - -func failedToResolveRefFragmentPart(value, what string) error { - return fmt.Errorf("failed to resolve %q in fragment in URI: %q", what, value) -} - -// Loader helps deserialize an OpenAPIv3 document -type Loader struct { - // IsExternalRefsAllowed enables visiting other files - IsExternalRefsAllowed bool - - // ReadFromURIFunc allows overriding the any file/URL reading func - ReadFromURIFunc ReadFromURIFunc - - Context context.Context - - rootDir string - rootLocation string - - visitedPathItemRefs map[string]struct{} - - visitedDocuments map[string]*T - - visitedCallback map[*Callback]struct{} - visitedExample map[*Example]struct{} - visitedHeader map[*Header]struct{} - visitedLink map[*Link]struct{} - visitedParameter map[*Parameter]struct{} - visitedRequestBody map[*RequestBody]struct{} - visitedResponse map[*Response]struct{} - visitedSchema map[*Schema]struct{} - visitedSecurityScheme map[*SecurityScheme]struct{} -} - -// NewLoader returns an empty Loader -func NewLoader() *Loader { - return &Loader{ - Context: context.Background(), - } -} - -func (loader *Loader) resetVisitedPathItemRefs() { - loader.visitedPathItemRefs = make(map[string]struct{}) -} - -// LoadFromURI loads a spec from a remote URL -func (loader *Loader) LoadFromURI(location *url.URL) (*T, error) { - loader.resetVisitedPathItemRefs() - return loader.loadFromURIInternal(location) -} - -// LoadFromFile loads a spec from a local file path -func (loader *Loader) LoadFromFile(location string) (*T, error) { - loader.rootDir = path.Dir(location) - return loader.LoadFromURI(&url.URL{Path: filepath.ToSlash(location)}) -} - -func (loader *Loader) loadFromURIInternal(location *url.URL) (*T, error) { - data, err := loader.readURL(location) - if err != nil { - return nil, err - } - return loader.loadFromDataWithPathInternal(data, location) -} - -func (loader *Loader) allowsExternalRefs(ref string) (err error) { - if !loader.IsExternalRefsAllowed { - err = fmt.Errorf("encountered disallowed external reference: %q", ref) - } - return -} - -func (loader *Loader) loadSingleElementFromURI(ref string, rootPath *url.URL, element interface{}) (*url.URL, error) { - if err := loader.allowsExternalRefs(ref); err != nil { - return nil, err - } - - resolvedPath, err := resolvePathWithRef(ref, rootPath) - if err != nil { - return nil, err - } - if frag := resolvedPath.Fragment; frag != "" { - return nil, fmt.Errorf("unexpected ref fragment %q", frag) - } - - data, err := loader.readURL(resolvedPath) - if err != nil { - return nil, err - } - if err := unmarshal(data, element); err != nil { - return nil, err - } - - return resolvedPath, nil -} - -func (loader *Loader) readURL(location *url.URL) ([]byte, error) { - if f := loader.ReadFromURIFunc; f != nil { - return f(loader, location) - } - return DefaultReadFromURI(loader, location) -} - -// LoadFromStdin loads a spec from stdin -func (loader *Loader) LoadFromStdin() (*T, error) { - data, err := io.ReadAll(os.Stdin) - if err != nil { - return nil, fmt.Errorf("read from stdin: %w", err) - } - return loader.LoadFromData(data) -} - -// LoadFromData loads a spec from a byte array -func (loader *Loader) LoadFromData(data []byte) (*T, error) { - loader.resetVisitedPathItemRefs() - doc := &T{} - if err := unmarshal(data, doc); err != nil { - return nil, err - } - if err := loader.ResolveRefsIn(doc, nil); err != nil { - return nil, err - } - return doc, nil -} - -// LoadFromDataWithPath takes the OpenAPI document data in bytes and a path where the resolver can find referred -// elements and returns a *T with all resolved data or an error if unable to load data or resolve refs. -func (loader *Loader) LoadFromDataWithPath(data []byte, location *url.URL) (*T, error) { - loader.resetVisitedPathItemRefs() - return loader.loadFromDataWithPathInternal(data, location) -} - -func (loader *Loader) loadFromDataWithPathInternal(data []byte, location *url.URL) (*T, error) { - if loader.visitedDocuments == nil { - loader.visitedDocuments = make(map[string]*T) - loader.rootLocation = location.Path - } - uri := location.String() - if doc, ok := loader.visitedDocuments[uri]; ok { - return doc, nil - } - - doc := &T{} - loader.visitedDocuments[uri] = doc - - if err := unmarshal(data, doc); err != nil { - return nil, err - } - if err := loader.ResolveRefsIn(doc, location); err != nil { - return nil, err - } - - return doc, nil -} - -// ResolveRefsIn expands references if for instance spec was just unmarshaled -func (loader *Loader) ResolveRefsIn(doc *T, location *url.URL) (err error) { - if loader.Context == nil { - loader.Context = context.Background() - } - - if loader.visitedPathItemRefs == nil { - loader.resetVisitedPathItemRefs() - } - - if components := doc.Components; components != nil { - for _, component := range components.Headers { - if err = loader.resolveHeaderRef(doc, component, location); err != nil { - return - } - } - for _, component := range components.Parameters { - if err = loader.resolveParameterRef(doc, component, location); err != nil { - return - } - } - for _, component := range components.RequestBodies { - if err = loader.resolveRequestBodyRef(doc, component, location); err != nil { - return - } - } - for _, component := range components.Responses { - if err = loader.resolveResponseRef(doc, component, location); err != nil { - return - } - } - for _, component := range components.Schemas { - if err = loader.resolveSchemaRef(doc, component, location, []string{}); err != nil { - return - } - } - for _, component := range components.SecuritySchemes { - if err = loader.resolveSecuritySchemeRef(doc, component, location); err != nil { - return - } - } - - examples := make([]string, 0, len(components.Examples)) - for name := range components.Examples { - examples = append(examples, name) - } - sort.Strings(examples) - for _, name := range examples { - component := components.Examples[name] - if err = loader.resolveExampleRef(doc, component, location); err != nil { - return - } - } - - for _, component := range components.Callbacks { - if err = loader.resolveCallbackRef(doc, component, location); err != nil { - return - } - } - } - - // Visit all operations - for _, pathItem := range doc.Paths.Map() { - if pathItem == nil { - continue - } - if err = loader.resolvePathItemRef(doc, pathItem, location); err != nil { - return - } - } - - return -} - -func join(basePath *url.URL, relativePath *url.URL) *url.URL { - if basePath == nil { - return relativePath - } - newPath := *basePath - newPath.Path = path.Join(path.Dir(newPath.Path), relativePath.Path) - return &newPath -} - -func resolvePath(basePath *url.URL, componentPath *url.URL) *url.URL { - if is_file(componentPath) { - // support absolute paths - if componentPath.Path[0] == '/' { - return componentPath - } - return join(basePath, componentPath) - } - return componentPath -} - -func resolvePathWithRef(ref string, rootPath *url.URL) (*url.URL, error) { - parsedURL, err := url.Parse(ref) - if err != nil { - return nil, fmt.Errorf("cannot parse reference: %q: %w", ref, err) - } - - resolvedPath := resolvePath(rootPath, parsedURL) - resolvedPath.Fragment = parsedURL.Fragment - return resolvedPath, nil -} - -func isSingleRefElement(ref string) bool { - return !strings.Contains(ref, "#") -} - -func (loader *Loader) resolveComponent(doc *T, ref string, path *url.URL, resolved interface{}) ( - componentDoc *T, - componentPath *url.URL, - err error, -) { - if componentDoc, ref, componentPath, err = loader.resolveRef(doc, ref, path); err != nil { - return nil, nil, err - } - - parsedURL, err := url.Parse(ref) - if err != nil { - return nil, nil, fmt.Errorf("cannot parse reference: %q: %v", ref, parsedURL) - } - fragment := parsedURL.Fragment - if fragment == "" { - fragment = "/" - } - if fragment[0] != '/' { - return nil, nil, fmt.Errorf("expected fragment prefix '#/' in URI %q", ref) - } - - drill := func(cursor interface{}) (interface{}, error) { - for _, pathPart := range strings.Split(fragment[1:], "/") { - pathPart = unescapeRefString(pathPart) - attempted := false - - switch c := cursor.(type) { - // Special case of T - // See issue856: a ref to doc => we assume that doc is a T => things live in T.Extensions - case *T: - if pathPart == "" { - cursor = c.Extensions - attempted = true - } - - // Special case due to multijson - case *SchemaRef: - if pathPart == "additionalProperties" { - if ap := c.Value.AdditionalProperties.Has; ap != nil { - cursor = *ap - } else { - cursor = c.Value.AdditionalProperties.Schema - } - attempted = true - } - - case *Responses: - cursor = c.m // m map[string]*ResponseRef - case *Callback: - cursor = c.m // m map[string]*PathItem - case *Paths: - cursor = c.m // m map[string]*PathItem - } - - if !attempted { - if cursor, err = drillIntoField(cursor, pathPart); err != nil { - e := failedToResolveRefFragmentPart(ref, pathPart) - return nil, fmt.Errorf("%s: %w", e, err) - } - } - - if cursor == nil { - return nil, failedToResolveRefFragmentPart(ref, pathPart) - } - } - return cursor, nil - } - var cursor interface{} - if cursor, err = drill(componentDoc); err != nil { - if path == nil { - return nil, nil, err - } - var err2 error - data, err2 := loader.readURL(path) - if err2 != nil { - return nil, nil, err - } - if err2 = unmarshal(data, &cursor); err2 != nil { - return nil, nil, err - } - if cursor, err2 = drill(cursor); err2 != nil || cursor == nil { - return nil, nil, err - } - err = nil - } - - switch { - case reflect.TypeOf(cursor) == reflect.TypeOf(resolved): - reflect.ValueOf(resolved).Elem().Set(reflect.ValueOf(cursor).Elem()) - return componentDoc, componentPath, nil - - case reflect.TypeOf(cursor) == reflect.TypeOf(map[string]interface{}{}): - codec := func(got, expect interface{}) error { - enc, err := json.Marshal(got) - if err != nil { - return err - } - if err = json.Unmarshal(enc, expect); err != nil { - return err - } - return nil - } - if err := codec(cursor, resolved); err != nil { - return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved)) - } - return componentDoc, componentPath, nil - - default: - return nil, nil, fmt.Errorf("bad data in %q (expecting %s)", ref, readableType(resolved)) - } -} - -func readableType(x interface{}) string { - switch x.(type) { - case *Callback: - return "callback object" - case *CallbackRef: - return "ref to callback object" - case *ExampleRef: - return "ref to example object" - case *HeaderRef: - return "ref to header object" - case *LinkRef: - return "ref to link object" - case *ParameterRef: - return "ref to parameter object" - case *PathItem: - return "pathItem object" - case *RequestBodyRef: - return "ref to requestBody object" - case *ResponseRef: - return "ref to response object" - case *SchemaRef: - return "ref to schema object" - case *SecuritySchemeRef: - return "ref to securityScheme object" - default: - panic(fmt.Sprintf("unreachable %T", x)) - } -} - -func drillIntoField(cursor interface{}, fieldName string) (interface{}, error) { - switch val := reflect.Indirect(reflect.ValueOf(cursor)); val.Kind() { - - case reflect.Map: - elementValue := val.MapIndex(reflect.ValueOf(fieldName)) - if !elementValue.IsValid() { - return nil, fmt.Errorf("map key %q not found", fieldName) - } - return elementValue.Interface(), nil - - case reflect.Slice: - i, err := strconv.ParseUint(fieldName, 10, 32) - if err != nil { - return nil, err - } - index := int(i) - if 0 > index || index >= val.Len() { - return nil, errors.New("slice index out of bounds") - } - return val.Index(index).Interface(), nil - - case reflect.Struct: - hasFields := false - for i := 0; i < val.NumField(); i++ { - hasFields = true - if yamlTag := val.Type().Field(i).Tag.Get("yaml"); yamlTag != "-" { - if tagName := strings.Split(yamlTag, ",")[0]; tagName != "" { - if fieldName == tagName { - return val.Field(i).Interface(), nil - } - } - } - } - - // if cursor is a "ref wrapper" struct (e.g. RequestBodyRef), - if _, ok := val.Type().FieldByName("Value"); ok { - // try digging into its Value field - return drillIntoField(val.FieldByName("Value").Interface(), fieldName) - } - if hasFields { - if ff := val.Type().Field(0); ff.PkgPath == "" && ff.Name == "Extensions" { - extensions := val.Field(0).Interface().(map[string]interface{}) - if enc, ok := extensions[fieldName]; ok { - return enc, nil - } - } - } - return nil, fmt.Errorf("struct field %q not found", fieldName) - - default: - return nil, errors.New("not a map, slice nor struct") - } -} - -func (loader *Loader) resolveRef(doc *T, ref string, path *url.URL) (*T, string, *url.URL, error) { - if ref != "" && ref[0] == '#' { - return doc, ref, path, nil - } - - if err := loader.allowsExternalRefs(ref); err != nil { - return nil, "", nil, err - } - - resolvedPath, err := resolvePathWithRef(ref, path) - if err != nil { - return nil, "", nil, err - } - fragment := "#" + resolvedPath.Fragment - resolvedPath.Fragment = "" - - if doc, err = loader.loadFromURIInternal(resolvedPath); err != nil { - return nil, "", nil, fmt.Errorf("error resolving reference %q: %w", ref, err) - } - - return doc, fragment, resolvedPath, nil -} - -var ( - errMUSTCallback = errors.New("invalid callback: value MUST be an object") - errMUSTExample = errors.New("invalid example: value MUST be an object") - errMUSTHeader = errors.New("invalid header: value MUST be an object") - errMUSTLink = errors.New("invalid link: value MUST be an object") - errMUSTParameter = errors.New("invalid parameter: value MUST be an object") - errMUSTPathItem = errors.New("invalid path item: value MUST be an object") - errMUSTRequestBody = errors.New("invalid requestBody: value MUST be an object") - errMUSTResponse = errors.New("invalid response: value MUST be an object") - errMUSTSchema = errors.New("invalid schema: value MUST be an object") - errMUSTSecurityScheme = errors.New("invalid securityScheme: value MUST be an object") -) - -func (loader *Loader) resolveHeaderRef(doc *T, component *HeaderRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTHeader - } - - if component.Value != nil { - if loader.visitedHeader == nil { - loader.visitedHeader = make(map[*Header]struct{}) - } - if _, ok := loader.visitedHeader[component.Value]; ok { - return nil - } - loader.visitedHeader[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var header Header - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &header); err != nil { - return err - } - component.Value = &header - } else { - var resolved HeaderRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveHeaderRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTHeader { - return nil - } - return err - } - component.Value = resolved.Value - } - } - value := component.Value - if value == nil { - return nil - } - - if schema := value.Schema; schema != nil { - if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { - return err - } - } - return nil -} - -func (loader *Loader) resolveParameterRef(doc *T, component *ParameterRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTParameter - } - - if component.Value != nil { - if loader.visitedParameter == nil { - loader.visitedParameter = make(map[*Parameter]struct{}) - } - if _, ok := loader.visitedParameter[component.Value]; ok { - return nil - } - loader.visitedParameter[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var param Parameter - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, ¶m); err != nil { - return err - } - component.Value = ¶m - } else { - var resolved ParameterRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveParameterRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTParameter { - return nil - } - return err - } - component.Value = resolved.Value - } - } - value := component.Value - if value == nil { - return nil - } - - if value.Content != nil && value.Schema != nil { - return errors.New("cannot contain both schema and content in a parameter") - } - for _, contentType := range value.Content { - if schema := contentType.Schema; schema != nil { - if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { - return err - } - } - } - if schema := value.Schema; schema != nil { - if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { - return err - } - } - return nil -} - -func (loader *Loader) resolveRequestBodyRef(doc *T, component *RequestBodyRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTRequestBody - } - - if component.Value != nil { - if loader.visitedRequestBody == nil { - loader.visitedRequestBody = make(map[*RequestBody]struct{}) - } - if _, ok := loader.visitedRequestBody[component.Value]; ok { - return nil - } - loader.visitedRequestBody[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var requestBody RequestBody - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &requestBody); err != nil { - return err - } - component.Value = &requestBody - } else { - var resolved RequestBodyRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err = loader.resolveRequestBodyRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTRequestBody { - return nil - } - return err - } - component.Value = resolved.Value - } - } - value := component.Value - if value == nil { - return nil - } - - for _, contentType := range value.Content { - if contentType == nil { - continue - } - examples := make([]string, 0, len(contentType.Examples)) - for name := range contentType.Examples { - examples = append(examples, name) - } - sort.Strings(examples) - for _, name := range examples { - example := contentType.Examples[name] - if err := loader.resolveExampleRef(doc, example, documentPath); err != nil { - return err - } - contentType.Examples[name] = example - } - if schema := contentType.Schema; schema != nil { - if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { - return err - } - } - } - return nil -} - -func (loader *Loader) resolveResponseRef(doc *T, component *ResponseRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTResponse - } - - if component.Value != nil { - if loader.visitedResponse == nil { - loader.visitedResponse = make(map[*Response]struct{}) - } - if _, ok := loader.visitedResponse[component.Value]; ok { - return nil - } - loader.visitedResponse[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var resp Response - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resp); err != nil { - return err - } - component.Value = &resp - } else { - var resolved ResponseRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveResponseRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTResponse { - return nil - } - return err - } - component.Value = resolved.Value - } - } - value := component.Value - if value == nil { - return nil - } - - for _, header := range value.Headers { - if err := loader.resolveHeaderRef(doc, header, documentPath); err != nil { - return err - } - } - for _, contentType := range value.Content { - if contentType == nil { - continue - } - examples := make([]string, 0, len(contentType.Examples)) - for name := range contentType.Examples { - examples = append(examples, name) - } - sort.Strings(examples) - for _, name := range examples { - example := contentType.Examples[name] - if err := loader.resolveExampleRef(doc, example, documentPath); err != nil { - return err - } - contentType.Examples[name] = example - } - if schema := contentType.Schema; schema != nil { - if err := loader.resolveSchemaRef(doc, schema, documentPath, []string{}); err != nil { - return err - } - contentType.Schema = schema - } - } - for _, link := range value.Links { - if err := loader.resolveLinkRef(doc, link, documentPath); err != nil { - return err - } - } - return nil -} - -func (loader *Loader) resolveSchemaRef(doc *T, component *SchemaRef, documentPath *url.URL, visited []string) (err error) { - if component.isEmpty() { - return errMUSTSchema - } - - if component.Value != nil { - if loader.visitedSchema == nil { - loader.visitedSchema = make(map[*Schema]struct{}) - } - if _, ok := loader.visitedSchema[component.Value]; ok { - return nil - } - loader.visitedSchema[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var schema Schema - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &schema); err != nil { - return err - } - component.Value = &schema - } else { - if visitedLimit(visited, ref) { - visited = append(visited, ref) - return fmt.Errorf("%s with length %d - %s", CircularReferenceError, len(visited), strings.Join(visited, " -> ")) - } - visited = append(visited, ref) - - var resolved SchemaRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveSchemaRef(doc, &resolved, componentPath, visited); err != nil { - if err == errMUSTSchema { - return nil - } - return err - } - component.Value = resolved.Value - } - if loader.visitedSchema == nil { - loader.visitedSchema = make(map[*Schema]struct{}) - } - loader.visitedSchema[component.Value] = struct{}{} - } - value := component.Value - if value == nil { - return nil - } - - // ResolveRefs referred schemas - if v := value.Items; v != nil { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - for _, v := range value.Properties { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - if v := value.AdditionalProperties.Schema; v != nil { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - if v := value.Not; v != nil { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - for _, v := range value.AllOf { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - for _, v := range value.AnyOf { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - for _, v := range value.OneOf { - if err := loader.resolveSchemaRef(doc, v, documentPath, visited); err != nil { - return err - } - } - return nil -} - -func (loader *Loader) resolveSecuritySchemeRef(doc *T, component *SecuritySchemeRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTSecurityScheme - } - - if component.Value != nil { - if loader.visitedSecurityScheme == nil { - loader.visitedSecurityScheme = make(map[*SecurityScheme]struct{}) - } - if _, ok := loader.visitedSecurityScheme[component.Value]; ok { - return nil - } - loader.visitedSecurityScheme[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var scheme SecurityScheme - if _, err = loader.loadSingleElementFromURI(ref, documentPath, &scheme); err != nil { - return err - } - component.Value = &scheme - } else { - var resolved SecuritySchemeRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveSecuritySchemeRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTSecurityScheme { - return nil - } - return err - } - component.Value = resolved.Value - } - } - return nil -} - -func (loader *Loader) resolveExampleRef(doc *T, component *ExampleRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTExample - } - - if component.Value != nil { - if loader.visitedExample == nil { - loader.visitedExample = make(map[*Example]struct{}) - } - if _, ok := loader.visitedExample[component.Value]; ok { - return nil - } - loader.visitedExample[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var example Example - if _, err = loader.loadSingleElementFromURI(ref, documentPath, &example); err != nil { - return err - } - component.Value = &example - } else { - var resolved ExampleRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveExampleRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTExample { - return nil - } - return err - } - component.Value = resolved.Value - } - } - return nil -} - -func (loader *Loader) resolveCallbackRef(doc *T, component *CallbackRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTCallback - } - - if component.Value != nil { - if loader.visitedCallback == nil { - loader.visitedCallback = make(map[*Callback]struct{}) - } - if _, ok := loader.visitedCallback[component.Value]; ok { - return nil - } - loader.visitedCallback[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var resolved Callback - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &resolved); err != nil { - return err - } - component.Value = &resolved - } else { - var resolved CallbackRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err = loader.resolveCallbackRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTCallback { - return nil - } - return err - } - component.Value = resolved.Value - } - } - value := component.Value - if value == nil { - return nil - } - - for _, pathItem := range value.Map() { - if err = loader.resolvePathItemRef(doc, pathItem, documentPath); err != nil { - return err - } - } - return nil -} - -func (loader *Loader) resolveLinkRef(doc *T, component *LinkRef, documentPath *url.URL) (err error) { - if component.isEmpty() { - return errMUSTLink - } - - if component.Value != nil { - if loader.visitedLink == nil { - loader.visitedLink = make(map[*Link]struct{}) - } - if _, ok := loader.visitedLink[component.Value]; ok { - return nil - } - loader.visitedLink[component.Value] = struct{}{} - } - - if ref := component.Ref; ref != "" { - if isSingleRefElement(ref) { - var link Link - if _, err = loader.loadSingleElementFromURI(ref, documentPath, &link); err != nil { - return err - } - component.Value = &link - } else { - var resolved LinkRef - doc, componentPath, err := loader.resolveComponent(doc, ref, documentPath, &resolved) - if err != nil { - return err - } - if err := loader.resolveLinkRef(doc, &resolved, componentPath); err != nil { - if err == errMUSTLink { - return nil - } - return err - } - component.Value = resolved.Value - } - } - return nil -} - -func (loader *Loader) resolvePathItemRef(doc *T, pathItem *PathItem, documentPath *url.URL) (err error) { - if pathItem == nil { - err = errMUSTPathItem - return - } - - if ref := pathItem.Ref; ref != "" { - if !pathItem.isEmpty() { - return - } - if isSingleRefElement(ref) { - var p PathItem - if documentPath, err = loader.loadSingleElementFromURI(ref, documentPath, &p); err != nil { - return - } - *pathItem = p - } else { - var resolved PathItem - if doc, documentPath, err = loader.resolveComponent(doc, ref, documentPath, &resolved); err != nil { - if err == errMUSTPathItem { - return nil - } - return - } - *pathItem = resolved - } - pathItem.Ref = ref - } - - for _, parameter := range pathItem.Parameters { - if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil { - return - } - } - for _, operation := range pathItem.Operations() { - for _, parameter := range operation.Parameters { - if err = loader.resolveParameterRef(doc, parameter, documentPath); err != nil { - return - } - } - if requestBody := operation.RequestBody; requestBody != nil { - if err = loader.resolveRequestBodyRef(doc, requestBody, documentPath); err != nil { - return - } - } - for _, response := range operation.Responses.Map() { - if err = loader.resolveResponseRef(doc, response, documentPath); err != nil { - return - } - } - for _, callback := range operation.Callbacks { - if err = loader.resolveCallbackRef(doc, callback, documentPath); err != nil { - return - } - } - } - return -} - -func unescapeRefString(ref string) string { - return strings.Replace(strings.Replace(ref, "~1", "/", -1), "~0", "~", -1) -} - -func visitedLimit(visited []string, ref string) bool { - visitedCount := 0 - for _, v := range visited { - if v == ref { - visitedCount++ - if visitedCount >= CircularReferenceCounter { - return true - } - } - } - return false -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go b/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go deleted file mode 100644 index ba7b5f24..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/loader_uri_reader.go +++ /dev/null @@ -1,116 +0,0 @@ -package openapi3 - -import ( - "errors" - "fmt" - "io" - "net/http" - "net/url" - "os" - "path/filepath" - "sync" -) - -// ReadFromURIFunc defines a function which reads the contents of a resource -// located at a URI. -type ReadFromURIFunc func(loader *Loader, url *url.URL) ([]byte, error) - -var uriMu = &sync.RWMutex{} - -// ErrURINotSupported indicates the ReadFromURIFunc does not know how to handle a -// given URI. -var ErrURINotSupported = errors.New("unsupported URI") - -// ReadFromURIs returns a ReadFromURIFunc which tries to read a URI using the -// given reader functions, in the same order. If a reader function does not -// support the URI and returns ErrURINotSupported, the next function is checked -// until a match is found, or the URI is not supported by any. -func ReadFromURIs(readers ...ReadFromURIFunc) ReadFromURIFunc { - return func(loader *Loader, url *url.URL) ([]byte, error) { - for i := range readers { - buf, err := readers[i](loader, url) - if err == ErrURINotSupported { - continue - } else if err != nil { - return nil, err - } - return buf, nil - } - return nil, ErrURINotSupported - } -} - -// DefaultReadFromURI returns a caching ReadFromURIFunc which can read remote -// HTTP URIs and local file URIs. -var DefaultReadFromURI = URIMapCache(ReadFromURIs(ReadFromHTTP(http.DefaultClient), ReadFromFile)) - -// ReadFromHTTP returns a ReadFromURIFunc which uses the given http.Client to -// read the contents from a remote HTTP URI. This client may be customized to -// implement timeouts, RFC 7234 caching, etc. -func ReadFromHTTP(cl *http.Client) ReadFromURIFunc { - return func(loader *Loader, location *url.URL) ([]byte, error) { - if location.Scheme == "" || location.Host == "" { - return nil, ErrURINotSupported - } - req, err := http.NewRequest("GET", location.String(), nil) - if err != nil { - return nil, err - } - resp, err := cl.Do(req) - if err != nil { - return nil, err - } - defer resp.Body.Close() - if resp.StatusCode > 399 { - return nil, fmt.Errorf("error loading %q: request returned status code %d", location.String(), resp.StatusCode) - } - return io.ReadAll(resp.Body) - } -} - -func is_file(location *url.URL) bool { - return location.Path != "" && - location.Host == "" && - (location.Scheme == "" || location.Scheme == "file") -} - -// ReadFromFile is a ReadFromURIFunc which reads local file URIs. -func ReadFromFile(loader *Loader, location *url.URL) ([]byte, error) { - if !is_file(location) { - return nil, ErrURINotSupported - } - return os.ReadFile(location.Path) -} - -// URIMapCache returns a ReadFromURIFunc that caches the contents read from URI -// locations in a simple map. This cache implementation is suitable for -// short-lived processes such as command-line tools which process OpenAPI -// documents. -func URIMapCache(reader ReadFromURIFunc) ReadFromURIFunc { - cache := map[string][]byte{} - return func(loader *Loader, location *url.URL) (buf []byte, err error) { - if location.Scheme == "" || location.Scheme == "file" { - if !filepath.IsAbs(location.Path) { - // Do not cache relative file paths; this can cause trouble if - // the current working directory changes when processing - // multiple top-level documents. - return reader(loader, location) - } - } - uri := location.String() - var ok bool - uriMu.RLock() - if buf, ok = cache[uri]; ok { - uriMu.RUnlock() - return - } - uriMu.RUnlock() - if buf, err = reader(loader, location); err != nil { - return - } - uriMu.Lock() - defer uriMu.Unlock() - cache[uri] = buf - return - } -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go b/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go deleted file mode 100644 index 1f438538..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/maplike.go +++ /dev/null @@ -1,345 +0,0 @@ -package openapi3 - -import ( - "encoding/json" - "sort" - "strings" - - "github.com/go-openapi/jsonpointer" -) - -// NewResponsesWithCapacity builds a responses object of the given capacity. -func NewResponsesWithCapacity(cap int) *Responses { - if cap == 0 { - return &Responses{m: make(map[string]*ResponseRef)} - } - return &Responses{m: make(map[string]*ResponseRef, cap)} -} - -// Value returns the responses for key or nil -func (responses *Responses) Value(key string) *ResponseRef { - if responses.Len() == 0 { - return nil - } - return responses.m[key] -} - -// Set adds or replaces key 'key' of 'responses' with 'value'. -// Note: 'responses' MUST be non-nil -func (responses *Responses) Set(key string, value *ResponseRef) { - if responses.m == nil { - responses.m = make(map[string]*ResponseRef) - } - responses.m[key] = value -} - -// Len returns the amount of keys in responses excluding responses.Extensions. -func (responses *Responses) Len() int { - if responses == nil || responses.m == nil { - return 0 - } - return len(responses.m) -} - -// Map returns responses as a 'map'. -// Note: iteration on Go maps is not ordered. -func (responses *Responses) Map() (m map[string]*ResponseRef) { - if responses == nil || len(responses.m) == 0 { - return make(map[string]*ResponseRef) - } - m = make(map[string]*ResponseRef, len(responses.m)) - for k, v := range responses.m { - m[k] = v - } - return -} - -var _ jsonpointer.JSONPointable = (*Responses)(nil) - -// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable -func (responses Responses) JSONLookup(token string) (interface{}, error) { - if v := responses.Value(token); v == nil { - vv, _, err := jsonpointer.GetForToken(responses.Extensions, token) - return vv, err - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - var vv *Response = v.Value - return vv, nil - } -} - -// MarshalJSON returns the JSON encoding of Responses. -func (responses *Responses) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, responses.Len()+len(responses.Extensions)) - for k, v := range responses.Extensions { - m[k] = v - } - for k, v := range responses.Map() { - m[k] = v - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Responses to a copy of data. -func (responses *Responses) UnmarshalJSON(data []byte) (err error) { - var m map[string]interface{} - if err = json.Unmarshal(data, &m); err != nil { - return - } - - ks := make([]string, 0, len(m)) - for k := range m { - ks = append(ks, k) - } - sort.Strings(ks) - - x := Responses{ - Extensions: make(map[string]interface{}), - m: make(map[string]*ResponseRef, len(m)), - } - - for _, k := range ks { - v := m[k] - if strings.HasPrefix(k, "x-") { - x.Extensions[k] = v - continue - } - - var data []byte - if data, err = json.Marshal(v); err != nil { - return - } - var vv ResponseRef - if err = vv.UnmarshalJSON(data); err != nil { - return - } - x.m[k] = &vv - } - *responses = x - return -} - -// NewCallbackWithCapacity builds a callback object of the given capacity. -func NewCallbackWithCapacity(cap int) *Callback { - if cap == 0 { - return &Callback{m: make(map[string]*PathItem)} - } - return &Callback{m: make(map[string]*PathItem, cap)} -} - -// Value returns the callback for key or nil -func (callback *Callback) Value(key string) *PathItem { - if callback.Len() == 0 { - return nil - } - return callback.m[key] -} - -// Set adds or replaces key 'key' of 'callback' with 'value'. -// Note: 'callback' MUST be non-nil -func (callback *Callback) Set(key string, value *PathItem) { - if callback.m == nil { - callback.m = make(map[string]*PathItem) - } - callback.m[key] = value -} - -// Len returns the amount of keys in callback excluding callback.Extensions. -func (callback *Callback) Len() int { - if callback == nil || callback.m == nil { - return 0 - } - return len(callback.m) -} - -// Map returns callback as a 'map'. -// Note: iteration on Go maps is not ordered. -func (callback *Callback) Map() (m map[string]*PathItem) { - if callback == nil || len(callback.m) == 0 { - return make(map[string]*PathItem) - } - m = make(map[string]*PathItem, len(callback.m)) - for k, v := range callback.m { - m[k] = v - } - return -} - -var _ jsonpointer.JSONPointable = (*Callback)(nil) - -// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable -func (callback Callback) JSONLookup(token string) (interface{}, error) { - if v := callback.Value(token); v == nil { - vv, _, err := jsonpointer.GetForToken(callback.Extensions, token) - return vv, err - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - var vv *PathItem = v - return vv, nil - } -} - -// MarshalJSON returns the JSON encoding of Callback. -func (callback *Callback) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, callback.Len()+len(callback.Extensions)) - for k, v := range callback.Extensions { - m[k] = v - } - for k, v := range callback.Map() { - m[k] = v - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Callback to a copy of data. -func (callback *Callback) UnmarshalJSON(data []byte) (err error) { - var m map[string]interface{} - if err = json.Unmarshal(data, &m); err != nil { - return - } - - ks := make([]string, 0, len(m)) - for k := range m { - ks = append(ks, k) - } - sort.Strings(ks) - - x := Callback{ - Extensions: make(map[string]interface{}), - m: make(map[string]*PathItem, len(m)), - } - - for _, k := range ks { - v := m[k] - if strings.HasPrefix(k, "x-") { - x.Extensions[k] = v - continue - } - - var data []byte - if data, err = json.Marshal(v); err != nil { - return - } - var vv PathItem - if err = vv.UnmarshalJSON(data); err != nil { - return - } - x.m[k] = &vv - } - *callback = x - return -} - -// NewPathsWithCapacity builds a paths object of the given capacity. -func NewPathsWithCapacity(cap int) *Paths { - if cap == 0 { - return &Paths{m: make(map[string]*PathItem)} - } - return &Paths{m: make(map[string]*PathItem, cap)} -} - -// Value returns the paths for key or nil -func (paths *Paths) Value(key string) *PathItem { - if paths.Len() == 0 { - return nil - } - return paths.m[key] -} - -// Set adds or replaces key 'key' of 'paths' with 'value'. -// Note: 'paths' MUST be non-nil -func (paths *Paths) Set(key string, value *PathItem) { - if paths.m == nil { - paths.m = make(map[string]*PathItem) - } - paths.m[key] = value -} - -// Len returns the amount of keys in paths excluding paths.Extensions. -func (paths *Paths) Len() int { - if paths == nil || paths.m == nil { - return 0 - } - return len(paths.m) -} - -// Map returns paths as a 'map'. -// Note: iteration on Go maps is not ordered. -func (paths *Paths) Map() (m map[string]*PathItem) { - if paths == nil || len(paths.m) == 0 { - return make(map[string]*PathItem) - } - m = make(map[string]*PathItem, len(paths.m)) - for k, v := range paths.m { - m[k] = v - } - return -} - -var _ jsonpointer.JSONPointable = (*Paths)(nil) - -// JSONLookup implements https://github.com/go-openapi/jsonpointer#JSONPointable -func (paths Paths) JSONLookup(token string) (interface{}, error) { - if v := paths.Value(token); v == nil { - vv, _, err := jsonpointer.GetForToken(paths.Extensions, token) - return vv, err - } else if ref := v.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } else { - var vv *PathItem = v - return vv, nil - } -} - -// MarshalJSON returns the JSON encoding of Paths. -func (paths *Paths) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, paths.Len()+len(paths.Extensions)) - for k, v := range paths.Extensions { - m[k] = v - } - for k, v := range paths.Map() { - m[k] = v - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Paths to a copy of data. -func (paths *Paths) UnmarshalJSON(data []byte) (err error) { - var m map[string]interface{} - if err = json.Unmarshal(data, &m); err != nil { - return - } - - ks := make([]string, 0, len(m)) - for k := range m { - ks = append(ks, k) - } - sort.Strings(ks) - - x := Paths{ - Extensions: make(map[string]interface{}), - m: make(map[string]*PathItem, len(m)), - } - - for _, k := range ks { - v := m[k] - if strings.HasPrefix(k, "x-") { - x.Extensions[k] = v - continue - } - - var data []byte - if data, err = json.Marshal(v); err != nil { - return - } - var vv PathItem - if err = vv.UnmarshalJSON(data); err != nil { - return - } - x.m[k] = &vv - } - *paths = x - return -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go b/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go deleted file mode 100644 index 18036ae7..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/marsh.go +++ /dev/null @@ -1,26 +0,0 @@ -package openapi3 - -import ( - "encoding/json" - "fmt" - "strings" - - "github.com/invopop/yaml" -) - -func unmarshalError(jsonUnmarshalErr error) error { - if before, after, found := strings.Cut(jsonUnmarshalErr.Error(), "Bis"); found && before != "" && after != "" { - before = strings.ReplaceAll(before, " Go struct ", " ") - return fmt.Errorf("%s%s", before, strings.ReplaceAll(after, "Bis", "")) - } - return jsonUnmarshalErr -} - -func unmarshal(data []byte, v interface{}) error { - // See https://github.com/getkin/kin-openapi/issues/680 - if err := json.Unmarshal(data, v); err != nil { - // UnmarshalStrict(data, v) TODO: investigate how ymlv3 handles duplicate map keys - return yaml.Unmarshal(data, v) - } - return nil -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go b/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go deleted file mode 100644 index e043a7c9..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/media_type.go +++ /dev/null @@ -1,170 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "sort" - - "github.com/go-openapi/jsonpointer" -) - -// MediaType is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#media-type-object -type MediaType struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"` - Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` - Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` - Encoding map[string]*Encoding `json:"encoding,omitempty" yaml:"encoding,omitempty"` -} - -var _ jsonpointer.JSONPointable = (*MediaType)(nil) - -func NewMediaType() *MediaType { - return &MediaType{} -} - -func (mediaType *MediaType) WithSchema(schema *Schema) *MediaType { - if schema == nil { - mediaType.Schema = nil - } else { - mediaType.Schema = &SchemaRef{Value: schema} - } - return mediaType -} - -func (mediaType *MediaType) WithSchemaRef(schema *SchemaRef) *MediaType { - mediaType.Schema = schema - return mediaType -} - -func (mediaType *MediaType) WithExample(name string, value interface{}) *MediaType { - example := mediaType.Examples - if example == nil { - example = make(map[string]*ExampleRef) - mediaType.Examples = example - } - example[name] = &ExampleRef{ - Value: NewExample(value), - } - return mediaType -} - -func (mediaType *MediaType) WithEncoding(name string, enc *Encoding) *MediaType { - encoding := mediaType.Encoding - if encoding == nil { - encoding = make(map[string]*Encoding) - mediaType.Encoding = encoding - } - encoding[name] = enc - return mediaType -} - -// MarshalJSON returns the JSON encoding of MediaType. -func (mediaType MediaType) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(mediaType.Extensions)) - for k, v := range mediaType.Extensions { - m[k] = v - } - if x := mediaType.Schema; x != nil { - m["schema"] = x - } - if x := mediaType.Example; x != nil { - m["example"] = x - } - if x := mediaType.Examples; len(x) != 0 { - m["examples"] = x - } - if x := mediaType.Encoding; len(x) != 0 { - m["encoding"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets MediaType to a copy of data. -func (mediaType *MediaType) UnmarshalJSON(data []byte) error { - type MediaTypeBis MediaType - var x MediaTypeBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "schema") - delete(x.Extensions, "example") - delete(x.Extensions, "examples") - delete(x.Extensions, "encoding") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *mediaType = MediaType(x) - return nil -} - -// Validate returns an error if MediaType does not comply with the OpenAPI spec. -func (mediaType *MediaType) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if mediaType == nil { - return nil - } - if schema := mediaType.Schema; schema != nil { - if err := schema.Validate(ctx); err != nil { - return err - } - - if mediaType.Example != nil && mediaType.Examples != nil { - return errors.New("example and examples are mutually exclusive") - } - - if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { - if example := mediaType.Example; example != nil { - if err := validateExampleValue(ctx, example, schema.Value); err != nil { - return fmt.Errorf("invalid example: %w", err) - } - } - - if examples := mediaType.Examples; examples != nil { - names := make([]string, 0, len(examples)) - for name := range examples { - names = append(names, name) - } - sort.Strings(names) - for _, k := range names { - v := examples[k] - if err := v.Validate(ctx); err != nil { - return fmt.Errorf("example %s: %w", k, err) - } - if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil { - return fmt.Errorf("example %s: %w", k, err) - } - } - } - } - } - - return validateExtensions(ctx, mediaType.Extensions) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (mediaType MediaType) JSONLookup(token string) (interface{}, error) { - switch token { - case "schema": - if mediaType.Schema != nil { - if mediaType.Schema.Ref != "" { - return &Ref{Ref: mediaType.Schema.Ref}, nil - } - return mediaType.Schema.Value, nil - } - case "example": - return mediaType.Example, nil - case "examples": - return mediaType.Examples, nil - case "encoding": - return mediaType.Encoding, nil - } - v, _, err := jsonpointer.GetForToken(mediaType.Extensions, token) - return v, err -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go b/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go deleted file mode 100644 index 62e149dc..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/openapi3.go +++ /dev/null @@ -1,191 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - - "github.com/go-openapi/jsonpointer" -) - -// T is the root of an OpenAPI v3 document -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#openapi-object -type T struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - OpenAPI string `json:"openapi" yaml:"openapi"` // Required - Components *Components `json:"components,omitempty" yaml:"components,omitempty"` - Info *Info `json:"info" yaml:"info"` // Required - Paths *Paths `json:"paths" yaml:"paths"` // Required - Security SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"` - Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"` - Tags Tags `json:"tags,omitempty" yaml:"tags,omitempty"` - ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` - - visited visitedComponent -} - -var _ jsonpointer.JSONPointable = (*T)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (doc *T) JSONLookup(token string) (interface{}, error) { - switch token { - case "openapi": - return doc.OpenAPI, nil - case "components": - return doc.Components, nil - case "info": - return doc.Info, nil - case "paths": - return doc.Paths, nil - case "security": - return doc.Security, nil - case "servers": - return doc.Servers, nil - case "tags": - return doc.Tags, nil - case "externalDocs": - return doc.ExternalDocs, nil - } - - v, _, err := jsonpointer.GetForToken(doc.Extensions, token) - return v, err -} - -// MarshalJSON returns the JSON encoding of T. -func (doc T) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(doc.Extensions)) - for k, v := range doc.Extensions { - m[k] = v - } - m["openapi"] = doc.OpenAPI - if x := doc.Components; x != nil { - m["components"] = x - } - m["info"] = doc.Info - m["paths"] = doc.Paths - if x := doc.Security; len(x) != 0 { - m["security"] = x - } - if x := doc.Servers; len(x) != 0 { - m["servers"] = x - } - if x := doc.Tags; len(x) != 0 { - m["tags"] = x - } - if x := doc.ExternalDocs; x != nil { - m["externalDocs"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets T to a copy of data. -func (doc *T) UnmarshalJSON(data []byte) error { - type TBis T - var x TBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "openapi") - delete(x.Extensions, "components") - delete(x.Extensions, "info") - delete(x.Extensions, "paths") - delete(x.Extensions, "security") - delete(x.Extensions, "servers") - delete(x.Extensions, "tags") - delete(x.Extensions, "externalDocs") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *doc = T(x) - return nil -} - -func (doc *T) AddOperation(path string, method string, operation *Operation) { - if doc.Paths == nil { - doc.Paths = NewPaths() - } - pathItem := doc.Paths.Value(path) - if pathItem == nil { - pathItem = &PathItem{} - doc.Paths.Set(path, pathItem) - } - pathItem.SetOperation(method, operation) -} - -func (doc *T) AddServer(server *Server) { - doc.Servers = append(doc.Servers, server) -} - -func (doc *T) AddServers(servers ...*Server) { - doc.Servers = append(doc.Servers, servers...) -} - -// Validate returns an error if T does not comply with the OpenAPI spec. -// Validations Options can be provided to modify the validation behavior. -func (doc *T) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if doc.OpenAPI == "" { - return errors.New("value of openapi must be a non-empty string") - } - - var wrap func(error) error - - wrap = func(e error) error { return fmt.Errorf("invalid components: %w", e) } - if v := doc.Components; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } - - wrap = func(e error) error { return fmt.Errorf("invalid info: %w", e) } - if v := doc.Info; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } else { - return wrap(errors.New("must be an object")) - } - - wrap = func(e error) error { return fmt.Errorf("invalid paths: %w", e) } - if v := doc.Paths; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } else { - return wrap(errors.New("must be an object")) - } - - wrap = func(e error) error { return fmt.Errorf("invalid security: %w", e) } - if v := doc.Security; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } - - wrap = func(e error) error { return fmt.Errorf("invalid servers: %w", e) } - if v := doc.Servers; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } - - wrap = func(e error) error { return fmt.Errorf("invalid tags: %w", e) } - if v := doc.Tags; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } - - wrap = func(e error) error { return fmt.Errorf("invalid external docs: %w", e) } - if v := doc.ExternalDocs; v != nil { - if err := v.Validate(ctx); err != nil { - return wrap(err) - } - } - - return validateExtensions(ctx, doc.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/operation.go b/vendor/github.com/getkin/kin-openapi/openapi3/operation.go deleted file mode 100644 index d859a437..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/operation.go +++ /dev/null @@ -1,213 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "strconv" - - "github.com/go-openapi/jsonpointer" -) - -// Operation represents "operation" specified by" OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#operation-object -type Operation struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - // Optional tags for documentation. - Tags []string `json:"tags,omitempty" yaml:"tags,omitempty"` - - // Optional short summary. - Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` - - // Optional description. Should use CommonMark syntax. - Description string `json:"description,omitempty" yaml:"description,omitempty"` - - // Optional operation ID. - OperationID string `json:"operationId,omitempty" yaml:"operationId,omitempty"` - - // Optional parameters. - Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"` - - // Optional body parameter. - RequestBody *RequestBodyRef `json:"requestBody,omitempty" yaml:"requestBody,omitempty"` - - // Responses. - Responses *Responses `json:"responses" yaml:"responses"` // Required - - // Optional callbacks - Callbacks Callbacks `json:"callbacks,omitempty" yaml:"callbacks,omitempty"` - - Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` - - // Optional security requirements that overrides top-level security. - Security *SecurityRequirements `json:"security,omitempty" yaml:"security,omitempty"` - - // Optional servers that overrides top-level servers. - Servers *Servers `json:"servers,omitempty" yaml:"servers,omitempty"` - - ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` -} - -var _ jsonpointer.JSONPointable = (*Operation)(nil) - -func NewOperation() *Operation { - return &Operation{} -} - -// MarshalJSON returns the JSON encoding of Operation. -func (operation Operation) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 12+len(operation.Extensions)) - for k, v := range operation.Extensions { - m[k] = v - } - if x := operation.Tags; len(x) != 0 { - m["tags"] = x - } - if x := operation.Summary; x != "" { - m["summary"] = x - } - if x := operation.Description; x != "" { - m["description"] = x - } - if x := operation.OperationID; x != "" { - m["operationId"] = x - } - if x := operation.Parameters; len(x) != 0 { - m["parameters"] = x - } - if x := operation.RequestBody; x != nil { - m["requestBody"] = x - } - m["responses"] = operation.Responses - if x := operation.Callbacks; len(x) != 0 { - m["callbacks"] = x - } - if x := operation.Deprecated; x { - m["deprecated"] = x - } - if x := operation.Security; x != nil { - m["security"] = x - } - if x := operation.Servers; x != nil { - m["servers"] = x - } - if x := operation.ExternalDocs; x != nil { - m["externalDocs"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Operation to a copy of data. -func (operation *Operation) UnmarshalJSON(data []byte) error { - type OperationBis Operation - var x OperationBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "tags") - delete(x.Extensions, "summary") - delete(x.Extensions, "description") - delete(x.Extensions, "operationId") - delete(x.Extensions, "parameters") - delete(x.Extensions, "requestBody") - delete(x.Extensions, "responses") - delete(x.Extensions, "callbacks") - delete(x.Extensions, "deprecated") - delete(x.Extensions, "security") - delete(x.Extensions, "servers") - delete(x.Extensions, "externalDocs") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *operation = Operation(x) - return nil -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (operation Operation) JSONLookup(token string) (interface{}, error) { - switch token { - case "requestBody": - if operation.RequestBody != nil { - if operation.RequestBody.Ref != "" { - return &Ref{Ref: operation.RequestBody.Ref}, nil - } - return operation.RequestBody.Value, nil - } - case "tags": - return operation.Tags, nil - case "summary": - return operation.Summary, nil - case "description": - return operation.Description, nil - case "operationID": - return operation.OperationID, nil - case "parameters": - return operation.Parameters, nil - case "responses": - return operation.Responses, nil - case "callbacks": - return operation.Callbacks, nil - case "deprecated": - return operation.Deprecated, nil - case "security": - return operation.Security, nil - case "servers": - return operation.Servers, nil - case "externalDocs": - return operation.ExternalDocs, nil - } - - v, _, err := jsonpointer.GetForToken(operation.Extensions, token) - return v, err -} - -func (operation *Operation) AddParameter(p *Parameter) { - operation.Parameters = append(operation.Parameters, &ParameterRef{Value: p}) -} - -func (operation *Operation) AddResponse(status int, response *Response) { - code := "default" - if 0 < status && status < 1000 { - code = strconv.FormatInt(int64(status), 10) - } - if operation.Responses == nil { - operation.Responses = NewResponses() - } - operation.Responses.Set(code, &ResponseRef{Value: response}) -} - -// Validate returns an error if Operation does not comply with the OpenAPI spec. -func (operation *Operation) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if v := operation.Parameters; v != nil { - if err := v.Validate(ctx); err != nil { - return err - } - } - - if v := operation.RequestBody; v != nil { - if err := v.Validate(ctx); err != nil { - return err - } - } - - if v := operation.Responses; v != nil { - if err := v.Validate(ctx); err != nil { - return err - } - } else { - return errors.New("value of responses must be an object") - } - - if v := operation.ExternalDocs; v != nil { - if err := v.Validate(ctx); err != nil { - return fmt.Errorf("invalid external docs: %w", err) - } - } - - return validateExtensions(ctx, operation.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go b/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go deleted file mode 100644 index f5a157de..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/parameter.go +++ /dev/null @@ -1,407 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "sort" - "strconv" - - "github.com/go-openapi/jsonpointer" -) - -// Parameters is specified by OpenAPI/Swagger 3.0 standard. -type Parameters []*ParameterRef - -var _ jsonpointer.JSONPointable = (*Parameters)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (p Parameters) JSONLookup(token string) (interface{}, error) { - index, err := strconv.Atoi(token) - if err != nil { - return nil, err - } - if index < 0 || index >= len(p) { - return nil, fmt.Errorf("index %d out of bounds of array of length %d", index, len(p)) - } - - ref := p[index] - if ref != nil && ref.Ref != "" { - return &Ref{Ref: ref.Ref}, nil - } - return ref.Value, nil -} - -func NewParameters() Parameters { - return make(Parameters, 0, 4) -} - -func (parameters Parameters) GetByInAndName(in string, name string) *Parameter { - for _, item := range parameters { - if v := item.Value; v != nil { - if v.Name == name && v.In == in { - return v - } - } - } - return nil -} - -// Validate returns an error if Parameters does not comply with the OpenAPI spec. -func (parameters Parameters) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - dupes := make(map[string]struct{}) - for _, parameterRef := range parameters { - if v := parameterRef.Value; v != nil { - key := v.In + ":" + v.Name - if _, ok := dupes[key]; ok { - return fmt.Errorf("more than one %q parameter has name %q", v.In, v.Name) - } - dupes[key] = struct{}{} - } - - if err := parameterRef.Validate(ctx); err != nil { - return err - } - } - return nil -} - -// Parameter is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#parameter-object -type Parameter struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Name string `json:"name,omitempty" yaml:"name,omitempty"` - In string `json:"in,omitempty" yaml:"in,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Style string `json:"style,omitempty" yaml:"style,omitempty"` - Explode *bool `json:"explode,omitempty" yaml:"explode,omitempty"` - AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"` - AllowReserved bool `json:"allowReserved,omitempty" yaml:"allowReserved,omitempty"` - Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` - Required bool `json:"required,omitempty" yaml:"required,omitempty"` - Schema *SchemaRef `json:"schema,omitempty" yaml:"schema,omitempty"` - Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` - Examples Examples `json:"examples,omitempty" yaml:"examples,omitempty"` - Content Content `json:"content,omitempty" yaml:"content,omitempty"` -} - -var _ jsonpointer.JSONPointable = (*Parameter)(nil) - -const ( - ParameterInPath = "path" - ParameterInQuery = "query" - ParameterInHeader = "header" - ParameterInCookie = "cookie" -) - -func NewPathParameter(name string) *Parameter { - return &Parameter{ - Name: name, - In: ParameterInPath, - Required: true, - } -} - -func NewQueryParameter(name string) *Parameter { - return &Parameter{ - Name: name, - In: ParameterInQuery, - } -} - -func NewHeaderParameter(name string) *Parameter { - return &Parameter{ - Name: name, - In: ParameterInHeader, - } -} - -func NewCookieParameter(name string) *Parameter { - return &Parameter{ - Name: name, - In: ParameterInCookie, - } -} - -func (parameter *Parameter) WithDescription(value string) *Parameter { - parameter.Description = value - return parameter -} - -func (parameter *Parameter) WithRequired(value bool) *Parameter { - parameter.Required = value - return parameter -} - -func (parameter *Parameter) WithSchema(value *Schema) *Parameter { - if value == nil { - parameter.Schema = nil - } else { - parameter.Schema = &SchemaRef{ - Value: value, - } - } - return parameter -} - -// MarshalJSON returns the JSON encoding of Parameter. -func (parameter Parameter) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 13+len(parameter.Extensions)) - for k, v := range parameter.Extensions { - m[k] = v - } - - if x := parameter.Name; x != "" { - m["name"] = x - } - if x := parameter.In; x != "" { - m["in"] = x - } - if x := parameter.Description; x != "" { - m["description"] = x - } - if x := parameter.Style; x != "" { - m["style"] = x - } - if x := parameter.Explode; x != nil { - m["explode"] = x - } - if x := parameter.AllowEmptyValue; x { - m["allowEmptyValue"] = x - } - if x := parameter.AllowReserved; x { - m["allowReserved"] = x - } - if x := parameter.Deprecated; x { - m["deprecated"] = x - } - if x := parameter.Required; x { - m["required"] = x - } - if x := parameter.Schema; x != nil { - m["schema"] = x - } - if x := parameter.Example; x != nil { - m["example"] = x - } - if x := parameter.Examples; len(x) != 0 { - m["examples"] = x - } - if x := parameter.Content; len(x) != 0 { - m["content"] = x - } - - return json.Marshal(m) -} - -// UnmarshalJSON sets Parameter to a copy of data. -func (parameter *Parameter) UnmarshalJSON(data []byte) error { - type ParameterBis Parameter - var x ParameterBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - - delete(x.Extensions, "name") - delete(x.Extensions, "in") - delete(x.Extensions, "description") - delete(x.Extensions, "style") - delete(x.Extensions, "explode") - delete(x.Extensions, "allowEmptyValue") - delete(x.Extensions, "allowReserved") - delete(x.Extensions, "deprecated") - delete(x.Extensions, "required") - delete(x.Extensions, "schema") - delete(x.Extensions, "example") - delete(x.Extensions, "examples") - delete(x.Extensions, "content") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - - *parameter = Parameter(x) - return nil -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (parameter Parameter) JSONLookup(token string) (interface{}, error) { - switch token { - case "schema": - if parameter.Schema != nil { - if parameter.Schema.Ref != "" { - return &Ref{Ref: parameter.Schema.Ref}, nil - } - return parameter.Schema.Value, nil - } - case "name": - return parameter.Name, nil - case "in": - return parameter.In, nil - case "description": - return parameter.Description, nil - case "style": - return parameter.Style, nil - case "explode": - return parameter.Explode, nil - case "allowEmptyValue": - return parameter.AllowEmptyValue, nil - case "allowReserved": - return parameter.AllowReserved, nil - case "deprecated": - return parameter.Deprecated, nil - case "required": - return parameter.Required, nil - case "example": - return parameter.Example, nil - case "examples": - return parameter.Examples, nil - case "content": - return parameter.Content, nil - } - - v, _, err := jsonpointer.GetForToken(parameter.Extensions, token) - return v, err -} - -// SerializationMethod returns a parameter's serialization method. -// When a parameter's serialization method is not defined the method returns -// the default serialization method corresponding to a parameter's location. -func (parameter *Parameter) SerializationMethod() (*SerializationMethod, error) { - switch parameter.In { - case ParameterInPath, ParameterInHeader: - style := parameter.Style - if style == "" { - style = SerializationSimple - } - explode := false - if parameter.Explode != nil { - explode = *parameter.Explode - } - return &SerializationMethod{Style: style, Explode: explode}, nil - case ParameterInQuery, ParameterInCookie: - style := parameter.Style - if style == "" { - style = SerializationForm - } - explode := true - if parameter.Explode != nil { - explode = *parameter.Explode - } - return &SerializationMethod{Style: style, Explode: explode}, nil - default: - return nil, fmt.Errorf("unexpected parameter's 'in': %q", parameter.In) - } -} - -// Validate returns an error if Parameter does not comply with the OpenAPI spec. -func (parameter *Parameter) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if parameter.Name == "" { - return errors.New("parameter name can't be blank") - } - in := parameter.In - switch in { - case - ParameterInPath, - ParameterInQuery, - ParameterInHeader, - ParameterInCookie: - default: - return fmt.Errorf("parameter can't have 'in' value %q", parameter.In) - } - - if in == ParameterInPath && !parameter.Required { - return fmt.Errorf("path parameter %q must be required", parameter.Name) - } - - // Validate a parameter's serialization method. - sm, err := parameter.SerializationMethod() - if err != nil { - return err - } - var smSupported bool - switch { - case parameter.In == ParameterInPath && sm.Style == SerializationSimple && !sm.Explode, - parameter.In == ParameterInPath && sm.Style == SerializationSimple && sm.Explode, - parameter.In == ParameterInPath && sm.Style == SerializationLabel && !sm.Explode, - parameter.In == ParameterInPath && sm.Style == SerializationLabel && sm.Explode, - parameter.In == ParameterInPath && sm.Style == SerializationMatrix && !sm.Explode, - parameter.In == ParameterInPath && sm.Style == SerializationMatrix && sm.Explode, - - parameter.In == ParameterInQuery && sm.Style == SerializationForm && sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationForm && !sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationSpaceDelimited && !sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationPipeDelimited && !sm.Explode, - parameter.In == ParameterInQuery && sm.Style == SerializationDeepObject && sm.Explode, - - parameter.In == ParameterInHeader && sm.Style == SerializationSimple && !sm.Explode, - parameter.In == ParameterInHeader && sm.Style == SerializationSimple && sm.Explode, - - parameter.In == ParameterInCookie && sm.Style == SerializationForm && !sm.Explode, - parameter.In == ParameterInCookie && sm.Style == SerializationForm && sm.Explode: - smSupported = true - } - if !smSupported { - e := fmt.Errorf("serialization method with style=%q and explode=%v is not supported by a %s parameter", sm.Style, sm.Explode, in) - return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e) - } - - if (parameter.Schema == nil) == (len(parameter.Content) == 0) { - e := errors.New("parameter must contain exactly one of content and schema") - return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, e) - } - - if content := parameter.Content; content != nil { - e := errors.New("parameter content must only contain one entry") - if len(content) > 1 { - return fmt.Errorf("parameter %q content is invalid: %w", parameter.Name, e) - } - - if err := content.Validate(ctx); err != nil { - return fmt.Errorf("parameter %q content is invalid: %w", parameter.Name, err) - } - } - - if schema := parameter.Schema; schema != nil { - if err := schema.Validate(ctx); err != nil { - return fmt.Errorf("parameter %q schema is invalid: %w", parameter.Name, err) - } - if parameter.Example != nil && parameter.Examples != nil { - return fmt.Errorf("parameter %q example and examples are mutually exclusive", parameter.Name) - } - - if vo := getValidationOptions(ctx); vo.examplesValidationDisabled { - return nil - } - if example := parameter.Example; example != nil { - if err := validateExampleValue(ctx, example, schema.Value); err != nil { - return fmt.Errorf("invalid example: %w", err) - } - } else if examples := parameter.Examples; examples != nil { - names := make([]string, 0, len(examples)) - for name := range examples { - names = append(names, name) - } - sort.Strings(names) - for _, k := range names { - v := examples[k] - if err := v.Validate(ctx); err != nil { - return fmt.Errorf("%s: %w", k, err) - } - if err := validateExampleValue(ctx, v.Value.Value, schema.Value); err != nil { - return fmt.Errorf("%s: %w", k, err) - } - } - } - } - - return validateExtensions(ctx, parameter.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go b/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go deleted file mode 100644 index e5dd0fb6..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/path_item.go +++ /dev/null @@ -1,239 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "sort" -) - -// PathItem is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#path-item-object -type PathItem struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Ref string `json:"$ref,omitempty" yaml:"$ref,omitempty"` - Summary string `json:"summary,omitempty" yaml:"summary,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Connect *Operation `json:"connect,omitempty" yaml:"connect,omitempty"` - Delete *Operation `json:"delete,omitempty" yaml:"delete,omitempty"` - Get *Operation `json:"get,omitempty" yaml:"get,omitempty"` - Head *Operation `json:"head,omitempty" yaml:"head,omitempty"` - Options *Operation `json:"options,omitempty" yaml:"options,omitempty"` - Patch *Operation `json:"patch,omitempty" yaml:"patch,omitempty"` - Post *Operation `json:"post,omitempty" yaml:"post,omitempty"` - Put *Operation `json:"put,omitempty" yaml:"put,omitempty"` - Trace *Operation `json:"trace,omitempty" yaml:"trace,omitempty"` - Servers Servers `json:"servers,omitempty" yaml:"servers,omitempty"` - Parameters Parameters `json:"parameters,omitempty" yaml:"parameters,omitempty"` -} - -// MarshalJSON returns the JSON encoding of PathItem. -func (pathItem PathItem) MarshalJSON() ([]byte, error) { - if ref := pathItem.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - - m := make(map[string]interface{}, 13+len(pathItem.Extensions)) - for k, v := range pathItem.Extensions { - m[k] = v - } - if x := pathItem.Summary; x != "" { - m["summary"] = x - } - if x := pathItem.Description; x != "" { - m["description"] = x - } - if x := pathItem.Connect; x != nil { - m["connect"] = x - } - if x := pathItem.Delete; x != nil { - m["delete"] = x - } - if x := pathItem.Get; x != nil { - m["get"] = x - } - if x := pathItem.Head; x != nil { - m["head"] = x - } - if x := pathItem.Options; x != nil { - m["options"] = x - } - if x := pathItem.Patch; x != nil { - m["patch"] = x - } - if x := pathItem.Post; x != nil { - m["post"] = x - } - if x := pathItem.Put; x != nil { - m["put"] = x - } - if x := pathItem.Trace; x != nil { - m["trace"] = x - } - if x := pathItem.Servers; len(x) != 0 { - m["servers"] = x - } - if x := pathItem.Parameters; len(x) != 0 { - m["parameters"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets PathItem to a copy of data. -func (pathItem *PathItem) UnmarshalJSON(data []byte) error { - type PathItemBis PathItem - var x PathItemBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "$ref") - delete(x.Extensions, "summary") - delete(x.Extensions, "description") - delete(x.Extensions, "connect") - delete(x.Extensions, "delete") - delete(x.Extensions, "get") - delete(x.Extensions, "head") - delete(x.Extensions, "options") - delete(x.Extensions, "patch") - delete(x.Extensions, "post") - delete(x.Extensions, "put") - delete(x.Extensions, "trace") - delete(x.Extensions, "servers") - delete(x.Extensions, "parameters") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *pathItem = PathItem(x) - return nil -} - -func (pathItem *PathItem) Operations() map[string]*Operation { - operations := make(map[string]*Operation) - if v := pathItem.Connect; v != nil { - operations[http.MethodConnect] = v - } - if v := pathItem.Delete; v != nil { - operations[http.MethodDelete] = v - } - if v := pathItem.Get; v != nil { - operations[http.MethodGet] = v - } - if v := pathItem.Head; v != nil { - operations[http.MethodHead] = v - } - if v := pathItem.Options; v != nil { - operations[http.MethodOptions] = v - } - if v := pathItem.Patch; v != nil { - operations[http.MethodPatch] = v - } - if v := pathItem.Post; v != nil { - operations[http.MethodPost] = v - } - if v := pathItem.Put; v != nil { - operations[http.MethodPut] = v - } - if v := pathItem.Trace; v != nil { - operations[http.MethodTrace] = v - } - return operations -} - -func (pathItem *PathItem) GetOperation(method string) *Operation { - switch method { - case http.MethodConnect: - return pathItem.Connect - case http.MethodDelete: - return pathItem.Delete - case http.MethodGet: - return pathItem.Get - case http.MethodHead: - return pathItem.Head - case http.MethodOptions: - return pathItem.Options - case http.MethodPatch: - return pathItem.Patch - case http.MethodPost: - return pathItem.Post - case http.MethodPut: - return pathItem.Put - case http.MethodTrace: - return pathItem.Trace - default: - panic(fmt.Errorf("unsupported HTTP method %q", method)) - } -} - -func (pathItem *PathItem) SetOperation(method string, operation *Operation) { - switch method { - case http.MethodConnect: - pathItem.Connect = operation - case http.MethodDelete: - pathItem.Delete = operation - case http.MethodGet: - pathItem.Get = operation - case http.MethodHead: - pathItem.Head = operation - case http.MethodOptions: - pathItem.Options = operation - case http.MethodPatch: - pathItem.Patch = operation - case http.MethodPost: - pathItem.Post = operation - case http.MethodPut: - pathItem.Put = operation - case http.MethodTrace: - pathItem.Trace = operation - default: - panic(fmt.Errorf("unsupported HTTP method %q", method)) - } -} - -// Validate returns an error if PathItem does not comply with the OpenAPI spec. -func (pathItem *PathItem) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - operations := pathItem.Operations() - - methods := make([]string, 0, len(operations)) - for method := range operations { - methods = append(methods, method) - } - sort.Strings(methods) - for _, method := range methods { - operation := operations[method] - if err := operation.Validate(ctx); err != nil { - return fmt.Errorf("invalid operation %s: %v", method, err) - } - } - - if v := pathItem.Parameters; v != nil { - if err := v.Validate(ctx); err != nil { - return err - } - } - - return validateExtensions(ctx, pathItem.Extensions) -} - -// isEmpty's introduced in 546590b1 -func (pathItem *PathItem) isEmpty() bool { - // NOTE: ignores pathItem.Extensions - // NOTE: ignores pathItem.Ref - return pathItem.Summary == "" && - pathItem.Description == "" && - pathItem.Connect == nil && - pathItem.Delete == nil && - pathItem.Get == nil && - pathItem.Head == nil && - pathItem.Options == nil && - pathItem.Patch == nil && - pathItem.Post == nil && - pathItem.Put == nil && - pathItem.Trace == nil && - len(pathItem.Servers) == 0 && - len(pathItem.Parameters) == 0 -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/paths.go b/vendor/github.com/getkin/kin-openapi/openapi3/paths.go deleted file mode 100644 index ac4f58bb..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/paths.go +++ /dev/null @@ -1,268 +0,0 @@ -package openapi3 - -import ( - "context" - "fmt" - "sort" - "strings" -) - -// Paths is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object -type Paths struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - m map[string]*PathItem -} - -// NewPaths builds a paths object with path items in insertion order. -func NewPaths(opts ...NewPathsOption) *Paths { - paths := NewPathsWithCapacity(len(opts)) - for _, opt := range opts { - opt(paths) - } - return paths -} - -// NewPathsOption describes options to NewPaths func -type NewPathsOption func(*Paths) - -// WithPath adds a named path item -func WithPath(path string, pathItem *PathItem) NewPathsOption { - return func(paths *Paths) { - if p := pathItem; p != nil && path != "" { - paths.Set(path, p) - } - } -} - -// Validate returns an error if Paths does not comply with the OpenAPI spec. -func (paths *Paths) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - normalizedPaths := make(map[string]string, paths.Len()) - - keys := make([]string, 0, paths.Len()) - for key := range paths.Map() { - keys = append(keys, key) - } - sort.Strings(keys) - for _, path := range keys { - pathItem := paths.Value(path) - if path == "" || path[0] != '/' { - return fmt.Errorf("path %q does not start with a forward slash (/)", path) - } - - if pathItem == nil { - pathItem = &PathItem{} - paths.Set(path, pathItem) - } - - normalizedPath, _, varsInPath := normalizeTemplatedPath(path) - if oldPath, ok := normalizedPaths[normalizedPath]; ok { - return fmt.Errorf("conflicting paths %q and %q", path, oldPath) - } - normalizedPaths[path] = path - - var commonParams []string - for _, parameterRef := range pathItem.Parameters { - if parameterRef != nil { - if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath { - commonParams = append(commonParams, parameter.Name) - } - } - } - operations := pathItem.Operations() - methods := make([]string, 0, len(operations)) - for method := range operations { - methods = append(methods, method) - } - sort.Strings(methods) - for _, method := range methods { - operation := operations[method] - var setParams []string - for _, parameterRef := range operation.Parameters { - if parameterRef != nil { - if parameter := parameterRef.Value; parameter != nil && parameter.In == ParameterInPath { - setParams = append(setParams, parameter.Name) - } - } - } - if expected := len(setParams) + len(commonParams); expected != len(varsInPath) { - expected -= len(varsInPath) - if expected < 0 { - expected *= -1 - } - missing := make(map[string]struct{}, expected) - definedParams := append(setParams, commonParams...) - for _, name := range definedParams { - if _, ok := varsInPath[name]; !ok { - missing[name] = struct{}{} - } - } - for name := range varsInPath { - got := false - for _, othername := range definedParams { - if othername == name { - got = true - break - } - } - if !got { - missing[name] = struct{}{} - } - } - if len(missing) != 0 { - missings := make([]string, 0, len(missing)) - for name := range missing { - missings = append(missings, name) - } - return fmt.Errorf("operation %s %s must define exactly all path parameters (missing: %v)", method, path, missings) - } - } - } - - if err := pathItem.Validate(ctx); err != nil { - return fmt.Errorf("invalid path %s: %v", path, err) - } - } - - if err := paths.validateUniqueOperationIDs(); err != nil { - return err - } - - return validateExtensions(ctx, paths.Extensions) -} - -// InMatchingOrder returns paths in the order they are matched against URLs. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#paths-object -// When matching URLs, concrete (non-templated) paths would be matched -// before their templated counterparts. -func (paths *Paths) InMatchingOrder() []string { - // NOTE: sorting by number of variables ASC then by descending lexicographical - // order seems to be a good heuristic. - if paths.Len() == 0 { - return nil - } - - vars := make(map[int][]string) - max := 0 - for path := range paths.Map() { - count := strings.Count(path, "}") - vars[count] = append(vars[count], path) - if count > max { - max = count - } - } - - ordered := make([]string, 0, paths.Len()) - for c := 0; c <= max; c++ { - if ps, ok := vars[c]; ok { - sort.Sort(sort.Reverse(sort.StringSlice(ps))) - ordered = append(ordered, ps...) - } - } - return ordered -} - -// Find returns a path that matches the key. -// -// The method ignores differences in template variable names (except possible "*" suffix). -// -// For example: -// -// paths := openapi3.Paths { -// "/person/{personName}": &openapi3.PathItem{}, -// } -// pathItem := path.Find("/person/{name}") -// -// would return the correct path item. -func (paths *Paths) Find(key string) *PathItem { - // Try directly access the map - pathItem := paths.Value(key) - if pathItem != nil { - return pathItem - } - - normalizedPath, expected, _ := normalizeTemplatedPath(key) - for path, pathItem := range paths.Map() { - pathNormalized, got, _ := normalizeTemplatedPath(path) - if got == expected && pathNormalized == normalizedPath { - return pathItem - } - } - return nil -} - -func (paths *Paths) validateUniqueOperationIDs() error { - operationIDs := make(map[string]string) - for urlPath, pathItem := range paths.Map() { - if pathItem == nil { - continue - } - for httpMethod, operation := range pathItem.Operations() { - if operation == nil || operation.OperationID == "" { - continue - } - endpoint := httpMethod + " " + urlPath - if endpointDup, ok := operationIDs[operation.OperationID]; ok { - if endpoint > endpointDup { // For make error message a bit more deterministic. May be useful for tests. - endpoint, endpointDup = endpointDup, endpoint - } - return fmt.Errorf("operations %q and %q have the same operation id %q", - endpoint, endpointDup, operation.OperationID) - } - operationIDs[operation.OperationID] = endpoint - } - } - return nil -} - -func normalizeTemplatedPath(path string) (string, uint, map[string]struct{}) { - if strings.IndexByte(path, '{') < 0 { - return path, 0, nil - } - - var buffTpl strings.Builder - buffTpl.Grow(len(path)) - - var ( - cc rune - count uint - isVariable bool - vars = make(map[string]struct{}) - buffVar strings.Builder - ) - for i, c := range path { - if isVariable { - if c == '}' { - // End path variable - isVariable = false - - vars[buffVar.String()] = struct{}{} - buffVar = strings.Builder{} - - // First append possible '*' before this character - // The character '}' will be appended - if i > 0 && cc == '*' { - buffTpl.WriteRune(cc) - } - } else { - buffVar.WriteRune(c) - continue - } - - } else if c == '{' { - // Begin path variable - isVariable = true - - // The character '{' will be appended - count++ - } - - // Append the character - buffTpl.WriteRune(c) - cc = c - } - return buffTpl.String(), count, vars -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/ref.go b/vendor/github.com/getkin/kin-openapi/openapi3/ref.go deleted file mode 100644 index a937de4a..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/ref.go +++ /dev/null @@ -1,7 +0,0 @@ -package openapi3 - -// Ref is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#reference-object -type Ref struct { - Ref string `json:"$ref" yaml:"$ref"` -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/refs.go b/vendor/github.com/getkin/kin-openapi/openapi3/refs.go deleted file mode 100644 index a7e1e368..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/refs.go +++ /dev/null @@ -1,713 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "fmt" - "sort" - - "github.com/go-openapi/jsonpointer" - "github.com/perimeterx/marshmallow" -) - -// CallbackRef represents either a Callback or a $ref to a Callback. -// When serializing and both fields are set, Ref is preferred over Value. -type CallbackRef struct { - Ref string - Value *Callback - extra []string -} - -var _ jsonpointer.JSONPointable = (*CallbackRef)(nil) - -func (x *CallbackRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of CallbackRef. -func (x CallbackRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of CallbackRef. -func (x CallbackRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return json.Marshal(x.Value) -} - -// UnmarshalJSON sets CallbackRef to a copy of data. -func (x *CallbackRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if CallbackRef does not comply with the OpenAPI spec. -func (x *CallbackRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *CallbackRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// ExampleRef represents either a Example or a $ref to a Example. -// When serializing and both fields are set, Ref is preferred over Value. -type ExampleRef struct { - Ref string - Value *Example - extra []string -} - -var _ jsonpointer.JSONPointable = (*ExampleRef)(nil) - -func (x *ExampleRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of ExampleRef. -func (x ExampleRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of ExampleRef. -func (x ExampleRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets ExampleRef to a copy of data. -func (x *ExampleRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if ExampleRef does not comply with the OpenAPI spec. -func (x *ExampleRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *ExampleRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// HeaderRef represents either a Header or a $ref to a Header. -// When serializing and both fields are set, Ref is preferred over Value. -type HeaderRef struct { - Ref string - Value *Header - extra []string -} - -var _ jsonpointer.JSONPointable = (*HeaderRef)(nil) - -func (x *HeaderRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of HeaderRef. -func (x HeaderRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of HeaderRef. -func (x HeaderRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets HeaderRef to a copy of data. -func (x *HeaderRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if HeaderRef does not comply with the OpenAPI spec. -func (x *HeaderRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *HeaderRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// LinkRef represents either a Link or a $ref to a Link. -// When serializing and both fields are set, Ref is preferred over Value. -type LinkRef struct { - Ref string - Value *Link - extra []string -} - -var _ jsonpointer.JSONPointable = (*LinkRef)(nil) - -func (x *LinkRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of LinkRef. -func (x LinkRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of LinkRef. -func (x LinkRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets LinkRef to a copy of data. -func (x *LinkRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if LinkRef does not comply with the OpenAPI spec. -func (x *LinkRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *LinkRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// ParameterRef represents either a Parameter or a $ref to a Parameter. -// When serializing and both fields are set, Ref is preferred over Value. -type ParameterRef struct { - Ref string - Value *Parameter - extra []string -} - -var _ jsonpointer.JSONPointable = (*ParameterRef)(nil) - -func (x *ParameterRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of ParameterRef. -func (x ParameterRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of ParameterRef. -func (x ParameterRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets ParameterRef to a copy of data. -func (x *ParameterRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if ParameterRef does not comply with the OpenAPI spec. -func (x *ParameterRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *ParameterRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// RequestBodyRef represents either a RequestBody or a $ref to a RequestBody. -// When serializing and both fields are set, Ref is preferred over Value. -type RequestBodyRef struct { - Ref string - Value *RequestBody - extra []string -} - -var _ jsonpointer.JSONPointable = (*RequestBodyRef)(nil) - -func (x *RequestBodyRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of RequestBodyRef. -func (x RequestBodyRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of RequestBodyRef. -func (x RequestBodyRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets RequestBodyRef to a copy of data. -func (x *RequestBodyRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if RequestBodyRef does not comply with the OpenAPI spec. -func (x *RequestBodyRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *RequestBodyRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// ResponseRef represents either a Response or a $ref to a Response. -// When serializing and both fields are set, Ref is preferred over Value. -type ResponseRef struct { - Ref string - Value *Response - extra []string -} - -var _ jsonpointer.JSONPointable = (*ResponseRef)(nil) - -func (x *ResponseRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of ResponseRef. -func (x ResponseRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of ResponseRef. -func (x ResponseRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets ResponseRef to a copy of data. -func (x *ResponseRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if ResponseRef does not comply with the OpenAPI spec. -func (x *ResponseRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *ResponseRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// SchemaRef represents either a Schema or a $ref to a Schema. -// When serializing and both fields are set, Ref is preferred over Value. -type SchemaRef struct { - Ref string - Value *Schema - extra []string -} - -var _ jsonpointer.JSONPointable = (*SchemaRef)(nil) - -func (x *SchemaRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of SchemaRef. -func (x SchemaRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of SchemaRef. -func (x SchemaRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets SchemaRef to a copy of data. -func (x *SchemaRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if SchemaRef does not comply with the OpenAPI spec. -func (x *SchemaRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *SchemaRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} - -// SecuritySchemeRef represents either a SecurityScheme or a $ref to a SecurityScheme. -// When serializing and both fields are set, Ref is preferred over Value. -type SecuritySchemeRef struct { - Ref string - Value *SecurityScheme - extra []string -} - -var _ jsonpointer.JSONPointable = (*SecuritySchemeRef)(nil) - -func (x *SecuritySchemeRef) isEmpty() bool { return x == nil || x.Ref == "" && x.Value == nil } - -// MarshalYAML returns the YAML encoding of SecuritySchemeRef. -func (x SecuritySchemeRef) MarshalYAML() (interface{}, error) { - if ref := x.Ref; ref != "" { - return &Ref{Ref: ref}, nil - } - return x.Value, nil -} - -// MarshalJSON returns the JSON encoding of SecuritySchemeRef. -func (x SecuritySchemeRef) MarshalJSON() ([]byte, error) { - if ref := x.Ref; ref != "" { - return json.Marshal(Ref{Ref: ref}) - } - return x.Value.MarshalJSON() -} - -// UnmarshalJSON sets SecuritySchemeRef to a copy of data. -func (x *SecuritySchemeRef) UnmarshalJSON(data []byte) error { - var refOnly Ref - if extra, err := marshmallow.Unmarshal(data, &refOnly, marshmallow.WithExcludeKnownFieldsFromMap(true)); err == nil && refOnly.Ref != "" { - x.Ref = refOnly.Ref - if len(extra) != 0 { - x.extra = make([]string, 0, len(extra)) - for key := range extra { - x.extra = append(x.extra, key) - } - sort.Strings(x.extra) - } - return nil - } - return json.Unmarshal(data, &x.Value) -} - -// Validate returns an error if SecuritySchemeRef does not comply with the OpenAPI spec. -func (x *SecuritySchemeRef) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - if extra := x.extra; len(extra) != 0 { - extras := make([]string, 0, len(extra)) - allowed := getValidationOptions(ctx).extraSiblingFieldsAllowed - for _, ex := range extra { - if allowed != nil { - if _, ok := allowed[ex]; ok { - continue - } - } - extras = append(extras, ex) - } - if len(extras) != 0 { - return fmt.Errorf("extra sibling fields: %+v", extras) - } - } - if v := x.Value; v != nil { - return v.Validate(ctx) - } - return foundUnresolvedRef(x.Ref) -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (x *SecuritySchemeRef) JSONLookup(token string) (interface{}, error) { - if token == "$ref" { - return x.Ref, nil - } - ptr, _, err := jsonpointer.GetForToken(x.Value, token) - return ptr, err -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go b/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go deleted file mode 100644 index acd2d0e8..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/request_body.go +++ /dev/null @@ -1,129 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" -) - -// RequestBody is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#request-body-object -type RequestBody struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Required bool `json:"required,omitempty" yaml:"required,omitempty"` - Content Content `json:"content" yaml:"content"` -} - -func NewRequestBody() *RequestBody { - return &RequestBody{} -} - -func (requestBody *RequestBody) WithDescription(value string) *RequestBody { - requestBody.Description = value - return requestBody -} - -func (requestBody *RequestBody) WithRequired(value bool) *RequestBody { - requestBody.Required = value - return requestBody -} - -func (requestBody *RequestBody) WithContent(content Content) *RequestBody { - requestBody.Content = content - return requestBody -} - -func (requestBody *RequestBody) WithSchemaRef(value *SchemaRef, consumes []string) *RequestBody { - requestBody.Content = NewContentWithSchemaRef(value, consumes) - return requestBody -} - -func (requestBody *RequestBody) WithSchema(value *Schema, consumes []string) *RequestBody { - requestBody.Content = NewContentWithSchema(value, consumes) - return requestBody -} - -func (requestBody *RequestBody) WithJSONSchemaRef(value *SchemaRef) *RequestBody { - requestBody.Content = NewContentWithJSONSchemaRef(value) - return requestBody -} - -func (requestBody *RequestBody) WithJSONSchema(value *Schema) *RequestBody { - requestBody.Content = NewContentWithJSONSchema(value) - return requestBody -} - -func (requestBody *RequestBody) WithFormDataSchemaRef(value *SchemaRef) *RequestBody { - requestBody.Content = NewContentWithFormDataSchemaRef(value) - return requestBody -} - -func (requestBody *RequestBody) WithFormDataSchema(value *Schema) *RequestBody { - requestBody.Content = NewContentWithFormDataSchema(value) - return requestBody -} - -func (requestBody *RequestBody) GetMediaType(mediaType string) *MediaType { - m := requestBody.Content - if m == nil { - return nil - } - return m[mediaType] -} - -// MarshalJSON returns the JSON encoding of RequestBody. -func (requestBody RequestBody) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 3+len(requestBody.Extensions)) - for k, v := range requestBody.Extensions { - m[k] = v - } - if x := requestBody.Description; x != "" { - m["description"] = requestBody.Description - } - if x := requestBody.Required; x { - m["required"] = x - } - if x := requestBody.Content; true { - m["content"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets RequestBody to a copy of data. -func (requestBody *RequestBody) UnmarshalJSON(data []byte) error { - type RequestBodyBis RequestBody - var x RequestBodyBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "description") - delete(x.Extensions, "required") - delete(x.Extensions, "content") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *requestBody = RequestBody(x) - return nil -} - -// Validate returns an error if RequestBody does not comply with the OpenAPI spec. -func (requestBody *RequestBody) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if requestBody.Content == nil { - return errors.New("content of the request body is required") - } - - if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { - vo.examplesValidationAsReq, vo.examplesValidationAsRes = true, false - } - - if err := requestBody.Content.Validate(ctx); err != nil { - return err - } - - return validateExtensions(ctx, requestBody.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/response.go b/vendor/github.com/getkin/kin-openapi/openapi3/response.go deleted file mode 100644 index d8b04725..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/response.go +++ /dev/null @@ -1,218 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "sort" - "strconv" -) - -// Responses is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#responses-object -type Responses struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - m map[string]*ResponseRef -} - -// NewResponses builds a responses object with response objects in insertion order. -// Given no arguments, NewResponses returns a valid responses object containing a default match-all reponse. -func NewResponses(opts ...NewResponsesOption) *Responses { - if len(opts) == 0 { - return NewResponses(WithName("default", NewResponse().WithDescription(""))) - } - responses := NewResponsesWithCapacity(len(opts)) - for _, opt := range opts { - opt(responses) - } - return responses -} - -// NewResponsesOption describes options to NewResponses func -type NewResponsesOption func(*Responses) - -// WithStatus adds a status code keyed ResponseRef -func WithStatus(status int, responseRef *ResponseRef) NewResponsesOption { - return func(responses *Responses) { - if r := responseRef; r != nil { - code := strconv.FormatInt(int64(status), 10) - responses.Set(code, r) - } - } -} - -// WithName adds a name-keyed Response -func WithName(name string, response *Response) NewResponsesOption { - return func(responses *Responses) { - if r := response; r != nil && name != "" { - responses.Set(name, &ResponseRef{Value: r}) - } - } -} - -// Default returns the default response -func (responses *Responses) Default() *ResponseRef { - return responses.Value("default") -} - -// Status returns a ResponseRef for the given status -// If an exact match isn't initially found a patterned field is checked using -// the first digit to determine the range (eg: 201 to 2XX) -// See https://spec.openapis.org/oas/v3.0.3#patterned-fields-0 -func (responses *Responses) Status(status int) *ResponseRef { - st := strconv.FormatInt(int64(status), 10) - if rref := responses.Value(st); rref != nil { - return rref - } - if 99 < status && status < 600 { - st = string(st[0]) + "XX" - switch st { - case "1XX", "2XX", "3XX", "4XX", "5XX": - return responses.Value(st) - } - } - return nil -} - -// Validate returns an error if Responses does not comply with the OpenAPI spec. -func (responses *Responses) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if responses.Len() == 0 { - return errors.New("the responses object MUST contain at least one response code") - } - - keys := make([]string, 0, responses.Len()) - for key := range responses.Map() { - keys = append(keys, key) - } - sort.Strings(keys) - for _, key := range keys { - v := responses.Value(key) - if err := v.Validate(ctx); err != nil { - return err - } - } - - return validateExtensions(ctx, responses.Extensions) -} - -// Response is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#response-object -type Response struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Description *string `json:"description,omitempty" yaml:"description,omitempty"` - Headers Headers `json:"headers,omitempty" yaml:"headers,omitempty"` - Content Content `json:"content,omitempty" yaml:"content,omitempty"` - Links Links `json:"links,omitempty" yaml:"links,omitempty"` -} - -func NewResponse() *Response { - return &Response{} -} - -func (response *Response) WithDescription(value string) *Response { - response.Description = &value - return response -} - -func (response *Response) WithContent(content Content) *Response { - response.Content = content - return response -} - -func (response *Response) WithJSONSchema(schema *Schema) *Response { - response.Content = NewContentWithJSONSchema(schema) - return response -} - -func (response *Response) WithJSONSchemaRef(schema *SchemaRef) *Response { - response.Content = NewContentWithJSONSchemaRef(schema) - return response -} - -// MarshalJSON returns the JSON encoding of Response. -func (response Response) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(response.Extensions)) - for k, v := range response.Extensions { - m[k] = v - } - if x := response.Description; x != nil { - m["description"] = x - } - if x := response.Headers; len(x) != 0 { - m["headers"] = x - } - if x := response.Content; len(x) != 0 { - m["content"] = x - } - if x := response.Links; len(x) != 0 { - m["links"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Response to a copy of data. -func (response *Response) UnmarshalJSON(data []byte) error { - type ResponseBis Response - var x ResponseBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "description") - delete(x.Extensions, "headers") - delete(x.Extensions, "content") - delete(x.Extensions, "links") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *response = Response(x) - return nil -} - -// Validate returns an error if Response does not comply with the OpenAPI spec. -func (response *Response) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if response.Description == nil { - return errors.New("a short description of the response is required") - } - if vo := getValidationOptions(ctx); !vo.examplesValidationDisabled { - vo.examplesValidationAsReq, vo.examplesValidationAsRes = false, true - } - - if content := response.Content; content != nil { - if err := content.Validate(ctx); err != nil { - return err - } - } - - headers := make([]string, 0, len(response.Headers)) - for name := range response.Headers { - headers = append(headers, name) - } - sort.Strings(headers) - for _, name := range headers { - header := response.Headers[name] - if err := header.Validate(ctx); err != nil { - return err - } - } - - links := make([]string, 0, len(response.Links)) - for name := range response.Links { - links = append(links, name) - } - sort.Strings(links) - for _, name := range links { - link := response.Links[name] - if err := link.Validate(ctx); err != nil { - return err - } - } - - return validateExtensions(ctx, response.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema.go deleted file mode 100644 index e8630b3c..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/schema.go +++ /dev/null @@ -1,2139 +0,0 @@ -package openapi3 - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "math" - "math/big" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "sync" - "unicode/utf16" - - "github.com/go-openapi/jsonpointer" - "github.com/mohae/deepcopy" -) - -const ( - TypeArray = "array" - TypeBoolean = "boolean" - TypeInteger = "integer" - TypeNumber = "number" - TypeObject = "object" - TypeString = "string" - - // constants for integer formats - formatMinInt32 = float64(math.MinInt32) - formatMaxInt32 = float64(math.MaxInt32) - formatMinInt64 = float64(math.MinInt64) - formatMaxInt64 = float64(math.MaxInt64) -) - -var ( - // SchemaErrorDetailsDisabled disables printing of details about schema errors. - SchemaErrorDetailsDisabled = false - - errSchema = errors.New("input does not match the schema") - - // ErrOneOfConflict is the SchemaError Origin when data matches more than one oneOf schema - ErrOneOfConflict = errors.New("input matches more than one oneOf schemas") - - // ErrSchemaInputNaN may be returned when validating a number - ErrSchemaInputNaN = errors.New("floating point NaN is not allowed") - // ErrSchemaInputInf may be returned when validating a number - ErrSchemaInputInf = errors.New("floating point Inf is not allowed") - - compiledPatterns sync.Map -) - -// NewSchemaRef simply builds a SchemaRef -func NewSchemaRef(ref string, value *Schema) *SchemaRef { - return &SchemaRef{ - Ref: ref, - Value: value, - } -} - -type SchemaRefs []*SchemaRef - -var _ jsonpointer.JSONPointable = (*SchemaRefs)(nil) - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (s SchemaRefs) JSONLookup(token string) (interface{}, error) { - i, err := strconv.ParseUint(token, 10, 64) - if err != nil { - return nil, err - } - - if i >= uint64(len(s)) { - return nil, fmt.Errorf("index out of range: %d", i) - } - - ref := s[i] - - if ref == nil || ref.Ref != "" { - return &Ref{Ref: ref.Ref}, nil - } - return ref.Value, nil -} - -// Schema is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object -type Schema struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - OneOf SchemaRefs `json:"oneOf,omitempty" yaml:"oneOf,omitempty"` - AnyOf SchemaRefs `json:"anyOf,omitempty" yaml:"anyOf,omitempty"` - AllOf SchemaRefs `json:"allOf,omitempty" yaml:"allOf,omitempty"` - Not *SchemaRef `json:"not,omitempty" yaml:"not,omitempty"` - Type string `json:"type,omitempty" yaml:"type,omitempty"` - Title string `json:"title,omitempty" yaml:"title,omitempty"` - Format string `json:"format,omitempty" yaml:"format,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Enum []interface{} `json:"enum,omitempty" yaml:"enum,omitempty"` - Default interface{} `json:"default,omitempty" yaml:"default,omitempty"` - Example interface{} `json:"example,omitempty" yaml:"example,omitempty"` - ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` - - // Array-related, here for struct compactness - UniqueItems bool `json:"uniqueItems,omitempty" yaml:"uniqueItems,omitempty"` - // Number-related, here for struct compactness - ExclusiveMin bool `json:"exclusiveMinimum,omitempty" yaml:"exclusiveMinimum,omitempty"` - ExclusiveMax bool `json:"exclusiveMaximum,omitempty" yaml:"exclusiveMaximum,omitempty"` - // Properties - Nullable bool `json:"nullable,omitempty" yaml:"nullable,omitempty"` - ReadOnly bool `json:"readOnly,omitempty" yaml:"readOnly,omitempty"` - WriteOnly bool `json:"writeOnly,omitempty" yaml:"writeOnly,omitempty"` - AllowEmptyValue bool `json:"allowEmptyValue,omitempty" yaml:"allowEmptyValue,omitempty"` - Deprecated bool `json:"deprecated,omitempty" yaml:"deprecated,omitempty"` - XML *XML `json:"xml,omitempty" yaml:"xml,omitempty"` - - // Number - Min *float64 `json:"minimum,omitempty" yaml:"minimum,omitempty"` - Max *float64 `json:"maximum,omitempty" yaml:"maximum,omitempty"` - MultipleOf *float64 `json:"multipleOf,omitempty" yaml:"multipleOf,omitempty"` - - // String - MinLength uint64 `json:"minLength,omitempty" yaml:"minLength,omitempty"` - MaxLength *uint64 `json:"maxLength,omitempty" yaml:"maxLength,omitempty"` - Pattern string `json:"pattern,omitempty" yaml:"pattern,omitempty"` - - // Array - MinItems uint64 `json:"minItems,omitempty" yaml:"minItems,omitempty"` - MaxItems *uint64 `json:"maxItems,omitempty" yaml:"maxItems,omitempty"` - Items *SchemaRef `json:"items,omitempty" yaml:"items,omitempty"` - - // Object - Required []string `json:"required,omitempty" yaml:"required,omitempty"` - Properties Schemas `json:"properties,omitempty" yaml:"properties,omitempty"` - MinProps uint64 `json:"minProperties,omitempty" yaml:"minProperties,omitempty"` - MaxProps *uint64 `json:"maxProperties,omitempty" yaml:"maxProperties,omitempty"` - AdditionalProperties AdditionalProperties `json:"additionalProperties,omitempty" yaml:"additionalProperties,omitempty"` - Discriminator *Discriminator `json:"discriminator,omitempty" yaml:"discriminator,omitempty"` -} - -type AdditionalProperties struct { - Has *bool - Schema *SchemaRef -} - -// MarshalJSON returns the JSON encoding of AdditionalProperties. -func (addProps AdditionalProperties) MarshalJSON() ([]byte, error) { - if x := addProps.Has; x != nil { - if *x { - return []byte("true"), nil - } - return []byte("false"), nil - } - if x := addProps.Schema; x != nil { - return json.Marshal(x) - } - return nil, nil -} - -// UnmarshalJSON sets AdditionalProperties to a copy of data. -func (addProps *AdditionalProperties) UnmarshalJSON(data []byte) error { - var x interface{} - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - switch y := x.(type) { - case nil: - case bool: - addProps.Has = &y - case map[string]interface{}: - if len(y) == 0 { - addProps.Schema = &SchemaRef{Value: &Schema{}} - } else { - buf := new(bytes.Buffer) - json.NewEncoder(buf).Encode(y) - if err := json.NewDecoder(buf).Decode(&addProps.Schema); err != nil { - return err - } - } - default: - return errors.New("cannot unmarshal additionalProperties: value must be either a schema object or a boolean") - } - return nil -} - -var _ jsonpointer.JSONPointable = (*Schema)(nil) - -func NewSchema() *Schema { - return &Schema{} -} - -// MarshalJSON returns the JSON encoding of Schema. -func (schema Schema) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 36+len(schema.Extensions)) - for k, v := range schema.Extensions { - m[k] = v - } - - if x := schema.OneOf; len(x) != 0 { - m["oneOf"] = x - } - if x := schema.AnyOf; len(x) != 0 { - m["anyOf"] = x - } - if x := schema.AllOf; len(x) != 0 { - m["allOf"] = x - } - if x := schema.Not; x != nil { - m["not"] = x - } - if x := schema.Type; len(x) != 0 { - m["type"] = x - } - if x := schema.Title; len(x) != 0 { - m["title"] = x - } - if x := schema.Format; len(x) != 0 { - m["format"] = x - } - if x := schema.Description; len(x) != 0 { - m["description"] = x - } - if x := schema.Enum; len(x) != 0 { - m["enum"] = x - } - if x := schema.Default; x != nil { - m["default"] = x - } - if x := schema.Example; x != nil { - m["example"] = x - } - if x := schema.ExternalDocs; x != nil { - m["externalDocs"] = x - } - - // Array-related - if x := schema.UniqueItems; x { - m["uniqueItems"] = x - } - // Number-related - if x := schema.ExclusiveMin; x { - m["exclusiveMinimum"] = x - } - if x := schema.ExclusiveMax; x { - m["exclusiveMaximum"] = x - } - // Properties - if x := schema.Nullable; x { - m["nullable"] = x - } - if x := schema.ReadOnly; x { - m["readOnly"] = x - } - if x := schema.WriteOnly; x { - m["writeOnly"] = x - } - if x := schema.AllowEmptyValue; x { - m["allowEmptyValue"] = x - } - if x := schema.Deprecated; x { - m["deprecated"] = x - } - if x := schema.XML; x != nil { - m["xml"] = x - } - - // Number - if x := schema.Min; x != nil { - m["minimum"] = x - } - if x := schema.Max; x != nil { - m["maximum"] = x - } - if x := schema.MultipleOf; x != nil { - m["multipleOf"] = x - } - - // String - if x := schema.MinLength; x != 0 { - m["minLength"] = x - } - if x := schema.MaxLength; x != nil { - m["maxLength"] = x - } - if x := schema.Pattern; x != "" { - m["pattern"] = x - } - - // Array - if x := schema.MinItems; x != 0 { - m["minItems"] = x - } - if x := schema.MaxItems; x != nil { - m["maxItems"] = x - } - if x := schema.Items; x != nil { - m["items"] = x - } - - // Object - if x := schema.Required; len(x) != 0 { - m["required"] = x - } - if x := schema.Properties; len(x) != 0 { - m["properties"] = x - } - if x := schema.MinProps; x != 0 { - m["minProperties"] = x - } - if x := schema.MaxProps; x != nil { - m["maxProperties"] = x - } - if x := schema.AdditionalProperties; x.Has != nil || x.Schema != nil { - m["additionalProperties"] = &x - } - if x := schema.Discriminator; x != nil { - m["discriminator"] = x - } - - return json.Marshal(m) -} - -// UnmarshalJSON sets Schema to a copy of data. -func (schema *Schema) UnmarshalJSON(data []byte) error { - type SchemaBis Schema - var x SchemaBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - - delete(x.Extensions, "oneOf") - delete(x.Extensions, "anyOf") - delete(x.Extensions, "allOf") - delete(x.Extensions, "not") - delete(x.Extensions, "type") - delete(x.Extensions, "title") - delete(x.Extensions, "format") - delete(x.Extensions, "description") - delete(x.Extensions, "enum") - delete(x.Extensions, "default") - delete(x.Extensions, "example") - delete(x.Extensions, "externalDocs") - - // Array-related - delete(x.Extensions, "uniqueItems") - // Number-related - delete(x.Extensions, "exclusiveMinimum") - delete(x.Extensions, "exclusiveMaximum") - // Properties - delete(x.Extensions, "nullable") - delete(x.Extensions, "readOnly") - delete(x.Extensions, "writeOnly") - delete(x.Extensions, "allowEmptyValue") - delete(x.Extensions, "deprecated") - delete(x.Extensions, "xml") - - // Number - delete(x.Extensions, "minimum") - delete(x.Extensions, "maximum") - delete(x.Extensions, "multipleOf") - - // String - delete(x.Extensions, "minLength") - delete(x.Extensions, "maxLength") - delete(x.Extensions, "pattern") - - // Array - delete(x.Extensions, "minItems") - delete(x.Extensions, "maxItems") - delete(x.Extensions, "items") - - // Object - delete(x.Extensions, "required") - delete(x.Extensions, "properties") - delete(x.Extensions, "minProperties") - delete(x.Extensions, "maxProperties") - delete(x.Extensions, "additionalProperties") - delete(x.Extensions, "discriminator") - - if len(x.Extensions) == 0 { - x.Extensions = nil - } - - *schema = Schema(x) - - if schema.Format == "date" { - // This is a fix for: https://github.com/getkin/kin-openapi/issues/697 - if eg, ok := schema.Example.(string); ok { - schema.Example = strings.TrimSuffix(eg, "T00:00:00Z") - } - } - return nil -} - -// JSONLookup implements https://pkg.go.dev/github.com/go-openapi/jsonpointer#JSONPointable -func (schema Schema) JSONLookup(token string) (interface{}, error) { - switch token { - case "additionalProperties": - if addProps := schema.AdditionalProperties.Has; addProps != nil { - return *addProps, nil - } - if addProps := schema.AdditionalProperties.Schema; addProps != nil { - if addProps.Ref != "" { - return &Ref{Ref: addProps.Ref}, nil - } - return addProps.Value, nil - } - case "not": - if schema.Not != nil { - if schema.Not.Ref != "" { - return &Ref{Ref: schema.Not.Ref}, nil - } - return schema.Not.Value, nil - } - case "items": - if schema.Items != nil { - if schema.Items.Ref != "" { - return &Ref{Ref: schema.Items.Ref}, nil - } - return schema.Items.Value, nil - } - case "oneOf": - return schema.OneOf, nil - case "anyOf": - return schema.AnyOf, nil - case "allOf": - return schema.AllOf, nil - case "type": - return schema.Type, nil - case "title": - return schema.Title, nil - case "format": - return schema.Format, nil - case "description": - return schema.Description, nil - case "enum": - return schema.Enum, nil - case "default": - return schema.Default, nil - case "example": - return schema.Example, nil - case "externalDocs": - return schema.ExternalDocs, nil - case "uniqueItems": - return schema.UniqueItems, nil - case "exclusiveMin": - return schema.ExclusiveMin, nil - case "exclusiveMax": - return schema.ExclusiveMax, nil - case "nullable": - return schema.Nullable, nil - case "readOnly": - return schema.ReadOnly, nil - case "writeOnly": - return schema.WriteOnly, nil - case "allowEmptyValue": - return schema.AllowEmptyValue, nil - case "xml": - return schema.XML, nil - case "deprecated": - return schema.Deprecated, nil - case "min": - return schema.Min, nil - case "max": - return schema.Max, nil - case "multipleOf": - return schema.MultipleOf, nil - case "minLength": - return schema.MinLength, nil - case "maxLength": - return schema.MaxLength, nil - case "pattern": - return schema.Pattern, nil - case "minItems": - return schema.MinItems, nil - case "maxItems": - return schema.MaxItems, nil - case "required": - return schema.Required, nil - case "properties": - return schema.Properties, nil - case "minProps": - return schema.MinProps, nil - case "maxProps": - return schema.MaxProps, nil - case "discriminator": - return schema.Discriminator, nil - } - - v, _, err := jsonpointer.GetForToken(schema.Extensions, token) - return v, err -} - -func (schema *Schema) NewRef() *SchemaRef { - return &SchemaRef{ - Value: schema, - } -} - -func NewOneOfSchema(schemas ...*Schema) *Schema { - refs := make([]*SchemaRef, 0, len(schemas)) - for _, schema := range schemas { - refs = append(refs, &SchemaRef{Value: schema}) - } - return &Schema{ - OneOf: refs, - } -} - -func NewAnyOfSchema(schemas ...*Schema) *Schema { - refs := make([]*SchemaRef, 0, len(schemas)) - for _, schema := range schemas { - refs = append(refs, &SchemaRef{Value: schema}) - } - return &Schema{ - AnyOf: refs, - } -} - -func NewAllOfSchema(schemas ...*Schema) *Schema { - refs := make([]*SchemaRef, 0, len(schemas)) - for _, schema := range schemas { - refs = append(refs, &SchemaRef{Value: schema}) - } - return &Schema{ - AllOf: refs, - } -} - -func NewBoolSchema() *Schema { - return &Schema{ - Type: TypeBoolean, - } -} - -func NewFloat64Schema() *Schema { - return &Schema{ - Type: TypeNumber, - } -} - -func NewIntegerSchema() *Schema { - return &Schema{ - Type: TypeInteger, - } -} - -func NewInt32Schema() *Schema { - return &Schema{ - Type: TypeInteger, - Format: "int32", - } -} - -func NewInt64Schema() *Schema { - return &Schema{ - Type: TypeInteger, - Format: "int64", - } -} - -func NewStringSchema() *Schema { - return &Schema{ - Type: TypeString, - } -} - -func NewDateTimeSchema() *Schema { - return &Schema{ - Type: TypeString, - Format: "date-time", - } -} - -func NewUUIDSchema() *Schema { - return &Schema{ - Type: TypeString, - Format: "uuid", - } -} - -func NewBytesSchema() *Schema { - return &Schema{ - Type: TypeString, - Format: "byte", - } -} - -func NewArraySchema() *Schema { - return &Schema{ - Type: TypeArray, - } -} - -func NewObjectSchema() *Schema { - return &Schema{ - Type: TypeObject, - Properties: make(Schemas), - } -} - -func (schema *Schema) WithNullable() *Schema { - schema.Nullable = true - return schema -} - -func (schema *Schema) WithMin(value float64) *Schema { - schema.Min = &value - return schema -} - -func (schema *Schema) WithMax(value float64) *Schema { - schema.Max = &value - return schema -} - -func (schema *Schema) WithExclusiveMin(value bool) *Schema { - schema.ExclusiveMin = value - return schema -} - -func (schema *Schema) WithExclusiveMax(value bool) *Schema { - schema.ExclusiveMax = value - return schema -} - -func (schema *Schema) WithEnum(values ...interface{}) *Schema { - schema.Enum = values - return schema -} - -func (schema *Schema) WithDefault(defaultValue interface{}) *Schema { - schema.Default = defaultValue - return schema -} - -func (schema *Schema) WithFormat(value string) *Schema { - schema.Format = value - return schema -} - -func (schema *Schema) WithLength(i int64) *Schema { - n := uint64(i) - schema.MinLength = n - schema.MaxLength = &n - return schema -} - -func (schema *Schema) WithMinLength(i int64) *Schema { - n := uint64(i) - schema.MinLength = n - return schema -} - -func (schema *Schema) WithMaxLength(i int64) *Schema { - n := uint64(i) - schema.MaxLength = &n - return schema -} - -func (schema *Schema) WithLengthDecodedBase64(i int64) *Schema { - n := uint64(i) - v := (n*8 + 5) / 6 - schema.MinLength = v - schema.MaxLength = &v - return schema -} - -func (schema *Schema) WithMinLengthDecodedBase64(i int64) *Schema { - n := uint64(i) - schema.MinLength = (n*8 + 5) / 6 - return schema -} - -func (schema *Schema) WithMaxLengthDecodedBase64(i int64) *Schema { - n := uint64(i) - schema.MinLength = (n*8 + 5) / 6 - return schema -} - -func (schema *Schema) WithPattern(pattern string) *Schema { - schema.Pattern = pattern - return schema -} - -func (schema *Schema) WithItems(value *Schema) *Schema { - schema.Items = &SchemaRef{ - Value: value, - } - return schema -} - -func (schema *Schema) WithMinItems(i int64) *Schema { - n := uint64(i) - schema.MinItems = n - return schema -} - -func (schema *Schema) WithMaxItems(i int64) *Schema { - n := uint64(i) - schema.MaxItems = &n - return schema -} - -func (schema *Schema) WithUniqueItems(unique bool) *Schema { - schema.UniqueItems = unique - return schema -} - -func (schema *Schema) WithProperty(name string, propertySchema *Schema) *Schema { - return schema.WithPropertyRef(name, &SchemaRef{ - Value: propertySchema, - }) -} - -func (schema *Schema) WithPropertyRef(name string, ref *SchemaRef) *Schema { - properties := schema.Properties - if properties == nil { - properties = make(Schemas) - schema.Properties = properties - } - properties[name] = ref - return schema -} - -func (schema *Schema) WithProperties(properties map[string]*Schema) *Schema { - result := make(Schemas, len(properties)) - for k, v := range properties { - result[k] = &SchemaRef{ - Value: v, - } - } - schema.Properties = result - return schema -} - -func (schema *Schema) WithRequired(required []string) *Schema { - schema.Required = required - return schema -} - -func (schema *Schema) WithMinProperties(i int64) *Schema { - n := uint64(i) - schema.MinProps = n - return schema -} - -func (schema *Schema) WithMaxProperties(i int64) *Schema { - n := uint64(i) - schema.MaxProps = &n - return schema -} - -func (schema *Schema) WithAnyAdditionalProperties() *Schema { - schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(true)} - return schema -} - -func (schema *Schema) WithoutAdditionalProperties() *Schema { - schema.AdditionalProperties = AdditionalProperties{Has: BoolPtr(false)} - return schema -} - -func (schema *Schema) WithAdditionalProperties(v *Schema) *Schema { - schema.AdditionalProperties = AdditionalProperties{} - if v != nil { - schema.AdditionalProperties.Schema = &SchemaRef{Value: v} - } - return schema -} - -// IsEmpty tells whether schema is equivalent to the empty schema `{}`. -func (schema *Schema) IsEmpty() bool { - if schema.Type != "" || schema.Format != "" || len(schema.Enum) != 0 || - schema.UniqueItems || schema.ExclusiveMin || schema.ExclusiveMax || - schema.Nullable || schema.ReadOnly || schema.WriteOnly || schema.AllowEmptyValue || - schema.Min != nil || schema.Max != nil || schema.MultipleOf != nil || - schema.MinLength != 0 || schema.MaxLength != nil || schema.Pattern != "" || - schema.MinItems != 0 || schema.MaxItems != nil || - len(schema.Required) != 0 || - schema.MinProps != 0 || schema.MaxProps != nil { - return false - } - if n := schema.Not; n != nil && n.Value != nil && !n.Value.IsEmpty() { - return false - } - if ap := schema.AdditionalProperties.Schema; ap != nil && ap.Value != nil && !ap.Value.IsEmpty() { - return false - } - if apa := schema.AdditionalProperties.Has; apa != nil && !*apa { - return false - } - if items := schema.Items; items != nil && items.Value != nil && !items.Value.IsEmpty() { - return false - } - for _, s := range schema.Properties { - if ss := s.Value; ss != nil && !ss.IsEmpty() { - return false - } - } - for _, s := range schema.OneOf { - if ss := s.Value; ss != nil && !ss.IsEmpty() { - return false - } - } - for _, s := range schema.AnyOf { - if ss := s.Value; ss != nil && !ss.IsEmpty() { - return false - } - } - for _, s := range schema.AllOf { - if ss := s.Value; ss != nil && !ss.IsEmpty() { - return false - } - } - return true -} - -// Validate returns an error if Schema does not comply with the OpenAPI spec. -func (schema *Schema) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - _, err := schema.validate(ctx, []*Schema{}) - return err -} - -// returns the updated stack and an error if Schema does not comply with the OpenAPI spec. -func (schema *Schema) validate(ctx context.Context, stack []*Schema) ([]*Schema, error) { - validationOpts := getValidationOptions(ctx) - - for _, existing := range stack { - if existing == schema { - return stack, nil - } - } - stack = append(stack, schema) - - if schema.ReadOnly && schema.WriteOnly { - return stack, errors.New("a property MUST NOT be marked as both readOnly and writeOnly being true") - } - - for _, item := range schema.OneOf { - v := item.Value - if v == nil { - return stack, foundUnresolvedRef(item.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - for _, item := range schema.AnyOf { - v := item.Value - if v == nil { - return stack, foundUnresolvedRef(item.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - for _, item := range schema.AllOf { - v := item.Value - if v == nil { - return stack, foundUnresolvedRef(item.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - if ref := schema.Not; ref != nil { - v := ref.Value - if v == nil { - return stack, foundUnresolvedRef(ref.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - schemaType := schema.Type - switch schemaType { - case "": - case TypeBoolean: - case TypeNumber: - if format := schema.Format; len(format) > 0 { - switch format { - case "float", "double": - default: - if validationOpts.schemaFormatValidationEnabled { - return stack, unsupportedFormat(format) - } - } - } - case TypeInteger: - if format := schema.Format; len(format) > 0 { - switch format { - case "int32", "int64": - default: - if validationOpts.schemaFormatValidationEnabled { - return stack, unsupportedFormat(format) - } - } - } - case TypeString: - if format := schema.Format; len(format) > 0 { - switch format { - // Supported by OpenAPIv3.0.3: - // https://spec.openapis.org/oas/v3.0.3 - case "byte", "binary", "date", "date-time", "password": - // In JSON Draft-07 (not validated yet though): - // https://json-schema.org/draft-07/json-schema-release-notes.html#formats - case "iri", "iri-reference", "uri-template", "idn-email", "idn-hostname": - case "json-pointer", "relative-json-pointer", "regex", "time": - // In JSON Draft 2019-09 (not validated yet though): - // https://json-schema.org/draft/2019-09/release-notes.html#format-vocabulary - case "duration", "uuid": - // Defined in some other specification - case "email", "hostname", "ipv4", "ipv6", "uri", "uri-reference": - default: - // Try to check for custom defined formats - if _, ok := SchemaStringFormats[format]; !ok && validationOpts.schemaFormatValidationEnabled { - return stack, unsupportedFormat(format) - } - } - } - if !validationOpts.schemaPatternValidationDisabled && schema.Pattern != "" { - if _, err := schema.compilePattern(); err != nil { - return stack, err - } - } - case TypeArray: - if schema.Items == nil { - return stack, errors.New("when schema type is 'array', schema 'items' must be non-null") - } - case TypeObject: - default: - return stack, fmt.Errorf("unsupported 'type' value %q", schemaType) - } - - if ref := schema.Items; ref != nil { - v := ref.Value - if v == nil { - return stack, foundUnresolvedRef(ref.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - properties := make([]string, 0, len(schema.Properties)) - for name := range schema.Properties { - properties = append(properties, name) - } - sort.Strings(properties) - for _, name := range properties { - ref := schema.Properties[name] - v := ref.Value - if v == nil { - return stack, foundUnresolvedRef(ref.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - if schema.AdditionalProperties.Has != nil && schema.AdditionalProperties.Schema != nil { - return stack, errors.New("additionalProperties are set to both boolean and schema") - } - if ref := schema.AdditionalProperties.Schema; ref != nil { - v := ref.Value - if v == nil { - return stack, foundUnresolvedRef(ref.Ref) - } - - var err error - if stack, err = v.validate(ctx, stack); err != nil { - return stack, err - } - } - - if v := schema.ExternalDocs; v != nil { - if err := v.Validate(ctx); err != nil { - return stack, fmt.Errorf("invalid external docs: %w", err) - } - } - - if v := schema.Default; v != nil && !validationOpts.schemaDefaultsValidationDisabled { - if err := schema.VisitJSON(v); err != nil { - return stack, fmt.Errorf("invalid default: %w", err) - } - } - - if x := schema.Example; x != nil && !validationOpts.examplesValidationDisabled { - if err := validateExampleValue(ctx, x, schema); err != nil { - return stack, fmt.Errorf("invalid example: %w", err) - } - } - - return stack, validateExtensions(ctx, schema.Extensions) -} - -func (schema *Schema) IsMatching(value interface{}) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) IsMatchingJSONBoolean(value bool) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) IsMatchingJSONNumber(value float64) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) IsMatchingJSONString(value string) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) IsMatchingJSONArray(value []interface{}) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) IsMatchingJSONObject(value map[string]interface{}) bool { - settings := newSchemaValidationSettings(FailFast()) - return schema.visitJSON(settings, value) == nil -} - -func (schema *Schema) VisitJSON(value interface{}, opts ...SchemaValidationOption) error { - settings := newSchemaValidationSettings(opts...) - return schema.visitJSON(settings, value) -} - -func (schema *Schema) visitJSON(settings *schemaValidationSettings, value interface{}) (err error) { - switch value := value.(type) { - case nil: - // Don't use VisitJSONNull, as we still want to reach 'visitXOFOperations', since - // those could allow for a nullable value even though this one doesn't - if schema.Nullable { - return - } - case float64: - if math.IsNaN(value) { - return ErrSchemaInputNaN - } - if math.IsInf(value, 0) { - return ErrSchemaInputInf - } - } - - if schema.IsEmpty() { - switch value.(type) { - case nil: - return schema.visitJSONNull(settings) - default: - return - } - } - - if err = schema.visitNotOperation(settings, value); err != nil { - return - } - var run bool - if err, run = schema.visitXOFOperations(settings, value); err != nil || !run { - return - } - if err = schema.visitEnumOperation(settings, value); err != nil { - return - } - - switch value := value.(type) { - case nil: - return schema.visitJSONNull(settings) - case bool: - return schema.visitJSONBoolean(settings, value) - case json.Number: - valueFloat64, err := value.Float64() - if err != nil { - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "type", - Reason: "cannot convert json.Number to float64", - customizeMessageError: settings.customizeMessageError, - Origin: err, - } - } - return schema.visitJSONNumber(settings, valueFloat64) - case int: - return schema.visitJSONNumber(settings, float64(value)) - case int32: - return schema.visitJSONNumber(settings, float64(value)) - case int64: - return schema.visitJSONNumber(settings, float64(value)) - case float64: - return schema.visitJSONNumber(settings, value) - case string: - return schema.visitJSONString(settings, value) - case []interface{}: - return schema.visitJSONArray(settings, value) - case map[string]interface{}: - return schema.visitJSONObject(settings, value) - case map[interface{}]interface{}: // for YAML cf. issue #444 - values := make(map[string]interface{}, len(value)) - for key, v := range value { - if k, ok := key.(string); ok { - values[k] = v - } - } - if len(value) == len(values) { - return schema.visitJSONObject(settings, values) - } - } - - // Catch slice of non-empty interface type - if reflect.TypeOf(value).Kind() == reflect.Slice { - valueR := reflect.ValueOf(value) - newValue := make([]interface{}, 0, valueR.Len()) - for i := 0; i < valueR.Len(); i++ { - newValue = append(newValue, valueR.Index(i).Interface()) - } - return schema.visitJSONArray(settings, newValue) - } - - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "type", - Reason: fmt.Sprintf("unhandled value of type %T", value), - customizeMessageError: settings.customizeMessageError, - } -} - -func (schema *Schema) visitEnumOperation(settings *schemaValidationSettings, value interface{}) (err error) { - if enum := schema.Enum; len(enum) != 0 { - for _, v := range enum { - switch c := value.(type) { - case json.Number: - var f float64 - if f, err = strconv.ParseFloat(c.String(), 64); err != nil { - return err - } - if v == f { - return - } - case int64: - if v == float64(c) { - return - } - default: - if reflect.DeepEqual(v, value) { - return - } - } - } - if settings.failfast { - return errSchema - } - allowedValues, _ := json.Marshal(enum) - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "enum", - Reason: fmt.Sprintf("value is not one of the allowed values %s", string(allowedValues)), - customizeMessageError: settings.customizeMessageError, - } - } - return -} - -func (schema *Schema) visitNotOperation(settings *schemaValidationSettings, value interface{}) (err error) { - if ref := schema.Not; ref != nil { - v := ref.Value - if v == nil { - return foundUnresolvedRef(ref.Ref) - } - if err := v.visitJSON(settings, value); err == nil { - if settings.failfast { - return errSchema - } - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "not", - customizeMessageError: settings.customizeMessageError, - } - } - } - return -} - -// If the XOF operations pass successfully, abort further run of validation, as they will already be satisfied (unless the schema -// itself is badly specified -func (schema *Schema) visitXOFOperations(settings *schemaValidationSettings, value interface{}) (err error, run bool) { - var visitedOneOf, visitedAnyOf, visitedAllOf bool - if v := schema.OneOf; len(v) > 0 { - var discriminatorRef string - if schema.Discriminator != nil { - pn := schema.Discriminator.PropertyName - if valuemap, okcheck := value.(map[string]interface{}); okcheck { - discriminatorVal, okcheck := valuemap[pn] - if !okcheck { - return &SchemaError{ - Schema: schema, - SchemaField: "discriminator", - Reason: fmt.Sprintf("input does not contain the discriminator property %q", pn), - }, false - } - - discriminatorValString, okcheck := discriminatorVal.(string) - if !okcheck { - return &SchemaError{ - Value: discriminatorVal, - Schema: schema, - SchemaField: "discriminator", - Reason: fmt.Sprintf("value of discriminator property %q is not a string", pn), - }, false - } - - if discriminatorRef, okcheck = schema.Discriminator.Mapping[discriminatorValString]; len(schema.Discriminator.Mapping) > 0 && !okcheck { - return &SchemaError{ - Value: discriminatorVal, - Schema: schema, - SchemaField: "discriminator", - Reason: fmt.Sprintf("discriminator property %q has invalid value", pn), - }, false - } - } - } - - var ( - ok = 0 - validationErrors = multiErrorForOneOf{} - matchedOneOfIndices = make([]int, 0) - tempValue = value - ) - for idx, item := range v { - v := item.Value - if v == nil { - return foundUnresolvedRef(item.Ref), false - } - - if discriminatorRef != "" && discriminatorRef != item.Ref { - continue - } - - // make a deep copy to protect origin value from being injected default value that defined in mismatched oneOf schema - if settings.asreq || settings.asrep { - tempValue = deepcopy.Copy(value) - } - - if err := v.visitJSON(settings, tempValue); err != nil { - validationErrors = append(validationErrors, err) - continue - } - - matchedOneOfIndices = append(matchedOneOfIndices, idx) - ok++ - } - - if ok != 1 { - if settings.failfast { - return errSchema, false - } - e := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "oneOf", - customizeMessageError: settings.customizeMessageError, - } - if ok > 1 { - e.Origin = ErrOneOfConflict - e.Reason = fmt.Sprintf(`value matches more than one schema from "oneOf" (matches schemas at indices %v)`, matchedOneOfIndices) - } else { - e.Origin = fmt.Errorf("doesn't match schema due to: %w", validationErrors) - e.Reason = `value doesn't match any schema from "oneOf"` - } - - return e, false - } - - // run again to inject default value that defined in matched oneOf schema - if settings.asreq || settings.asrep { - _ = v[matchedOneOfIndices[0]].Value.visitJSON(settings, value) - } - visitedOneOf = true - } - - if v := schema.AnyOf; len(v) > 0 { - var ( - ok = false - matchedAnyOfIdx = 0 - tempValue = value - ) - for idx, item := range v { - v := item.Value - if v == nil { - return foundUnresolvedRef(item.Ref), false - } - // make a deep copy to protect origin value from being injected default value that defined in mismatched anyOf schema - if settings.asreq || settings.asrep { - tempValue = deepcopy.Copy(value) - } - if err := v.visitJSON(settings, tempValue); err == nil { - ok = true - matchedAnyOfIdx = idx - break - } - } - if !ok { - if settings.failfast { - return errSchema, false - } - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "anyOf", - Reason: `doesn't match any schema from "anyOf"`, - customizeMessageError: settings.customizeMessageError, - }, false - } - - _ = v[matchedAnyOfIdx].Value.visitJSON(settings, value) - visitedAnyOf = true - } - - for _, item := range schema.AllOf { - v := item.Value - if v == nil { - return foundUnresolvedRef(item.Ref), false - } - if err := v.visitJSON(settings, value); err != nil { - if settings.failfast { - return errSchema, false - } - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "allOf", - Reason: `doesn't match all schemas from "allOf"`, - Origin: err, - customizeMessageError: settings.customizeMessageError, - }, false - } - visitedAllOf = true - } - - run = !((visitedOneOf || visitedAnyOf || visitedAllOf) && value == nil) - return -} - -// The value is not considered in visitJSONNull because according to the spec -// "null is not supported as a type" unless `nullable` is also set to true -// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#data-types -// https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#schema-object -func (schema *Schema) visitJSONNull(settings *schemaValidationSettings) (err error) { - if schema.Nullable { - return - } - if settings.failfast { - return errSchema - } - return &SchemaError{ - Value: nil, - Schema: schema, - SchemaField: "nullable", - Reason: "Value is not nullable", - customizeMessageError: settings.customizeMessageError, - } -} - -func (schema *Schema) VisitJSONBoolean(value bool) error { - settings := newSchemaValidationSettings() - return schema.visitJSONBoolean(settings, value) -} - -func (schema *Schema) visitJSONBoolean(settings *schemaValidationSettings, value bool) (err error) { - if schemaType := schema.Type; schemaType != "" && schemaType != TypeBoolean { - return schema.expectedType(settings, value) - } - return -} - -func (schema *Schema) VisitJSONNumber(value float64) error { - settings := newSchemaValidationSettings() - return schema.visitJSONNumber(settings, value) -} - -func (schema *Schema) visitJSONNumber(settings *schemaValidationSettings, value float64) error { - var me MultiError - schemaType := schema.Type - if schemaType == TypeInteger { - if bigFloat := big.NewFloat(value); !bigFloat.IsInt() { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "type", - Reason: "value must be an integer", - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - } else if schemaType != "" && schemaType != TypeNumber { - return schema.expectedType(settings, value) - } - - // formats - if schemaType == TypeInteger && schema.Format != "" { - formatMin := float64(0) - formatMax := float64(0) - switch schema.Format { - case "int32": - formatMin = formatMinInt32 - formatMax = formatMaxInt32 - case "int64": - formatMin = formatMinInt64 - formatMax = formatMaxInt64 - default: - if settings.formatValidationEnabled { - return unsupportedFormat(schema.Format) - } - } - if formatMin != 0 && formatMax != 0 && !(formatMin <= value && value <= formatMax) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "format", - Reason: fmt.Sprintf("number must be an %s", schema.Format), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - } - - // "exclusiveMinimum" - if v := schema.ExclusiveMin; v && !(*schema.Min < value) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "exclusiveMinimum", - Reason: fmt.Sprintf("number must be more than %g", *schema.Min), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "exclusiveMaximum" - if v := schema.ExclusiveMax; v && !(*schema.Max > value) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "exclusiveMaximum", - Reason: fmt.Sprintf("number must be less than %g", *schema.Max), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "minimum" - if v := schema.Min; v != nil && !(*v <= value) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "minimum", - Reason: fmt.Sprintf("number must be at least %g", *v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "maximum" - if v := schema.Max; v != nil && !(*v >= value) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "maximum", - Reason: fmt.Sprintf("number must be at most %g", *v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "multipleOf" - if v := schema.MultipleOf; v != nil { - // "A numeric instance is valid only if division by this keyword's - // value results in an integer." - if bigFloat := big.NewFloat(value / *v); !bigFloat.IsInt() { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "multipleOf", - Reason: fmt.Sprintf("number must be a multiple of %g", *v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - } - - if len(me) > 0 { - return me - } - - return nil -} - -func (schema *Schema) VisitJSONString(value string) error { - settings := newSchemaValidationSettings() - return schema.visitJSONString(settings, value) -} - -func (schema *Schema) visitJSONString(settings *schemaValidationSettings, value string) error { - if schemaType := schema.Type; schemaType != "" && schemaType != TypeString { - return schema.expectedType(settings, value) - } - - var me MultiError - - // "minLength" and "maxLength" - minLength := schema.MinLength - maxLength := schema.MaxLength - if minLength != 0 || maxLength != nil { - // JSON schema string lengths are UTF-16, not UTF-8! - length := int64(0) - for _, r := range value { - if utf16.IsSurrogate(r) { - length += 2 - } else { - length++ - } - } - if minLength != 0 && length < int64(minLength) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "minLength", - Reason: fmt.Sprintf("minimum string length is %d", minLength), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - if maxLength != nil && length > int64(*maxLength) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "maxLength", - Reason: fmt.Sprintf("maximum string length is %d", *maxLength), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - } - - // "pattern" - if !settings.patternValidationDisabled && schema.Pattern != "" { - cpiface, _ := compiledPatterns.Load(schema.Pattern) - cp, _ := cpiface.(*regexp.Regexp) - if cp == nil { - var err error - if cp, err = schema.compilePattern(); err != nil { - if !settings.multiError { - return err - } - me = append(me, err) - } - } - if !cp.MatchString(value) { - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "pattern", - Reason: fmt.Sprintf(`string doesn't match the regular expression "%s"`, schema.Pattern), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - } - - // "format" - var formatStrErr string - var formatErr error - if format := schema.Format; format != "" { - if f, ok := SchemaStringFormats[format]; ok { - switch { - case f.regexp != nil && f.callback == nil: - if cp := f.regexp; !cp.MatchString(value) { - formatStrErr = fmt.Sprintf(`string doesn't match the format %q (regular expression "%s")`, format, cp.String()) - } - case f.regexp == nil && f.callback != nil: - if err := f.callback(value); err != nil { - var schemaErr = &SchemaError{} - if errors.As(err, &schemaErr) { - formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%s)`, format, schemaErr.Reason) - } else { - formatStrErr = fmt.Sprintf(`string doesn't match the format %q (%v)`, format, err) - } - formatErr = err - } - default: - formatStrErr = fmt.Sprintf("corrupted entry %q in SchemaStringFormats", format) - } - } - } - if formatStrErr != "" || formatErr != nil { - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "format", - Reason: formatStrErr, - Origin: formatErr, - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - - } - - if len(me) > 0 { - return me - } - - return nil -} - -func (schema *Schema) VisitJSONArray(value []interface{}) error { - settings := newSchemaValidationSettings() - return schema.visitJSONArray(settings, value) -} - -func (schema *Schema) visitJSONArray(settings *schemaValidationSettings, value []interface{}) error { - if schemaType := schema.Type; schemaType != "" && schemaType != TypeArray { - return schema.expectedType(settings, value) - } - - var me MultiError - - lenValue := int64(len(value)) - - // "minItems" - if v := schema.MinItems; v != 0 && lenValue < int64(v) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "minItems", - Reason: fmt.Sprintf("minimum number of items is %d", v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "maxItems" - if v := schema.MaxItems; v != nil && lenValue > int64(*v) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "maxItems", - Reason: fmt.Sprintf("maximum number of items is %d", *v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "uniqueItems" - if sliceUniqueItemsChecker == nil { - sliceUniqueItemsChecker = isSliceOfUniqueItems - } - if v := schema.UniqueItems; v && !sliceUniqueItemsChecker(value) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "uniqueItems", - Reason: "duplicate items found", - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "items" - if itemSchemaRef := schema.Items; itemSchemaRef != nil { - itemSchema := itemSchemaRef.Value - if itemSchema == nil { - return foundUnresolvedRef(itemSchemaRef.Ref) - } - for i, item := range value { - if err := itemSchema.visitJSON(settings, item); err != nil { - err = markSchemaErrorIndex(err, i) - if !settings.multiError { - return err - } - if itemMe, ok := err.(MultiError); ok { - me = append(me, itemMe...) - } else { - me = append(me, err) - } - } - } - } - - if len(me) > 0 { - return me - } - - return nil -} - -func (schema *Schema) VisitJSONObject(value map[string]interface{}) error { - settings := newSchemaValidationSettings() - return schema.visitJSONObject(settings, value) -} - -func (schema *Schema) visitJSONObject(settings *schemaValidationSettings, value map[string]interface{}) error { - if schemaType := schema.Type; schemaType != "" && schemaType != TypeObject { - return schema.expectedType(settings, value) - } - - var me MultiError - - if settings.asreq || settings.asrep { - properties := make([]string, 0, len(schema.Properties)) - for propName := range schema.Properties { - properties = append(properties, propName) - } - sort.Strings(properties) - for _, propName := range properties { - propSchema := schema.Properties[propName] - reqRO := settings.asreq && propSchema.Value.ReadOnly && !settings.readOnlyValidationDisabled - repWO := settings.asrep && propSchema.Value.WriteOnly && !settings.writeOnlyValidationDisabled - - if f := settings.defaultsSet; f != nil && value[propName] == nil { - if dflt := propSchema.Value.Default; dflt != nil && !reqRO && !repWO { - value[propName] = dflt - settings.onceSettingDefaults.Do(f) - } - } - - if value[propName] != nil { - if reqRO { - me = append(me, fmt.Errorf("readOnly property %q in request", propName)) - } else if repWO { - me = append(me, fmt.Errorf("writeOnly property %q in response", propName)) - } - } - } - } - - // "properties" - properties := schema.Properties - lenValue := int64(len(value)) - - // "minProperties" - if v := schema.MinProps; v != 0 && lenValue < int64(v) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "minProperties", - Reason: fmt.Sprintf("there must be at least %d properties", v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "maxProperties" - if v := schema.MaxProps; v != nil && lenValue > int64(*v) { - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "maxProperties", - Reason: fmt.Sprintf("there must be at most %d properties", *v), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "additionalProperties" - var additionalProperties *Schema - if ref := schema.AdditionalProperties.Schema; ref != nil { - additionalProperties = ref.Value - } - keys := make([]string, 0, len(value)) - for k := range value { - keys = append(keys, k) - } - sort.Strings(keys) - for _, k := range keys { - v := value[k] - if properties != nil { - propertyRef := properties[k] - if propertyRef != nil { - p := propertyRef.Value - if p == nil { - return foundUnresolvedRef(propertyRef.Ref) - } - if err := p.visitJSON(settings, v); err != nil { - if settings.failfast { - return errSchema - } - err = markSchemaErrorKey(err, k) - if !settings.multiError { - return err - } - if v, ok := err.(MultiError); ok { - me = append(me, v...) - continue - } - me = append(me, err) - } - continue - } - } - if allowed := schema.AdditionalProperties.Has; allowed == nil || *allowed { - if additionalProperties != nil { - if err := additionalProperties.visitJSON(settings, v); err != nil { - if settings.failfast { - return errSchema - } - err = markSchemaErrorKey(err, k) - if !settings.multiError { - return err - } - if v, ok := err.(MultiError); ok { - me = append(me, v...) - continue - } - me = append(me, err) - } - } - continue - } - if settings.failfast { - return errSchema - } - err := &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "properties", - Reason: fmt.Sprintf("property %q is unsupported", k), - customizeMessageError: settings.customizeMessageError, - } - if !settings.multiError { - return err - } - me = append(me, err) - } - - // "required" - for _, k := range schema.Required { - if _, ok := value[k]; !ok { - if s := schema.Properties[k]; s != nil && s.Value.ReadOnly && settings.asreq { - continue - } - if s := schema.Properties[k]; s != nil && s.Value.WriteOnly && settings.asrep { - continue - } - if settings.failfast { - return errSchema - } - err := markSchemaErrorKey(&SchemaError{ - Value: value, - Schema: schema, - SchemaField: "required", - Reason: fmt.Sprintf("property %q is missing", k), - customizeMessageError: settings.customizeMessageError, - }, k) - if !settings.multiError { - return err - } - me = append(me, err) - } - } - - if len(me) > 0 { - return me - } - - return nil -} - -func (schema *Schema) expectedType(settings *schemaValidationSettings, value interface{}) error { - if settings.failfast { - return errSchema - } - - a := "a" - switch schema.Type { - case TypeArray, TypeObject, TypeInteger: - a = "an" - } - return &SchemaError{ - Value: value, - Schema: schema, - SchemaField: "type", - Reason: fmt.Sprintf("value must be %s %s", a, schema.Type), - customizeMessageError: settings.customizeMessageError, - } -} - -// SchemaError is an error that occurs during schema validation. -type SchemaError struct { - // Value is the value that failed validation. - Value interface{} - // reversePath is the path to the value that failed validation. - reversePath []string - // Schema is the schema that failed validation. - Schema *Schema - // SchemaField is the field of the schema that failed validation. - SchemaField string - // Reason is a human-readable message describing the error. - // The message should never include the original value to prevent leakage of potentially sensitive inputs in error messages. - Reason string - // Origin is the original error that caused this error. - Origin error - // customizeMessageError is a function that can be used to customize the error message. - customizeMessageError func(err *SchemaError) string -} - -var _ interface{ Unwrap() error } = SchemaError{} - -func markSchemaErrorKey(err error, key string) error { - var me multiErrorForOneOf - - if errors.As(err, &me) { - err = me.Unwrap() - } - - if v, ok := err.(*SchemaError); ok { - v.reversePath = append(v.reversePath, key) - return v - } - if v, ok := err.(MultiError); ok { - for _, e := range v { - _ = markSchemaErrorKey(e, key) - } - return v - } - return err -} - -func markSchemaErrorIndex(err error, index int) error { - return markSchemaErrorKey(err, strconv.FormatInt(int64(index), 10)) -} - -func (err *SchemaError) JSONPointer() []string { - reversePath := err.reversePath - path := append([]string(nil), reversePath...) - for left, right := 0, len(path)-1; left < right; left, right = left+1, right-1 { - path[left], path[right] = path[right], path[left] - } - return path -} - -func (err *SchemaError) Error() string { - if err.customizeMessageError != nil { - if msg := err.customizeMessageError(err); msg != "" { - return msg - } - } - - buf := bytes.NewBuffer(make([]byte, 0, 256)) - - if len(err.reversePath) > 0 { - buf.WriteString(`Error at "`) - reversePath := err.reversePath - for i := len(reversePath) - 1; i >= 0; i-- { - buf.WriteByte('/') - buf.WriteString(reversePath[i]) - } - buf.WriteString(`": `) - } - - if err.Origin != nil { - buf.WriteString(err.Origin.Error()) - - return buf.String() - } - - reason := err.Reason - if reason == "" { - buf.WriteString(`Doesn't match schema "`) - buf.WriteString(err.SchemaField) - buf.WriteString(`"`) - } else { - buf.WriteString(reason) - } - - if !SchemaErrorDetailsDisabled { - buf.WriteString("\nSchema:\n ") - encoder := json.NewEncoder(buf) - encoder.SetIndent(" ", " ") - if err := encoder.Encode(err.Schema); err != nil { - panic(err) - } - buf.WriteString("\nValue:\n ") - if err := encoder.Encode(err.Value); err != nil { - panic(err) - } - } - - return buf.String() -} - -func (err SchemaError) Unwrap() error { - return err.Origin -} - -func isSliceOfUniqueItems(xs []interface{}) bool { - s := len(xs) - m := make(map[string]struct{}, s) - for _, x := range xs { - // The input slice is converted from a JSON string, there shall - // have no error when convert it back. - key, _ := json.Marshal(&x) - m[string(key)] = struct{}{} - } - return s == len(m) -} - -// SliceUniqueItemsChecker is an function used to check if an given slice -// have unique items. -type SliceUniqueItemsChecker func(items []interface{}) bool - -// By default using predefined func isSliceOfUniqueItems which make use of -// json.Marshal to generate a key for map used to check if a given slice -// have unique items. -var sliceUniqueItemsChecker SliceUniqueItemsChecker = isSliceOfUniqueItems - -// RegisterArrayUniqueItemsChecker is used to register a customized function -// used to check if JSON array have unique items. -func RegisterArrayUniqueItemsChecker(fn SliceUniqueItemsChecker) { - sliceUniqueItemsChecker = fn -} - -func unsupportedFormat(format string) error { - return fmt.Errorf("unsupported 'format' value %q", format) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go deleted file mode 100644 index ea38400c..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/schema_formats.go +++ /dev/null @@ -1,106 +0,0 @@ -package openapi3 - -import ( - "fmt" - "net" - "regexp" - "strings" -) - -const ( - // FormatOfStringForUUIDOfRFC4122 is an optional predefined format for UUID v1-v5 as specified by RFC4122 - FormatOfStringForUUIDOfRFC4122 = `^(?:[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}|00000000-0000-0000-0000-000000000000)$` - - // FormatOfStringForEmail pattern catches only some suspiciously wrong-looking email addresses. - // Use DefineStringFormat(...) if you need something stricter. - FormatOfStringForEmail = `^[^@]+@[^@<>",\s]+$` -) - -// FormatCallback performs custom checks on exotic formats -type FormatCallback func(value string) error - -// Format represents a format validator registered by either DefineStringFormat or DefineStringFormatCallback -type Format struct { - regexp *regexp.Regexp - callback FormatCallback -} - -// SchemaStringFormats allows for validating string formats -var SchemaStringFormats = make(map[string]Format, 4) - -// DefineStringFormat defines a new regexp pattern for a given format -func DefineStringFormat(name string, pattern string) { - re, err := regexp.Compile(pattern) - if err != nil { - err := fmt.Errorf("format %q has invalid pattern %q: %w", name, pattern, err) - panic(err) - } - SchemaStringFormats[name] = Format{regexp: re} -} - -// DefineStringFormatCallback adds a validation function for a specific schema format entry -func DefineStringFormatCallback(name string, callback FormatCallback) { - SchemaStringFormats[name] = Format{callback: callback} -} - -func validateIP(ip string) error { - parsed := net.ParseIP(ip) - if parsed == nil { - return &SchemaError{ - Value: ip, - Reason: "Not an IP address", - } - } - return nil -} - -func validateIPv4(ip string) error { - if err := validateIP(ip); err != nil { - return err - } - - if !(strings.Count(ip, ":") < 2) { - return &SchemaError{ - Value: ip, - Reason: "Not an IPv4 address (it's IPv6)", - } - } - return nil -} - -func validateIPv6(ip string) error { - if err := validateIP(ip); err != nil { - return err - } - - if !(strings.Count(ip, ":") >= 2) { - return &SchemaError{ - Value: ip, - Reason: "Not an IPv6 address (it's IPv4)", - } - } - return nil -} - -func init() { - // Base64 - // The pattern supports base64 and b./ase64url. Padding ('=') is supported. - DefineStringFormat("byte", `(^$|^[a-zA-Z0-9+/\-_]*=*$)`) - - // date - DefineStringFormat("date", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)$`) - - // date-time - DefineStringFormat("date-time", `^[0-9]{4}-(0[0-9]|10|11|12)-([0-2][0-9]|30|31)T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?(Z|(\+|-)[0-9]{2}:[0-9]{2})?$`) - -} - -// DefineIPv4Format opts in ipv4 format validation on top of OAS 3 spec -func DefineIPv4Format() { - DefineStringFormatCallback("ipv4", validateIPv4) -} - -// DefineIPv6Format opts in ipv6 format validation on top of OAS 3 spec -func DefineIPv6Format() { - DefineStringFormatCallback("ipv6", validateIPv6) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go deleted file mode 100644 index ceb4ad15..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/schema_pattern.go +++ /dev/null @@ -1,29 +0,0 @@ -package openapi3 - -import ( - "fmt" - "regexp" -) - -var patRewriteCodepoints = regexp.MustCompile(`[\][u]([0-9A-F]{4})`) - -// See https://pkg.go.dev/regexp/syntax -func intoGoRegexp(re string) string { - return patRewriteCodepoints.ReplaceAllString(re, `x{$1}`) -} - -// NOTE: racey WRT [writes to schema.Pattern] vs [reads schema.Pattern then writes to compiledPatterns] -func (schema *Schema) compilePattern() (cp *regexp.Regexp, err error) { - pattern := schema.Pattern - if cp, err = regexp.Compile(intoGoRegexp(pattern)); err != nil { - err = &SchemaError{ - Schema: schema, - SchemaField: "pattern", - Origin: err, - Reason: fmt.Sprintf("cannot compile pattern %q: %v", pattern, err), - } - return - } - var _ bool = compiledPatterns.CompareAndSwap(pattern, nil, cp) - return -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go b/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go deleted file mode 100644 index 17aad2fa..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/schema_validation_settings.go +++ /dev/null @@ -1,79 +0,0 @@ -package openapi3 - -import ( - "sync" -) - -// SchemaValidationOption describes options a user has when validating request / response bodies. -type SchemaValidationOption func(*schemaValidationSettings) - -type schemaValidationSettings struct { - failfast bool - multiError bool - asreq, asrep bool // exclusive (XOR) fields - formatValidationEnabled bool - patternValidationDisabled bool - readOnlyValidationDisabled bool - writeOnlyValidationDisabled bool - - onceSettingDefaults sync.Once - defaultsSet func() - - customizeMessageError func(err *SchemaError) string -} - -// FailFast returns schema validation errors quicker. -func FailFast() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.failfast = true } -} - -func MultiErrors() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.multiError = true } -} - -func VisitAsRequest() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.asreq, s.asrep = true, false } -} - -func VisitAsResponse() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.asreq, s.asrep = false, true } -} - -// EnableFormatValidation setting makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification. -func EnableFormatValidation() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.formatValidationEnabled = true } -} - -// DisablePatternValidation setting makes Validate not return an error when validating patterns that are not supported by the Go regexp engine. -func DisablePatternValidation() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.patternValidationDisabled = true } -} - -// DisableReadOnlyValidation setting makes Validate not return an error when validating properties marked as read-only -func DisableReadOnlyValidation() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.readOnlyValidationDisabled = true } -} - -// DisableWriteOnlyValidation setting makes Validate not return an error when validating properties marked as write-only -func DisableWriteOnlyValidation() SchemaValidationOption { - return func(s *schemaValidationSettings) { s.writeOnlyValidationDisabled = true } -} - -// DefaultsSet executes the given callback (once) IFF schema validation set default values. -func DefaultsSet(f func()) SchemaValidationOption { - return func(s *schemaValidationSettings) { s.defaultsSet = f } -} - -// SetSchemaErrorMessageCustomizer allows to override the schema error message. -// If the passed function returns an empty string, it returns to the previous Error() implementation. -func SetSchemaErrorMessageCustomizer(f func(err *SchemaError) string) SchemaValidationOption { - return func(s *schemaValidationSettings) { s.customizeMessageError = f } -} - -func newSchemaValidationSettings(opts ...SchemaValidationOption) *schemaValidationSettings { - settings := &schemaValidationSettings{} - for _, opt := range opts { - opt(settings) - } - return settings -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go b/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go deleted file mode 100644 index 87891c95..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/security_requirements.go +++ /dev/null @@ -1,51 +0,0 @@ -package openapi3 - -import ( - "context" -) - -type SecurityRequirements []SecurityRequirement - -func NewSecurityRequirements() *SecurityRequirements { - return &SecurityRequirements{} -} - -func (srs *SecurityRequirements) With(securityRequirement SecurityRequirement) *SecurityRequirements { - *srs = append(*srs, securityRequirement) - return srs -} - -// Validate returns an error if SecurityRequirements does not comply with the OpenAPI spec. -func (srs SecurityRequirements) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - for _, security := range srs { - if err := security.Validate(ctx); err != nil { - return err - } - } - return nil -} - -// SecurityRequirement is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-requirement-object -type SecurityRequirement map[string][]string - -func NewSecurityRequirement() SecurityRequirement { - return make(SecurityRequirement) -} - -func (security SecurityRequirement) Authenticate(provider string, scopes ...string) SecurityRequirement { - if len(scopes) == 0 { - scopes = []string{} // Forces the variable to be encoded as an array instead of null - } - security[provider] = scopes - return security -} - -// Validate returns an error if SecurityRequirement does not comply with the OpenAPI spec. -func (security *SecurityRequirement) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - return nil -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go b/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go deleted file mode 100644 index c07bfb61..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/security_scheme.go +++ /dev/null @@ -1,402 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "net/url" -) - -// SecurityScheme is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#security-scheme-object -type SecurityScheme struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Type string `json:"type,omitempty" yaml:"type,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Name string `json:"name,omitempty" yaml:"name,omitempty"` - In string `json:"in,omitempty" yaml:"in,omitempty"` - Scheme string `json:"scheme,omitempty" yaml:"scheme,omitempty"` - BearerFormat string `json:"bearerFormat,omitempty" yaml:"bearerFormat,omitempty"` - Flows *OAuthFlows `json:"flows,omitempty" yaml:"flows,omitempty"` - OpenIdConnectUrl string `json:"openIdConnectUrl,omitempty" yaml:"openIdConnectUrl,omitempty"` -} - -func NewSecurityScheme() *SecurityScheme { - return &SecurityScheme{} -} - -func NewCSRFSecurityScheme() *SecurityScheme { - return &SecurityScheme{ - Type: "apiKey", - In: "header", - Name: "X-XSRF-TOKEN", - } -} - -func NewOIDCSecurityScheme(oidcUrl string) *SecurityScheme { - return &SecurityScheme{ - Type: "openIdConnect", - OpenIdConnectUrl: oidcUrl, - } -} - -func NewJWTSecurityScheme() *SecurityScheme { - return &SecurityScheme{ - Type: "http", - Scheme: "bearer", - BearerFormat: "JWT", - } -} - -// MarshalJSON returns the JSON encoding of SecurityScheme. -func (ss SecurityScheme) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 8+len(ss.Extensions)) - for k, v := range ss.Extensions { - m[k] = v - } - if x := ss.Type; x != "" { - m["type"] = x - } - if x := ss.Description; x != "" { - m["description"] = x - } - if x := ss.Name; x != "" { - m["name"] = x - } - if x := ss.In; x != "" { - m["in"] = x - } - if x := ss.Scheme; x != "" { - m["scheme"] = x - } - if x := ss.BearerFormat; x != "" { - m["bearerFormat"] = x - } - if x := ss.Flows; x != nil { - m["flows"] = x - } - if x := ss.OpenIdConnectUrl; x != "" { - m["openIdConnectUrl"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets SecurityScheme to a copy of data. -func (ss *SecurityScheme) UnmarshalJSON(data []byte) error { - type SecuritySchemeBis SecurityScheme - var x SecuritySchemeBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "type") - delete(x.Extensions, "description") - delete(x.Extensions, "name") - delete(x.Extensions, "in") - delete(x.Extensions, "scheme") - delete(x.Extensions, "bearerFormat") - delete(x.Extensions, "flows") - delete(x.Extensions, "openIdConnectUrl") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *ss = SecurityScheme(x) - return nil -} - -func (ss *SecurityScheme) WithType(value string) *SecurityScheme { - ss.Type = value - return ss -} - -func (ss *SecurityScheme) WithDescription(value string) *SecurityScheme { - ss.Description = value - return ss -} - -func (ss *SecurityScheme) WithName(value string) *SecurityScheme { - ss.Name = value - return ss -} - -func (ss *SecurityScheme) WithIn(value string) *SecurityScheme { - ss.In = value - return ss -} - -func (ss *SecurityScheme) WithScheme(value string) *SecurityScheme { - ss.Scheme = value - return ss -} - -func (ss *SecurityScheme) WithBearerFormat(value string) *SecurityScheme { - ss.BearerFormat = value - return ss -} - -// Validate returns an error if SecurityScheme does not comply with the OpenAPI spec. -func (ss *SecurityScheme) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - hasIn := false - hasBearerFormat := false - hasFlow := false - switch ss.Type { - case "apiKey": - hasIn = true - case "http": - scheme := ss.Scheme - switch scheme { - case "bearer": - hasBearerFormat = true - case "basic", "negotiate", "digest": - default: - return fmt.Errorf("security scheme of type 'http' has invalid 'scheme' value %q", scheme) - } - case "oauth2": - hasFlow = true - case "openIdConnect": - if ss.OpenIdConnectUrl == "" { - return fmt.Errorf("no OIDC URL found for openIdConnect security scheme %q", ss.Name) - } - default: - return fmt.Errorf("security scheme 'type' can't be %q", ss.Type) - } - - // Validate "in" and "name" - if hasIn { - switch ss.In { - case "query", "header", "cookie": - default: - return fmt.Errorf("security scheme of type 'apiKey' should have 'in'. It can be 'query', 'header' or 'cookie', not %q", ss.In) - } - if ss.Name == "" { - return errors.New("security scheme of type 'apiKey' should have 'name'") - } - } else if len(ss.In) > 0 { - return fmt.Errorf("security scheme of type %q can't have 'in'", ss.Type) - } else if len(ss.Name) > 0 { - return fmt.Errorf("security scheme of type %q can't have 'name'", ss.Type) - } - - // Validate "format" - // "bearerFormat" is an arbitrary string so we only check if the scheme supports it - if !hasBearerFormat && len(ss.BearerFormat) > 0 { - return fmt.Errorf("security scheme of type %q can't have 'bearerFormat'", ss.Type) - } - - // Validate "flow" - if hasFlow { - flow := ss.Flows - if flow == nil { - return fmt.Errorf("security scheme of type %q should have 'flows'", ss.Type) - } - if err := flow.Validate(ctx); err != nil { - return fmt.Errorf("security scheme 'flow' is invalid: %w", err) - } - } else if ss.Flows != nil { - return fmt.Errorf("security scheme of type %q can't have 'flows'", ss.Type) - } - - return validateExtensions(ctx, ss.Extensions) -} - -// OAuthFlows is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flows-object -type OAuthFlows struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Implicit *OAuthFlow `json:"implicit,omitempty" yaml:"implicit,omitempty"` - Password *OAuthFlow `json:"password,omitempty" yaml:"password,omitempty"` - ClientCredentials *OAuthFlow `json:"clientCredentials,omitempty" yaml:"clientCredentials,omitempty"` - AuthorizationCode *OAuthFlow `json:"authorizationCode,omitempty" yaml:"authorizationCode,omitempty"` -} - -type oAuthFlowType int - -const ( - oAuthFlowTypeImplicit oAuthFlowType = iota - oAuthFlowTypePassword - oAuthFlowTypeClientCredentials - oAuthFlowAuthorizationCode -) - -// MarshalJSON returns the JSON encoding of OAuthFlows. -func (flows OAuthFlows) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(flows.Extensions)) - for k, v := range flows.Extensions { - m[k] = v - } - if x := flows.Implicit; x != nil { - m["implicit"] = x - } - if x := flows.Password; x != nil { - m["password"] = x - } - if x := flows.ClientCredentials; x != nil { - m["clientCredentials"] = x - } - if x := flows.AuthorizationCode; x != nil { - m["authorizationCode"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets OAuthFlows to a copy of data. -func (flows *OAuthFlows) UnmarshalJSON(data []byte) error { - type OAuthFlowsBis OAuthFlows - var x OAuthFlowsBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "implicit") - delete(x.Extensions, "password") - delete(x.Extensions, "clientCredentials") - delete(x.Extensions, "authorizationCode") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *flows = OAuthFlows(x) - return nil -} - -// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec. -func (flows *OAuthFlows) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if v := flows.Implicit; v != nil { - if err := v.validate(ctx, oAuthFlowTypeImplicit, opts...); err != nil { - return fmt.Errorf("the OAuth flow 'implicit' is invalid: %w", err) - } - } - - if v := flows.Password; v != nil { - if err := v.validate(ctx, oAuthFlowTypePassword, opts...); err != nil { - return fmt.Errorf("the OAuth flow 'password' is invalid: %w", err) - } - } - - if v := flows.ClientCredentials; v != nil { - if err := v.validate(ctx, oAuthFlowTypeClientCredentials, opts...); err != nil { - return fmt.Errorf("the OAuth flow 'clientCredentials' is invalid: %w", err) - } - } - - if v := flows.AuthorizationCode; v != nil { - if err := v.validate(ctx, oAuthFlowAuthorizationCode, opts...); err != nil { - return fmt.Errorf("the OAuth flow 'authorizationCode' is invalid: %w", err) - } - } - - return validateExtensions(ctx, flows.Extensions) -} - -// OAuthFlow is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#oauth-flow-object -type OAuthFlow struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - AuthorizationURL string `json:"authorizationUrl,omitempty" yaml:"authorizationUrl,omitempty"` - TokenURL string `json:"tokenUrl,omitempty" yaml:"tokenUrl,omitempty"` - RefreshURL string `json:"refreshUrl,omitempty" yaml:"refreshUrl,omitempty"` - Scopes map[string]string `json:"scopes" yaml:"scopes"` // required -} - -// MarshalJSON returns the JSON encoding of OAuthFlow. -func (flow OAuthFlow) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(flow.Extensions)) - for k, v := range flow.Extensions { - m[k] = v - } - if x := flow.AuthorizationURL; x != "" { - m["authorizationUrl"] = x - } - if x := flow.TokenURL; x != "" { - m["tokenUrl"] = x - } - if x := flow.RefreshURL; x != "" { - m["refreshUrl"] = x - } - m["scopes"] = flow.Scopes - return json.Marshal(m) -} - -// UnmarshalJSON sets OAuthFlow to a copy of data. -func (flow *OAuthFlow) UnmarshalJSON(data []byte) error { - type OAuthFlowBis OAuthFlow - var x OAuthFlowBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "authorizationUrl") - delete(x.Extensions, "tokenUrl") - delete(x.Extensions, "refreshUrl") - delete(x.Extensions, "scopes") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *flow = OAuthFlow(x) - return nil -} - -// Validate returns an error if OAuthFlows does not comply with the OpenAPI spec. -func (flow *OAuthFlow) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if v := flow.RefreshURL; v != "" { - if _, err := url.Parse(v); err != nil { - return fmt.Errorf("field 'refreshUrl' is invalid: %w", err) - } - } - - if flow.Scopes == nil { - return errors.New("field 'scopes' is missing") - } - - return validateExtensions(ctx, flow.Extensions) -} - -func (flow *OAuthFlow) validate(ctx context.Context, typ oAuthFlowType, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - typeIn := func(types ...oAuthFlowType) bool { - for _, ty := range types { - if ty == typ { - return true - } - } - return false - } - - if in := typeIn(oAuthFlowTypeImplicit, oAuthFlowAuthorizationCode); true { - switch { - case flow.AuthorizationURL == "" && in: - return errors.New("field 'authorizationUrl' is empty or missing") - case flow.AuthorizationURL != "" && !in: - return errors.New("field 'authorizationUrl' should not be set") - case flow.AuthorizationURL != "": - if _, err := url.Parse(flow.AuthorizationURL); err != nil { - return fmt.Errorf("field 'authorizationUrl' is invalid: %w", err) - } - } - } - - if in := typeIn(oAuthFlowTypePassword, oAuthFlowTypeClientCredentials, oAuthFlowAuthorizationCode); true { - switch { - case flow.TokenURL == "" && in: - return errors.New("field 'tokenUrl' is empty or missing") - case flow.TokenURL != "" && !in: - return errors.New("field 'tokenUrl' should not be set") - case flow.TokenURL != "": - if _, err := url.Parse(flow.TokenURL); err != nil { - return fmt.Errorf("field 'tokenUrl' is invalid: %w", err) - } - } - } - - return flow.Validate(ctx, opts...) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go b/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go deleted file mode 100644 index 2ec8bd2d..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/serialization_method.go +++ /dev/null @@ -1,17 +0,0 @@ -package openapi3 - -const ( - SerializationSimple = "simple" - SerializationLabel = "label" - SerializationMatrix = "matrix" - SerializationForm = "form" - SerializationSpaceDelimited = "spaceDelimited" - SerializationPipeDelimited = "pipeDelimited" - SerializationDeepObject = "deepObject" -) - -// SerializationMethod describes a serialization method of HTTP request's parameters and body. -type SerializationMethod struct { - Style string - Explode bool -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/server.go b/vendor/github.com/getkin/kin-openapi/openapi3/server.go deleted file mode 100644 index 04e233d5..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/server.go +++ /dev/null @@ -1,284 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "math" - "net/url" - "sort" - "strings" -) - -// Servers is specified by OpenAPI/Swagger standard version 3. -type Servers []*Server - -// Validate returns an error if Servers does not comply with the OpenAPI spec. -func (servers Servers) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - for _, v := range servers { - if err := v.Validate(ctx); err != nil { - return err - } - } - return nil -} - -// BasePath returns the base path of the first server in the list, or /. -func (servers Servers) BasePath() (string, error) { - for _, server := range servers { - return server.BasePath() - } - return "/", nil -} - -func (servers Servers) MatchURL(parsedURL *url.URL) (*Server, []string, string) { - rawURL := parsedURL.String() - if i := strings.IndexByte(rawURL, '?'); i >= 0 { - rawURL = rawURL[:i] - } - for _, server := range servers { - pathParams, remaining, ok := server.MatchRawURL(rawURL) - if ok { - return server, pathParams, remaining - } - } - return nil, nil, "" -} - -// Server is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-object -type Server struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - URL string `json:"url" yaml:"url"` // Required - Description string `json:"description,omitempty" yaml:"description,omitempty"` - Variables map[string]*ServerVariable `json:"variables,omitempty" yaml:"variables,omitempty"` -} - -// BasePath returns the base path extracted from the default values of variables, if any. -// Assumes a valid struct (per Validate()). -func (server *Server) BasePath() (string, error) { - if server == nil { - return "/", nil - } - - uri := server.URL - for name, svar := range server.Variables { - uri = strings.ReplaceAll(uri, "{"+name+"}", svar.Default) - } - - u, err := url.ParseRequestURI(uri) - if err != nil { - return "", err - } - - if bp := u.Path; bp != "" { - return bp, nil - } - - return "/", nil -} - -// MarshalJSON returns the JSON encoding of Server. -func (server Server) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 3+len(server.Extensions)) - for k, v := range server.Extensions { - m[k] = v - } - m["url"] = server.URL - if x := server.Description; x != "" { - m["description"] = x - } - if x := server.Variables; len(x) != 0 { - m["variables"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Server to a copy of data. -func (server *Server) UnmarshalJSON(data []byte) error { - type ServerBis Server - var x ServerBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "url") - delete(x.Extensions, "description") - delete(x.Extensions, "variables") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *server = Server(x) - return nil -} - -func (server Server) ParameterNames() ([]string, error) { - pattern := server.URL - var params []string - for len(pattern) > 0 { - i := strings.IndexByte(pattern, '{') - if i < 0 { - break - } - pattern = pattern[i+1:] - i = strings.IndexByte(pattern, '}') - if i < 0 { - return nil, errors.New("missing '}'") - } - params = append(params, strings.TrimSpace(pattern[:i])) - pattern = pattern[i+1:] - } - return params, nil -} - -func (server Server) MatchRawURL(input string) ([]string, string, bool) { - pattern := server.URL - var params []string - for len(pattern) > 0 { - c := pattern[0] - if len(pattern) == 1 && c == '/' { - break - } - if c == '{' { - // Find end of pattern - i := strings.IndexByte(pattern, '}') - if i < 0 { - return nil, "", false - } - pattern = pattern[i+1:] - - // Find next matching pattern character or next '/' whichever comes first - np := -1 - if len(pattern) > 0 { - np = strings.IndexByte(input, pattern[0]) - } - ns := strings.IndexByte(input, '/') - - if np < 0 { - i = ns - } else if ns < 0 { - i = np - } else { - i = int(math.Min(float64(np), float64(ns))) - } - if i < 0 { - i = len(input) - } - params = append(params, input[:i]) - input = input[i:] - continue - } - if len(input) == 0 || input[0] != c { - return nil, "", false - } - pattern = pattern[1:] - input = input[1:] - } - if input == "" { - input = "/" - } - if input[0] != '/' { - return nil, "", false - } - return params, input, true -} - -// Validate returns an error if Server does not comply with the OpenAPI spec. -func (server *Server) Validate(ctx context.Context, opts ...ValidationOption) (err error) { - ctx = WithValidationOptions(ctx, opts...) - - if server.URL == "" { - return errors.New("value of url must be a non-empty string") - } - - opening, closing := strings.Count(server.URL, "{"), strings.Count(server.URL, "}") - if opening != closing { - return errors.New("server URL has mismatched { and }") - } - - if opening != len(server.Variables) { - return errors.New("server has undeclared variables") - } - - variables := make([]string, 0, len(server.Variables)) - for name := range server.Variables { - variables = append(variables, name) - } - sort.Strings(variables) - for _, name := range variables { - v := server.Variables[name] - if !strings.Contains(server.URL, "{"+name+"}") { - return errors.New("server has undeclared variables") - } - if err = v.Validate(ctx); err != nil { - return - } - } - - return validateExtensions(ctx, server.Extensions) -} - -// ServerVariable is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#server-variable-object -type ServerVariable struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Enum []string `json:"enum,omitempty" yaml:"enum,omitempty"` - Default string `json:"default,omitempty" yaml:"default,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` -} - -// MarshalJSON returns the JSON encoding of ServerVariable. -func (serverVariable ServerVariable) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 4+len(serverVariable.Extensions)) - for k, v := range serverVariable.Extensions { - m[k] = v - } - if x := serverVariable.Enum; len(x) != 0 { - m["enum"] = x - } - if x := serverVariable.Default; x != "" { - m["default"] = x - } - if x := serverVariable.Description; x != "" { - m["description"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets ServerVariable to a copy of data. -func (serverVariable *ServerVariable) UnmarshalJSON(data []byte) error { - type ServerVariableBis ServerVariable - var x ServerVariableBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "enum") - delete(x.Extensions, "default") - delete(x.Extensions, "description") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *serverVariable = ServerVariable(x) - return nil -} - -// Validate returns an error if ServerVariable does not comply with the OpenAPI spec. -func (serverVariable *ServerVariable) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if serverVariable.Default == "" { - data, err := serverVariable.MarshalJSON() - if err != nil { - return err - } - return fmt.Errorf("field default is required in %s", data) - } - - return validateExtensions(ctx, serverVariable.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/tag.go b/vendor/github.com/getkin/kin-openapi/openapi3/tag.go deleted file mode 100644 index eea6462f..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/tag.go +++ /dev/null @@ -1,90 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" - "fmt" -) - -// Tags is specified by OpenAPI/Swagger 3.0 standard. -type Tags []*Tag - -func (tags Tags) Get(name string) *Tag { - for _, tag := range tags { - if tag.Name == name { - return tag - } - } - return nil -} - -// Validate returns an error if Tags does not comply with the OpenAPI spec. -func (tags Tags) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - for _, v := range tags { - if err := v.Validate(ctx); err != nil { - return err - } - } - return nil -} - -// Tag is specified by OpenAPI/Swagger 3.0 standard. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#tag-object -type Tag struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Name string `json:"name,omitempty" yaml:"name,omitempty"` - Description string `json:"description,omitempty" yaml:"description,omitempty"` - ExternalDocs *ExternalDocs `json:"externalDocs,omitempty" yaml:"externalDocs,omitempty"` -} - -// MarshalJSON returns the JSON encoding of Tag. -func (t Tag) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 3+len(t.Extensions)) - for k, v := range t.Extensions { - m[k] = v - } - if x := t.Name; x != "" { - m["name"] = x - } - if x := t.Description; x != "" { - m["description"] = x - } - if x := t.ExternalDocs; x != nil { - m["externalDocs"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets Tag to a copy of data. -func (t *Tag) UnmarshalJSON(data []byte) error { - type TagBis Tag - var x TagBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "name") - delete(x.Extensions, "description") - delete(x.Extensions, "externalDocs") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *t = Tag(x) - return nil -} - -// Validate returns an error if Tag does not comply with the OpenAPI spec. -func (t *Tag) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - if v := t.ExternalDocs; v != nil { - if err := v.Validate(ctx); err != nil { - return fmt.Errorf("invalid external docs: %w", err) - } - } - - return validateExtensions(ctx, t.Extensions) -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml deleted file mode 100644 index ff8240eb..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/base.yml +++ /dev/null @@ -1,16 +0,0 @@ -openapi: "3.0.3" -info: - title: Recursive cyclic refs example - version: "1.0" -components: - schemas: - Foo: - properties: - foo2: - $ref: "other.yml#/components/schemas/Foo2" - bar: - $ref: "#/components/schemas/Bar" - Bar: - properties: - foo: - $ref: "#/components/schemas/Foo" diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml deleted file mode 100644 index 29b72d98..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/circularRef/other.yml +++ /dev/null @@ -1,10 +0,0 @@ -openapi: "3.0.3" -info: - title: Recursive cyclic refs example - version: "1.0" -components: - schemas: - Foo2: - properties: - id: - type: string diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml deleted file mode 100644 index cc59fc27..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Bar.yml +++ /dev/null @@ -1,2 +0,0 @@ -type: string -example: bar diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml deleted file mode 100644 index c476aa1a..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Cat.yml +++ /dev/null @@ -1,4 +0,0 @@ -type: object -properties: - cat: - $ref: ../openapi.yml#/components/schemas/Cat diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml deleted file mode 100644 index 53a23366..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo.yml +++ /dev/null @@ -1,4 +0,0 @@ -type: object -properties: - bar: - $ref: ../openapi.yml#/components/schemas/Bar diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml deleted file mode 100644 index aeac81f4..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/Foo/Foo2.yml +++ /dev/null @@ -1,4 +0,0 @@ -type: object -properties: - foo: - $ref: ../../openapi.yml#/components/schemas/Foo diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml deleted file mode 100644 index b4d40479..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/components/models/error.yaml +++ /dev/null @@ -1,2 +0,0 @@ -type: object -title: ErrorDetails diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml deleted file mode 100644 index d1370e32..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/issue615.yml +++ /dev/null @@ -1,60 +0,0 @@ -openapi: "3.0.3" -info: - title: Deep recursive cyclic refs example - version: "1.0" -paths: - /foo: - $ref: ./paths/foo.yml -components: - schemas: - FilterColumnIncludes: - type: object - properties: - $includes: - $ref: '#/components/schemas/FilterPredicate' - additionalProperties: false - maxProperties: 1 - minProperties: 1 - FilterPredicate: - oneOf: - - $ref: '#/components/schemas/FilterValue' - - type: array - items: - $ref: '#/components/schemas/FilterPredicate' - minLength: 1 - - $ref: '#/components/schemas/FilterPredicateOp' - - $ref: '#/components/schemas/FilterPredicateRangeOp' - FilterPredicateOp: - type: object - properties: - $any: - oneOf: - - type: array - items: - $ref: '#/components/schemas/FilterPredicate' - $none: - oneOf: - - $ref: '#/components/schemas/FilterPredicate' - - type: array - items: - $ref: '#/components/schemas/FilterPredicate' - additionalProperties: false - maxProperties: 1 - minProperties: 1 - FilterPredicateRangeOp: - type: object - properties: - $lt: - $ref: '#/components/schemas/FilterRangeValue' - additionalProperties: false - maxProperties: 2 - minProperties: 2 - FilterRangeValue: - oneOf: - - type: number - - type: string - FilterValue: - oneOf: - - type: number - - type: string - - type: boolean \ No newline at end of file diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml deleted file mode 100644 index 9f884c71..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml +++ /dev/null @@ -1,33 +0,0 @@ -openapi: "3.0.3" -info: - title: Recursive refs example - version: "1.0" -paths: - /foo: - $ref: ./paths/foo.yml - /double-ref-foo: - get: - summary: Double ref response - description: Reference response with double reference. - responses: - "400": - $ref: "#/components/responses/400" -components: - schemas: - Foo: - $ref: ./components/Foo.yml - Foo2: - $ref: ./components/Foo/Foo2.yml - Bar: - $ref: ./components/Bar.yml - Cat: - $ref: ./components/Cat.yml - Error: - $ref: ./components/models/error.yaml - responses: - "400": - description: 400 Bad Request - content: - application/json: - schema: - $ref: "#/components/schemas/Error" diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml deleted file mode 100644 index 0d508527..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/openapi.yml.internalized.yml +++ /dev/null @@ -1,110 +0,0 @@ -{ - "components": { - "parameters": { - "number": { - "in": "query", - "name": "someNumber", - "schema": { - "type": "string" - } - } - }, - "responses": { - "400": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/Error" - } - } - }, - "description": "400 Bad Request" - } - }, - "schemas": { - "Bar": { - "example": "bar", - "type": "string" - }, - "Error":{ - "title":"ErrorDetails", - "type":"object" - }, - "Foo": { - "properties": { - "bar": { - "$ref": "#/components/schemas/Bar" - } - }, - "type": "object" - }, - "Foo2": { - "properties": { - "foo": { - "$ref": "#/components/schemas/Foo" - } - }, - "type": "object" - }, - "error":{ - "title":"ErrorDetails", - "type":"object" - }, - "Cat": { - "properties": { - "cat": { - "$ref": "#/components/schemas/Cat" - } - }, - "type": "object" - } - } - }, - "info": { - "title": "Recursive refs example", - "version": "1.0" - }, - "openapi": "3.0.3", - "paths": { - "/double-ref-foo": { - "get": { - "description": "Reference response with double reference.", - "responses": { - "400": { - "$ref": "#/components/responses/400" - } - }, - "summary": "Double ref response" - } - }, - "/foo": { - "get": { - "responses": { - "200": { - "content": { - "application/json": { - "schema": { - "properties": { - "foo2": { - "$ref": "#/components/schemas/Foo2" - } - }, - "type": "object" - } - } - }, - "description": "OK" - }, - "400": { - "$ref": "#/components/responses/400" - } - } - }, - "parameters": [ - { - "$ref": "#/components/parameters/number" - } - ] - } - } -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml deleted file mode 100644 index 29f0f264..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/parameters/number.yml +++ /dev/null @@ -1,4 +0,0 @@ -name: someNumber -in: query -schema: - type: string diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml b/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml deleted file mode 100644 index 4c845b53..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/testdata/recursiveRef/paths/foo.yml +++ /dev/null @@ -1,15 +0,0 @@ -parameters: - - $ref: ../parameters/number.yml -get: - responses: - "200": - description: OK - content: - application/json: - schema: - type: object - properties: - foo2: - $ref: ../openapi.yml#/components/schemas/Foo2 - "400": - $ref: "../openapi.yml#/components/responses/400" diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go b/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go deleted file mode 100644 index 8982594b..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/validation_options.go +++ /dev/null @@ -1,112 +0,0 @@ -package openapi3 - -import "context" - -// ValidationOption allows the modification of how the OpenAPI document is validated. -type ValidationOption func(options *ValidationOptions) - -// ValidationOptions provides configuration for validating OpenAPI documents. -type ValidationOptions struct { - examplesValidationAsReq, examplesValidationAsRes bool - examplesValidationDisabled bool - schemaDefaultsValidationDisabled bool - schemaFormatValidationEnabled bool - schemaPatternValidationDisabled bool - extraSiblingFieldsAllowed map[string]struct{} -} - -type validationOptionsKey struct{} - -// AllowExtraSiblingFields called as AllowExtraSiblingFields("description") makes Validate not return an error when said field appears next to a $ref. -func AllowExtraSiblingFields(fields ...string) ValidationOption { - return func(options *ValidationOptions) { - if options.extraSiblingFieldsAllowed == nil && len(fields) != 0 { - options.extraSiblingFieldsAllowed = make(map[string]struct{}, len(fields)) - } - for _, field := range fields { - options.extraSiblingFieldsAllowed[field] = struct{}{} - } - } -} - -// EnableSchemaFormatValidation makes Validate not return an error when validating documents that mention schema formats that are not defined by the OpenAPIv3 specification. -// By default, schema format validation is disabled. -func EnableSchemaFormatValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaFormatValidationEnabled = true - } -} - -// DisableSchemaFormatValidation does the opposite of EnableSchemaFormatValidation. -// By default, schema format validation is disabled. -func DisableSchemaFormatValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaFormatValidationEnabled = false - } -} - -// EnableSchemaPatternValidation does the opposite of DisableSchemaPatternValidation. -// By default, schema pattern validation is enabled. -func EnableSchemaPatternValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaPatternValidationDisabled = false - } -} - -// DisableSchemaPatternValidation makes Validate not return an error when validating patterns that are not supported by the Go regexp engine. -func DisableSchemaPatternValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaPatternValidationDisabled = true - } -} - -// EnableSchemaDefaultsValidation does the opposite of DisableSchemaDefaultsValidation. -// By default, schema default values are validated against their schema. -func EnableSchemaDefaultsValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaDefaultsValidationDisabled = false - } -} - -// DisableSchemaDefaultsValidation disables schemas' default field validation. -// By default, schema default values are validated against their schema. -func DisableSchemaDefaultsValidation() ValidationOption { - return func(options *ValidationOptions) { - options.schemaDefaultsValidationDisabled = true - } -} - -// EnableExamplesValidation does the opposite of DisableExamplesValidation. -// By default, all schema examples are validated. -func EnableExamplesValidation() ValidationOption { - return func(options *ValidationOptions) { - options.examplesValidationDisabled = false - } -} - -// DisableExamplesValidation disables all example schema validation. -// By default, all schema examples are validated. -func DisableExamplesValidation() ValidationOption { - return func(options *ValidationOptions) { - options.examplesValidationDisabled = true - } -} - -// WithValidationOptions allows adding validation options to a context object that can be used when validating any OpenAPI type. -func WithValidationOptions(ctx context.Context, opts ...ValidationOption) context.Context { - if len(opts) == 0 { - return ctx - } - options := &ValidationOptions{} - for _, opt := range opts { - opt(options) - } - return context.WithValue(ctx, validationOptionsKey{}, options) -} - -func getValidationOptions(ctx context.Context) *ValidationOptions { - if options, ok := ctx.Value(validationOptionsKey{}).(*ValidationOptions); ok { - return options - } - return &ValidationOptions{} -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/visited.go b/vendor/github.com/getkin/kin-openapi/openapi3/visited.go deleted file mode 100644 index 67f970e3..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/visited.go +++ /dev/null @@ -1,41 +0,0 @@ -package openapi3 - -func newVisited() visitedComponent { - return visitedComponent{ - header: make(map[*Header]struct{}), - schema: make(map[*Schema]struct{}), - } -} - -type visitedComponent struct { - header map[*Header]struct{} - schema map[*Schema]struct{} -} - -// resetVisited clears visitedComponent map -// should be called before recursion over doc *T -func (doc *T) resetVisited() { - doc.visited = newVisited() -} - -// isVisitedHeader returns `true` if the *Header pointer was already visited -// otherwise it returns `false` -func (doc *T) isVisitedHeader(h *Header) bool { - if _, ok := doc.visited.header[h]; ok { - return true - } - - doc.visited.header[h] = struct{}{} - return false -} - -// isVisitedHeader returns `true` if the *Schema pointer was already visited -// otherwise it returns `false` -func (doc *T) isVisitedSchema(s *Schema) bool { - if _, ok := doc.visited.schema[s]; ok { - return true - } - - doc.visited.schema[s] = struct{}{} - return false -} diff --git a/vendor/github.com/getkin/kin-openapi/openapi3/xml.go b/vendor/github.com/getkin/kin-openapi/openapi3/xml.go deleted file mode 100644 index 604b607d..00000000 --- a/vendor/github.com/getkin/kin-openapi/openapi3/xml.go +++ /dev/null @@ -1,69 +0,0 @@ -package openapi3 - -import ( - "context" - "encoding/json" -) - -// XML is specified by OpenAPI/Swagger standard version 3. -// See https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.3.md#xml-object -type XML struct { - Extensions map[string]interface{} `json:"-" yaml:"-"` - - Name string `json:"name,omitempty" yaml:"name,omitempty"` - Namespace string `json:"namespace,omitempty" yaml:"namespace,omitempty"` - Prefix string `json:"prefix,omitempty" yaml:"prefix,omitempty"` - Attribute bool `json:"attribute,omitempty" yaml:"attribute,omitempty"` - Wrapped bool `json:"wrapped,omitempty" yaml:"wrapped,omitempty"` -} - -// MarshalJSON returns the JSON encoding of XML. -func (xml XML) MarshalJSON() ([]byte, error) { - m := make(map[string]interface{}, 5+len(xml.Extensions)) - for k, v := range xml.Extensions { - m[k] = v - } - if x := xml.Name; x != "" { - m["name"] = x - } - if x := xml.Namespace; x != "" { - m["namespace"] = x - } - if x := xml.Prefix; x != "" { - m["prefix"] = x - } - if x := xml.Attribute; x { - m["attribute"] = x - } - if x := xml.Wrapped; x { - m["wrapped"] = x - } - return json.Marshal(m) -} - -// UnmarshalJSON sets XML to a copy of data. -func (xml *XML) UnmarshalJSON(data []byte) error { - type XMLBis XML - var x XMLBis - if err := json.Unmarshal(data, &x); err != nil { - return unmarshalError(err) - } - _ = json.Unmarshal(data, &x.Extensions) - delete(x.Extensions, "name") - delete(x.Extensions, "namespace") - delete(x.Extensions, "prefix") - delete(x.Extensions, "attribute") - delete(x.Extensions, "wrapped") - if len(x.Extensions) == 0 { - x.Extensions = nil - } - *xml = XML(x) - return nil -} - -// Validate returns an error if XML does not comply with the OpenAPI spec. -func (xml *XML) Validate(ctx context.Context, opts ...ValidationOption) error { - ctx = WithValidationOptions(ctx, opts...) - - return validateExtensions(ctx, xml.Extensions) -} diff --git a/vendor/github.com/hashicorp/hc-install/README.md b/vendor/github.com/hashicorp/hc-install/README.md index 6e78b5a6..0d55191b 100644 --- a/vendor/github.com/hashicorp/hc-install/README.md +++ b/vendor/github.com/hashicorp/hc-install/README.md @@ -14,55 +14,55 @@ the library in ad-hoc or CI shell scripting outside of Go. `hc-install` does **not**: - - Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system. - - Deal with execution of installed binaries (via service files or otherwise). - - Upgrade existing binaries on your system. - - Add nor link downloaded binaries to your `$PATH`. +- Determine suitable installation path based on target system. e.g. in `/usr/bin` or `/usr/local/bin` on Unix based system. +- Deal with execution of installed binaries (via service files or otherwise). +- Upgrade existing binaries on your system. +- Add nor link downloaded binaries to your `$PATH`. ## API The `Installer` offers a few high-level methods: - - `Ensure(context.Context, []src.Source)` to find, install, or build a product version - - `Install(context.Context, []src.Installable)` to install a product version +- `Ensure(context.Context, []src.Source)` to find, install, or build a product version +- `Install(context.Context, []src.Installable)` to install a product version ### Sources The `Installer` methods accept number of different `Source` types. Each comes with different trade-offs described below. - - `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths) - - **Pros:** - - This is most convenient when you already have the product installed on your system +- `fs.{AnyVersion,ExactVersion,Version}` - Finds a binary in `$PATH` (or additional paths) + - **Pros:** + - This is most convenient when you already have the product installed on your system which you already manage. - - **Cons:** - - Only relies on a single version, expects _you_ to manage the installation - - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub) - - `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com` - - **Pros:** - - Fast and reliable way of obtaining any pre-built version of any product - - Allows installation of enterprise versions - - **Cons:** - - Installation may consume some bandwidth, disk space and a little time - - Potentially less stable builds (see `checkpoint` below) - - `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint - - **Pros:** - - Checkpoint typically contains only product versions considered stable - - **Cons:** - - Installation may consume some bandwidth, disk space and a little time - - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above) - - `build.GitRevision` - Clones raw source code and builds the product from it - - **Pros:** - - Useful for catching bugs and incompatibilities as early as possible (prior to product release). - - **Cons:** - - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwith, disk space) - - There are no guarantees that build instructions will always be up-to-date - - There's increased likelihood of build containing bugs prior to release - - Any CI builds relying on this are likely to be fragile + - **Cons:** + - Only relies on a single version, expects _you_ to manage the installation + - _Not recommended_ for any environment where product installation is not controlled or managed by you (e.g. default GitHub Actions image managed by GitHub) +- `releases.{LatestVersion,ExactVersion}` - Downloads, verifies & installs any known product from `releases.hashicorp.com` + - **Pros:** + - Fast and reliable way of obtaining any pre-built version of any product + - Allows installation of enterprise versions + - **Cons:** + - Installation may consume some bandwidth, disk space and a little time + - Potentially less stable builds (see `checkpoint` below) +- `checkpoint.LatestVersion` - Downloads, verifies & installs any known product available in HashiCorp Checkpoint + - **Pros:** + - Checkpoint typically contains only product versions considered stable + - **Cons:** + - Installation may consume some bandwidth, disk space and a little time + - Currently doesn't allow installation of old versions or enterprise versions (see `releases` above) +- `build.GitRevision` - Clones raw source code and builds the product from it + - **Pros:** + - Useful for catching bugs and incompatibilities as early as possible (prior to product release). + - **Cons:** + - Building from scratch can consume significant amount of time & resources (CPU, memory, bandwidth, disk space) + - There are no guarantees that build instructions will always be up-to-date + - There's increased likelihood of build containing bugs prior to release + - Any CI builds relying on this are likely to be fragile ## Example Usage -See examples at https://pkg.go.dev/github.com/hashicorp/hc-install#example-Installer. +See examples at . ## CLI @@ -70,9 +70,9 @@ In addition to the Go library, which is the intended primary use case of `hc-ins The CLI comes with some trade-offs: - - more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`) - - minimal environment pre-requisites (no need to compile Go code) - - see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager) +- more limited interface compared to the flexible Go API (installs specific versions of products via `releases.ExactVersion`) +- minimal environment pre-requisites (no need to compile Go code) +- see ["hc-install is not a package manager"](https://github.com/hashicorp/hc-install#hc-install-is-not-a-package-manager) ### Installation @@ -82,7 +82,7 @@ Given that one of the key roles of the CLI/library is integrity checking, you sh [Homebrew](https://brew.sh) -``` +```sh brew install hashicorp/tap/hc-install ``` @@ -102,19 +102,23 @@ You can follow the instructions in the [Official Packaging Guide](https://www.ha ### Usage -``` +```text Usage: hc-install install [options] -version This command installs a HashiCorp product. Options: -version [REQUIRED] Version of product to install. - -path Path to directory where the product will be installed. Defaults - to current working directory. + -path Path to directory where the product will be installed. + Defaults to current working directory. + -log-file Path to file where logs will be written. /dev/stdout + or /dev/stderr can be used to log to STDOUT/STDERR. ``` + ```sh hc-install install -version 1.3.7 terraform ``` -``` + +```sh hc-install: will install terraform@1.3.7 installed terraform@1.3.7 to /current/working/dir/terraform ``` diff --git a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go index 2cd5379f..7a8aa3d9 100644 --- a/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go +++ b/vendor/github.com/hashicorp/hc-install/checkpoint/latest_version.go @@ -6,7 +6,7 @@ package checkpoint import ( "context" "fmt" - "io/ioutil" + "io" "log" "os" "path/filepath" @@ -24,7 +24,7 @@ import ( var ( defaultTimeout = 30 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) // LatestVersion installs the latest version known to Checkpoint @@ -101,7 +101,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", lv.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -126,9 +126,9 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if lv.ArmoredPublicKey != "" { d.ArmoredPublicKey = lv.ArmoredPublicKey } - zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, "") - if zipFilePath != "" { - lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, pv, dstDir, "") + if up != nil { + lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs.go b/vendor/github.com/hashicorp/hc-install/fs/fs.go index 216df2c2..ac6f5cf9 100644 --- a/vendor/github.com/hashicorp/hc-install/fs/fs.go +++ b/vendor/github.com/hashicorp/hc-install/fs/fs.go @@ -4,14 +4,14 @@ package fs import ( - "io/ioutil" + "io" "log" "time" ) var ( defaultTimeout = 10 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) type fileCheckFunc func(path string) error diff --git a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go index eebd98b8..5aed8444 100644 --- a/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go +++ b/vendor/github.com/hashicorp/hc-install/fs/fs_unix.go @@ -16,9 +16,7 @@ import ( func lookupDirs(extraDirs []string) []string { pathVar := os.Getenv("PATH") dirs := filepath.SplitList(pathVar) - for _, ep := range extraDirs { - dirs = append(dirs, ep) - } + dirs = append(dirs, extraDirs...) return dirs } diff --git a/vendor/github.com/hashicorp/hc-install/installer.go b/vendor/github.com/hashicorp/hc-install/installer.go index 6c704eed..01c1fdee 100644 --- a/vendor/github.com/hashicorp/hc-install/installer.go +++ b/vendor/github.com/hashicorp/hc-install/installer.go @@ -6,7 +6,7 @@ package install import ( "context" "fmt" - "io/ioutil" + "io" "log" "github.com/hashicorp/go-multierror" @@ -23,7 +23,7 @@ type Installer struct { type RemoveFunc func(ctx context.Context) error func NewInstaller() *Installer { - discardLogger := log.New(ioutil.Discard, "", 0) + discardLogger := log.New(io.Discard, "", 0) return &Installer{ logger: discardLogger, } diff --git a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go index 504bf45a..6eef755b 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go +++ b/vendor/github.com/hashicorp/hc-install/internal/build/go_build.go @@ -7,7 +7,7 @@ import ( "bytes" "context" "fmt" - "io/ioutil" + "io" "log" "os" "os/exec" @@ -17,7 +17,7 @@ import ( "golang.org/x/mod/modfile" ) -var discardLogger = log.New(ioutil.Discard, "", 0) +var discardLogger = log.New(io.Discard, "", 0) // GoBuild represents a Go builder (to run "go build") type GoBuild struct { @@ -161,7 +161,7 @@ type CleanupFunc func(context.Context) func guessRequiredGoVersion(repoDir string) (*version.Version, bool) { goEnvFile := filepath.Join(repoDir, ".go-version") if fi, err := os.Stat(goEnvFile); err == nil && !fi.IsDir() { - b, err := ioutil.ReadFile(goEnvFile) + b, err := os.ReadFile(goEnvFile) if err != nil { return nil, false } @@ -174,7 +174,7 @@ func guessRequiredGoVersion(repoDir string) (*version.Version, bool) { goModFile := filepath.Join(repoDir, "go.mod") if fi, err := os.Stat(goModFile); err == nil && !fi.IsDir() { - b, err := ioutil.ReadFile(goModFile) + b, err := os.ReadFile(goModFile) if err != nil { return nil, false } diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go index 843de8cd..59dd1a1f 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/checksum_downloader.go @@ -55,7 +55,7 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C client := httpclient.NewHTTPClient() sigURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL, url.PathEscape(cd.ProductVersion.Name), - url.PathEscape(cd.ProductVersion.RawVersion), + url.PathEscape(cd.ProductVersion.Version.String()), url.PathEscape(sigFilename)) cd.Logger.Printf("downloading signature from %s", sigURL) @@ -76,7 +76,7 @@ func (cd *ChecksumDownloader) DownloadAndVerifyChecksums(ctx context.Context) (C shasumsURL := fmt.Sprintf("%s/%s/%s/%s", cd.BaseURL, url.PathEscape(cd.ProductVersion.Name), - url.PathEscape(cd.ProductVersion.RawVersion), + url.PathEscape(cd.ProductVersion.Version.String()), url.PathEscape(cd.ProductVersion.SHASUMS)) cd.Logger.Printf("downloading checksums from %s", shasumsURL) diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go index 146c1cf0..a1139b58 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/downloader.go @@ -10,7 +10,6 @@ import ( "crypto/sha256" "fmt" "io" - "io/ioutil" "log" "net/http" "net/url" @@ -29,14 +28,18 @@ type Downloader struct { BaseURL string } -func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (zipFilePath string, err error) { +type UnpackedProduct struct { + PathsToRemove []string +} + +func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, binDir string, licenseDir string) (up *UnpackedProduct, err error) { if len(pv.Builds) == 0 { - return "", fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version) + return nil, fmt.Errorf("no builds found for %s %s", pv.Name, pv.Version) } pb, ok := pv.Builds.FilterBuild(runtime.GOOS, runtime.GOARCH, "zip") if !ok { - return "", fmt.Errorf("no ZIP archive found for %s %s %s/%s", + return nil, fmt.Errorf("no ZIP archive found for %s %s %s/%s", pv.Name, pv.Version, runtime.GOOS, runtime.GOARCH) } @@ -50,12 +53,12 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } verifiedChecksums, err := v.DownloadAndVerifyChecksums(ctx) if err != nil { - return "", err + return nil, err } var ok bool verifiedChecksum, ok = verifiedChecksums[pb.Filename] if !ok { - return "", fmt.Errorf("no checksum found for %q", pb.Filename) + return nil, fmt.Errorf("no checksum found for %q", pb.Filename) } } @@ -63,16 +66,17 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, archiveURL := pb.URL if d.BaseURL != "" { - // ensure that absolute download links from mocked responses - // are still pointing to the mock server if one is set + // If custom URL is set, use that instead of the one from the JSON. + // Also ensures that absolute download links from mocked responses + // are still pointing to the mock server if one is set. baseURL, err := url.Parse(d.BaseURL) if err != nil { - return "", err + return nil, err } u, err := url.Parse(archiveURL) if err != nil { - return "", err + return nil, err } u.Scheme = baseURL.Scheme u.Host = baseURL.Host @@ -83,15 +87,15 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, req, err := http.NewRequestWithContext(ctx, http.MethodGet, archiveURL, nil) if err != nil { - return "", fmt.Errorf("failed to create request for %q: %w", archiveURL, err) + return nil, fmt.Errorf("failed to create request for %q: %w", archiveURL, err) } resp, err := client.Do(req) if err != nil { - return "", err + return nil, err } if resp.StatusCode != 200 { - return "", fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status) + return nil, fmt.Errorf("failed to download ZIP archive from %q: %s", archiveURL, resp.Status) } defer resp.Body.Close() @@ -100,19 +104,22 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, contentType := resp.Header.Get("content-type") if !contentTypeIsZip(contentType) { - return "", fmt.Errorf("unexpected content-type: %s (expected any of %q)", + return nil, fmt.Errorf("unexpected content-type: %s (expected any of %q)", contentType, zipMimeTypes) } expectedSize := resp.ContentLength - pkgFile, err := ioutil.TempFile("", pb.Filename) + pkgFile, err := os.CreateTemp("", pb.Filename) if err != nil { - return "", err + return nil, err } defer pkgFile.Close() pkgFilePath, err := filepath.Abs(pkgFile.Name()) + up = &UnpackedProduct{} + up.PathsToRemove = append(up.PathsToRemove, pkgFilePath) + d.Logger.Printf("copying %q (%d bytes) to %s", pb.Filename, expectedSize, pkgFile.Name()) var bytesCopied int64 @@ -123,12 +130,12 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, bytesCopied, err = io.Copy(h, r) if err != nil { - return "", err + return nil, err } calculatedSum := h.Sum(nil) if !bytes.Equal(calculatedSum, verifiedChecksum) { - return pkgFilePath, fmt.Errorf( + return up, fmt.Errorf( "checksum mismatch (expected: %x, got: %x)", verifiedChecksum, calculatedSum, ) @@ -136,14 +143,14 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } else { bytesCopied, err = io.Copy(pkgFile, pkgReader) if err != nil { - return pkgFilePath, err + return up, err } } d.Logger.Printf("copied %d bytes to %s", bytesCopied, pkgFile.Name()) if expectedSize != 0 && bytesCopied != int64(expectedSize) { - return pkgFilePath, fmt.Errorf( + return up, fmt.Errorf( "unexpected size (downloaded: %d, expected: %d)", bytesCopied, expectedSize, ) @@ -151,7 +158,7 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, r, err := zip.OpenReader(pkgFile.Name()) if err != nil { - return pkgFilePath, err + return up, err } defer r.Close() @@ -163,7 +170,7 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, } srcFile, err := f.Open() if err != nil { - return pkgFilePath, err + return up, err } // Determine the appropriate destination file path @@ -174,20 +181,25 @@ func (d *Downloader) DownloadAndUnpack(ctx context.Context, pv *ProductVersion, d.Logger.Printf("unpacking %s to %s", f.Name, dstDir) dstPath := filepath.Join(dstDir, f.Name) + + if isLicenseFile(f.Name) { + up.PathsToRemove = append(up.PathsToRemove, dstPath) + } + dstFile, err := os.Create(dstPath) if err != nil { - return pkgFilePath, err + return up, err } _, err = io.Copy(dstFile, srcFile) if err != nil { - return pkgFilePath, err + return up, err } srcFile.Close() dstFile.Close() } - return pkgFilePath, nil + return up, nil } // The production release site uses consistent single mime type @@ -207,11 +219,13 @@ func contentTypeIsZip(contentType string) bool { return false } -// Enterprise products have a few additional license files -// that need to be extracted to a separate directory +// Product archives may have a few license files +// which may be extracted to a separate directory +// and may need to be tracked for later cleanup. var licenseFiles = []string{ "EULA.txt", "TermsOfEvaluation.txt", + "LICENSE.txt", } func isLicenseFile(filename string) bool { diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go index 99b811a6..94152b13 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/product_version.go @@ -9,8 +9,7 @@ import "github.com/hashicorp/go-version" // "consul 0.5.1". A ProductVersion may have one or more builds. type ProductVersion struct { Name string `json:"name"` - RawVersion string `json:"version"` - Version *version.Version `json:"-"` + Version *version.Version `json:"version"` SHASUMS string `json:"shasums,omitempty"` SHASUMSSig string `json:"shasums_signature,omitempty"` SHASUMSSigs []string `json:"shasums_signatures,omitempty"` diff --git a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go index 755019f2..4c0bab00 100644 --- a/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go +++ b/vendor/github.com/hashicorp/hc-install/internal/releasesjson/releases.go @@ -7,7 +7,7 @@ import ( "context" "encoding/json" "fmt" - "io/ioutil" + "io" "log" "net/http" "net/url" @@ -55,7 +55,7 @@ type Releases struct { func NewReleases() *Releases { return &Releases{ - logger: log.New(ioutil.Discard, "", 0), + logger: log.New(io.Discard, "", 0), BaseURL: defaultBaseURL, } } @@ -95,7 +95,7 @@ func (r *Releases) ListProductVersions(ctx context.Context, productName string) r.logger.Printf("received %s", resp.Status) - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { return nil, err } @@ -153,7 +153,7 @@ func (r *Releases) GetProductVersion(ctx context.Context, product string, versio r.logger.Printf("received %s", resp.Status) - body, err := ioutil.ReadAll(resp.Body) + body, err := io.ReadAll(resp.Body) if err != nil { return nil, err } diff --git a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go index e42f4d23..179f0b4b 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/exact_version.go +++ b/vendor/github.com/hashicorp/hc-install/releases/exact_version.go @@ -6,7 +6,6 @@ package releases import ( "context" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -37,7 +36,10 @@ type ExactVersion struct { // instead of built-in pubkey to verify signature of downloaded checksums ArmoredPublicKey string - apiBaseURL string + // ApiBaseURL is an optional field that specifies a custom URL to download the product from. + // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site. + // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files). + ApiBaseURL string logger *log.Logger pathsToRemove []string } @@ -93,7 +95,7 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", ev.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -103,8 +105,8 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { ev.log().Printf("will install into dir at %s", dstDir) rels := rjson.NewReleases() - if ev.apiBaseURL != "" { - rels.BaseURL = ev.apiBaseURL + if ev.ApiBaseURL != "" { + rels.BaseURL = ev.ApiBaseURL } rels.SetLogger(ev.log()) installVersion := ev.Version @@ -125,17 +127,17 @@ func (ev *ExactVersion) Install(ctx context.Context) (string, error) { if ev.ArmoredPublicKey != "" { d.ArmoredPublicKey = ev.ArmoredPublicKey } - if ev.apiBaseURL != "" { - d.BaseURL = ev.apiBaseURL + if ev.ApiBaseURL != "" { + d.BaseURL = ev.ApiBaseURL } licenseDir := "" if ev.Enterprise != nil { licenseDir = ev.Enterprise.LicenseDir } - zipFilePath, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir) - if zipFilePath != "" { - ev.pathsToRemove = append(ev.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, pv, dstDir, licenseDir) + if up != nil { + ev.pathsToRemove = append(ev.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go index 9893b223..c4888f4a 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/latest_version.go +++ b/vendor/github.com/hashicorp/hc-install/releases/latest_version.go @@ -6,7 +6,6 @@ package releases import ( "context" "fmt" - "io/ioutil" "log" "os" "path/filepath" @@ -37,7 +36,10 @@ type LatestVersion struct { // instead of built-in pubkey to verify signature of downloaded checksums ArmoredPublicKey string - apiBaseURL string + // ApiBaseURL is an optional field that specifies a custom URL to download the product from. + // If ApiBaseURL is set, the product will be downloaded from this base URL instead of the default site. + // Note: The directory structure of the custom URL must match the HashiCorp releases site (including the index.json files). + ApiBaseURL string logger *log.Logger pathsToRemove []string } @@ -89,7 +91,7 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if dstDir == "" { var err error dirName := fmt.Sprintf("%s_*", lv.Product.Name) - dstDir, err = ioutil.TempDir("", dirName) + dstDir, err = os.MkdirTemp("", dirName) if err != nil { return "", err } @@ -99,8 +101,8 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { lv.log().Printf("will install into dir at %s", dstDir) rels := rjson.NewReleases() - if lv.apiBaseURL != "" { - rels.BaseURL = lv.apiBaseURL + if lv.ApiBaseURL != "" { + rels.BaseURL = lv.ApiBaseURL } rels.SetLogger(lv.log()) versions, err := rels.ListProductVersions(ctx, lv.Product.Name) @@ -126,16 +128,16 @@ func (lv *LatestVersion) Install(ctx context.Context) (string, error) { if lv.ArmoredPublicKey != "" { d.ArmoredPublicKey = lv.ArmoredPublicKey } - if lv.apiBaseURL != "" { - d.BaseURL = lv.apiBaseURL + if lv.ApiBaseURL != "" { + d.BaseURL = lv.ApiBaseURL } licenseDir := "" if lv.Enterprise != nil { licenseDir = lv.Enterprise.LicenseDir } - zipFilePath, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir) - if zipFilePath != "" { - lv.pathsToRemove = append(lv.pathsToRemove, zipFilePath) + up, err := d.DownloadAndUnpack(ctx, versionToInstall, dstDir, licenseDir) + if up != nil { + lv.pathsToRemove = append(lv.pathsToRemove, up.PathsToRemove...) } if err != nil { return "", err diff --git a/vendor/github.com/hashicorp/hc-install/releases/releases.go b/vendor/github.com/hashicorp/hc-install/releases/releases.go index 7bef49ba..a24db6c6 100644 --- a/vendor/github.com/hashicorp/hc-install/releases/releases.go +++ b/vendor/github.com/hashicorp/hc-install/releases/releases.go @@ -4,7 +4,7 @@ package releases import ( - "io/ioutil" + "io" "log" "time" ) @@ -12,5 +12,5 @@ import ( var ( defaultInstallTimeout = 30 * time.Second defaultListTimeout = 10 * time.Second - discardLogger = log.New(ioutil.Discard, "", 0) + discardLogger = log.New(io.Discard, "", 0) ) diff --git a/vendor/github.com/hashicorp/hc-install/version/VERSION b/vendor/github.com/hashicorp/hc-install/version/VERSION index d2b13eb6..faef31a4 100644 --- a/vendor/github.com/hashicorp/hc-install/version/VERSION +++ b/vendor/github.com/hashicorp/hc-install/version/VERSION @@ -1 +1 @@ -0.6.4 +0.7.0 diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go new file mode 100644 index 00000000..3c3183b7 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/directory.go @@ -0,0 +1,174 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "path/filepath" +) + +const ( + CdktfIndexDirectory = `cdktf` + + LegacyIndexDirectory = `website/docs` + LegacyDataSourcesDirectory = `d` + LegacyGuidesDirectory = `guides` + LegacyResourcesDirectory = `r` + LegacyFunctionsDirectory = `functions` + + RegistryIndexDirectory = `docs` + RegistryDataSourcesDirectory = `data-sources` + RegistryGuidesDirectory = `guides` + RegistryResourcesDirectory = `resources` + RegistryFunctionsDirectory = `functions` + + // Terraform Registry Storage Limits + // https://www.terraform.io/docs/registry/providers/docs.html#storage-limits + RegistryMaximumNumberOfFiles = 2000 + RegistryMaximumSizeOfFile = 500000 // 500KB + +) + +var ValidLegacyDirectories = []string{ + LegacyIndexDirectory, + LegacyIndexDirectory + "/" + LegacyDataSourcesDirectory, + LegacyIndexDirectory + "/" + LegacyGuidesDirectory, + LegacyIndexDirectory + "/" + LegacyResourcesDirectory, + LegacyIndexDirectory + "/" + LegacyFunctionsDirectory, +} + +var ValidRegistryDirectories = []string{ + RegistryIndexDirectory, + RegistryIndexDirectory + "/" + RegistryDataSourcesDirectory, + RegistryIndexDirectory + "/" + RegistryGuidesDirectory, + RegistryIndexDirectory + "/" + RegistryResourcesDirectory, + RegistryIndexDirectory + "/" + RegistryFunctionsDirectory, +} + +var ValidCdktfLanguages = []string{ + "csharp", + "go", + "java", + "python", + "typescript", +} + +var ValidLegacySubdirectories = []string{ + LegacyIndexDirectory, + LegacyDataSourcesDirectory, + LegacyGuidesDirectory, + LegacyResourcesDirectory, +} + +var ValidRegistrySubdirectories = []string{ + RegistryIndexDirectory, + RegistryDataSourcesDirectory, + RegistryGuidesDirectory, + RegistryResourcesDirectory, +} + +func InvalidDirectoriesCheck(dirPath string) error { + if IsValidRegistryDirectory(dirPath) { + return nil + } + + if IsValidLegacyDirectory(dirPath) { + return nil + } + + if IsValidCdktfDirectory(dirPath) { + return nil + } + + return fmt.Errorf("invalid Terraform Provider documentation directory found: %s", dirPath) + +} + +func MixedDirectoriesCheck(docFiles []string) error { + var legacyDirectoryFound bool + var registryDirectoryFound bool + err := fmt.Errorf("mixed Terraform Provider documentation directory layouts found, must use only legacy or registry layout") + + for _, file := range docFiles { + directory := filepath.Dir(file) + log.Printf("[DEBUG] Found directory: %s", directory) + + // Allow docs/ with other files + if IsValidRegistryDirectory(directory) && directory != RegistryIndexDirectory { + registryDirectoryFound = true + + if legacyDirectoryFound { + log.Printf("[DEBUG] Found mixed directories") + return err + } + } + + if IsValidLegacyDirectory(directory) { + legacyDirectoryFound = true + + if registryDirectoryFound { + log.Printf("[DEBUG] Found mixed directories") + return err + } + } + } + + return nil +} + +func IsValidLegacyDirectory(directory string) bool { + for _, validLegacyDirectory := range ValidLegacyDirectories { + if directory == filepath.FromSlash(validLegacyDirectory) { + return true + } + } + + return false +} + +func IsValidRegistryDirectory(directory string) bool { + for _, validRegistryDirectory := range ValidRegistryDirectories { + if directory == filepath.FromSlash(validRegistryDirectory) { + return true + } + } + + return false +} + +func IsValidCdktfDirectory(directory string) bool { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s", LegacyIndexDirectory, CdktfIndexDirectory)) { + return true + } + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s", RegistryIndexDirectory, CdktfIndexDirectory)) { + return true + } + + for _, validCdktfLanguage := range ValidCdktfLanguages { + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s", LegacyIndexDirectory, CdktfIndexDirectory, validCdktfLanguage)) { + return true + } + + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s", RegistryIndexDirectory, CdktfIndexDirectory, validCdktfLanguage)) { + return true + } + + for _, validLegacySubdirectory := range ValidLegacySubdirectories { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s/%s", LegacyIndexDirectory, CdktfIndexDirectory, validCdktfLanguage, validLegacySubdirectory)) { + return true + } + } + + for _, validRegistrySubdirectory := range ValidRegistrySubdirectories { + if directory == filepath.FromSlash(fmt.Sprintf("%s/%s/%s/%s", RegistryIndexDirectory, CdktfIndexDirectory, validCdktfLanguage, validRegistrySubdirectory)) { + return true + } + } + } + + return false +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go new file mode 100644 index 00000000..cb079b3a --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file.go @@ -0,0 +1,39 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "os" + "path/filepath" +) + +type FileOptions struct { + BasePath string +} + +func (opts *FileOptions) FullPath(path string) string { + if opts.BasePath != "" { + return filepath.Join(opts.BasePath, path) + } + + return path +} + +// FileSizeCheck verifies that documentation file is below the Terraform Registry storage limit. +func FileSizeCheck(fullpath string) error { + fi, err := os.Stat(fullpath) + + if err != nil { + return err + } + + log.Printf("[DEBUG] File %s size: %d (limit: %d)", fullpath, fi.Size(), RegistryMaximumSizeOfFile) + if fi.Size() >= int64(RegistryMaximumSizeOfFile) { + return fmt.Errorf("exceeded maximum (%d) size of documentation file for Terraform Registry: %d", RegistryMaximumSizeOfFile, fi.Size()) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go new file mode 100644 index 00000000..dd5f37b6 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_extension.go @@ -0,0 +1,64 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "path/filepath" + "strings" +) + +const ( + FileExtensionHtmlMarkdown = `.html.markdown` + FileExtensionHtmlMd = `.html.md` + FileExtensionMarkdown = `.markdown` + FileExtensionMd = `.md` +) + +var ValidLegacyFileExtensions = []string{ + FileExtensionHtmlMarkdown, + FileExtensionHtmlMd, + FileExtensionMarkdown, + FileExtensionMd, +} + +var ValidRegistryFileExtensions = []string{ + FileExtensionMd, +} + +// FileExtensionCheck checks if the file extension of the given path is valid. +func FileExtensionCheck(path string, validExtensions []string) error { + if !FilePathEndsWithExtensionFrom(path, validExtensions) { + return fmt.Errorf("file does not end with a valid extension, valid extensions: %v", ValidLegacyFileExtensions) + } + + return nil +} + +func FilePathEndsWithExtensionFrom(path string, validExtensions []string) bool { + for _, validExtension := range validExtensions { + if strings.HasSuffix(path, validExtension) { + return true + } + } + + return false +} + +// TrimFileExtension removes file extensions including those with multiple periods. +func TrimFileExtension(path string) string { + filename := filepath.Base(path) + + if filename == "." { + return "" + } + + dotIndex := strings.IndexByte(filename, '.') + + if dotIndex > 0 { + return filename[:dotIndex] + } + + return filename +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go new file mode 100644 index 00000000..d65989fd --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/file_mismatch.go @@ -0,0 +1,284 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "errors" + "fmt" + "log" + "os" + "sort" + + tfjson "github.com/hashicorp/terraform-json" +) + +type FileMismatchOptions struct { + *FileOptions + + IgnoreFileMismatch []string + + IgnoreFileMissing []string + + ProviderShortName string + + DatasourceEntries []os.DirEntry + + ResourceEntries []os.DirEntry + + FunctionEntries []os.DirEntry + + Schema *tfjson.ProviderSchema +} + +type FileMismatchCheck struct { + Options *FileMismatchOptions +} + +func NewFileMismatchCheck(opts *FileMismatchOptions) *FileMismatchCheck { + check := &FileMismatchCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &FileMismatchOptions{} + } + + if check.Options.FileOptions == nil { + check.Options.FileOptions = &FileOptions{} + } + + return check +} + +func (check *FileMismatchCheck) Run() error { + var result error + + if check.Options.Schema == nil { + log.Printf("[DEBUG] Skipping file mismatch checks due to missing provider schema") + return nil + } + + if check.Options.ResourceEntries != nil { + err := check.ResourceFileMismatchCheck(check.Options.ResourceEntries, "resource", check.Options.Schema.ResourceSchemas) + result = errors.Join(result, err) + } + + if check.Options.DatasourceEntries != nil { + err := check.ResourceFileMismatchCheck(check.Options.DatasourceEntries, "datasource", check.Options.Schema.DataSourceSchemas) + result = errors.Join(result, err) + } + + if check.Options.FunctionEntries != nil { + err := check.FunctionFileMismatchCheck(check.Options.FunctionEntries, check.Options.Schema.Functions) + result = errors.Join(result, err) + } + + return result +} + +// ResourceFileMismatchCheck checks for mismatched files, either missing or extraneous, against the resource/datasouce schema +func (check *FileMismatchCheck) ResourceFileMismatchCheck(files []os.DirEntry, resourceType string, schemas map[string]*tfjson.Schema) error { + if len(files) == 0 { + log.Printf("[DEBUG] Skipping %s file mismatch checks due to missing file list", resourceType) + return nil + } + + if len(schemas) == 0 { + log.Printf("[DEBUG] Skipping %s file mismatch checks due to missing schemas", resourceType) + return nil + } + + var extraFiles []string + var missingFiles []string + + for _, file := range files { + log.Printf("[DEBUG] Found file %s", file.Name()) + if fileHasResource(schemas, check.Options.ProviderShortName, file.Name()) { + continue + } + + if check.IgnoreFileMismatch(file.Name()) { + continue + } + + log.Printf("[DEBUG] Found extraneous file %s", file.Name()) + extraFiles = append(extraFiles, file.Name()) + } + + for _, resourceName := range resourceNames(schemas) { + log.Printf("[DEBUG] Found %s %s", resourceType, resourceName) + if resourceHasFile(files, check.Options.ProviderShortName, resourceName) { + continue + } + + if check.IgnoreFileMissing(resourceName) { + continue + } + + log.Printf("[DEBUG] Missing file for %s %s", resourceType, resourceName) + missingFiles = append(missingFiles, resourceName) + } + + var result error + + for _, extraFile := range extraFiles { + err := fmt.Errorf("matching %s for documentation file (%s) not found, file is extraneous or incorrectly named", resourceType, extraFile) + result = errors.Join(result, err) + } + + for _, missingFile := range missingFiles { + err := fmt.Errorf("missing documentation file for %s: %s", resourceType, missingFile) + result = errors.Join(result, err) + } + + return result + +} + +// FunctionFileMismatchCheck checks for mismatched files, either missing or extraneous, against the function signature +func (check *FileMismatchCheck) FunctionFileMismatchCheck(files []os.DirEntry, functions map[string]*tfjson.FunctionSignature) error { + if len(files) == 0 { + log.Printf("[DEBUG] Skipping function file mismatch checks due to missing file list") + return nil + } + + if len(functions) == 0 { + log.Printf("[DEBUG] Skipping function file mismatch checks due to missing schemas") + return nil + } + + var extraFiles []string + var missingFiles []string + + for _, file := range files { + if fileHasFunction(functions, file.Name()) { + continue + } + + if check.IgnoreFileMismatch(file.Name()) { + continue + } + + extraFiles = append(extraFiles, file.Name()) + } + + for _, functionName := range functionNames(functions) { + if functionHasFile(files, functionName) { + continue + } + + if check.IgnoreFileMissing(functionName) { + continue + } + + missingFiles = append(missingFiles, functionName) + } + + var result error + + for _, extraFile := range extraFiles { + err := fmt.Errorf("matching function for documentation file (%s) not found, file is extraneous or incorrectly named", extraFile) + result = errors.Join(result, err) + } + + for _, missingFile := range missingFiles { + err := fmt.Errorf("missing documentation file for function: %s", missingFile) + result = errors.Join(result, err) + } + + return result + +} + +func (check *FileMismatchCheck) IgnoreFileMismatch(file string) bool { + for _, ignoreResourceName := range check.Options.IgnoreFileMismatch { + if ignoreResourceName == fileResourceName(check.Options.ProviderShortName, file) { + return true + } + } + + return false +} + +func (check *FileMismatchCheck) IgnoreFileMissing(resourceName string) bool { + for _, ignoreResourceName := range check.Options.IgnoreFileMissing { + if ignoreResourceName == resourceName { + return true + } + } + + return false +} + +func fileHasResource(schemaResources map[string]*tfjson.Schema, providerName, file string) bool { + if _, ok := schemaResources[fileResourceName(providerName, file)]; ok { + return true + } + + return false +} + +func fileHasFunction(functions map[string]*tfjson.FunctionSignature, file string) bool { + if _, ok := functions[TrimFileExtension(file)]; ok { + return true + } + + return false +} + +func fileResourceName(providerName, fileName string) string { + resourceSuffix := TrimFileExtension(fileName) + + return fmt.Sprintf("%s_%s", providerName, resourceSuffix) +} + +func resourceHasFile(files []os.DirEntry, providerName, resourceName string) bool { + var found bool + + for _, file := range files { + if fileResourceName(providerName, file.Name()) == resourceName { + found = true + break + } + } + + return found +} + +func functionHasFile(files []os.DirEntry, functionName string) bool { + var found bool + + for _, file := range files { + if TrimFileExtension(file.Name()) == functionName { + found = true + break + } + } + + return found +} + +func resourceNames(resources map[string]*tfjson.Schema) []string { + names := make([]string, 0, len(resources)) + + for name := range resources { + names = append(names, name) + } + + sort.Strings(names) + + return names +} + +func functionNames(functions map[string]*tfjson.FunctionSignature) []string { + names := make([]string, 0, len(functions)) + + for name := range functions { + names = append(names, name) + } + + sort.Strings(names) + + return names +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go new file mode 100644 index 00000000..65ac43aa --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/frontmatter.go @@ -0,0 +1,104 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "bytes" + "fmt" + + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/parser" + "go.abhg.dev/goldmark/frontmatter" +) + +type FrontMatterCheck struct { + Options *FrontMatterOptions +} + +// FrontMatterData represents the YAML frontmatter of Terraform Provider documentation. +type FrontMatterData struct { + Description *string `yaml:"description,omitempty"` + Layout *string `yaml:"layout,omitempty"` + PageTitle *string `yaml:"page_title,omitempty"` + SidebarCurrent *string `yaml:"sidebar_current,omitempty"` + Subcategory *string `yaml:"subcategory,omitempty"` +} + +// FrontMatterOptions represents configuration options for FrontMatter. +type FrontMatterOptions struct { + NoLayout bool + NoPageTitle bool + NoSidebarCurrent bool + NoSubcategory bool + RequireDescription bool + RequireLayout bool + RequirePageTitle bool +} + +func NewFrontMatterCheck(opts *FrontMatterOptions) *FrontMatterCheck { + check := &FrontMatterCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &FrontMatterOptions{} + } + + return check +} + +func (check *FrontMatterCheck) Run(src []byte) error { + frontMatter := FrontMatterData{} + + md := goldmark.New( + goldmark.WithExtensions(&frontmatter.Extender{}), + ) + + ctx := parser.NewContext() + var buff bytes.Buffer + + err := md.Convert(src, &buff, parser.WithContext(ctx)) + if err != nil { + return err + } + d := frontmatter.Get(ctx) + if d == nil { + return fmt.Errorf("no frontmatter found") + } + + err = d.Decode(&frontMatter) + if err != nil { + return fmt.Errorf("error parsing YAML frontmatter: %w", err) + } + + if check.Options.NoLayout && frontMatter.Layout != nil { + return fmt.Errorf("YAML frontmatter should not contain layout") + } + + if check.Options.NoPageTitle && frontMatter.PageTitle != nil { + return fmt.Errorf("YAML frontmatter should not contain page_title") + } + + if check.Options.NoSidebarCurrent && frontMatter.SidebarCurrent != nil { + return fmt.Errorf("YAML frontmatter should not contain sidebar_current") + } + + if check.Options.NoSubcategory && frontMatter.Subcategory != nil { + return fmt.Errorf("YAML frontmatter should not contain subcategory") + } + + if check.Options.RequireDescription && frontMatter.Description == nil { + return fmt.Errorf("YAML frontmatter missing required description") + } + + if check.Options.RequireLayout && frontMatter.Layout == nil { + return fmt.Errorf("YAML frontmatter missing required layout") + } + + if check.Options.RequirePageTitle && frontMatter.PageTitle == nil { + return fmt.Errorf("YAML frontmatter missing required page_title") + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go new file mode 100644 index 00000000..5358b669 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/check/provider_file.go @@ -0,0 +1,67 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package check + +import ( + "fmt" + "log" + "os" +) + +type ProviderFileOptions struct { + *FileOptions + + FrontMatter *FrontMatterOptions + ValidExtensions []string +} + +type ProviderFileCheck struct { + Options *ProviderFileOptions +} + +func NewProviderFileCheck(opts *ProviderFileOptions) *ProviderFileCheck { + check := &ProviderFileCheck{ + Options: opts, + } + + if check.Options == nil { + check.Options = &ProviderFileOptions{} + } + + if check.Options.FileOptions == nil { + check.Options.FileOptions = &FileOptions{} + } + + if check.Options.FrontMatter == nil { + check.Options.FrontMatter = &FrontMatterOptions{} + } + + return check +} + +func (check *ProviderFileCheck) Run(path string) error { + fullpath := check.Options.FullPath(path) + + log.Printf("[DEBUG] Checking file: %s", fullpath) + + if err := FileExtensionCheck(path, check.Options.ValidExtensions); err != nil { + return fmt.Errorf("%s: error checking file extension: %w", path, err) + } + + if err := FileSizeCheck(fullpath); err != nil { + return fmt.Errorf("%s: error checking file size: %w", path, err) + } + + content, err := os.ReadFile(fullpath) + + if err != nil { + return fmt.Errorf("%s: error reading file: %w", path, err) + } + + if err := NewFrontMatterCheck(check.Options.FrontMatter).Run(content); err != nil { + return fmt.Errorf("%s: error checking file frontmatter: %w", path, err) + } + + return nil +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go index 1a0c7f15..77dbc96f 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/generate.go @@ -72,7 +72,7 @@ func (cmd *generateCmd) Help() string { func (cmd *generateCmd) Flags() *flag.FlagSet { fs := flag.NewFlagSet("generate", flag.ExitOnError) - fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations") + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory when running the command outside the root provider code directory") fs.StringVar(&cmd.flagProvidersSchema, "providers-schema", "", "path to the providers schema JSON file, which contains the output of the terraform providers schema -json command. Setting this flag will skip building the provider and calling Terraform CLI") fs.StringVar(&cmd.flagRenderedProviderName, "rendered-provider-name", "", "provider name, as generated in documentation (ex. page titles, ...)") diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go index 8248b9ec..14e39ec1 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/migrate.go @@ -17,6 +17,7 @@ type migrateCmd struct { flagProviderDir string flagTemplatesDir string flagExamplesDir string + flagProviderName string } func (cmd *migrateCmd) Synopsis() string { @@ -67,6 +68,8 @@ func (cmd *migrateCmd) Flags() *flag.FlagSet { fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory; this will default to the current working directory if not set") fs.StringVar(&cmd.flagTemplatesDir, "templates-dir", "templates", "new website templates directory based on provider-dir; files will be migrated to this directory") fs.StringVar(&cmd.flagExamplesDir, "examples-dir", "examples", "examples directory based on provider-dir; extracted code examples will be migrated to this directory") + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") + return fs } @@ -87,6 +90,7 @@ func (cmd *migrateCmd) runInternal() error { cmd.flagProviderDir, cmd.flagTemplatesDir, cmd.flagExamplesDir, + cmd.flagProviderName, ) if err != nil { return fmt.Errorf("unable to migrate website: %w", err) diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go index c4a406cf..55107f3c 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/cmd/validate.go @@ -4,6 +4,7 @@ package cmd import ( + "errors" "flag" "fmt" "strings" @@ -13,10 +14,15 @@ import ( type validateCmd struct { commonCmd + + flagProviderName string + flagProviderDir string + flagProvidersSchema string + tfVersion string } func (cmd *validateCmd) Synopsis() string { - return "validates a plugin website for the current directory" + return "validates a plugin website" } func (cmd *validateCmd) Help() string { @@ -59,6 +65,10 @@ func (cmd *validateCmd) Help() string { func (cmd *validateCmd) Flags() *flag.FlagSet { fs := flag.NewFlagSet("validate", flag.ExitOnError) + fs.StringVar(&cmd.flagProviderName, "provider-name", "", "provider name, as used in Terraform configurations; defaults to the --provider-dir short name (after removing `terraform-provider-` prefix)") + fs.StringVar(&cmd.flagProviderDir, "provider-dir", "", "relative or absolute path to the root provider code directory; this will default to the current working directory if not set") + fs.StringVar(&cmd.flagProvidersSchema, "providers-schema", "", "path to the providers schema JSON file, which contains the output of the terraform providers schema -json command. Setting this flag will skip building the provider and calling Terraform CLI") + fs.StringVar(&cmd.tfVersion, "tf-version", "", "terraform binary version to download. If not provided, will look for a terraform binary in the local environment. If not found in the environment, will download the latest version of Terraform") return fs } @@ -74,9 +84,14 @@ func (cmd *validateCmd) Run(args []string) int { } func (cmd *validateCmd) runInternal() error { - err := provider.Validate(cmd.ui) + err := provider.Validate(cmd.ui, + cmd.flagProviderDir, + cmd.flagProviderName, + cmd.flagProvidersSchema, + cmd.tfVersion, + ) if err != nil { - return fmt.Errorf("unable to validate website: %w", err) + return errors.Join(errors.New("validation errors found: "), err) } return nil diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/functionmd/render.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go similarity index 97% rename from vendor/github.com/hashicorp/terraform-plugin-docs/functionmd/render.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go index fdea3a82..7b4951b7 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/functionmd/render.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/functionmd/render.go @@ -10,7 +10,7 @@ import ( tfjson "github.com/hashicorp/terraform-json" - "github.com/hashicorp/terraform-plugin-docs/schemamd" + "github.com/hashicorp/terraform-plugin-docs/internal/schemamd" ) // RenderArguments returns a Markdown formatted string of the function arguments. diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go index ded53b65..60a0ede9 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/mdplain.go @@ -3,13 +3,25 @@ package mdplain -import "github.com/russross/blackfriday" +import ( + "bytes" -// Clean runs a VERY naive cleanup of markdown text to make it more palatable as plain text. -func PlainMarkdown(md string) (string, error) { - pt := &Text{} - - html := blackfriday.MarkdownOptions([]byte(md), pt, blackfriday.Options{}) + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/extension" +) - return string(html), nil +// Clean runs a VERY naive cleanup of markdown text to make it more palatable as plain text. +func PlainMarkdown(markdown string) (string, error) { + var buf bytes.Buffer + extensions := []goldmark.Extender{ + extension.Linkify, + } + md := goldmark.New( + goldmark.WithExtensions(extensions...), + goldmark.WithRenderer(NewTextRenderer()), + ) + if err := md.Convert([]byte(markdown), &buf); err != nil { + return "", err + } + return buf.String(), nil } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go index 660cdae5..93bd5ec7 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/mdplain/renderer.go @@ -5,175 +5,101 @@ package mdplain import ( "bytes" + "io" - "github.com/russross/blackfriday" + "github.com/yuin/goldmark/ast" + extAST "github.com/yuin/goldmark/extension/ast" + "github.com/yuin/goldmark/renderer" ) -type Text struct{} - -func TextRenderer() blackfriday.Renderer { - return &Text{} -} - -func (options *Text) GetFlags() int { - return 0 -} - -func (options *Text) TitleBlock(out *bytes.Buffer, text []byte) { - text = bytes.TrimPrefix(text, []byte("% ")) - text = bytes.Replace(text, []byte("\n% "), []byte("\n"), -1) - out.Write(text) - out.WriteString("\n") -} - -func (options *Text) Header(out *bytes.Buffer, text func() bool, level int, id string) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return +type TextRender struct{} + +func NewTextRenderer() *TextRender { + return &TextRender{} +} + +func (r *TextRender) Render(w io.Writer, source []byte, n ast.Node) error { + out := bytes.NewBuffer([]byte{}) + err := ast.Walk(n, func(node ast.Node, entering bool) (ast.WalkStatus, error) { + if !entering || node.Type() == ast.TypeDocument { + return ast.WalkContinue, nil + } + + switch node := node.(type) { + case *ast.Blockquote, *ast.Heading: + doubleSpace(out) + out.Write(node.Text(source)) + return ast.WalkSkipChildren, nil + case *ast.ThematicBreak: + doubleSpace(out) + return ast.WalkSkipChildren, nil + case *ast.CodeBlock: + doubleSpace(out) + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + case *ast.FencedCodeBlock: + doubleSpace(out) + doubleSpace(out) + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + _, _ = out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + case *ast.List: + doubleSpace(out) + return ast.WalkContinue, nil + case *ast.Paragraph: + doubleSpace(out) + if node.Text(source)[0] == '|' { // Write tables as-is. + for i := 0; i < node.Lines().Len(); i++ { + line := node.Lines().At(i) + out.Write(line.Value(source)) + } + return ast.WalkSkipChildren, nil + } + return ast.WalkContinue, nil + case *extAST.Strikethrough: + out.Write(node.Text(source)) + return ast.WalkContinue, nil + case *ast.AutoLink: + out.Write(node.URL(source)) + return ast.WalkSkipChildren, nil + case *ast.CodeSpan: + out.Write(node.Text(source)) + return ast.WalkSkipChildren, nil + case *ast.Link: + _, err := out.Write(node.Text(source)) + if !isRelativeLink(node.Destination) { + out.WriteString(" ") + out.Write(node.Destination) + } + return ast.WalkSkipChildren, err + case *ast.Text: + out.Write(node.Text(source)) + if node.SoftLineBreak() { + doubleSpace(out) + } + return ast.WalkContinue, nil + case *ast.Image: + return ast.WalkSkipChildren, nil + + } + return ast.WalkContinue, nil + }) + if err != nil { + return err } -} - -func (options *Text) BlockHtml(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) - out.WriteByte('\n') -} - -func (options *Text) HRule(out *bytes.Buffer) { - doubleSpace(out) -} - -func (options *Text) BlockCode(out *bytes.Buffer, text []byte, lang string) { - options.BlockCodeNormal(out, text, lang) -} - -func (options *Text) BlockCodeNormal(out *bytes.Buffer, text []byte, lang string) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) BlockQuote(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) Table(out *bytes.Buffer, header []byte, body []byte, columnData []int) { - doubleSpace(out) - out.Write(header) - out.Write(body) -} - -func (options *Text) TableRow(out *bytes.Buffer, text []byte) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) TableHeaderCell(out *bytes.Buffer, text []byte, align int) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) TableCell(out *bytes.Buffer, text []byte, align int) { - doubleSpace(out) - out.Write(text) -} - -func (options *Text) Footnotes(out *bytes.Buffer, text func() bool) { - options.HRule(out) - options.List(out, text, 0) -} - -func (options *Text) FootnoteItem(out *bytes.Buffer, name, text []byte, flags int) { - out.Write(text) -} - -func (options *Text) List(out *bytes.Buffer, text func() bool, flags int) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return + _, err = w.Write(out.Bytes()) + if err != nil { + return err } + return nil } -func (options *Text) ListItem(out *bytes.Buffer, text []byte, flags int) { - out.Write(text) -} - -func (options *Text) Paragraph(out *bytes.Buffer, text func() bool) { - marker := out.Len() - doubleSpace(out) - - if !text() { - out.Truncate(marker) - return - } -} - -func (options *Text) AutoLink(out *bytes.Buffer, link []byte, kind int) { - out.Write(link) -} - -func (options *Text) CodeSpan(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) DoubleEmphasis(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) Emphasis(out *bytes.Buffer, text []byte) { - if len(text) == 0 { - return - } - out.Write(text) -} - -func (options *Text) Image(out *bytes.Buffer, link []byte, title []byte, alt []byte) {} - -func (options *Text) LineBreak(out *bytes.Buffer) {} - -func (options *Text) Link(out *bytes.Buffer, link []byte, title []byte, content []byte) { - out.Write(content) - if !isRelativeLink(link) { - out.WriteString(" ") - out.Write(link) - } -} - -func (options *Text) RawHtmlTag(out *bytes.Buffer, text []byte) {} - -func (options *Text) TripleEmphasis(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) StrikeThrough(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) FootnoteRef(out *bytes.Buffer, ref []byte, id int) {} - -func (options *Text) Entity(out *bytes.Buffer, entity []byte) { - out.Write(entity) -} - -func (options *Text) NormalText(out *bytes.Buffer, text []byte) { - out.Write(text) -} - -func (options *Text) Smartypants(out *bytes.Buffer, text []byte) {} - -func (options *Text) DocumentHeader(out *bytes.Buffer) {} - -func (options *Text) DocumentFooter(out *bytes.Buffer) {} - -func (options *Text) TocHeader(text []byte, level int) {} - -func (options *Text) TocFinalize() {} +func (r *TextRender) AddOptions(...renderer.Option) {} func doubleSpace(out *bytes.Buffer) { if out.Len() > 0 { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go index d0c53a54..d0c3c965 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/generate.go @@ -8,7 +8,6 @@ import ( "fmt" "os" "os/exec" - "path" "path/filepath" "runtime" "strings" @@ -455,7 +454,7 @@ func (g *generator) renderStaticWebsite(providerSchema *tfjson.ProviderSchema) e // Remove subdirectories managed by tfplugindocs if file.IsDir() && slices.Contains(managedWebsiteSubDirectories, file.Name()) { g.infof("removing directory: %q", file.Name()) - err = os.RemoveAll(path.Join(g.ProviderDocsDir(), file.Name())) + err = os.RemoveAll(filepath.Join(g.ProviderDocsDir(), file.Name())) if err != nil { return fmt.Errorf("unable to remove directory %q from rendered website directory: %w", file.Name(), err) } @@ -465,7 +464,7 @@ func (g *generator) renderStaticWebsite(providerSchema *tfjson.ProviderSchema) e // Remove files managed by tfplugindocs if !file.IsDir() && slices.Contains(managedWebsiteFiles, file.Name()) { g.infof("removing file: %q", file.Name()) - err = os.RemoveAll(path.Join(g.ProviderDocsDir(), file.Name())) + err = os.RemoveAll(filepath.Join(g.ProviderDocsDir(), file.Name())) if err != nil { return fmt.Errorf("unable to remove file %q from rendered website directory: %w", file.Name(), err) } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go new file mode 100644 index 00000000..366812bc --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/logger.go @@ -0,0 +1,27 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package provider + +import ( + "fmt" + + "github.com/hashicorp/cli" +) + +type Logger struct { + ui cli.Ui +} + +func NewLogger(ui cli.Ui) *Logger { + return &Logger{ui} +} + +func (l *Logger) infof(format string, args ...interface{}) { + l.ui.Info(fmt.Sprintf(format, args...)) +} + +//nolint:unused +func (l *Logger) warnf(format string, args ...interface{}) { + l.ui.Warn(fmt.Sprintf(format, args...)) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go index e8a77f46..babe2127 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/migrate.go @@ -27,6 +27,8 @@ type migrator struct { templatesDir string examplesDir string + providerName string + ui cli.Ui } @@ -38,7 +40,7 @@ func (m *migrator) warnf(format string, a ...interface{}) { m.ui.Warn(fmt.Sprintf(format, a...)) } -func Migrate(ui cli.Ui, providerDir string, templatesDir string, examplesDir string) error { +func Migrate(ui cli.Ui, providerDir string, templatesDir string, examplesDir string, providerName string) error { // Ensure provider directory is resolved absolute path if providerDir == "" { wd, err := os.Getwd() @@ -69,6 +71,11 @@ func Migrate(ui cli.Ui, providerDir string, templatesDir string, examplesDir str return fmt.Errorf("expected %q to be a directory", providerDir) } + // Default providerName to provider directory name + if providerName == "" { + providerName = filepath.Base(providerDir) + } + // Determine website directory websiteDir, err := determineWebsiteDir(providerDir) if err != nil { @@ -80,6 +87,7 @@ func Migrate(ui cli.Ui, providerDir string, templatesDir string, examplesDir str templatesDir: templatesDir, examplesDir: examplesDir, websiteDir: websiteDir, + providerName: providerName, ui: ui, } @@ -172,28 +180,43 @@ func (m *migrator) MigrateTemplate(relDir string) fs.WalkDirFunc { } baseName, _, _ := strings.Cut(d.Name(), ".") + shortName := providerShortName(m.providerName) + fileName := strings.TrimPrefix(baseName, shortName+"_") var exampleRelDir string - if baseName == "index" { + if fileName == "index" { exampleRelDir = relDir } else { - exampleRelDir = filepath.Join(relDir, baseName) + exampleRelDir = filepath.Join(relDir, fileName) } - templateFilePath := filepath.Join(m.ProviderTemplatesDir(), relDir, baseName+".md.tmpl") + templateFilePath := filepath.Join(m.ProviderTemplatesDir(), relDir, fileName+".md.tmpl") err = os.MkdirAll(filepath.Dir(templateFilePath), 0755) if err != nil { return fmt.Errorf("unable to create directory %q: %w", templateFilePath, err) } + templateFile, err := os.OpenFile(templateFilePath, os.O_WRONLY|os.O_CREATE, 0600) + + if err != nil { + return fmt.Errorf("unable to open file %q: %w", templateFilePath, err) + } + + defer func(f *os.File) { + err := f.Close() + if err != nil { + m.warnf("unable to close file %q: %q", f.Name(), err) + } + }(templateFile) + m.infof("extracting YAML frontmatter to %q", templateFilePath) - err = m.ExtractFrontMatter(data, relDir, templateFilePath) + err = m.ExtractFrontMatter(data, relDir, templateFile) if err != nil { return fmt.Errorf("unable to extract front matter to %q: %w", templateFilePath, err) } m.infof("extracting code examples from %q", d.Name()) - err = m.ExtractCodeExamples(data, exampleRelDir, templateFilePath) + err = m.ExtractCodeExamples(data, exampleRelDir, templateFile) if err != nil { return fmt.Errorf("unable to extract code examples from %q: %w", templateFilePath, err) } @@ -203,29 +226,18 @@ func (m *migrator) MigrateTemplate(relDir string) fs.WalkDirFunc { } -func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFilePath string) error { - templateFile, err := os.OpenFile(templateFilePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600) - if err != nil { - return fmt.Errorf("unable to open file %q: %w", templateFilePath, err) - } - defer func(f *os.File) { - err := f.Close() - if err != nil { - m.warnf("unable to close file %q: %q", templateFilePath, err) - } - }(templateFile) - +func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFile *os.File) error { fileScanner := bufio.NewScanner(bytes.NewReader(content)) fileScanner.Split(bufio.ScanLines) hasFirstLine := fileScanner.Scan() if !hasFirstLine || fileScanner.Text() != "---" { - m.warnf("no frontmatter found in %q", templateFilePath) + m.warnf("no frontmatter found in %q", templateFile.Name()) return nil } - _, err = templateFile.WriteString(fileScanner.Text() + "\n") + _, err := templateFile.WriteString(fileScanner.Text() + "\n") if err != nil { - return fmt.Errorf("unable to append frontmatter to %q: %w", templateFilePath, err) + return fmt.Errorf("unable to append frontmatter to %q: %w", templateFile.Name(), err) } exited := false for fileScanner.Scan() { @@ -235,7 +247,7 @@ func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFil } _, err = templateFile.WriteString(fileScanner.Text() + "\n") if err != nil { - return fmt.Errorf("unable to append frontmatter to %q: %w", templateFilePath, err) + return fmt.Errorf("unable to append frontmatter to %q: %w", templateFile.Name(), err) } if fileScanner.Text() == "---" { exited = true @@ -244,7 +256,7 @@ func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFil } if !exited { - return fmt.Errorf("cannot find ending of frontmatter block in %q", templateFilePath) + return fmt.Errorf("cannot find ending of frontmatter block in %q", templateFile.Name()) } // add comment to end of front matter briefly explaining template functionality @@ -254,24 +266,13 @@ func (m *migrator) ExtractFrontMatter(content []byte, relDir string, templateFil _, err = templateFile.WriteString(migrateProviderTemplateComment + "\n") } if err != nil { - return fmt.Errorf("unable to append template comment to %q: %w", templateFilePath, err) + return fmt.Errorf("unable to append template comment to %q: %w", templateFile.Name(), err) } return nil } -func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templateFilePath string) error { - templateFile, err := os.OpenFile(templateFilePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0600) - if err != nil { - return fmt.Errorf("unable to open file %q: %w", templateFilePath, err) - } - defer func(f *os.File) { - err := f.Close() - if err != nil { - m.warnf("unable to close file %q: %q", templateFilePath, err) - } - }(templateFile) - +func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templateFile *os.File) error { md := newMarkdownRenderer() p := md.Parser() root := p.Parse(text.NewReader(content)) @@ -279,7 +280,7 @@ func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templat exampleCount := 0 importCount := 0 - err = ast.Walk(root, func(node ast.Node, enter bool) (ast.WalkStatus, error) { + err := ast.Walk(root, func(node ast.Node, enter bool) (ast.WalkStatus, error) { // skip the root node if !enter || node.Type() == ast.TypeDocument { return ast.WalkContinue, nil @@ -294,20 +295,20 @@ func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templat exampleCount++ ext = ".tf" exampleName = "example_" + strconv.Itoa(exampleCount) + ext - examplePath = filepath.Join(m.ProviderExamplesDir(), newRelDir, exampleName) + examplePath = filepath.Join(m.examplesDir, newRelDir, exampleName) template = fmt.Sprintf("{{tffile \"%s\"}}", examplePath) - m.infof("creating example file %q", examplePath) + m.infof("creating example file %q", filepath.Join(m.providerDir, examplePath)) case "console": importCount++ ext = ".sh" exampleName = "import_" + strconv.Itoa(importCount) + ext - examplePath = filepath.Join(m.ProviderExamplesDir(), newRelDir, exampleName) + examplePath = filepath.Join(m.examplesDir, newRelDir, exampleName) template = fmt.Sprintf("{{codefile \"shell\" \"%s\"}}", examplePath) - m.infof("creating import file %q", examplePath) + m.infof("creating import file %q", filepath.Join(m.providerDir, examplePath)) default: // Render node as is m.infof("skipping code block with unknown language %q", lang) - err = md.Renderer().Render(templateFile, content, node) + err := md.Renderer().Render(templateFile, content, node) if err != nil { return ast.WalkStop, fmt.Errorf("unable to render node: %w", err) } @@ -322,7 +323,7 @@ func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templat } // create example file from code block - err = writeFile(examplePath, codeBuf.String()) + err := writeFile(examplePath, codeBuf.String()) if err != nil { return ast.WalkStop, fmt.Errorf("unable to write file %q: %w", examplePath, err) } @@ -337,7 +338,7 @@ func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templat } // Render non-code nodes as is - err = md.Renderer().Render(templateFile, content, node) + err := md.Renderer().Render(templateFile, content, node) if err != nil { return ast.WalkStop, fmt.Errorf("unable to render node: %w", err) } @@ -353,9 +354,9 @@ func (m *migrator) ExtractCodeExamples(content []byte, newRelDir string, templat _, err = templateFile.WriteString("\n") if err != nil { - return fmt.Errorf("unable to write to template %q: %w", templateFilePath, err) + return fmt.Errorf("unable to write to template %q: %w", templateFile.Name(), err) } - m.infof("finished creating template %q", templateFilePath) + m.infof("finished creating template %q", templateFile.Name()) return nil } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go new file mode 100644 index 00000000..3338fe98 --- /dev/null +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/schema.go @@ -0,0 +1,136 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package provider + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "runtime" + + "github.com/hashicorp/go-version" + install "github.com/hashicorp/hc-install" + "github.com/hashicorp/hc-install/checkpoint" + "github.com/hashicorp/hc-install/fs" + "github.com/hashicorp/hc-install/product" + "github.com/hashicorp/hc-install/releases" + "github.com/hashicorp/hc-install/src" + "github.com/hashicorp/terraform-exec/tfexec" + tfjson "github.com/hashicorp/terraform-json" +) + +func TerraformProviderSchemaFromTerraform(ctx context.Context, providerName, providerDir, tfVersion string, l *Logger) (*tfjson.ProviderSchema, error) { + var err error + + shortName := providerShortName(providerName) + + tmpDir, err := os.MkdirTemp("", "tfws") + if err != nil { + return nil, fmt.Errorf("unable to create temporary provider install directory %q: %w", tmpDir, err) + } + defer os.RemoveAll(tmpDir) + + l.infof("compiling provider %q", shortName) + providerPath := fmt.Sprintf("plugins/registry.terraform.io/hashicorp/%s/0.0.1/%s_%s", shortName, runtime.GOOS, runtime.GOARCH) + outFile := filepath.Join(tmpDir, providerPath, fmt.Sprintf("terraform-provider-%s", shortName)) + switch runtime.GOOS { + case "windows": + outFile = outFile + ".exe" + } + buildCmd := exec.Command("go", "build", "-o", outFile) + buildCmd.Dir = providerDir + // TODO: constrain env here to make it a little safer? + _, err = runCmd(buildCmd) + if err != nil { + return nil, fmt.Errorf("unable to execute go build command: %w", err) + } + + err = writeFile(filepath.Join(tmpDir, "provider.tf"), fmt.Sprintf(` +provider %[1]q { +} +`, shortName)) + if err != nil { + return nil, fmt.Errorf("unable to write provider.tf file: %w", err) + } + + i := install.NewInstaller() + var sources []src.Source + if tfVersion != "" { + l.infof("downloading Terraform CLI binary version from releases.hashicorp.com: %s", tfVersion) + sources = []src.Source{ + &releases.ExactVersion{ + Product: product.Terraform, + Version: version.Must(version.NewVersion(tfVersion)), + InstallDir: tmpDir, + }, + } + } else { + l.infof("using Terraform CLI binary from PATH if available, otherwise downloading latest Terraform CLI binary") + sources = []src.Source{ + &fs.AnyVersion{ + Product: &product.Terraform, + }, + &checkpoint.LatestVersion{ + InstallDir: tmpDir, + Product: product.Terraform, + }, + } + } + + tfBin, err := i.Ensure(context.Background(), sources) + if err != nil { + return nil, fmt.Errorf("unable to download Terraform binary: %w", err) + } + + tf, err := tfexec.NewTerraform(tmpDir, tfBin) + if err != nil { + return nil, fmt.Errorf("unable to create new terraform exec instance: %w", err) + } + + l.infof("running terraform init") + err = tf.Init(ctx, tfexec.Get(false), tfexec.PluginDir("./plugins")) + if err != nil { + return nil, fmt.Errorf("unable to run terraform init on provider: %w", err) + } + + l.infof("getting provider schema") + schemas, err := tf.ProvidersSchema(ctx) + if err != nil { + return nil, fmt.Errorf("unable to retrieve provider schema from terraform exec: %w", err) + } + + if ps, ok := schemas.Schemas[shortName]; ok { + return ps, nil + } + + if ps, ok := schemas.Schemas["registry.terraform.io/hashicorp/"+shortName]; ok { + return ps, nil + } + + return nil, fmt.Errorf("unable to find schema in JSON for provider %q", shortName) +} + +func TerraformProviderSchemaFromFile(providerName, providersSchemaPath string, l *Logger) (*tfjson.ProviderSchema, error) { + var err error + + shortName := providerShortName(providerName) + + l.infof("getting provider schema") + schemas, err := extractSchemaFromFile(providersSchemaPath) + if err != nil { + return nil, fmt.Errorf("unable to retrieve provider schema from JSON file: %w", err) + } + + if ps, ok := schemas.Schemas[shortName]; ok { + return ps, nil + } + + if ps, ok := schemas.Schemas["registry.terraform.io/hashicorp/"+shortName]; ok { + return ps, nil + } + + return nil, fmt.Errorf("unable to find schema in JSON for provider %q", shortName) +} diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go index 10f2e655..58766489 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/template.go @@ -16,10 +16,11 @@ import ( tfjson "github.com/hashicorp/terraform-json" - "github.com/hashicorp/terraform-plugin-docs/functionmd" + "github.com/hashicorp/terraform-plugin-docs/internal/schemamd" + + "github.com/hashicorp/terraform-plugin-docs/internal/functionmd" "github.com/hashicorp/terraform-plugin-docs/internal/mdplain" "github.com/hashicorp/terraform-plugin-docs/internal/tmplfuncs" - "github.com/hashicorp/terraform-plugin-docs/schemamd" ) const ( diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go index 21063dc8..7a3ec336 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/util.go @@ -86,9 +86,13 @@ func resourceSchema(schemas map[string]*tfjson.Schema, providerShortName, templa func writeFile(path string, data string) error { dir, _ := filepath.Split(path) - err := os.MkdirAll(dir, 0755) - if err != nil { - return fmt.Errorf("unable to make dir %q: %w", dir, err) + + var err error + if dir != "" { + err = os.MkdirAll(dir, 0755) + if err != nil { + return fmt.Errorf("unable to make dir %q: %w", dir, err) + } } err = os.WriteFile(path, []byte(data), 0644) @@ -99,6 +103,7 @@ func writeFile(path string, data string) error { return nil } +//nolint:unparam func runCmd(cmd *exec.Cmd) ([]byte, error) { output, err := cmd.CombinedOutput() if err != nil { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go index c93ff3ec..a72be373 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/provider/validate.go @@ -4,269 +4,363 @@ package provider import ( + "context" + "errors" "fmt" + "log" "os" "path/filepath" - "strings" + "github.com/bmatcuk/doublestar/v4" "github.com/hashicorp/cli" + tfjson "github.com/hashicorp/terraform-json" + + "github.com/hashicorp/terraform-plugin-docs/internal/check" ) -func Validate(ui cli.Ui) error { - dirExists := func(name string) bool { - if _, err := os.Stat(name); err != nil { - return false - } +const ( + FileExtensionHtmlMarkdown = `.html.markdown` + FileExtensionHtmlMd = `.html.md` + FileExtensionMarkdown = `.markdown` + FileExtensionMd = `.md` - return true - } + DocumentationGlobPattern = `{docs/index.md,docs/{,cdktf/}{data-sources,guides,resources,functions}/**/*,website/docs/**/*}` + DocumentationDirGlobPattern = `{docs/{,cdktf/}{data-sources,guides,resources,functions}{,/*},website/docs/**/*}` +) - switch { - default: - ui.Warn("no website detected, exiting") - case dirExists("templates"): - ui.Info("detected templates directory, running checks...") - err := validateTemplates(ui, "templates") - if err != nil { - return err - } - if dirExists("examples") { - ui.Info("detected examples directory for templates, running checks...") - err = validateExamples(ui, "examples") - if err != nil { - return err - } - } - return err - case dirExists("docs"): - ui.Info("detected static docs directory, running checks") - return validateStaticDocs(ui, "docs") - case dirExists("website"): - ui.Info("detected legacy website directory, running checks") - return validateLegacyWebsite(ui, "website") - } +var ValidLegacyFileExtensions = []string{ + FileExtensionHtmlMarkdown, + FileExtensionHtmlMd, + FileExtensionMarkdown, + FileExtensionMd, +} - return nil +var ValidRegistryFileExtensions = []string{ + FileExtensionMd, } -func validateExamples(ui cli.Ui, dir string) error { - return nil +var LegacyFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, } -func validateTemplates(ui cli.Ui, dir string) error { - checks := []check{ - checkAllowedFiles( - "index.md", - "index.md.tmpl", - ), - checkAllowedDirs( - "data-sources", - "guides", - "functions", - "resources", - ), - checkBlockedExtensions( - ".html.md.tmpl", - ), - checkAllowedExtensions( - ".md", - ".md.tmpl", - ), - } - issues := []issue{} - for _, c := range checks { - checkIssues, err := c(dir) +var LegacyIndexFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + NoSubcategory: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, +} + +var LegacyGuideFrontMatterOptions = &check.FrontMatterOptions{ + NoSidebarCurrent: true, + RequireDescription: true, + RequireLayout: true, + RequirePageTitle: true, +} + +var RegistryFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, +} + +var RegistryIndexFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, + NoSubcategory: true, +} + +var RegistryGuideFrontMatterOptions = &check.FrontMatterOptions{ + NoLayout: true, + NoSidebarCurrent: true, + RequirePageTitle: true, +} + +type validator struct { + providerName string + providerDir string + providersSchemaPath string + + tfVersion string + providerSchema *tfjson.ProviderSchema + + logger *Logger +} + +func Validate(ui cli.Ui, providerDir, providerName, providersSchemaPath, tfversion string) error { + // Ensure provider directory is resolved absolute path + if providerDir == "" { + wd, err := os.Getwd() + if err != nil { - return err + return fmt.Errorf("error getting working directory: %w", err) + } + + providerDir = wd + } else { + absProviderDir, err := filepath.Abs(providerDir) + + if err != nil { + return fmt.Errorf("error getting absolute path with provider directory %q: %w", providerDir, err) } - issues = append(issues, checkIssues...) + + providerDir = absProviderDir } - for _, issue := range issues { - ui.Warn(fmt.Sprintf("%s: %s", issue.file, issue.message)) + + // Verify provider directory + providerDirFileInfo, err := os.Stat(providerDir) + + if err != nil { + return fmt.Errorf("error getting information for provider directory %q: %w", providerDir, err) + } + + if !providerDirFileInfo.IsDir() { + return fmt.Errorf("expected %q to be a directory", providerDir) } - if len(issues) > 0 { - return fmt.Errorf("invalid templates directory") + + v := &validator{ + providerName: providerName, + providerDir: providerDir, + providersSchemaPath: providersSchemaPath, + tfVersion: tfversion, + + logger: NewLogger(ui), } - return nil + + ctx := context.Background() + + return v.validate(ctx) } -func validateStaticDocs(ui cli.Ui, dir string) error { - checks := []check{ - checkAllowedFiles( - "index.md", - ), - checkAllowedDirs( - "data-sources", - "guides", - "functions", - "resources", - "cdktf", - ), - checkBlockedExtensions( - ".html.md.tmpl", - ".html.md", - ".md.tmpl", - ), - checkAllowedExtensions( - ".md", - ), +func (v *validator) validate(ctx context.Context) error { + var result error + + var err error + + if v.providerName == "" { + v.providerName = filepath.Base(v.providerDir) } - issues := []issue{} - for _, c := range checks { - checkIssues, err := c(dir) + + if v.providersSchemaPath == "" { + v.logger.infof("exporting schema from Terraform") + v.providerSchema, err = TerraformProviderSchemaFromTerraform(ctx, v.providerName, v.providerDir, v.tfVersion, v.logger) if err != nil { - return err + return fmt.Errorf("error exporting provider schema from Terraform: %w", err) + } + } else { + v.logger.infof("exporting schema from JSON file") + v.providerSchema, err = TerraformProviderSchemaFromFile(v.providerName, v.providersSchemaPath, v.logger) + if err != nil { + return fmt.Errorf("error exporting provider schema from JSON file: %w", err) } - issues = append(issues, checkIssues...) } - for _, issue := range issues { - ui.Warn(fmt.Sprintf("%s: %s", issue.file, issue.message)) + + providerFs := os.DirFS(v.providerDir) + + files, globErr := doublestar.Glob(providerFs, DocumentationGlobPattern) + if globErr != nil { + return fmt.Errorf("error finding documentation files: %w", err) } - if len(issues) > 0 { - return fmt.Errorf("invalid templates directory") + + log.Printf("[DEBUG] Found documentation files %v", files) + + v.logger.infof("running mixed directories check") + err = check.MixedDirectoriesCheck(files) + result = errors.Join(result, err) + + if dirExists(filepath.Join(v.providerDir, "docs")) { + v.logger.infof("detected static docs directory, running checks") + err = v.validateStaticDocs(filepath.Join(v.providerDir, "docs")) + result = errors.Join(result, err) + + } + if dirExists(filepath.Join(v.providerDir, filepath.Join("website", "docs"))) { + v.logger.infof("detected legacy website directory, running checks") + err = v.validateLegacyWebsite(filepath.Join(v.providerDir, "website/docs")) + result = errors.Join(result, err) } - return nil -} -func validateLegacyWebsite(ui cli.Ui, dir string) error { - panic("not implemented") + return result } -type issue struct { - file string - message string -} +func (v *validator) validateStaticDocs(dir string) error { -type check func(dir string) ([]issue, error) + var result error -func checkBlockedExtensions(exts ...string) check { - return func(dir string) ([]issue, error) { - issues := []issue{} - err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if info.IsDir() { - return nil - } - for _, ext := range exts { - if strings.HasSuffix(path, ext) { - _, file := filepath.Split(path) - issues = append(issues, issue{ - file: path, - message: fmt.Sprintf("the extension for %q is not supported", file), - }) - break - } - } - return nil - }) + options := &check.ProviderFileOptions{ + FrontMatter: RegistryFrontMatterOptions, + ValidExtensions: ValidRegistryFileExtensions, + } + + var files []string + + err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error { if err != nil { - return nil, err + return fmt.Errorf("error walking directory %q: %w", dir, err) } - return issues, nil - } -} -func checkAllowedExtensions(exts ...string) check { - return func(dir string) ([]issue, error) { - issues := []issue{} - err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + rel, err := filepath.Rel(v.providerDir, path) + if err != nil { + return err + } + if d.IsDir() { + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationDirGlobPattern), rel) if err != nil { return err } - if info.IsDir() { - return nil - } - valid := false - for _, ext := range exts { - if strings.HasSuffix(path, ext) { - valid = true - break - } - } - if !valid { - _, file := filepath.Split(path) - issues = append(issues, issue{ - file: path, - message: fmt.Sprintf("the extension for %q is not expected", file), - }) + if !match { + return nil // skip valid non-documentation directories } + + v.logger.infof("running invalid directories check on %s", rel) + result = errors.Join(result, check.InvalidDirectoriesCheck(rel)) return nil - }) + } + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationGlobPattern), rel) if err != nil { - return nil, err + return err + } + if !match { + return nil // skip valid non-documentation files } - return issues, nil + + // Configure FrontMatterOptions based on file type + if d.Name() == "index.md" { + options.FrontMatter = RegistryIndexFrontMatterOptions + } else if _, relErr := filepath.Rel(rel, "guides"); relErr != nil { + options.FrontMatter = RegistryGuideFrontMatterOptions + } else { + options.FrontMatter = RegistryFrontMatterOptions + } + v.logger.infof("running file checks on %s", rel) + result = errors.Join(result, check.NewProviderFileCheck(options).Run(path)) + + files = append(files, path) + return nil + }) + if err != nil { + return fmt.Errorf("error walking directory %q: %w", dir, err) } -} -func checkAllowedDirs(dirs ...string) check { - allowedDirs := map[string]bool{} - for _, d := range dirs { - allowedDirs[d] = true + mismatchOpt := &check.FileMismatchOptions{ + ProviderShortName: providerShortName(v.providerName), + Schema: v.providerSchema, + } + + if dirExists(filepath.Join(dir, "data-sources")) { + dataSourceFiles, _ := os.ReadDir(filepath.Join(dir, "data-sources")) + mismatchOpt.DatasourceEntries = dataSourceFiles + } + if dirExists(filepath.Join(dir, "resources")) { + resourceFiles, _ := os.ReadDir(filepath.Join(dir, "resources")) + mismatchOpt.ResourceEntries = resourceFiles + } + if dirExists(filepath.Join(dir, "functions")) { + functionFiles, _ := os.ReadDir(filepath.Join(dir, "functions")) + mismatchOpt.FunctionEntries = functionFiles } - return func(dir string) ([]issue, error) { - issues := []issue{} + v.logger.infof("running file mismatch check") + if err := check.NewFileMismatchCheck(mismatchOpt).Run(); err != nil { + result = errors.Join(result, err) + } + + return result +} + +func (v *validator) validateLegacyWebsite(dir string) error { + + var result error + + options := &check.ProviderFileOptions{ + FrontMatter: LegacyFrontMatterOptions, + ValidExtensions: ValidLegacyFileExtensions, + } - f, err := os.Open(dir) + var files []string + err := filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error { if err != nil { - return nil, err + return fmt.Errorf("error walking directory %q: %w", dir, err) } - infos, err := f.Readdir(-1) + + rel, err := filepath.Rel(v.providerDir, path) if err != nil { - return nil, err + return err } - - for _, fi := range infos { - if !fi.IsDir() { - continue + if d.IsDir() { + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationDirGlobPattern), rel) + if err != nil { + return err } - - if !allowedDirs[fi.Name()] { - issues = append(issues, issue{ - file: filepath.Join(dir, fi.Name()), - message: fmt.Sprintf("directory %q is not allowed", fi.Name()), - }) + if !match { + return nil // skip valid non-documentation directories } + + v.logger.infof("running invalid directories check on %s", rel) + result = errors.Join(result, check.InvalidDirectoriesCheck(rel)) + return nil } - return issues, nil - } -} + match, err := doublestar.PathMatch(filepath.FromSlash(DocumentationGlobPattern), rel) + if err != nil { + return err + } + if !match { + return nil // skip non-documentation files + } + + // Configure FrontMatterOptions based on file type + if d.Name() == "index.md" { + options.FrontMatter = LegacyIndexFrontMatterOptions + } else if _, relErr := filepath.Rel(rel, "guides"); relErr != nil { + options.FrontMatter = LegacyGuideFrontMatterOptions + } else { + options.FrontMatter = LegacyFrontMatterOptions + } + v.logger.infof("running file checks on %s", rel) + result = errors.Join(result, check.NewProviderFileCheck(options).Run(path)) -func checkAllowedFiles(dirs ...string) check { - allowedFiles := map[string]bool{} - for _, d := range dirs { - allowedFiles[d] = true + files = append(files, path) + return nil + }) + if err != nil { + return fmt.Errorf("error walking directory %q: %w", dir, err) } - return func(dir string) ([]issue, error) { - issues := []issue{} + mismatchOpt := &check.FileMismatchOptions{ + ProviderShortName: providerShortName(v.providerName), + Schema: v.providerSchema, + } - f, err := os.Open(dir) - if err != nil { - return nil, err - } - infos, err := f.Readdir(-1) - if err != nil { - return nil, err - } + if dirExists(filepath.Join(dir, "d")) { + dataSourceFiles, _ := os.ReadDir(filepath.Join(dir, "d")) + mismatchOpt.DatasourceEntries = dataSourceFiles + } + if dirExists(filepath.Join(dir, "r")) { + resourceFiles, _ := os.ReadDir(filepath.Join(dir, "r")) + mismatchOpt.ResourceEntries = resourceFiles + } + if dirExists(filepath.Join(dir, "functions")) { + functionFiles, _ := os.ReadDir(filepath.Join(dir, "functions")) + mismatchOpt.FunctionEntries = functionFiles + } - for _, fi := range infos { - if fi.IsDir() { - continue - } + v.logger.infof("running file mismatch check") + if err := check.NewFileMismatchCheck(mismatchOpt).Run(); err != nil { + result = errors.Join(result, err) + } - if !allowedFiles[fi.Name()] { - issues = append(issues, issue{ - file: filepath.Join(dir, fi.Name()), - message: fmt.Sprintf("file %q is not allowed", fi.Name()), - }) - } - } + return result +} - return issues, nil +func dirExists(name string) bool { + if file, err := os.Stat(name); err != nil { + return false + } else if !file.IsDir() { + return false } + + return true } diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/behaviors.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/behaviors.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/behaviors.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/behaviors.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go similarity index 90% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go index 90617d86..a459d0ba 100644 --- a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/render.go +++ b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/render.go @@ -57,11 +57,12 @@ var ( ) type nestedType struct { - anchorID string - path []string - block *tfjson.SchemaBlock - object *cty.Type - attrs *tfjson.SchemaNestedAttributeType + anchorID string + pathTitle string + path []string + block *tfjson.SchemaBlock + object *cty.Type + attrs *tfjson.SchemaNestedAttributeType group groupFilter } @@ -87,6 +88,7 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } anchorID := "nestedatt--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nestedTypes := []nestedType{} switch { case att.AttributeNestedType != nil: @@ -96,9 +98,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - attrs: att.AttributeNestedType, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + attrs: att.AttributeNestedType, group: group, }) @@ -109,9 +112,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &att.AttributeType, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &att.AttributeType, group: group, }) @@ -123,9 +127,10 @@ func writeAttribute(w io.Writer, path []string, att *tfjson.SchemaAttribute, gro nt := att.AttributeType.ElementType() nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &nt, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &nt, group: group, }) @@ -153,10 +158,12 @@ func writeBlockType(w io.Writer, path []string, block *tfjson.SchemaBlockType) ( } anchorID := "nestedblock--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nt := nestedType{ - anchorID: anchorID, - path: path, - block: block.Block, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + block: block.Block, } _, err = io.WriteString(w, " (see [below for nested schema](#"+anchorID+"))") @@ -231,7 +238,7 @@ nameLoop: // // If a `.Description` is provided instead, the behaviour will be the // same as for every other attribute. - if strings.ToLower(n) == "id" && childAtt.Description == "" { + if strings.ToLower(n) == "id" && len(parents) == 0 && childAtt.Description == "" { if strings.Contains(gf.topLevelTitle, "Read-Only") { childAtt.Description = "The ID of this resource." groups[i] = append(groups[i], n) @@ -344,7 +351,7 @@ func writeNestedTypes(w io.Writer, nestedTypes []nestedType) error { return err } - _, err = io.WriteString(w, "### Nested Schema for `"+strings.Join(nt.path, ".")+"`\n\n") + _, err = io.WriteString(w, "### Nested Schema for `"+nt.pathTitle+"`\n\n") if err != nil { return err } @@ -401,6 +408,7 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF } anchorID := "nestedobjatt--" + strings.Join(path, "--") + pathTitle := strings.Join(path, ".") nestedTypes := []nestedType{} switch { case att.IsObjectType(): @@ -410,9 +418,10 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF } nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &att, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &att, group: group, }) @@ -424,9 +433,10 @@ func writeObjectAttribute(w io.Writer, path []string, att cty.Type, group groupF nt := att.ElementType() nestedTypes = append(nestedTypes, nestedType{ - anchorID: anchorID, - path: path, - object: &nt, + anchorID: anchorID, + pathTitle: pathTitle, + path: path, + object: &nt, group: group, }) @@ -456,7 +466,9 @@ func writeObjectChildren(w io.Writer, parents []string, ty cty.Type, group group for _, name := range sortedNames { att := atts[name] - path := append(parents, name) + path := make([]string, len(parents), len(parents)+1) + copy(path, parents) + path = append(path, name) nt, err := writeObjectAttribute(w, path, att, group) if err != nil { @@ -512,7 +524,9 @@ func writeNestedAttributeChildren(w io.Writer, parents []string, nestedAttribute for _, name := range names { att := nestedAttributes.Attributes[name] - path := append(parents, name) + path := make([]string, len(parents), len(parents)+1) + copy(path, parents) + path = append(path, name) nt, err := writeAttribute(w, path, att, group) if err != nil { diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_attribute_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_attribute_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_attribute_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_attribute_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_block_type_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_block_type_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_block_type_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_block_type_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_nested_attribute_type_description.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_nested_attribute_type_description.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_nested_attribute_type_description.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_nested_attribute_type_description.go diff --git a/vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_type.go b/vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_type.go similarity index 100% rename from vendor/github.com/hashicorp/terraform-plugin-docs/schemamd/write_type.go rename to vendor/github.com/hashicorp/terraform-plugin-docs/internal/schemamd/write_type.go diff --git a/vendor/github.com/invopop/yaml/.gitignore b/vendor/github.com/invopop/yaml/.gitignore deleted file mode 100644 index e256a31e..00000000 --- a/vendor/github.com/invopop/yaml/.gitignore +++ /dev/null @@ -1,20 +0,0 @@ -# OSX leaves these everywhere on SMB shares -._* - -# Eclipse files -.classpath -.project -.settings/** - -# Emacs save files -*~ - -# Vim-related files -[._]*.s[a-w][a-z] -[._]s[a-w][a-z] -*.un~ -Session.vim -.netrwhist - -# Go test binaries -*.test diff --git a/vendor/github.com/invopop/yaml/.golangci.toml b/vendor/github.com/invopop/yaml/.golangci.toml deleted file mode 100644 index 4a438ca2..00000000 --- a/vendor/github.com/invopop/yaml/.golangci.toml +++ /dev/null @@ -1,15 +0,0 @@ -[run] -timeout = "120s" - -[output] -format = "colored-line-number" - -[linters] -enable = [ - "gocyclo", "unconvert", "goimports", "unused", "varcheck", - "vetshadow", "misspell", "nakedret", "errcheck", "revive", "ineffassign", - "deadcode", "goconst", "vet", "unparam", "gofmt" -] - -[issues] -exclude-use-default = false diff --git a/vendor/github.com/invopop/yaml/LICENSE b/vendor/github.com/invopop/yaml/LICENSE deleted file mode 100644 index 7805d36d..00000000 --- a/vendor/github.com/invopop/yaml/LICENSE +++ /dev/null @@ -1,50 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2014 Sam Ghods - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - - -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/invopop/yaml/README.md b/vendor/github.com/invopop/yaml/README.md deleted file mode 100644 index 2c33dfe5..00000000 --- a/vendor/github.com/invopop/yaml/README.md +++ /dev/null @@ -1,128 +0,0 @@ -# YAML marshaling and unmarshaling support for Go - -[![Lint](https://github.com/invopop/yaml/actions/workflows/lint.yaml/badge.svg)](https://github.com/invopop/yaml/actions/workflows/lint.yaml) -[![Test Go](https://github.com/invopop/yaml/actions/workflows/test.yaml/badge.svg)](https://github.com/invopop/yaml/actions/workflows/test.yaml) -[![Go Report Card](https://goreportcard.com/badge/github.com/invopop/yaml)](https://goreportcard.com/report/github.com/invopop/yaml) -![Latest Tag](https://img.shields.io/github/v/tag/invopop/yaml) - -## Introduction - -A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs. - -This is a fork and split of the original [ghodss/yaml](https://github.com/ghodss/yaml) repository which no longer appears to be maintained. - -In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](https://web.archive.org/web/20150812020634/http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/). - -## Compatibility - -This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility). - -Tested against Go versions 1.14 and onwards. - -## Caveats - -**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example: - -``` -BAD: - exampleKey: !!binary gIGC - -GOOD: - exampleKey: gIGC -... and decode the base64 data in your code. -``` - -**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys. - -## Installation and usage - -To install, run: - -``` -$ go get github.com/invopop/yaml -``` - -And import using: - -``` -import "github.com/invopop/yaml" -``` - -Usage is very similar to the JSON library: - -```go -package main - -import ( - "fmt" - - "github.com/invopop/yaml" -) - -type Person struct { - Name string `json:"name"` // Affects YAML field names too. - Age int `json:"age"` -} - -func main() { - // Marshal a Person struct to YAML. - p := Person{"John", 30} - y, err := yaml.Marshal(p) - if err != nil { - fmt.Printf("err: %v\n", err) - return - } - fmt.Println(string(y)) - /* Output: - age: 30 - name: John - */ - - // Unmarshal the YAML back into a Person struct. - var p2 Person - err = yaml.Unmarshal(y, &p2) - if err != nil { - fmt.Printf("err: %v\n", err) - return - } - fmt.Println(p2) - /* Output: - {John 30} - */ -} -``` - -`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available: - -```go -package main - -import ( - "fmt" - - "github.com/invopop/yaml" -) - -func main() { - j := []byte(`{"name": "John", "age": 30}`) - y, err := yaml.JSONToYAML(j) - if err != nil { - fmt.Printf("err: %v\n", err) - return - } - fmt.Println(string(y)) - /* Output: - name: John - age: 30 - */ - j2, err := yaml.YAMLToJSON(y) - if err != nil { - fmt.Printf("err: %v\n", err) - return - } - fmt.Println(string(j2)) - /* Output: - {"age":30,"name":"John"} - */ -} -``` diff --git a/vendor/github.com/invopop/yaml/fields.go b/vendor/github.com/invopop/yaml/fields.go deleted file mode 100644 index 52b30c6b..00000000 --- a/vendor/github.com/invopop/yaml/fields.go +++ /dev/null @@ -1,498 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package yaml - -import ( - "bytes" - "encoding" - "encoding/json" - "reflect" - "sort" - "strings" - "sync" - "unicode" - "unicode/utf8" -) - -// indirect walks down v allocating pointers as needed, -// until it gets to a non-pointer. -// if it encounters an Unmarshaler, indirect stops and returns that. -// if decodingNull is true, indirect stops at the last pointer so it can be set to nil. -func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { - // If v is a named type and is addressable, - // start with its address, so that if the type has pointer methods, - // we find them. - if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { - v = v.Addr() - } - for { - // Load value from interface, but only if the result will be - // usefully addressable. - if v.Kind() == reflect.Interface && !v.IsNil() { - e := v.Elem() - if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { - v = e - continue - } - } - - if v.Kind() != reflect.Ptr { - break - } - - if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { - break - } - if v.IsNil() { - if v.CanSet() { - v.Set(reflect.New(v.Type().Elem())) - } else { - v = reflect.New(v.Type().Elem()) - } - } - if v.Type().NumMethod() > 0 { - if u, ok := v.Interface().(json.Unmarshaler); ok { - return u, nil, reflect.Value{} - } - if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { - return nil, u, reflect.Value{} - } - } - v = v.Elem() - } - return nil, nil, v -} - -// A field represents a single field found in a struct. -type field struct { - name string - nameBytes []byte // []byte(name) - equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent - - tag bool - index []int - typ reflect.Type - omitEmpty bool - quoted bool -} - -func fillField(f field) field { - f.nameBytes = []byte(f.name) - f.equalFold = foldFunc(f.nameBytes) - return f -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from json tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that JSON should recognize for the given type. -// The algorithm is breadth-first search over the set of structs to include - the top struct -// and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - var count, nextCount map[reflect.Type]int - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" { // unexported - continue - } - tag := sf.Tag.Get("json") - if tag == "-" { - continue - } - name, opts := parseTag(tag) - if !isValidTag(name) { - name = "" - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := name != "" - if name == "" { - name = sf.Name - } - fields = append(fields, fillField(field{ - name: name, - tag: tagged, - index: index, - typ: ft, - omitEmpty: opts.Contains("omitempty"), - quoted: opts.Contains("string"), - })) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft})) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with JSON tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// JSON tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -} - -func isValidTag(s string) bool { - if s == "" { - return false - } - for _, c := range s { - switch { - case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): - // Backslash and quote chars are reserved, but - // otherwise any punctuation chars are allowed - // in a tag name. - default: - if !unicode.IsLetter(c) && !unicode.IsDigit(c) { - return false - } - } - } - return true -} - -const ( - caseMask = ^byte(0x20) // Mask to ignore case in ASCII. - kelvin = '\u212a' - smallLongEss = '\u017f' -) - -// foldFunc returns one of four different case folding equivalence -// functions, from most general (and slow) to fastest: -// -// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8 -// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S') -// 3) asciiEqualFold, no special, but includes non-letters (including _) -// 4) simpleLetterEqualFold, no specials, no non-letters. -// -// The letters S and K are special because they map to 3 runes, not just 2: -// * S maps to s and to U+017F 'ſ' Latin small letter long s -// * k maps to K and to U+212A 'K' Kelvin sign -// See http://play.golang.org/p/tTxjOc0OGo -// -// The returned function is specialized for matching against s and -// should only be given s. It's not curried for performance reasons. -func foldFunc(s []byte) func(s, t []byte) bool { - nonLetter := false - special := false // special letter - for _, b := range s { - if b >= utf8.RuneSelf { - return bytes.EqualFold - } - upper := b & caseMask - if upper < 'A' || upper > 'Z' { - nonLetter = true - } else if upper == 'K' || upper == 'S' { - // See above for why these letters are special. - special = true - } - } - if special { - return equalFoldRight - } - if nonLetter { - return asciiEqualFold - } - return simpleLetterEqualFold -} - -// equalFoldRight is a specialization of bytes.EqualFold when s is -// known to be all ASCII (including punctuation), but contains an 's', -// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t. -// See comments on foldFunc. -func equalFoldRight(s, t []byte) bool { - for _, sb := range s { - if len(t) == 0 { - return false - } - tb := t[0] - if tb < utf8.RuneSelf { - if sb != tb { - sbUpper := sb & caseMask - if 'A' <= sbUpper && sbUpper <= 'Z' { - if sbUpper != tb&caseMask { - return false - } - } else { - return false - } - } - t = t[1:] - continue - } - // sb is ASCII and t is not. t must be either kelvin - // sign or long s; sb must be s, S, k, or K. - tr, size := utf8.DecodeRune(t) - switch sb { - case 's', 'S': - if tr != smallLongEss { - return false - } - case 'k', 'K': - if tr != kelvin { - return false - } - default: - return false - } - t = t[size:] - - } - return len(t) <= 0 -} - -// asciiEqualFold is a specialization of bytes.EqualFold for use when -// s is all ASCII (but may contain non-letters) and contains no -// special-folding letters. -// See comments on foldFunc. -func asciiEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, sb := range s { - tb := t[i] - if sb == tb { - continue - } - if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') { - if sb&caseMask != tb&caseMask { - return false - } - } else { - return false - } - } - return true -} - -// simpleLetterEqualFold is a specialization of bytes.EqualFold for -// use when s is all ASCII letters (no underscores, etc) and also -// doesn't contain 'k', 'K', 's', or 'S'. -// See comments on foldFunc. -func simpleLetterEqualFold(s, t []byte) bool { - if len(s) != len(t) { - return false - } - for i, b := range s { - if b&caseMask != t[i]&caseMask { - return false - } - } - return true -} - -// tagOptions is the string following a comma in a struct field's "json" -// tag, or the empty string. It does not include the leading comma. -type tagOptions string - -// parseTag splits a struct field's json tag into its name and -// comma-separated options. -func parseTag(tag string) (string, tagOptions) { - if idx := strings.Index(tag, ","); idx != -1 { - return tag[:idx], tagOptions(tag[idx+1:]) - } - return tag, tagOptions("") -} - -// Contains reports whether a comma-separated list of options -// contains a particular substr flag. substr must be surrounded by a -// string boundary or commas. -func (o tagOptions) Contains(optionName string) bool { - if len(o) == 0 { - return false - } - s := string(o) - for s != "" { - var next string - i := strings.Index(s, ",") - if i >= 0 { - s, next = s[:i], s[i+1:] - } - if s == optionName { - return true - } - s = next - } - return false -} diff --git a/vendor/github.com/invopop/yaml/yaml.go b/vendor/github.com/invopop/yaml/yaml.go deleted file mode 100644 index 805d515d..00000000 --- a/vendor/github.com/invopop/yaml/yaml.go +++ /dev/null @@ -1,314 +0,0 @@ -// Package yaml provides a wrapper around go-yaml designed to enable a better -// way of handling YAML when marshaling to and from structs. -// -// In short, this package first converts YAML to JSON using go-yaml and then -// uses json.Marshal and json.Unmarshal to convert to or from the struct. This -// means that it effectively reuses the JSON struct tags as well as the custom -// JSON methods MarshalJSON and UnmarshalJSON unlike go-yaml. -// -package yaml // import "github.com/invopop/yaml" - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "reflect" - "strconv" - - "gopkg.in/yaml.v3" -) - -// Marshal the object into JSON then converts JSON to YAML and returns the -// YAML. -func Marshal(o interface{}) ([]byte, error) { - j, err := json.Marshal(o) - if err != nil { - return nil, fmt.Errorf("error marshaling into JSON: %v", err) - } - - y, err := JSONToYAML(j) - if err != nil { - return nil, fmt.Errorf("error converting JSON to YAML: %v", err) - } - - return y, nil -} - -// JSONOpt is a decoding option for decoding from JSON format. -type JSONOpt func(*json.Decoder) *json.Decoder - -// Unmarshal converts YAML to JSON then uses JSON to unmarshal into an object, -// optionally configuring the behavior of the JSON unmarshal. -func Unmarshal(y []byte, o interface{}, opts ...JSONOpt) error { - dec := yaml.NewDecoder(bytes.NewReader(y)) - return unmarshal(dec, o, opts) -} - -func unmarshal(dec *yaml.Decoder, o interface{}, opts []JSONOpt) error { - vo := reflect.ValueOf(o) - j, err := yamlToJSON(dec, &vo) - if err != nil { - return fmt.Errorf("error converting YAML to JSON: %v", err) - } - - err = jsonUnmarshal(bytes.NewReader(j), o, opts...) - if err != nil { - return fmt.Errorf("error unmarshaling JSON: %v", err) - } - - return nil -} - -// jsonUnmarshal unmarshals the JSON byte stream from the given reader into the -// object, optionally applying decoder options prior to decoding. We are not -// using json.Unmarshal directly as we want the chance to pass in non-default -// options. -func jsonUnmarshal(r io.Reader, o interface{}, opts ...JSONOpt) error { - d := json.NewDecoder(r) - for _, opt := range opts { - d = opt(d) - } - if err := d.Decode(&o); err != nil { - return fmt.Errorf("while decoding JSON: %v", err) - } - return nil -} - -// JSONToYAML converts JSON to YAML. -func JSONToYAML(j []byte) ([]byte, error) { - // Convert the JSON to an object. - var jsonObj interface{} - // We are using yaml.Unmarshal here (instead of json.Unmarshal) because the - // Go JSON library doesn't try to pick the right number type (int, float, - // etc.) when unmarshalling to interface{}, it just picks float64 - // universally. go-yaml does go through the effort of picking the right - // number type, so we can preserve number type throughout this process. - err := yaml.Unmarshal(j, &jsonObj) - if err != nil { - return nil, err - } - - // Marshal this object into YAML. - return yaml.Marshal(jsonObj) -} - -// YAMLToJSON converts YAML to JSON. Since JSON is a subset of YAML, -// passing JSON through this method should be a no-op. -// -// Things YAML can do that are not supported by JSON: -// * In YAML you can have binary and null keys in your maps. These are invalid -// in JSON. (int and float keys are converted to strings.) -// * Binary data in YAML with the !!binary tag is not supported. If you want to -// use binary data with this library, encode the data as base64 as usual but do -// not use the !!binary tag in your YAML. This will ensure the original base64 -// encoded data makes it all the way through to the JSON. -// -func YAMLToJSON(y []byte) ([]byte, error) { //nolint:revive - dec := yaml.NewDecoder(bytes.NewReader(y)) - return yamlToJSON(dec, nil) -} - -func yamlToJSON(dec *yaml.Decoder, jsonTarget *reflect.Value) ([]byte, error) { - // Convert the YAML to an object. - var yamlObj interface{} - if err := dec.Decode(&yamlObj); err != nil { - // Functionality changed in v3 which means we need to ignore EOF error. - // See https://github.com/go-yaml/yaml/issues/639 - if !errors.Is(err, io.EOF) { - return nil, err - } - } - - // YAML objects are not completely compatible with JSON objects (e.g. you - // can have non-string keys in YAML). So, convert the YAML-compatible object - // to a JSON-compatible object, failing with an error if irrecoverable - // incompatibilities happen along the way. - jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget) - if err != nil { - return nil, err - } - - // Convert this object to JSON and return the data. - return json.Marshal(jsonObj) -} - -func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) { //nolint:gocyclo - var err error - - // Resolve jsonTarget to a concrete value (i.e. not a pointer or an - // interface). We pass decodingNull as false because we're not actually - // decoding into the value, we're just checking if the ultimate target is a - // string. - if jsonTarget != nil { - ju, tu, pv := indirect(*jsonTarget, false) - // We have a JSON or Text Umarshaler at this level, so we can't be trying - // to decode into a string. - if ju != nil || tu != nil { - jsonTarget = nil - } else { - jsonTarget = &pv - } - } - - // go-yaml v3 changed from v2 and now will provide map[string]interface{} by - // default and map[interface{}]interface{} when none of the keys strings. - // To get around this, we run a pre-loop to convert the map. - // JSON only supports strings as keys, so we must convert. - - switch typedYAMLObj := yamlObj.(type) { - case map[interface{}]interface{}: - // From my reading of go-yaml v2 (specifically the resolve function), - // keys can only have the types string, int, int64, float64, binary - // (unsupported), or null (unsupported). - strMap := make(map[string]interface{}) - for k, v := range typedYAMLObj { - // Resolve the key to a string first. - var keyString string - switch typedKey := k.(type) { - case string: - keyString = typedKey - case int: - keyString = strconv.Itoa(typedKey) - case int64: - // go-yaml will only return an int64 as a key if the system - // architecture is 32-bit and the key's value is between 32-bit - // and 64-bit. Otherwise the key type will simply be int. - keyString = strconv.FormatInt(typedKey, 10) - case float64: - // Float64 is now supported in keys - keyString = strconv.FormatFloat(typedKey, 'g', -1, 64) - case bool: - if typedKey { - keyString = "true" - } else { - keyString = "false" - } - default: - return nil, fmt.Errorf("unsupported map key of type: %s, key: %+#v, value: %+#v", - reflect.TypeOf(k), k, v) - } - strMap[keyString] = v - } - // replace yamlObj with our new string map - yamlObj = strMap - } - - // If yamlObj is a number or a boolean, check if jsonTarget is a string - - // if so, coerce. Else return normal. - // If yamlObj is a map or array, find the field that each key is - // unmarshaling to, and when you recurse pass the reflect.Value for that - // field back into this function. - switch typedYAMLObj := yamlObj.(type) { - case map[string]interface{}: - for k, v := range typedYAMLObj { - - // jsonTarget should be a struct or a map. If it's a struct, find - // the field it's going to map to and pass its reflect.Value. If - // it's a map, find the element type of the map and pass the - // reflect.Value created from that type. If it's neither, just pass - // nil - JSON conversion will error for us if it's a real issue. - if jsonTarget != nil { - t := *jsonTarget - if t.Kind() == reflect.Struct { - keyBytes := []byte(k) - // Find the field that the JSON library would use. - var f *field - fields := cachedTypeFields(t.Type()) - for i := range fields { - ff := &fields[i] - if bytes.Equal(ff.nameBytes, keyBytes) { - f = ff - break - } - // Do case-insensitive comparison. - if f == nil && ff.equalFold(ff.nameBytes, keyBytes) { - f = ff - } - } - if f != nil { - // Find the reflect.Value of the most preferential - // struct field. - jtf := t.Field(f.index[0]) - typedYAMLObj[k], err = convertToJSONableObject(v, &jtf) - if err != nil { - return nil, err - } - continue - } - } else if t.Kind() == reflect.Map { - // Create a zero value of the map's element type to use as - // the JSON target. - jtv := reflect.Zero(t.Type().Elem()) - typedYAMLObj[k], err = convertToJSONableObject(v, &jtv) - if err != nil { - return nil, err - } - continue - } - } - typedYAMLObj[k], err = convertToJSONableObject(v, nil) - if err != nil { - return nil, err - } - } - return typedYAMLObj, nil - case []interface{}: - // We need to recurse into arrays in case there are any - // map[interface{}]interface{}'s inside and to convert any - // numbers to strings. - - // If jsonTarget is a slice (which it really should be), find the - // thing it's going to map to. If it's not a slice, just pass nil - // - JSON conversion will error for us if it's a real issue. - var jsonSliceElemValue *reflect.Value - if jsonTarget != nil { - t := *jsonTarget - if t.Kind() == reflect.Slice { - // By default slices point to nil, but we need a reflect.Value - // pointing to a value of the slice type, so we create one here. - ev := reflect.Indirect(reflect.New(t.Type().Elem())) - jsonSliceElemValue = &ev - } - } - - // Make and use a new array. - arr := make([]interface{}, len(typedYAMLObj)) - for i, v := range typedYAMLObj { - arr[i], err = convertToJSONableObject(v, jsonSliceElemValue) - if err != nil { - return nil, err - } - } - return arr, nil - default: - // If the target type is a string and the YAML type is a number, - // convert the YAML type to a string. - if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String { - // Based on my reading of go-yaml, it may return int, int64, - // float64, or uint64. - var s string - switch typedVal := typedYAMLObj.(type) { - case int: - s = strconv.FormatInt(int64(typedVal), 10) - case int64: - s = strconv.FormatInt(typedVal, 10) - case float64: - s = strconv.FormatFloat(typedVal, 'g', -1, 64) - case uint64: - s = strconv.FormatUint(typedVal, 10) - case bool: - if typedVal { - s = "true" - } else { - s = "false" - } - } - if len(s) > 0 { - yamlObj = interface{}(s) - } - } - return yamlObj, nil - } -} diff --git a/vendor/github.com/mohae/deepcopy/.travis.yml b/vendor/github.com/mohae/deepcopy/.travis.yml deleted file mode 100644 index fd47a8cf..00000000 --- a/vendor/github.com/mohae/deepcopy/.travis.yml +++ /dev/null @@ -1,11 +0,0 @@ -language: go - -go: - - tip - -matrix: - allow_failures: - - go: tip - -script: - - go test ./... diff --git a/vendor/github.com/mohae/deepcopy/README.md b/vendor/github.com/mohae/deepcopy/README.md deleted file mode 100644 index f8184188..00000000 --- a/vendor/github.com/mohae/deepcopy/README.md +++ /dev/null @@ -1,8 +0,0 @@ -deepCopy -======== -[![GoDoc](https://godoc.org/github.com/mohae/deepcopy?status.svg)](https://godoc.org/github.com/mohae/deepcopy)[![Build Status](https://travis-ci.org/mohae/deepcopy.png)](https://travis-ci.org/mohae/deepcopy) - -DeepCopy makes deep copies of things: unexported field values are not copied. - -## Usage - cpy := deepcopy.Copy(orig) diff --git a/vendor/github.com/mohae/deepcopy/deepcopy.go b/vendor/github.com/mohae/deepcopy/deepcopy.go deleted file mode 100644 index ba763ad0..00000000 --- a/vendor/github.com/mohae/deepcopy/deepcopy.go +++ /dev/null @@ -1,125 +0,0 @@ -// deepcopy makes deep copies of things. A standard copy will copy the -// pointers: deep copy copies the values pointed to. Unexported field -// values are not copied. -// -// Copyright (c)2014-2016, Joel Scoble (github.com/mohae), all rights reserved. -// License: MIT, for more details check the included LICENSE file. -package deepcopy - -import ( - "reflect" - "time" -) - -// Interface for delegating copy process to type -type Interface interface { - DeepCopy() interface{} -} - -// Iface is an alias to Copy; this exists for backwards compatibility reasons. -func Iface(iface interface{}) interface{} { - return Copy(iface) -} - -// Copy creates a deep copy of whatever is passed to it and returns the copy -// in an interface{}. The returned value will need to be asserted to the -// correct type. -func Copy(src interface{}) interface{} { - if src == nil { - return nil - } - - // Make the interface a reflect.Value - original := reflect.ValueOf(src) - - // Make a copy of the same type as the original. - cpy := reflect.New(original.Type()).Elem() - - // Recursively copy the original. - copyRecursive(original, cpy) - - // Return the copy as an interface. - return cpy.Interface() -} - -// copyRecursive does the actual copying of the interface. It currently has -// limited support for what it can handle. Add as needed. -func copyRecursive(original, cpy reflect.Value) { - // check for implement deepcopy.Interface - if original.CanInterface() { - if copier, ok := original.Interface().(Interface); ok { - cpy.Set(reflect.ValueOf(copier.DeepCopy())) - return - } - } - - // handle according to original's Kind - switch original.Kind() { - case reflect.Ptr: - // Get the actual value being pointed to. - originalValue := original.Elem() - - // if it isn't valid, return. - if !originalValue.IsValid() { - return - } - cpy.Set(reflect.New(originalValue.Type())) - copyRecursive(originalValue, cpy.Elem()) - - case reflect.Interface: - // If this is a nil, don't do anything - if original.IsNil() { - return - } - // Get the value for the interface, not the pointer. - originalValue := original.Elem() - - // Get the value by calling Elem(). - copyValue := reflect.New(originalValue.Type()).Elem() - copyRecursive(originalValue, copyValue) - cpy.Set(copyValue) - - case reflect.Struct: - t, ok := original.Interface().(time.Time) - if ok { - cpy.Set(reflect.ValueOf(t)) - return - } - // Go through each field of the struct and copy it. - for i := 0; i < original.NumField(); i++ { - // The Type's StructField for a given field is checked to see if StructField.PkgPath - // is set to determine if the field is exported or not because CanSet() returns false - // for settable fields. I'm not sure why. -mohae - if original.Type().Field(i).PkgPath != "" { - continue - } - copyRecursive(original.Field(i), cpy.Field(i)) - } - - case reflect.Slice: - if original.IsNil() { - return - } - // Make a new slice and copy each element. - cpy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap())) - for i := 0; i < original.Len(); i++ { - copyRecursive(original.Index(i), cpy.Index(i)) - } - - case reflect.Map: - if original.IsNil() { - return - } - cpy.Set(reflect.MakeMap(original.Type())) - for _, key := range original.MapKeys() { - originalValue := original.MapIndex(key) - copyValue := reflect.New(originalValue.Type()).Elem() - copyRecursive(originalValue, copyValue) - copyKey := Copy(key.Interface()) - cpy.SetMapIndex(reflect.ValueOf(copyKey), copyValue) - } - - default: - cpy.Set(original) - } -} diff --git a/vendor/github.com/perimeterx/marshmallow/.gitignore b/vendor/github.com/perimeterx/marshmallow/.gitignore deleted file mode 100644 index cf53c0a1..00000000 --- a/vendor/github.com/perimeterx/marshmallow/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/.idea - -coverage.out -profile.out diff --git a/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md b/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md deleted file mode 100644 index 92937d05..00000000 --- a/vendor/github.com/perimeterx/marshmallow/CHANGELOG.md +++ /dev/null @@ -1,49 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). - -## [[1.1.5](https://github.com/PerimeterX/marshmallow/compare/v1.1.4...v1.1.5)] - 2023-07-03 - -### Added - -- Support for reporting errors from `HandleJSONData` - [info](https://github.com/PerimeterX/marshmallow/issues/27). - -## [[1.1.4](https://github.com/PerimeterX/marshmallow/compare/v1.1.3...v1.1.4)] - 2022-11-10 - -### Fixed - -- Fixed problem with nested object implementing JSONDataHandler with skipPopulateStruct - [info](https://github.com/PerimeterX/marshmallow/issues/18). -- Fixed problem with nested object implementing JSONDataHandler with skipPopulateStruct in ModeFailOverToOriginalValue - [info](https://github.com/PerimeterX/marshmallow/issues/19). - -## [[1.1.3](https://github.com/PerimeterX/marshmallow/compare/v1.1.2...v1.1.3)] - 2022-08-31 - -### Added - -- Support for excluding known fields from the result map - [info](https://github.com/PerimeterX/marshmallow/issues/16). - -## [[1.1.2](https://github.com/PerimeterX/marshmallow/compare/v1.1.1...v1.1.2)] - 2022-08-23 - -### Added - -- Support capturing nested unknown fields - [info](https://github.com/PerimeterX/marshmallow/issues/15). - -## [[1.1.1](https://github.com/PerimeterX/marshmallow/compare/v1.1.0...v1.1.1)] - 2022-08-21 - -### Fixed - -- Fix parsing bug for unknown nested fields - [info](https://github.com/PerimeterX/marshmallow/issues/12). - -## [[1.1.0](https://github.com/PerimeterX/marshmallow/compare/v0.0.1...v1.1.0)] - 2022-07-10 - -### Fixed - -- Fixed an issue with embedded fields - [info](https://github.com/PerimeterX/marshmallow/issues/9). - -## [[0.0.1](https://github.com/PerimeterX/marshmallow/tree/v0.0.1)] - 2022-04-21 - -### Added - -- All functionality from our internal repository, after it has been stabilized on production for several months - [info](https://www.perimeterx.com/tech-blog/2022/boosting-up-json-performance-of-unstructured-structs-in-go/). diff --git a/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md b/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md deleted file mode 100644 index 0f6c45e7..00000000 --- a/vendor/github.com/perimeterx/marshmallow/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,133 +0,0 @@ - -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, caste, color, religion, or sexual -identity and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the overall - community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or advances of - any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email address, - without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement at -[opensource-conduct@humansecurity.com](mailto:opensource-conduct@humansecurity.com). -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series of -actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or permanent -ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within the -community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.1, available at -[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. - -Community Impact Guidelines were inspired by -[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. - -For answers to common questions about this code of conduct, see the FAQ at -[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at -[https://www.contributor-covenant.org/translations][translations]. - -[homepage]: https://www.contributor-covenant.org -[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html -[Mozilla CoC]: https://github.com/mozilla/diversity -[FAQ]: https://www.contributor-covenant.org/faq -[translations]: https://www.contributor-covenant.org/translations diff --git a/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md b/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md deleted file mode 100644 index a265c9ab..00000000 --- a/vendor/github.com/perimeterx/marshmallow/CONTRIBUTING.md +++ /dev/null @@ -1,47 +0,0 @@ -# How To Contribute - -We'd love to accept your patches and contributions to this project. There are just a few guidelines you need to follow which are described in detail below. - -## 1. Fork this repo - -You should create a fork of this project in your account and work from there. You can create a fork by clicking the fork button in GitHub. - -## 2. One feature, one branch - -Work for each new feature/issue should occur in its own branch. To create a new branch from the command line: -```shell -git checkout -b my-new-feature -``` -where "my-new-feature" describes what you're working on. - -## 3. Add unit tests -If your contribution modifies existing or adds new code please add corresponding unit tests for this. - -## 4. Ensure that the build passes - -Run -```shell -go test -v -``` -and check that there are no errors. - -## 5. Add documentation for new or updated functionality - -Please review the [README.md](README.md) file in this project to see if they are impacted by your change and update them accordingly. - -## 6. Add to CHANGELOG.md - -Any notable changes should be recorded in the CHANGELOG.md following the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) conventions. - -## 7. Submit a pull request and describe the change - -Push your changes to your branch and open a pull request against the parent repo on GitHub. The project administrators will review your pull request and respond with feedback. - -# How your contribution gets merged - -Upon pull request submission, your code will be reviewed by the maintainers. They will confirm at least the following: - -- Tests run successfully (unit, coverage, style). -- Contribution policy has been followed. - -A (human) reviewer will need to sign off on your pull request before it can be merged. diff --git a/vendor/github.com/perimeterx/marshmallow/README.md b/vendor/github.com/perimeterx/marshmallow/README.md deleted file mode 100644 index bfa90363..00000000 --- a/vendor/github.com/perimeterx/marshmallow/README.md +++ /dev/null @@ -1,205 +0,0 @@ -# Marshmallow - -![Marshmallow Campfire](https://raw.githubusercontent.com/PerimeterX/marshmallow/assets/campfire.png) - -[![CodeQL Status](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/codeql.yml?branch=main&logo=github&label=CodeQL)](https://github.com/PerimeterX/marshmallow/actions/workflows/codeql.yml?query=branch%3Amain++) -[![Run Tests](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/go.yml?branch=main&logo=github&label=Run%20Tests)](https://github.com/PerimeterX/marshmallow/actions/workflows/go.yml?query=branch%3Amain) -[![Dependency Review](https://img.shields.io/github/actions/workflow/status/perimeterx/marshmallow/dependency-review.yml?logo=github&label=Dependency%20Review)](https://github.com/PerimeterX/marshmallow/actions/workflows/dependency-review.yml?query=branch%3Amain) -[![Go Report Card](https://goreportcard.com/badge/github.com/perimeterx/marshmallow)](https://goreportcard.com/report/github.com/perimeterx/marshmallow) -![Manual Code Coverage](https://img.shields.io/badge/coverage-92.6%25-green) -[![Go Reference](https://pkg.go.dev/badge/github.com/perimeterx/marshmallow.svg)](https://pkg.go.dev/github.com/perimeterx/marshmallow) -[![Licence](https://img.shields.io/github/license/perimeterx/marshmallow)](LICENSE) -[![Latest Release](https://img.shields.io/github/v/release/perimeterx/marshmallow)](https://github.com/PerimeterX/marshmallow/releases) -![Top Languages](https://img.shields.io/github/languages/top/perimeterx/marshmallow) -[![Issues](https://img.shields.io/github/issues-closed/perimeterx/marshmallow?color=%238250df&logo=github)](https://github.com/PerimeterX/marshmallow/issues) -[![Pull Requests](https://img.shields.io/github/issues-pr-closed-raw/perimeterx/marshmallow?color=%238250df&label=merged%20pull%20requests&logo=github)](https://github.com/PerimeterX/marshmallow/pulls) -[![Commits](https://img.shields.io/github/last-commit/perimeterx/marshmallow)](https://github.com/PerimeterX/marshmallow/commits/main) -[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md) - -marshmallow-gopher - -Marshmallow package provides a simple API to perform flexible and performant JSON unmarshalling in Go. - -Marshmallow specializes in dealing with **unstructured struct** - when some fields are known and some aren't, -with zero performance overhead nor extra coding needed. -While unmarshalling, marshmallow allows fully retaining the original data and access -it via a typed struct and a dynamic map. - -## Contents - -- [Install](#install) -- [Usage](#usage) -- [Performance Benchmark And Alternatives](#performance-benchmark-and-alternatives) -- [When Should I Use Marshmallow](#when-should-i-use-marshmallow) -- [API](#api) - -## Install - -```sh -go get -u github.com/perimeterx/marshmallow -``` - -## Usage - -```go -package main - -import ( - "fmt" - "github.com/perimeterx/marshmallow" -) - -func main() { - v := struct { - Foo string `json:"foo"` - Boo []int `json:"boo"` - }{} - result, err := marshmallow.Unmarshal([]byte(`{"foo":"bar","boo":[1,2,3],"goo":12.6}`), &v) - fmt.Printf("v=%+v, result=%+v, err=%v", v, result, err) - // Output: v={Foo:bar Boo:[1 2 3]}, result=map[boo:[1 2 3] foo:bar goo:12.6], err= -} -``` - -**Examples can be found [here](example_test.go)** - -## Performance Benchmark And Alternatives - -Marshmallow performs best when dealing with mixed data - when some fields are known and some are unknown. -More info [below](#when-should-i-use-marshmallow). -Other solutions are available for this kind of use case, each solution is explained and documented in the link below. -The full benchmark test can be found -[here](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go). - -|Benchmark|Iterations|Time/Iteration|Bytes Allocated|Allocations| -|--|--|--|--|--| -|[unmarshall twice](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L40)|228693|5164 ns/op|1640 B/op|51 allocs/op| -|[raw map](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L66)|232236|5116 ns/op|2296 B/op|53 allocs/op| -|[go codec](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L121)|388442|3077 ns/op|2512 B/op|37 allocs/op| -|[marshmallow](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L16)|626168|1853 ns/op|608 B/op|18 allocs/op| -|[marshmallow without populating struct](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L162)|678616|1751 ns/op|608 B/op|18 allocs/op| - -![marshmallow performance comparison](https://raw.githubusercontent.com/PerimeterX/marshmallow/e45088ca20d4ea5be4143d418d12da63a68d6dfd/performance-chart.svg) - -**Marshmallow provides the best performance (up to X3 faster) while not requiring any extra coding.** -In fact, marshmallow performs as fast as normal `json.Unmarshal` call, however, such a call causes loss of data for all -the fields that did not match the given struct. With marshmallow you never lose any data. - -|Benchmark|Iterations|Time/Iteration|Bytes Allocated|Allocations| -|--|--|--|--|--| -|[marshmallow](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L16)|626168|1853 ns/op|608 B/op|18 allocs/op| -|[native library](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L143)|652106|1845 ns/op|304 B/op|11 allocs/op| -|[marshmallow without populating struct](https://github.com/PerimeterX/marshmallow/blob/8c5bba9e6dc0033f4324eca554737089a99f6e5e/benchmark_test.go#L162)|678616|1751 ns/op|608 B/op|18 allocs/op| - -## When Should I Use Marshmallow - -Marshmallow is best suited for use cases where you are interested in all the input data, but you have predetermined -information only about a subset of it. For instance, if you plan to reference two specific fields from the data, then -iterate all the data and apply some generic logic. How does it look with the native library: - -```go -func isAllowedToDrive(data []byte) (bool, error) { - result := make(map[string]interface{}) - err := json.Unmarshal(data, &result) - if err != nil { - return false, err - } - - age, ok := result["age"] - if !ok { - return false, nil - } - a, ok := age.(float64) - if !ok { - return false, nil - } - if a < 17 { - return false, nil - } - - hasDriversLicense, ok := result["has_drivers_license"] - if !ok { - return false, nil - } - h, ok := hasDriversLicense.(bool) - if !ok { - return false, nil - } - if !h { - return false, nil - } - - for key := range result { - if strings.Contains(key, "prior_conviction") { - return false, nil - } - } - - return true, nil -} -``` - -And with marshmallow: - -```go -func isAllowedToDrive(data []byte) (bool, error) { - v := struct { - Age int `json:"age"` - HasDriversLicense bool `json:"has_drivers_license"` - }{} - result, err := marshmallow.Unmarshal(data, &v) - if err != nil { - return false, err - } - - if v.Age < 17 || !v.HasDriversLicense { - return false, nil - } - - for key := range result { - if strings.Contains(key, "prior_conviction") { - return false, nil - } - } - - return true, nil -} -``` - -## API - -Marshmallow exposes two main API functions - -[Unmarshal](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/unmarshal.go#L27) -and -[UnmarshalFromJSONMap](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/unmarshal_from_json_map.go#L37). -While unmarshalling, marshmallow supports the following optional options: - -* Setting the mode for handling invalid data using the [WithMode](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/options.go#L30) function. -* Excluding known fields from the result map using the [WithExcludeKnownFieldsFromMap](https://github.com/PerimeterX/marshmallow/blob/457669ae9973895584f2636eabfc104140d3b700/options.go#L50) function. -* Skipping struct population to boost performance using the [WithSkipPopulateStruct](https://github.com/PerimeterX/marshmallow/blob/0e0218ab860be8a4b5f57f5ff239f281c250c5da/options.go#L41) function. - -In order to capture unknown nested fields, structs must implement [JSONDataErrorHandler](https://github.com/PerimeterX/marshmallow/blob/195c994aa6e3e0852601ad9cf65bcddef0dd7479/options.go#L76). -More info [here](https://github.com/PerimeterX/marshmallow/issues/15). - -Marshmallow also supports caching of refection information using -[EnableCache](https://github.com/PerimeterX/marshmallow/blob/d3500aa5b0f330942b178b155da933c035dd3906/cache.go#L40) -and -[EnableCustomCache](https://github.com/PerimeterX/marshmallow/blob/d3500aa5b0f330942b178b155da933c035dd3906/cache.go#L35). - -## Contact and Contribute - -Reporting issues and requesting features may be done in our [GitHub issues page](https://github.com/PerimeterX/marshmallow/issues). -Discussions may be conducted in our [GitHub discussions page](https://github.com/PerimeterX/marshmallow/discussions). -For any further questions or comments you can reach us out at [open-source@humansecurity.com](mailto:open-source@humansecurity.com). - -Any type of contribution is warmly welcome and appreciated ❤️ -Please read our [contribution](CONTRIBUTING.md) guide for more info. - -If you're looking for something to get started with, tou can always follow our [issues page](https://github.com/PerimeterX/marshmallow/issues) and look for -[good first issue](https://github.com/PerimeterX/marshmallow/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) and -[help wanted](https://github.com/PerimeterX/marshmallow/issues?q=is%3Aissue+label%3A%22help+wanted%22+is%3Aopen) labels. - -## Marshmallow Logo - -Marshmallow logo and assets by [Adva Rom](https://www.linkedin.com/in/adva-rom-7a6738127/) are licensed under a Creative Commons Attribution 4.0 International License.
- -![Marshmallow Logo](https://raw.githubusercontent.com/PerimeterX/marshmallow/assets/marshmallow.png) diff --git a/vendor/github.com/perimeterx/marshmallow/cache.go b/vendor/github.com/perimeterx/marshmallow/cache.go deleted file mode 100644 index a67cea6d..00000000 --- a/vendor/github.com/perimeterx/marshmallow/cache.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -import ( - "reflect" - "sync" -) - -// Cache allows unmarshalling to use a cached version of refection information about types. -// Cache interface follows the implementation of sync.Map, but you may wrap any cache implementation -// to match it. This allows you to control max cache size, eviction policies and any other caching aspect. -type Cache interface { - // Load returns the value stored in the map for a key, or nil if no value is present. - // The ok result indicates whether value was found in the map. - Load(key interface{}) (interface{}, bool) - // Store sets the value for a key. - Store(key, value interface{}) -} - -// EnableCustomCache enables unmarshalling cache. It allows reuse of refection information about types needed -// to perform the unmarshalling. A use of such cache can boost up unmarshalling by x1.4. -// Check out benchmark_test.go for an example. -// -// EnableCustomCache is not thread safe! Do not use it while performing unmarshalling, or it will -// cause an unsafe race condition. Typically, EnableCustomCache should be called once when the process boots. -// -// Caching is disabled by default. The use of this function allows enabling it and controlling the -// behavior of the cache. Typically, the use of sync.Map should be good enough. The caching mechanism -// stores a single map per struct type. If you plan to unmarshal a huge amount of distinct -// struct it may get to consume a lot of resources, in which case you have the control to choose -// the caching implementation you like and its setup. -func EnableCustomCache(c Cache) { - cache = c -} - -// EnableCache enables unmarshalling cache with default implementation. More info at EnableCustomCache. -func EnableCache() { - EnableCustomCache(&sync.Map{}) -} - -var cache Cache - -func cacheLookup(t reflect.Type) map[string]reflectionInfo { - if cache == nil { - return nil - } - value, exists := cache.Load(t) - if !exists { - return nil - } - result, _ := value.(map[string]reflectionInfo) - return result -} - -func cacheStore(t reflect.Type, fields map[string]reflectionInfo) { - if cache == nil { - return - } - cache.Store(t, fields) -} diff --git a/vendor/github.com/perimeterx/marshmallow/doc.go b/vendor/github.com/perimeterx/marshmallow/doc.go deleted file mode 100644 index c179e657..00000000 --- a/vendor/github.com/perimeterx/marshmallow/doc.go +++ /dev/null @@ -1,10 +0,0 @@ -/* -Package marshmallow provides a simple API to perform flexible and performant JSON unmarshalling. -Unlike other packages, marshmallow supports unmarshalling of some known and some unknown fields -with zero performance overhead nor extra coding needed. While unmarshalling, -marshmallow allows fully retaining the original data and access it via a typed struct and a -dynamic map. - -https://github.com/perimeterx/marshmallow -*/ -package marshmallow diff --git a/vendor/github.com/perimeterx/marshmallow/errors.go b/vendor/github.com/perimeterx/marshmallow/errors.go deleted file mode 100644 index c4d341cc..00000000 --- a/vendor/github.com/perimeterx/marshmallow/errors.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -import ( - "errors" - "fmt" - "github.com/mailru/easyjson/jlexer" - "reflect" - "strings" -) - -var ( - // ErrInvalidInput indicates the input JSON is invalid - ErrInvalidInput = errors.New("invalid JSON input") - - // ErrInvalidValue indicates the target struct has invalid type - ErrInvalidValue = errors.New("unexpected non struct value") -) - -// MultipleLexerError indicates one or more unmarshalling errors during JSON bytes decode -type MultipleLexerError struct { - Errors []*jlexer.LexerError -} - -func (m *MultipleLexerError) Error() string { - errs := make([]string, len(m.Errors)) - for i, lexerError := range m.Errors { - errs[i] = lexerError.Error() - } - return strings.Join(errs, ", ") -} - -// MultipleError indicates one or more unmarshalling errors during JSON map decode -type MultipleError struct { - Errors []error -} - -func (m *MultipleError) Error() string { - errs := make([]string, len(m.Errors)) - for i, lexerError := range m.Errors { - errs[i] = lexerError.Error() - } - return strings.Join(errs, ", ") -} - -// ParseError indicates a JSON map decode error -type ParseError struct { - Reason string - Path string -} - -func (p *ParseError) Error() string { - return fmt.Sprintf("parse error: %s in %s", p.Reason, p.Path) -} - -func newUnexpectedTypeParseError(expectedType reflect.Type, path []string) *ParseError { - return &ParseError{ - Reason: fmt.Sprintf("expected type %s", externalTypeName(expectedType)), - Path: strings.Join(path, "."), - } -} - -func newUnsupportedTypeParseError(unsupportedType reflect.Type, path []string) *ParseError { - return &ParseError{ - Reason: fmt.Sprintf("unsupported type %s", externalTypeName(unsupportedType)), - Path: strings.Join(path, "."), - } -} - -func addUnexpectedTypeLexerError(lexer *jlexer.Lexer, expectedType reflect.Type) { - lexer.AddNonFatalError(fmt.Errorf("expected type %s", externalTypeName(expectedType))) -} - -func addUnsupportedTypeLexerError(lexer *jlexer.Lexer, unsupportedType reflect.Type) { - lexer.AddNonFatalError(fmt.Errorf("unsupported type %s", externalTypeName(unsupportedType))) -} - -func externalTypeName(t reflect.Type) string { - switch t.Kind() { - case reflect.String: - return "string" - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, - reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, - reflect.Float64, reflect.Complex64, reflect.Complex128: - return "number" - case reflect.Bool: - return "boolean" - case reflect.Array, reflect.Slice: - return "array" - case reflect.Interface: - return "any" - case reflect.Map, reflect.Struct: - return "object" - case reflect.Ptr: - return externalTypeName(t.Elem()) - } - return "invalid" -} diff --git a/vendor/github.com/perimeterx/marshmallow/options.go b/vendor/github.com/perimeterx/marshmallow/options.go deleted file mode 100644 index ff97d336..00000000 --- a/vendor/github.com/perimeterx/marshmallow/options.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -// Mode dictates the unmarshalling mode. -// Each mode is self documented below. -type Mode uint8 - -const ( - // ModeFailOnFirstError is the default mode. It makes unmarshalling terminate - // immediately on any kind of error. This error will then be returned. - ModeFailOnFirstError Mode = iota - - // ModeAllowMultipleErrors mode makes unmarshalling keep decoding even if - // errors are encountered. In case of such error, the erroneous value will be omitted from the result. - // Eventually, all errors will all be returned, alongside the partial result. - ModeAllowMultipleErrors - - // ModeFailOverToOriginalValue mode makes unmarshalling keep decoding even if - // errors are encountered. In case of such error, the original external value be placed in the - // result data, even though it does not meet the schematic requirements. - // Eventually, all errors will be returned, alongside the full result. Note that the result map - // will contain values that do not match the struct schema. - ModeFailOverToOriginalValue -) - -// WithMode is an UnmarshalOption function to set the unmarshalling mode. -func WithMode(mode Mode) UnmarshalOption { - return func(options *unmarshalOptions) { - options.mode = mode - } -} - -// WithSkipPopulateStruct is an UnmarshalOption function to set the skipPopulateStruct option. -// Skipping populate struct is set to false by default. -// If you do not intend to use the struct value once unmarshalling is finished, set this -// option to true to boost performance. This would mean the struct fields will not be set -// with values, but rather it will only be used as the target schema when populating the result map. -func WithSkipPopulateStruct(skipPopulateStruct bool) UnmarshalOption { - return func(options *unmarshalOptions) { - options.skipPopulateStruct = skipPopulateStruct - } -} - -// WithExcludeKnownFieldsFromMap is an UnmarshalOption function to set the excludeKnownFieldsFromMap option. -// Exclude known fields flag is set to false by default. -// When the flag is set to true, fields specified in the input struct (known fields) will be excluded from the result map -func WithExcludeKnownFieldsFromMap(excludeKnownFields bool) UnmarshalOption { - return func(options *unmarshalOptions) { - options.excludeKnownFieldsFromMap = excludeKnownFields - } -} - -type UnmarshalOption func(*unmarshalOptions) - -type unmarshalOptions struct { - mode Mode - skipPopulateStruct bool - excludeKnownFieldsFromMap bool -} - -func buildUnmarshalOptions(options []UnmarshalOption) *unmarshalOptions { - result := &unmarshalOptions{} - for _, option := range options { - option(result) - } - return result -} - -// JSONDataErrorHandler allow types to handle JSON data as maps. -// Types should implement this interface if they wish to act on the map representation of parsed JSON input. -// This is mainly used to allow nested objects to capture unknown fields and leverage marshmallow's abilities. -// If HandleJSONData returns an error, it will be propagated as an unmarshal error -type JSONDataErrorHandler interface { - HandleJSONData(data map[string]interface{}) error -} - -// Deprecated: use JSONDataErrorHandler instead -type JSONDataHandler interface { - HandleJSONData(data map[string]interface{}) -} - -func asJSONDataHandler(value interface{}) (func(map[string]interface{}) error, bool) { - if handler, ok := value.(JSONDataErrorHandler); ok { - return handler.HandleJSONData, true - } - if handler, ok := value.(JSONDataHandler); ok { - return func(m map[string]interface{}) error { - handler.HandleJSONData(m) - return nil - }, true - } - return nil, false -} diff --git a/vendor/github.com/perimeterx/marshmallow/reflection.go b/vendor/github.com/perimeterx/marshmallow/reflection.go deleted file mode 100644 index 9b7d88ce..00000000 --- a/vendor/github.com/perimeterx/marshmallow/reflection.go +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -import ( - "encoding/json" - "reflect" - "strings" -) - -var unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() - -type reflectionInfo struct { - path []int - t reflect.Type -} - -func (r reflectionInfo) field(target reflect.Value) reflect.Value { - current := target - for _, i := range r.path { - current = current.Field(i) - } - return current -} - -func mapStructFields(target interface{}) map[string]reflectionInfo { - t := reflectStructType(target) - result := cacheLookup(t) - if result != nil { - return result - } - result = make(map[string]reflectionInfo, t.NumField()) - mapTypeFields(t, result, nil) - cacheStore(t, result) - return result -} - -func mapTypeFields(t reflect.Type, result map[string]reflectionInfo, path []int) { - num := t.NumField() - for i := 0; i < num; i++ { - field := t.Field(i) - fieldPath := append(path, i) - if field.Anonymous && field.Type.Kind() == reflect.Struct { - mapTypeFields(field.Type, result, fieldPath) - continue - } - name := field.Tag.Get("json") - if name == "" || name == "-" { - continue - } - if index := strings.Index(name, ","); index > -1 { - name = name[:index] - } - result[name] = reflectionInfo{ - path: fieldPath, - t: field.Type, - } - } -} - -func reflectStructValue(target interface{}) reflect.Value { - v := reflect.ValueOf(target) - for v.Kind() == reflect.Ptr { - v = v.Elem() - } - return v -} - -func reflectStructType(target interface{}) reflect.Type { - t := reflect.TypeOf(target) - for t.Kind() == reflect.Ptr { - t = t.Elem() - } - return t -} - -var primitiveConverters = map[reflect.Kind]func(v interface{}) (interface{}, bool){ - reflect.Bool: func(v interface{}) (interface{}, bool) { - res, ok := v.(bool) - return res, ok - }, - reflect.Int: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return int(res), true - } - return v, false - }, - reflect.Int8: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return int8(res), true - } - return v, false - }, - reflect.Int16: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return int16(res), true - } - return v, false - }, - reflect.Int32: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return int32(res), true - } - return v, false - }, - reflect.Int64: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return int64(res), true - } - return v, false - }, - reflect.Uint: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return uint(res), true - } - return v, false - }, - reflect.Uint8: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return uint8(res), true - } - return v, false - }, - reflect.Uint16: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return uint16(res), true - } - return v, false - }, - reflect.Uint32: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return uint32(res), true - } - return v, false - }, - reflect.Uint64: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return uint64(res), true - } - return v, false - }, - reflect.Float32: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return float32(res), true - } - return v, false - }, - reflect.Float64: func(v interface{}) (interface{}, bool) { - res, ok := v.(float64) - if ok { - return res, true - } - return v, false - }, - reflect.Interface: func(v interface{}) (interface{}, bool) { - return v, true - }, - reflect.String: func(v interface{}) (interface{}, bool) { - res, ok := v.(string) - return res, ok - }, -} - -func assignValue(field reflect.Value, value interface{}) { - if value == nil { - return - } - reflectValue := reflect.ValueOf(value) - if reflectValue.Type().AssignableTo(field.Type()) { - field.Set(reflectValue) - } -} - -func isValidValue(v interface{}) bool { - value := reflect.ValueOf(v) - return value.Kind() == reflect.Ptr && value.Elem().Kind() == reflect.Struct && !value.IsNil() -} - -func safeReflectValue(t reflect.Type, v interface{}) reflect.Value { - if v == nil { - return reflect.Zero(t) - } - return reflect.ValueOf(v) -} diff --git a/vendor/github.com/perimeterx/marshmallow/unmarshal.go b/vendor/github.com/perimeterx/marshmallow/unmarshal.go deleted file mode 100644 index 160ea30c..00000000 --- a/vendor/github.com/perimeterx/marshmallow/unmarshal.go +++ /dev/null @@ -1,383 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -import ( - "encoding/json" - "github.com/mailru/easyjson/jlexer" - "reflect" -) - -// Unmarshal parses the JSON-encoded object in data and stores the values -// in the struct pointed to by v and in the returned map. -// If v is nil or not a pointer to a struct, Unmarshal returns an ErrInvalidValue. -// If data is not a valid JSON or not a JSON object Unmarshal returns an ErrInvalidInput. -// -// Unmarshal follows the rules of json.Unmarshal with the following exceptions: -// - All input fields are stored in the resulting map, including fields that do not exist in the -// struct pointed by v. -// - Unmarshal only operates on JSON object inputs. It will reject all other types of input -// by returning ErrInvalidInput. -// - Unmarshal only operates on struct values. It will reject all other types of v by -// returning ErrInvalidValue. -// - Unmarshal supports three types of Mode values. Each mode is self documented and affects -// how Unmarshal behaves. -func Unmarshal(data []byte, v interface{}, options ...UnmarshalOption) (map[string]interface{}, error) { - if !isValidValue(v) { - return nil, ErrInvalidValue - } - opts := buildUnmarshalOptions(options) - useMultipleErrors := opts.mode == ModeAllowMultipleErrors || opts.mode == ModeFailOverToOriginalValue - d := &decoder{options: opts, lexer: &jlexer.Lexer{Data: data, UseMultipleErrors: useMultipleErrors}} - result := make(map[string]interface{}) - if d.lexer.IsNull() { - d.lexer.Skip() - } else if !d.lexer.IsDelim('{') { - return nil, ErrInvalidInput - } else { - d.populateStruct(false, v, result) - } - d.lexer.Consumed() - if useMultipleErrors { - errors := d.lexer.GetNonFatalErrors() - if len(errors) == 0 { - return result, nil - } - return result, &MultipleLexerError{Errors: errors} - } - err := d.lexer.Error() - if err != nil { - return nil, err - } - return result, nil -} - -type decoder struct { - options *unmarshalOptions - lexer *jlexer.Lexer -} - -func (d *decoder) populateStruct(forcePopulate bool, structInstance interface{}, result map[string]interface{}) (interface{}, bool) { - doPopulate := !d.options.skipPopulateStruct || forcePopulate - var structValue reflect.Value - if doPopulate { - structValue = reflectStructValue(structInstance) - } - fields := mapStructFields(structInstance) - var clone map[string]interface{} - if d.options.mode == ModeFailOverToOriginalValue { - clone = make(map[string]interface{}, len(fields)) - } - d.lexer.Delim('{') - for !d.lexer.IsDelim('}') { - key := d.lexer.UnsafeFieldName(false) - d.lexer.WantColon() - refInfo, exists := fields[key] - if exists { - value, isValidType := d.valueByReflectType(refInfo.t) - if isValidType { - if value != nil && doPopulate { - field := refInfo.field(structValue) - assignValue(field, value) - } - if !d.options.excludeKnownFieldsFromMap { - if result != nil { - result[key] = value - } - if clone != nil { - clone[key] = value - } - } - } else { - switch d.options.mode { - case ModeFailOnFirstError: - return nil, false - case ModeFailOverToOriginalValue: - if !forcePopulate { - result[key] = value - } else { - clone[key] = value - d.lexer.WantComma() - d.drainLexerMap(clone) - return clone, false - } - } - } - } else { - value := d.lexer.Interface() - if result != nil { - result[key] = value - } - if clone != nil { - clone[key] = value - } - } - d.lexer.WantComma() - } - d.lexer.Delim('}') - return structInstance, true -} - -func (d *decoder) valueByReflectType(t reflect.Type) (interface{}, bool) { - if t.Implements(unmarshalerType) { - result := reflect.New(t.Elem()).Interface() - d.valueFromCustomUnmarshaler(result.(json.Unmarshaler)) - return result, true - } - if reflect.PtrTo(t).Implements(unmarshalerType) { - value := reflect.New(t) - d.valueFromCustomUnmarshaler(value.Interface().(json.Unmarshaler)) - return value.Elem().Interface(), true - } - kind := t.Kind() - if converter := primitiveConverters[kind]; converter != nil { - v := d.lexer.Interface() - if v == nil { - return nil, true - } - converted, ok := converter(v) - if !ok { - addUnexpectedTypeLexerError(d.lexer, t) - return v, false - } - return converted, true - } - switch kind { - case reflect.Slice: - return d.buildSlice(t) - case reflect.Array: - return d.buildArray(t) - case reflect.Map: - return d.buildMap(t) - case reflect.Struct: - value, valid := d.buildStruct(t) - if value == nil { - return nil, valid - } - if !valid { - return value, false - } - return reflect.ValueOf(value).Elem().Interface(), valid - case reflect.Ptr: - if t.Elem().Kind() == reflect.Struct { - return d.buildStruct(t.Elem()) - } - value, valid := d.valueByReflectType(t.Elem()) - if value == nil { - return nil, valid - } - if !valid { - return value, false - } - result := reflect.New(reflect.TypeOf(value)) - result.Elem().Set(reflect.ValueOf(value)) - return result.Interface(), valid - } - addUnsupportedTypeLexerError(d.lexer, t) - return nil, false -} - -func (d *decoder) buildSlice(sliceType reflect.Type) (interface{}, bool) { - if d.lexer.IsNull() { - d.lexer.Skip() - return nil, true - } - if !d.lexer.IsDelim('[') { - addUnexpectedTypeLexerError(d.lexer, sliceType) - return d.lexer.Interface(), false - } - elemType := sliceType.Elem() - d.lexer.Delim('[') - var sliceValue reflect.Value - if !d.lexer.IsDelim(']') { - sliceValue = reflect.MakeSlice(sliceType, 0, 4) - } else { - sliceValue = reflect.MakeSlice(sliceType, 0, 0) - } - for !d.lexer.IsDelim(']') { - current, valid := d.valueByReflectType(elemType) - if !valid { - if d.options.mode != ModeFailOverToOriginalValue { - d.drainLexerArray(nil) - return nil, true - } - result := d.cloneReflectArray(sliceValue, -1) - result = append(result, current) - return d.drainLexerArray(result), true - } - sliceValue = reflect.Append(sliceValue, safeReflectValue(elemType, current)) - d.lexer.WantComma() - } - d.lexer.Delim(']') - return sliceValue.Interface(), true -} - -func (d *decoder) buildArray(arrayType reflect.Type) (interface{}, bool) { - if d.lexer.IsNull() { - d.lexer.Skip() - return nil, true - } - if !d.lexer.IsDelim('[') { - addUnexpectedTypeLexerError(d.lexer, arrayType) - return d.lexer.Interface(), false - } - elemType := arrayType.Elem() - arrayValue := reflect.New(arrayType).Elem() - d.lexer.Delim('[') - for i := 0; !d.lexer.IsDelim(']'); i++ { - current, valid := d.valueByReflectType(elemType) - if !valid { - if d.options.mode != ModeFailOverToOriginalValue { - d.drainLexerArray(nil) - return nil, true - } - result := d.cloneReflectArray(arrayValue, i) - result = append(result, current) - return d.drainLexerArray(result), true - } - if current != nil { - arrayValue.Index(i).Set(reflect.ValueOf(current)) - } - d.lexer.WantComma() - } - d.lexer.Delim(']') - return arrayValue.Interface(), true -} - -func (d *decoder) buildMap(mapType reflect.Type) (interface{}, bool) { - if d.lexer.IsNull() { - d.lexer.Skip() - return nil, true - } - if !d.lexer.IsDelim('{') { - addUnexpectedTypeLexerError(d.lexer, mapType) - return d.lexer.Interface(), false - } - d.lexer.Delim('{') - keyType := mapType.Key() - valueType := mapType.Elem() - mapValue := reflect.MakeMap(mapType) - for !d.lexer.IsDelim('}') { - key, valid := d.valueByReflectType(keyType) - if !valid { - if d.options.mode != ModeFailOverToOriginalValue { - d.lexer.WantColon() - d.lexer.Interface() - d.lexer.WantComma() - d.drainLexerMap(make(map[string]interface{})) - return nil, true - } - strKey, _ := key.(string) - d.lexer.WantColon() - value := d.lexer.Interface() - result := d.cloneReflectMap(mapValue) - result[strKey] = value - d.lexer.WantComma() - d.drainLexerMap(result) - return result, true - } - d.lexer.WantColon() - value, valid := d.valueByReflectType(valueType) - if !valid { - if d.options.mode != ModeFailOverToOriginalValue { - d.lexer.WantComma() - d.drainLexerMap(make(map[string]interface{})) - return nil, true - } - strKey, _ := key.(string) - result := d.cloneReflectMap(mapValue) - result[strKey] = value - d.lexer.WantComma() - d.drainLexerMap(result) - return result, true - } - mapValue.SetMapIndex(safeReflectValue(keyType, key), safeReflectValue(valueType, value)) - d.lexer.WantComma() - } - d.lexer.Delim('}') - return mapValue.Interface(), true -} - -func (d *decoder) buildStruct(structType reflect.Type) (interface{}, bool) { - if d.lexer.IsNull() { - d.lexer.Skip() - return nil, true - } - if !d.lexer.IsDelim('{') { - addUnexpectedTypeLexerError(d.lexer, structType) - return d.lexer.Interface(), false - } - value := reflect.New(structType).Interface() - handler, ok := asJSONDataHandler(value) - if !ok { - return d.populateStruct(true, value, nil) - } - data := make(map[string]interface{}) - result, valid := d.populateStruct(true, value, data) - if !valid { - return result, false - } - err := handler(data) - if err != nil { - d.lexer.AddNonFatalError(err) - return result, false - } - return result, true -} - -func (d *decoder) valueFromCustomUnmarshaler(unmarshaler json.Unmarshaler) { - data := d.lexer.Raw() - if !d.lexer.Ok() { - return - } - err := unmarshaler.UnmarshalJSON(data) - if err != nil { - d.lexer.AddNonFatalError(err) - } -} - -func (d *decoder) cloneReflectArray(value reflect.Value, length int) []interface{} { - if length == -1 { - length = value.Len() - } - result := make([]interface{}, length) - for i := 0; i < length; i++ { - result[i] = value.Index(i).Interface() - } - return result -} - -func (d *decoder) cloneReflectMap(mapValue reflect.Value) map[string]interface{} { - l := mapValue.Len() - result := make(map[string]interface{}, l) - for _, key := range mapValue.MapKeys() { - value := mapValue.MapIndex(key) - strKey, _ := key.Interface().(string) - result[strKey] = value.Interface() - } - return result -} - -func (d *decoder) drainLexerArray(target []interface{}) interface{} { - d.lexer.WantComma() - for !d.lexer.IsDelim(']') { - current := d.lexer.Interface() - target = append(target, current) - d.lexer.WantComma() - } - d.lexer.Delim(']') - return target -} - -func (d *decoder) drainLexerMap(target map[string]interface{}) { - for !d.lexer.IsDelim('}') { - key := d.lexer.String() - d.lexer.WantColon() - value := d.lexer.Interface() - target[key] = value - d.lexer.WantComma() - } - d.lexer.Delim('}') -} diff --git a/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go b/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go deleted file mode 100644 index 0907f8f8..00000000 --- a/vendor/github.com/perimeterx/marshmallow/unmarshal_from_json_map.go +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright 2022 PerimeterX. All rights reserved. -// Use of this source code is governed by a MIT style -// license that can be found in the LICENSE file. - -package marshmallow - -import ( - "reflect" -) - -// UnmarshalerFromJSONMap is the interface implemented by types -// that can unmarshal a JSON description of themselves. -// In case you want to implement custom unmarshalling, json.Unmarshaler only supports -// receiving the data as []byte. However, while unmarshalling from JSON map, -// the data is not available as a raw []byte and converting to it will significantly -// hurt performance. Thus, if you wish to implement a custom unmarshalling on a type -// that is being unmarshalled from a JSON map, you need to implement -// UnmarshalerFromJSONMap interface. -type UnmarshalerFromJSONMap interface { - UnmarshalJSONFromMap(data interface{}) error -} - -// UnmarshalFromJSONMap parses the JSON map data and stores the values -// in the struct pointed to by v and in the returned map. -// If v is nil or not a pointer to a struct, UnmarshalFromJSONMap returns an ErrInvalidValue. -// -// UnmarshalFromJSONMap follows the rules of json.Unmarshal with the following exceptions: -// - All input fields are stored in the resulting map, including fields that do not exist in the -// struct pointed by v. -// - UnmarshalFromJSONMap receive a JSON map instead of raw bytes. The given input map is assumed -// to be a JSON map, meaning it should only contain the following types: bool, string, float64, -// []interface, and map[string]interface{}. Other types will cause decoding to return unexpected results. -// - UnmarshalFromJSONMap only operates on struct values. It will reject all other types of v by -// returning ErrInvalidValue. -// - UnmarshalFromJSONMap supports three types of Mode values. Each mode is self documented and affects -// how UnmarshalFromJSONMap behaves. -func UnmarshalFromJSONMap(data map[string]interface{}, v interface{}, options ...UnmarshalOption) (map[string]interface{}, error) { - if !isValidValue(v) { - return nil, ErrInvalidValue - } - opts := buildUnmarshalOptions(options) - d := &mapDecoder{options: opts} - result := make(map[string]interface{}) - if data != nil { - d.populateStruct(false, nil, data, v, result) - } - if opts.mode == ModeAllowMultipleErrors || opts.mode == ModeFailOverToOriginalValue { - if len(d.errs) == 0 { - return result, nil - } - return result, &MultipleError{Errors: d.errs} - } - if d.err != nil { - return nil, d.err - } - return result, nil -} - -var unmarshalerFromJSONMapType = reflect.TypeOf((*UnmarshalerFromJSONMap)(nil)).Elem() - -type mapDecoder struct { - options *unmarshalOptions - err error - errs []error -} - -func (m *mapDecoder) populateStruct(forcePopulate bool, path []string, data map[string]interface{}, structInstance interface{}, result map[string]interface{}) (interface{}, bool) { - doPopulate := !m.options.skipPopulateStruct || forcePopulate - var structValue reflect.Value - if doPopulate { - structValue = reflectStructValue(structInstance) - } - fields := mapStructFields(structInstance) - for key, inputValue := range data { - refInfo, exists := fields[key] - if exists { - value, isValidType := m.valueByReflectType(append(path, key), inputValue, refInfo.t) - if isValidType { - if value != nil && doPopulate { - field := refInfo.field(structValue) - assignValue(field, value) - } - if !m.options.excludeKnownFieldsFromMap { - if result != nil { - result[key] = value - } - } - } else { - switch m.options.mode { - case ModeFailOnFirstError: - return nil, false - case ModeFailOverToOriginalValue: - if !forcePopulate { - result[key] = value - } else { - return data, false - } - } - } - } else { - if result != nil { - result[key] = inputValue - } - } - } - return structInstance, true -} - -func (m *mapDecoder) valueByReflectType(path []string, v interface{}, t reflect.Type) (interface{}, bool) { - if t.Implements(unmarshalerFromJSONMapType) { - result := reflect.New(t.Elem()).Interface() - m.valueFromCustomUnmarshaler(v, result.(UnmarshalerFromJSONMap)) - return result, true - } - if reflect.PtrTo(t).Implements(unmarshalerFromJSONMapType) { - value := reflect.New(t) - m.valueFromCustomUnmarshaler(v, value.Interface().(UnmarshalerFromJSONMap)) - return value.Elem().Interface(), true - } - kind := t.Kind() - if converter := primitiveConverters[kind]; converter != nil { - if v == nil { - return nil, true - } - converted, ok := converter(v) - if !ok { - m.addError(newUnexpectedTypeParseError(t, path)) - return v, false - } - return converted, true - } - switch kind { - case reflect.Slice: - return m.buildSlice(path, v, t) - case reflect.Array: - return m.buildArray(path, v, t) - case reflect.Map: - return m.buildMap(path, v, t) - case reflect.Struct: - value, valid := m.buildStruct(path, v, t) - if value == nil { - return nil, valid - } - if !valid { - return value, false - } - return reflect.ValueOf(value).Elem().Interface(), valid - case reflect.Ptr: - if t.Elem().Kind() == reflect.Struct { - return m.buildStruct(path, v, t.Elem()) - } - value, valid := m.valueByReflectType(path, v, t.Elem()) - if value == nil { - return nil, valid - } - if !valid { - return value, false - } - result := reflect.New(reflect.TypeOf(value)) - result.Elem().Set(reflect.ValueOf(value)) - return result.Interface(), valid - } - m.addError(newUnsupportedTypeParseError(t, path)) - return nil, false -} - -func (m *mapDecoder) buildSlice(path []string, v interface{}, sliceType reflect.Type) (interface{}, bool) { - if v == nil { - return nil, true - } - arr, ok := v.([]interface{}) - if !ok { - m.addError(newUnexpectedTypeParseError(sliceType, path)) - return v, false - } - elemType := sliceType.Elem() - var sliceValue reflect.Value - if len(arr) > 0 { - sliceValue = reflect.MakeSlice(sliceType, 0, 4) - } else { - sliceValue = reflect.MakeSlice(sliceType, 0, 0) - } - for _, element := range arr { - current, valid := m.valueByReflectType(path, element, elemType) - if !valid { - if m.options.mode != ModeFailOverToOriginalValue { - return nil, true - } - return v, true - } - sliceValue = reflect.Append(sliceValue, safeReflectValue(elemType, current)) - } - return sliceValue.Interface(), true -} - -func (m *mapDecoder) buildArray(path []string, v interface{}, arrayType reflect.Type) (interface{}, bool) { - if v == nil { - return nil, true - } - arr, ok := v.([]interface{}) - if !ok { - m.addError(newUnexpectedTypeParseError(arrayType, path)) - return v, false - } - elemType := arrayType.Elem() - arrayValue := reflect.New(arrayType).Elem() - for i, element := range arr { - current, valid := m.valueByReflectType(path, element, elemType) - if !valid { - if m.options.mode != ModeFailOverToOriginalValue { - return nil, true - } - return v, true - } - if current != nil { - arrayValue.Index(i).Set(reflect.ValueOf(current)) - } - } - return arrayValue.Interface(), true -} - -func (m *mapDecoder) buildMap(path []string, v interface{}, mapType reflect.Type) (interface{}, bool) { - if v == nil { - return nil, true - } - mp, ok := v.(map[string]interface{}) - if !ok { - m.addError(newUnexpectedTypeParseError(mapType, path)) - return v, false - } - keyType := mapType.Key() - valueType := mapType.Elem() - mapValue := reflect.MakeMap(mapType) - for inputKey, inputValue := range mp { - keyPath := append(path, inputKey) - key, valid := m.valueByReflectType(keyPath, inputKey, keyType) - if !valid { - if m.options.mode != ModeFailOverToOriginalValue { - return nil, true - } - return v, true - } - value, valid := m.valueByReflectType(keyPath, inputValue, valueType) - if !valid { - if m.options.mode != ModeFailOverToOriginalValue { - return nil, true - } - return v, true - } - mapValue.SetMapIndex(safeReflectValue(keyType, key), safeReflectValue(valueType, value)) - } - return mapValue.Interface(), true -} - -func (m *mapDecoder) buildStruct(path []string, v interface{}, structType reflect.Type) (interface{}, bool) { - if v == nil { - return nil, true - } - mp, ok := v.(map[string]interface{}) - if !ok { - m.addError(newUnexpectedTypeParseError(structType, path)) - return v, false - } - value := reflect.New(structType).Interface() - handler, ok := asJSONDataHandler(value) - if !ok { - return m.populateStruct(true, path, mp, value, nil) - } - data := make(map[string]interface{}) - result, valid := m.populateStruct(true, path, mp, value, data) - if !valid { - return result, false - } - err := handler(data) - if err != nil { - m.addError(err) - return result, false - } - return result, true -} - -func (m *mapDecoder) valueFromCustomUnmarshaler(data interface{}, unmarshaler UnmarshalerFromJSONMap) { - err := unmarshaler.UnmarshalJSONFromMap(data) - if err != nil { - m.addError(err) - } -} - -func (m *mapDecoder) addError(err error) { - if m.options.mode == ModeFailOnFirstError { - m.err = err - } else { - m.errs = append(m.errs, err) - } -} diff --git a/vendor/github.com/posener/complete/readme.md b/vendor/github.com/posener/complete/README.md similarity index 100% rename from vendor/github.com/posener/complete/readme.md rename to vendor/github.com/posener/complete/README.md diff --git a/vendor/github.com/russross/blackfriday/.gitignore b/vendor/github.com/russross/blackfriday/.gitignore deleted file mode 100644 index 75623dcc..00000000 --- a/vendor/github.com/russross/blackfriday/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -*.out -*.swp -*.8 -*.6 -_obj -_test* -markdown -tags diff --git a/vendor/github.com/russross/blackfriday/.travis.yml b/vendor/github.com/russross/blackfriday/.travis.yml deleted file mode 100644 index a49fff15..00000000 --- a/vendor/github.com/russross/blackfriday/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -sudo: false -language: go -go: - - "1.9.x" - - "1.10.x" - - "1.11.x" - - tip -matrix: - fast_finish: true - allow_failures: - - go: tip -install: - - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). -script: - - go get -t -v ./... - - diff -u <(echo -n) <(gofmt -d -s .) - - go tool vet . - - go test -v -race ./... diff --git a/vendor/github.com/russross/blackfriday/LICENSE.txt b/vendor/github.com/russross/blackfriday/LICENSE.txt deleted file mode 100644 index 7fbb253a..00000000 --- a/vendor/github.com/russross/blackfriday/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Blackfriday is distributed under the Simplified BSD License: - -Copyright © 2011 Russ Ross -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided with - the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS -FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, -INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN -ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/README.md b/vendor/github.com/russross/blackfriday/README.md deleted file mode 100644 index 997ef5d4..00000000 --- a/vendor/github.com/russross/blackfriday/README.md +++ /dev/null @@ -1,364 +0,0 @@ -Blackfriday -[![Build Status][BuildV2SVG]][BuildV2URL] -[![PkgGoDev][PkgGoDevV2SVG]][PkgGoDevV2URL] -=========== - -Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It -is paranoid about its input (so you can safely feed it user-supplied -data), it is fast, it supports common extensions (tables, smart -punctuation substitutions, etc.), and it is safe for all utf-8 -(unicode) input. - -HTML output is currently supported, along with Smartypants -extensions. - -It started as a translation from C of [Sundown][3]. - - -Installation ------------- - -Blackfriday is compatible with modern Go releases in module mode. -With Go installed: - - go get github.com/russross/blackfriday - -will resolve and add the package to the current development module, -then build and install it. Alternatively, you can achieve the same -if you import it in a package: - - import "github.com/russross/blackfriday" - -and `go get` without parameters. - -Old versions of Go and legacy GOPATH mode might work, -but no effort is made to keep them working. - - -Versions --------- - -Currently maintained and recommended version of Blackfriday is `v2`. It's being -developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the -documentation is available at -https://pkg.go.dev/github.com/russross/blackfriday/v2. - -It is `go get`-able in module mode at `github.com/russross/blackfriday/v2`. - -Version 2 offers a number of improvements over v1: - -* Cleaned up API -* A separate call to [`Parse`][4], which produces an abstract syntax tree for - the document -* Latest bug fixes -* Flexibility to easily add your own rendering extensions - -Potential drawbacks: - -* Our benchmarks show v2 to be slightly slower than v1. Currently in the - ballpark of around 15%. -* API breakage. If you can't afford modifying your code to adhere to the new API - and don't care too much about the new features, v2 is probably not for you. -* Several bug fixes are trailing behind and still need to be forward-ported to - v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for - tracking. - -If you are still interested in the legacy `v1`, you can import it from -`github.com/russross/blackfriday`. Documentation for the legacy v1 can be found -here: https://pkg.go.dev/github.com/russross/blackfriday. - - -Usage ------ - -### v1 - -For basic usage, it is as simple as getting your input into a byte -slice and calling: - -```go -output := blackfriday.MarkdownBasic(input) -``` - -This renders it with no extensions enabled. To get a more useful -feature set, use this instead: - -```go -output := blackfriday.MarkdownCommon(input) -``` - -### v2 - -For the most sensible markdown processing, it is as simple as getting your input -into a byte slice and calling: - -```go -output := blackfriday.Run(input) -``` - -Your input will be parsed and the output rendered with a set of most popular -extensions enabled. If you want the most basic feature set, corresponding with -the bare Markdown specification, use: - -```go -output := blackfriday.Run(input, blackfriday.WithNoExtensions()) -``` - -### Sanitize untrusted content - -Blackfriday itself does nothing to protect against malicious content. If you are -dealing with user-supplied markdown, we recommend running Blackfriday's output -through HTML sanitizer such as [Bluemonday][5]. - -Here's an example of simple usage of Blackfriday together with Bluemonday: - -```go -import ( - "github.com/microcosm-cc/bluemonday" - "github.com/russross/blackfriday" -) - -// ... -unsafe := blackfriday.Run(input) -html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) -``` - -### Custom options, v1 - -If you want to customize the set of options, first get a renderer -(currently only the HTML output engine), then use it to -call the more general `Markdown` function. For examples, see the -implementations of `MarkdownBasic` and `MarkdownCommon` in -`markdown.go`. - -### Custom options, v2 - -If you want to customize the set of options, use `blackfriday.WithExtensions`, -`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. - -### `blackfriday-tool` - -You can also check out `blackfriday-tool` for a more complete example -of how to use it. Download and install it using: - - go get github.com/russross/blackfriday-tool - -This is a simple command-line tool that allows you to process a -markdown file using a standalone program. You can also browse the -source directly on github if you are just looking for some example -code: - -* - -Note that if you have not already done so, installing -`blackfriday-tool` will be sufficient to download and install -blackfriday in addition to the tool itself. The tool binary will be -installed in `$GOPATH/bin`. This is a statically-linked binary that -can be copied to wherever you need it without worrying about -dependencies and library versions. - -### Sanitized anchor names - -Blackfriday includes an algorithm for creating sanitized anchor names -corresponding to a given input text. This algorithm is used to create -anchors for headings when `EXTENSION_AUTO_HEADER_IDS` is enabled. The -algorithm has a specification, so that other packages can create -compatible anchor names and links to those anchors. - -The specification is located at https://pkg.go.dev/github.com/russross/blackfriday#hdr-Sanitized_Anchor_Names. - -[`SanitizedAnchorName`](https://pkg.go.dev/github.com/russross/blackfriday#SanitizedAnchorName) exposes this functionality, and can be used to -create compatible links to the anchor names generated by blackfriday. -This algorithm is also implemented in a small standalone package at -[`github.com/shurcooL/sanitized_anchor_name`](https://pkg.go.dev/github.com/shurcooL/sanitized_anchor_name). It can be useful for clients -that want a small package and don't need full functionality of blackfriday. - - -Features --------- - -All features of Sundown are supported, including: - -* **Compatibility**. The Markdown v1.0.3 test suite passes with - the `--tidy` option. Without `--tidy`, the differences are - mostly in whitespace and entity escaping, where blackfriday is - more consistent and cleaner. - -* **Common extensions**, including table support, fenced code - blocks, autolinks, strikethroughs, non-strict emphasis, etc. - -* **Safety**. Blackfriday is paranoid when parsing, making it safe - to feed untrusted user input without fear of bad things - happening. The test suite stress tests this and there are no - known inputs that make it crash. If you find one, please let me - know and send me the input that does it. - - NOTE: "safety" in this context means *runtime safety only*. In order to - protect yourself against JavaScript injection in untrusted content, see - [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). - -* **Fast processing**. It is fast enough to render on-demand in - most web applications without having to cache the output. - -* **Thread safety**. You can run multiple parsers in different - goroutines without ill effect. There is no dependence on global - shared state. - -* **Minimal dependencies**. Blackfriday only depends on standard - library packages in Go. The source code is pretty - self-contained, so it is easy to add to any project, including - Google App Engine projects. - -* **Standards compliant**. Output successfully validates using the - W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. - - -Extensions ----------- - -In addition to the standard markdown syntax, this package -implements the following extensions: - -* **Intra-word emphasis supression**. The `_` character is - commonly used inside words when discussing code, so having - markdown interpret it as an emphasis command is usually the - wrong thing. Blackfriday lets you treat all emphasis markers as - normal characters when they occur inside a word. - -* **Tables**. Tables can be created by drawing them in the input - using a simple syntax: - - ``` - Name | Age - --------|------ - Bob | 27 - Alice | 23 - ``` - -* **Fenced code blocks**. In addition to the normal 4-space - indentation to mark code blocks, you can explicitly mark them - and supply a language (to make syntax highlighting simple). Just - mark it like this: - - ```go - func getTrue() bool { - return true - } - ``` - - You can use 3 or more backticks to mark the beginning of the - block, and the same number to mark the end of the block. - - To preserve classes of fenced code blocks while using the bluemonday - HTML sanitizer, use the following policy: - - ```go - p := bluemonday.UGCPolicy() - p.AllowAttrs("class").Matching(regexp.MustCompile("^language-[a-zA-Z0-9]+$")).OnElements("code") - html := p.SanitizeBytes(unsafe) - ``` - -* **Definition lists**. A simple definition list is made of a single-line - term followed by a colon and the definition for that term. - - Cat - : Fluffy animal everyone likes - - Internet - : Vector of transmission for pictures of cats - - Terms must be separated from the previous definition by a blank line. - -* **Footnotes**. A marker in the text that will become a superscript number; - a footnote definition that will be placed in a list of footnotes at the - end of the document. A footnote looks like this: - - This is a footnote.[^1] - - [^1]: the footnote text. - -* **Autolinking**. Blackfriday can find URLs that have not been - explicitly marked as links and turn them into links. - -* **Strikethrough**. Use two tildes (`~~`) to mark text that - should be crossed out. - -* **Hard line breaks**. With this extension enabled (it is off by - default in the `MarkdownBasic` and `MarkdownCommon` convenience - functions), newlines in the input translate into line breaks in - the output. - -* **Smart quotes**. Smartypants-style punctuation substitution is - supported, turning normal double- and single-quote marks into - curly quotes, etc. - -* **LaTeX-style dash parsing** is an additional option, where `--` - is translated into `–`, and `---` is translated into - `—`. This differs from most smartypants processors, which - turn a single hyphen into an ndash and a double hyphen into an - mdash. - -* **Smart fractions**, where anything that looks like a fraction - is translated into suitable HTML (instead of just a few special - cases like most smartypant processors). For example, `4/5` - becomes `45`, which renders as - 45. - - -Other renderers ---------------- - -Blackfriday is structured to allow alternative rendering engines. Here -are a few of note: - -* [github_flavored_markdown](https://pkg.go.dev/github.com/shurcooL/github_flavored_markdown): - provides a GitHub Flavored Markdown renderer with fenced code block - highlighting, clickable heading anchor links. - - It's not customizable, and its goal is to produce HTML output - equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), - except the rendering is performed locally. - -* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, - but for markdown. - -* [LaTeX output](https://gitlab.com/ambrevar/blackfriday-latex): - renders output as LaTeX. - -* [bfchroma](https://github.com/Depado/bfchroma/): provides convenience - integration with the [Chroma](https://github.com/alecthomas/chroma) code - highlighting library. bfchroma is only compatible with v2 of Blackfriday and - provides a drop-in renderer ready to use with Blackfriday, as well as - options and means for further customization. - -* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. - -* [Blackfriday-Slack](https://github.com/karriereat/blackfriday-slack): converts markdown to slack message style - - -TODO ----- - -* More unit testing -* Improve Unicode support. It does not understand all Unicode - rules (about what constitutes a letter, a punctuation symbol, - etc.), so it may fail to detect word boundaries correctly in - some instances. It is safe on all UTF-8 input. - - -License -------- - -[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) - - - [1]: https://daringfireball.net/projects/markdown/ "Markdown" - [2]: https://golang.org/ "Go Language" - [3]: https://github.com/vmg/sundown "Sundown" - [4]: https://pkg.go.dev/github.com/russross/blackfriday/v2#Parse "Parse func" - [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" - - [BuildV2SVG]: https://travis-ci.org/russross/blackfriday.svg?branch=v2 - [BuildV2URL]: https://travis-ci.org/russross/blackfriday - [PkgGoDevV2SVG]: https://pkg.go.dev/badge/github.com/russross/blackfriday/v2 - [PkgGoDevV2URL]: https://pkg.go.dev/github.com/russross/blackfriday/v2 diff --git a/vendor/github.com/russross/blackfriday/block.go b/vendor/github.com/russross/blackfriday/block.go deleted file mode 100644 index 563cb290..00000000 --- a/vendor/github.com/russross/blackfriday/block.go +++ /dev/null @@ -1,1480 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// Functions to parse block-level elements. -// - -package blackfriday - -import ( - "bytes" - "strings" - "unicode" -) - -// Parse block-level data. -// Note: this function and many that it calls assume that -// the input buffer ends with a newline. -func (p *parser) block(out *bytes.Buffer, data []byte) { - if len(data) == 0 || data[len(data)-1] != '\n' { - panic("block input is missing terminating newline") - } - - // this is called recursively: enforce a maximum depth - if p.nesting >= p.maxNesting { - return - } - p.nesting++ - - // parse out one block-level construct at a time - for len(data) > 0 { - // prefixed header: - // - // # Header 1 - // ## Header 2 - // ... - // ###### Header 6 - if p.isPrefixHeader(data) { - data = data[p.prefixHeader(out, data):] - continue - } - - // block of preformatted HTML: - // - //
- // ... - //
- if data[0] == '<' { - if i := p.html(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // title block - // - // % stuff - // % more stuff - // % even more stuff - if p.flags&EXTENSION_TITLEBLOCK != 0 { - if data[0] == '%' { - if i := p.titleBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - } - - // blank lines. note: returns the # of bytes to skip - if i := p.isEmpty(data); i > 0 { - data = data[i:] - continue - } - - // indented code block: - // - // func max(a, b int) int { - // if a > b { - // return a - // } - // return b - // } - if p.codePrefix(data) > 0 { - data = data[p.code(out, data):] - continue - } - - // fenced code block: - // - // ``` go info string here - // func fact(n int) int { - // if n <= 1 { - // return n - // } - // return n * fact(n-1) - // } - // ``` - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data, true); i > 0 { - data = data[i:] - continue - } - } - - // horizontal rule: - // - // ------ - // or - // ****** - // or - // ______ - if p.isHRule(data) { - p.r.HRule(out) - var i int - for i = 0; data[i] != '\n'; i++ { - } - data = data[i:] - continue - } - - // block quote: - // - // > A big quote I found somewhere - // > on the web - if p.quotePrefix(data) > 0 { - data = data[p.quote(out, data):] - continue - } - - // table: - // - // Name | Age | Phone - // ------|-----|--------- - // Bob | 31 | 555-1234 - // Alice | 27 | 555-4321 - if p.flags&EXTENSION_TABLES != 0 { - if i := p.table(out, data); i > 0 { - data = data[i:] - continue - } - } - - // an itemized/unordered list: - // - // * Item 1 - // * Item 2 - // - // also works with + or - - if p.uliPrefix(data) > 0 { - data = data[p.list(out, data, 0):] - continue - } - - // a numbered/ordered list: - // - // 1. Item 1 - // 2. Item 2 - if p.oliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_ORDERED):] - continue - } - - // definition lists: - // - // Term 1 - // : Definition a - // : Definition b - // - // Term 2 - // : Definition c - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(data) > 0 { - data = data[p.list(out, data, LIST_TYPE_DEFINITION):] - continue - } - } - - // anything else must look like a normal paragraph - // note: this finds underlined headers, too - data = data[p.paragraph(out, data):] - } - - p.nesting-- -} - -func (p *parser) isPrefixHeader(data []byte) bool { - if data[0] != '#' { - return false - } - - if p.flags&EXTENSION_SPACE_HEADERS != 0 { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - if data[level] != ' ' { - return false - } - } - return true -} - -func (p *parser) prefixHeader(out *bytes.Buffer, data []byte) int { - level := 0 - for level < 6 && data[level] == '#' { - level++ - } - i := skipChar(data, level, ' ') - end := skipUntilChar(data, i, '\n') - skip := end - id := "" - if p.flags&EXTENSION_HEADER_IDS != 0 { - j, k := 0, 0 - // find start/end of header id - for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { - } - for k = j + 1; k < end && data[k] != '}'; k++ { - } - // extract header id iff found - if j < end && k < end { - id = string(data[j+2 : k]) - end = j - skip = k + 1 - for end > 0 && data[end-1] == ' ' { - end-- - } - } - } - for end > 0 && data[end-1] == '#' { - if isBackslashEscaped(data, end-1) { - break - } - end-- - } - for end > 0 && data[end-1] == ' ' { - end-- - } - if end > i { - if id == "" && p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = SanitizedAnchorName(string(data[i:end])) - } - work := func() bool { - p.inline(out, data[i:end]) - return true - } - p.r.Header(out, work, level, id) - } - return skip -} - -func (p *parser) isUnderlinedHeader(data []byte) int { - // test of level 1 header - if data[0] == '=' { - i := skipChar(data, 1, '=') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 1 - } else { - return 0 - } - } - - // test of level 2 header - if data[0] == '-' { - i := skipChar(data, 1, '-') - i = skipChar(data, i, ' ') - if data[i] == '\n' { - return 2 - } else { - return 0 - } - } - - return 0 -} - -func (p *parser) titleBlock(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '%' { - return 0 - } - splitData := bytes.Split(data, []byte("\n")) - var i int - for idx, b := range splitData { - if !bytes.HasPrefix(b, []byte("%")) { - i = idx // - 1 - break - } - } - - data = bytes.Join(splitData[0:i], []byte("\n")) - p.r.TitleBlock(out, data) - - return len(data) -} - -func (p *parser) html(out *bytes.Buffer, data []byte, doRender bool) int { - var i, j int - - // identify the opening tag - if data[0] != '<' { - return 0 - } - curtag, tagfound := p.htmlFindTag(data[1:]) - - // handle special cases - if !tagfound { - // check for an HTML comment - if size := p.htmlComment(out, data, doRender); size > 0 { - return size - } - - // check for an
tag - if size := p.htmlHr(out, data, doRender); size > 0 { - return size - } - - // check for HTML CDATA - if size := p.htmlCDATA(out, data, doRender); size > 0 { - return size - } - - // no special case recognized - return 0 - } - - // look for an unindented matching closing tag - // followed by a blank line - found := false - /* - closetag := []byte("\n") - j = len(curtag) + 1 - for !found { - // scan for a closing tag at the beginning of a line - if skip := bytes.Index(data[j:], closetag); skip >= 0 { - j += skip + len(closetag) - } else { - break - } - - // see if it is the only thing on the line - if skip := p.isEmpty(data[j:]); skip > 0 { - // see if it is followed by a blank line/eof - j += skip - if j >= len(data) { - found = true - i = j - } else { - if skip := p.isEmpty(data[j:]); skip > 0 { - j += skip - found = true - i = j - } - } - } - } - */ - - // if not found, try a second pass looking for indented match - // but not if tag is "ins" or "del" (following original Markdown.pl) - if !found && curtag != "ins" && curtag != "del" { - i = 1 - for i < len(data) { - i++ - for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { - i++ - } - - if i+2+len(curtag) >= len(data) { - break - } - - j = p.htmlFindEnd(curtag, data[i-1:]) - - if j > 0 { - i += j - 1 - found = true - break - } - } - } - - if !found { - return 0 - } - - // the end of the block has been found - if doRender { - // trim newlines - end := i - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - - return i -} - -func (p *parser) renderHTMLBlock(out *bytes.Buffer, data []byte, start int, doRender bool) int { - // html block needs to end with a blank line - if i := p.isEmpty(data[start:]); i > 0 { - size := start + i - if doRender { - // trim trailing newlines - end := size - for end > 0 && data[end-1] == '\n' { - end-- - } - p.r.BlockHtml(out, data[:end]) - } - return size - } - return 0 -} - -// HTML comment, lax form -func (p *parser) htmlComment(out *bytes.Buffer, data []byte, doRender bool) int { - i := p.inlineHTMLComment(out, data) - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HTML CDATA section -func (p *parser) htmlCDATA(out *bytes.Buffer, data []byte, doRender bool) int { - const cdataTag = "') { - i++ - } - i++ - // no end-of-comment marker - if i >= len(data) { - return 0 - } - return p.renderHTMLBlock(out, data, i, doRender) -} - -// HR, which is the only self-closing block tag considered -func (p *parser) htmlHr(out *bytes.Buffer, data []byte, doRender bool) int { - if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { - return 0 - } - if data[3] != ' ' && data[3] != '/' && data[3] != '>' { - // not an
tag after all; at least not a valid one - return 0 - } - - i := 3 - for data[i] != '>' && data[i] != '\n' { - i++ - } - - if data[i] == '>' { - return p.renderHTMLBlock(out, data, i+1, doRender) - } - - return 0 -} - -func (p *parser) htmlFindTag(data []byte) (string, bool) { - i := 0 - for isalnum(data[i]) { - i++ - } - key := string(data[:i]) - if _, ok := blockTags[key]; ok { - return key, true - } - return "", false -} - -func (p *parser) htmlFindEnd(tag string, data []byte) int { - // assume data[0] == '<' && data[1] == '/' already tested - - // check if tag is a match - closetag := []byte("") - if !bytes.HasPrefix(data, closetag) { - return 0 - } - i := len(closetag) - - // check that the rest of the line is blank - skip := 0 - if skip = p.isEmpty(data[i:]); skip == 0 { - return 0 - } - i += skip - skip = 0 - - if i >= len(data) { - return i - } - - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - return i - } - if skip = p.isEmpty(data[i:]); skip == 0 { - // following line must be blank - return 0 - } - - return i + skip -} - -func (*parser) isEmpty(data []byte) int { - // it is okay to call isEmpty on an empty buffer - if len(data) == 0 { - return 0 - } - - var i int - for i = 0; i < len(data) && data[i] != '\n'; i++ { - if data[i] != ' ' && data[i] != '\t' { - return 0 - } - } - return i + 1 -} - -func (*parser) isHRule(data []byte) bool { - i := 0 - - // skip up to three spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // look at the hrule char - if data[i] != '*' && data[i] != '-' && data[i] != '_' { - return false - } - c := data[i] - - // the whole line must be the char or whitespace - n := 0 - for data[i] != '\n' { - switch { - case data[i] == c: - n++ - case data[i] != ' ': - return false - } - i++ - } - - return n >= 3 -} - -// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, -// and returns the end index if so, or 0 otherwise. It also returns the marker found. -// If syntax is not nil, it gets set to the syntax specified in the fence line. -// A final newline is mandatory to recognize the fence line, unless newlineOptional is true. -func isFenceLine(data []byte, info *string, oldmarker string, newlineOptional bool) (end int, marker string) { - i, size := 0, 0 - - // skip up to three spaces - for i < len(data) && i < 3 && data[i] == ' ' { - i++ - } - - // check for the marker characters: ~ or ` - if i >= len(data) { - return 0, "" - } - if data[i] != '~' && data[i] != '`' { - return 0, "" - } - - c := data[i] - - // the whole line must be the same char or whitespace - for i < len(data) && data[i] == c { - size++ - i++ - } - - // the marker char must occur at least 3 times - if size < 3 { - return 0, "" - } - marker = string(data[i-size : i]) - - // if this is the end marker, it must match the beginning marker - if oldmarker != "" && marker != oldmarker { - return 0, "" - } - - // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here - // into one, always get the info string, and discard it if the caller doesn't care. - if info != nil { - infoLength := 0 - i = skipChar(data, i, ' ') - - if i >= len(data) { - if newlineOptional && i == len(data) { - return i, marker - } - return 0, "" - } - - infoStart := i - - if data[i] == '{' { - i++ - infoStart++ - - for i < len(data) && data[i] != '}' && data[i] != '\n' { - infoLength++ - i++ - } - - if i >= len(data) || data[i] != '}' { - return 0, "" - } - - // strip all whitespace at the beginning and the end - // of the {} block - for infoLength > 0 && isspace(data[infoStart]) { - infoStart++ - infoLength-- - } - - for infoLength > 0 && isspace(data[infoStart+infoLength-1]) { - infoLength-- - } - - i++ - } else { - for i < len(data) && !isverticalspace(data[i]) { - infoLength++ - i++ - } - } - - *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength])) - } - - i = skipChar(data, i, ' ') - if i >= len(data) { - if newlineOptional { - return i, marker - } - return 0, "" - } - if data[i] == '\n' { - i++ // Take newline into account - } - - return i, marker -} - -// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, -// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. -// If doRender is true, a final newline is mandatory to recognize the fenced code block. -func (p *parser) fencedCodeBlock(out *bytes.Buffer, data []byte, doRender bool) int { - var infoString string - beg, marker := isFenceLine(data, &infoString, "", false) - if beg == 0 || beg >= len(data) { - return 0 - } - - var work bytes.Buffer - - for { - // safe to assume beg < len(data) - - // check for the end of the code block - newlineOptional := !doRender - fenceEnd, _ := isFenceLine(data[beg:], nil, marker, newlineOptional) - if fenceEnd != 0 { - beg += fenceEnd - break - } - - // copy the current line - end := skipUntilChar(data, beg, '\n') + 1 - - // did we reach the end of the buffer without a closing marker? - if end >= len(data) { - return 0 - } - - // verbatim copy to the working buffer - if doRender { - work.Write(data[beg:end]) - } - beg = end - } - - if doRender { - p.r.BlockCode(out, work.Bytes(), infoString) - } - - return beg -} - -func (p *parser) table(out *bytes.Buffer, data []byte) int { - var header bytes.Buffer - i, columns := p.tableHeader(&header, data) - if i == 0 { - return 0 - } - - var body bytes.Buffer - - for i < len(data) { - pipes, rowStart := 0, i - for ; data[i] != '\n'; i++ { - if data[i] == '|' { - pipes++ - } - } - - if pipes == 0 { - i = rowStart - break - } - - // include the newline in data sent to tableRow - i++ - p.tableRow(&body, data[rowStart:i], columns, false) - } - - p.r.Table(out, header.Bytes(), body.Bytes(), columns) - - return i -} - -// check if the specified position is preceded by an odd number of backslashes -func isBackslashEscaped(data []byte, i int) bool { - backslashes := 0 - for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { - backslashes++ - } - return backslashes&1 == 1 -} - -func (p *parser) tableHeader(out *bytes.Buffer, data []byte) (size int, columns []int) { - i := 0 - colCount := 1 - for i = 0; data[i] != '\n'; i++ { - if data[i] == '|' && !isBackslashEscaped(data, i) { - colCount++ - } - } - - // doesn't look like a table header - if colCount == 1 { - return - } - - // include the newline in the data sent to tableRow - header := data[:i+1] - - // column count ignores pipes at beginning or end of line - if data[0] == '|' { - colCount-- - } - if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { - colCount-- - } - - columns = make([]int, colCount) - - // move on to the header underline - i++ - if i >= len(data) { - return - } - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - i = skipChar(data, i, ' ') - - // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 - // and trailing | optional on last column - col := 0 - for data[i] != '\n' { - dashes := 0 - - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_LEFT - dashes++ - } - for data[i] == '-' { - i++ - dashes++ - } - if data[i] == ':' { - i++ - columns[col] |= TABLE_ALIGNMENT_RIGHT - dashes++ - } - for data[i] == ' ' { - i++ - } - - // end of column test is messy - switch { - case dashes < 3: - // not a valid column - return - - case data[i] == '|' && !isBackslashEscaped(data, i): - // marker found, now skip past trailing whitespace - col++ - i++ - for data[i] == ' ' { - i++ - } - - // trailing junk found after last column - if col >= colCount && data[i] != '\n' { - return - } - - case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: - // something else found where marker was required - return - - case data[i] == '\n': - // marker is optional for the last column - col++ - - default: - // trailing junk found after last column - return - } - } - if col != colCount { - return - } - - p.tableRow(out, header, columns, true) - size = i + 1 - return -} - -func (p *parser) tableRow(out *bytes.Buffer, data []byte, columns []int, header bool) { - i, col := 0, 0 - var rowWork bytes.Buffer - - if data[i] == '|' && !isBackslashEscaped(data, i) { - i++ - } - - for col = 0; col < len(columns) && i < len(data); col++ { - for data[i] == ' ' { - i++ - } - - cellStart := i - - for (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { - i++ - } - - cellEnd := i - - // skip the end-of-cell marker, possibly taking us past end of buffer - i++ - - for cellEnd > cellStart && data[cellEnd-1] == ' ' { - cellEnd-- - } - - var cellWork bytes.Buffer - p.inline(&cellWork, data[cellStart:cellEnd]) - - if header { - p.r.TableHeaderCell(&rowWork, cellWork.Bytes(), columns[col]) - } else { - p.r.TableCell(&rowWork, cellWork.Bytes(), columns[col]) - } - } - - // pad it out with empty columns to get the right number - for ; col < len(columns); col++ { - if header { - p.r.TableHeaderCell(&rowWork, nil, columns[col]) - } else { - p.r.TableCell(&rowWork, nil, columns[col]) - } - } - - // silently ignore rows with too many cells - - p.r.TableRow(out, rowWork.Bytes()) -} - -// returns blockquote prefix length -func (p *parser) quotePrefix(data []byte) int { - i := 0 - for i < 3 && data[i] == ' ' { - i++ - } - if data[i] == '>' { - if data[i+1] == ' ' { - return i + 2 - } - return i + 1 - } - return 0 -} - -// blockquote ends with at least one blank line -// followed by something without a blockquote prefix -func (p *parser) terminateBlockquote(data []byte, beg, end int) bool { - if p.isEmpty(data[beg:]) <= 0 { - return false - } - if end >= len(data) { - return true - } - return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 -} - -// parse a blockquote fragment -func (p *parser) quote(out *bytes.Buffer, data []byte) int { - var raw bytes.Buffer - beg, end := 0, 0 - for beg < len(data) { - end = beg - // Step over whole lines, collecting them. While doing that, check for - // fenced code and if one's found, incorporate it altogether, - // irregardless of any contents inside it - for data[end] != '\n' { - if p.flags&EXTENSION_FENCED_CODE != 0 { - if i := p.fencedCodeBlock(out, data[end:], false); i > 0 { - // -1 to compensate for the extra end++ after the loop: - end += i - 1 - break - } - } - end++ - } - end++ - - if pre := p.quotePrefix(data[beg:]); pre > 0 { - // skip the prefix - beg += pre - } else if p.terminateBlockquote(data, beg, end) { - break - } - - // this line is part of the blockquote - raw.Write(data[beg:end]) - beg = end - } - - var cooked bytes.Buffer - p.block(&cooked, raw.Bytes()) - p.r.BlockQuote(out, cooked.Bytes()) - return end -} - -// returns prefix length for block code -func (p *parser) codePrefix(data []byte) int { - if data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { - return 4 - } - return 0 -} - -func (p *parser) code(out *bytes.Buffer, data []byte) int { - var work bytes.Buffer - - i := 0 - for i < len(data) { - beg := i - for data[i] != '\n' { - i++ - } - i++ - - blankline := p.isEmpty(data[beg:i]) > 0 - if pre := p.codePrefix(data[beg:i]); pre > 0 { - beg += pre - } else if !blankline { - // non-empty, non-prefixed line breaks the pre - i = beg - break - } - - // verbatim copy to the working buffeu - if blankline { - work.WriteByte('\n') - } else { - work.Write(data[beg:i]) - } - } - - // trim all the \n off the end of work - workbytes := work.Bytes() - eol := len(workbytes) - for eol > 0 && workbytes[eol-1] == '\n' { - eol-- - } - if eol != len(workbytes) { - work.Truncate(eol) - } - - work.WriteByte('\n') - - p.r.BlockCode(out, work.Bytes(), "") - - return i -} - -// returns unordered list item prefix -func (p *parser) uliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // need a *, +, or - followed by a space - if (data[i] != '*' && data[i] != '+' && data[i] != '-') || - data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns ordered list item prefix -func (p *parser) oliPrefix(data []byte) int { - i := 0 - - // start with up to 3 spaces - for i < 3 && data[i] == ' ' { - i++ - } - - // count the digits - start := i - for data[i] >= '0' && data[i] <= '9' { - i++ - } - - // we need >= 1 digits followed by a dot and a space - if start == i || data[i] != '.' || data[i+1] != ' ' { - return 0 - } - return i + 2 -} - -// returns definition list item prefix -func (p *parser) dliPrefix(data []byte) int { - i := 0 - - // need a : followed by a spaces - if data[i] != ':' || data[i+1] != ' ' { - return 0 - } - for data[i] == ' ' { - i++ - } - return i + 2 -} - -// parse ordered or unordered list block -func (p *parser) list(out *bytes.Buffer, data []byte, flags int) int { - i := 0 - flags |= LIST_ITEM_BEGINNING_OF_LIST - work := func() bool { - for i < len(data) { - skip := p.listItem(out, data[i:], &flags) - i += skip - - if skip == 0 || flags&LIST_ITEM_END_OF_LIST != 0 { - break - } - flags &= ^LIST_ITEM_BEGINNING_OF_LIST - } - return true - } - - p.r.List(out, work, flags) - return i -} - -// Parse a single list item. -// Assumes initial prefix is already removed if this is a sublist. -func (p *parser) listItem(out *bytes.Buffer, data []byte, flags *int) int { - // keep track of the indentation of the first line - itemIndent := 0 - for itemIndent < 3 && data[itemIndent] == ' ' { - itemIndent++ - } - - i := p.uliPrefix(data) - if i == 0 { - i = p.oliPrefix(data) - } - if i == 0 { - i = p.dliPrefix(data) - // reset definition term flag - if i > 0 { - *flags &= ^LIST_TYPE_TERM - } - } - if i == 0 { - // if in defnition list, set term flag and continue - if *flags&LIST_TYPE_DEFINITION != 0 { - *flags |= LIST_TYPE_TERM - } else { - return 0 - } - } - - // skip leading whitespace on first line - for data[i] == ' ' { - i++ - } - - // find the end of the line - line := i - for i > 0 && data[i-1] != '\n' { - i++ - } - - // process the following lines - containsBlankLine := false - sublist := 0 - codeBlockMarker := "" - if p.flags&EXTENSION_FENCED_CODE != 0 && i > line { - // determine if codeblock starts on the first line - _, codeBlockMarker = isFenceLine(data[line:i], nil, "", false) - } - - // get working buffer - var raw bytes.Buffer - - // put the first line into the working buffer - raw.Write(data[line:i]) - line = i - -gatherlines: - for line < len(data) { - i++ - - // find the end of this line - for data[i-1] != '\n' { - i++ - } - // if it is an empty line, guess that it is part of this item - // and move on to the next line - if p.isEmpty(data[line:i]) > 0 { - containsBlankLine = true - raw.Write(data[line:i]) - line = i - continue - } - - // calculate the indentation - indent := 0 - for indent < 4 && line+indent < i && data[line+indent] == ' ' { - indent++ - } - - chunk := data[line+indent : i] - - if p.flags&EXTENSION_FENCED_CODE != 0 { - // determine if in or out of codeblock - // if in codeblock, ignore normal list processing - _, marker := isFenceLine(chunk, nil, codeBlockMarker, false) - if marker != "" { - if codeBlockMarker == "" { - // start of codeblock - codeBlockMarker = marker - } else { - // end of codeblock. - *flags |= LIST_ITEM_CONTAINS_BLOCK - codeBlockMarker = "" - } - } - // we are in a codeblock, write line, and continue - if codeBlockMarker != "" || marker != "" { - raw.Write(data[line+indent : i]) - line = i - continue gatherlines - } - } - - // evaluate how this line fits in - switch { - // is this a nested list item? - case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || - p.oliPrefix(chunk) > 0 || - p.dliPrefix(chunk) > 0: - - if containsBlankLine { - // end the list if the type changed after a blank line - if indent <= itemIndent && - ((*flags&LIST_TYPE_ORDERED != 0 && p.uliPrefix(chunk) > 0) || - (*flags&LIST_TYPE_ORDERED == 0 && p.oliPrefix(chunk) > 0)) { - - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - // to be a nested list, it must be indented more - // if not, it is the next item in the same list - if indent <= itemIndent { - break gatherlines - } - - // is this the first item in the nested list? - if sublist == 0 { - sublist = raw.Len() - } - - // is this a nested prefix header? - case p.isPrefixHeader(chunk): - // if the header is not indented, it is not nested in the list - // and thus ends the list - if containsBlankLine && indent < 4 { - *flags |= LIST_ITEM_END_OF_LIST - break gatherlines - } - *flags |= LIST_ITEM_CONTAINS_BLOCK - - // anything following an empty line is only part - // of this item if it is indented 4 spaces - // (regardless of the indentation of the beginning of the item) - case containsBlankLine && indent < 4: - if *flags&LIST_TYPE_DEFINITION != 0 && i < len(data)-1 { - // is the next item still a part of this list? - next := i - for data[next] != '\n' { - next++ - } - for next < len(data)-1 && data[next] == '\n' { - next++ - } - if i < len(data)-1 && data[i] != ':' && data[next] != ':' { - *flags |= LIST_ITEM_END_OF_LIST - } - } else { - *flags |= LIST_ITEM_END_OF_LIST - } - break gatherlines - - // a blank line means this should be parsed as a block - case containsBlankLine: - *flags |= LIST_ITEM_CONTAINS_BLOCK - } - - containsBlankLine = false - - // add the line into the working buffer without prefix - raw.Write(data[line+indent : i]) - - line = i - } - - // If reached end of data, the Renderer.ListItem call we're going to make below - // is definitely the last in the list. - if line >= len(data) { - *flags |= LIST_ITEM_END_OF_LIST - } - - rawBytes := raw.Bytes() - - // render the contents of the list item - var cooked bytes.Buffer - if *flags&LIST_ITEM_CONTAINS_BLOCK != 0 && *flags&LIST_TYPE_TERM == 0 { - // intermediate render of block item, except for definition term - if sublist > 0 { - p.block(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.block(&cooked, rawBytes) - } - } else { - // intermediate render of inline item - if sublist > 0 { - p.inline(&cooked, rawBytes[:sublist]) - p.block(&cooked, rawBytes[sublist:]) - } else { - p.inline(&cooked, rawBytes) - } - } - - // render the actual list item - cookedBytes := cooked.Bytes() - parsedEnd := len(cookedBytes) - - // strip trailing newlines - for parsedEnd > 0 && cookedBytes[parsedEnd-1] == '\n' { - parsedEnd-- - } - p.r.ListItem(out, cookedBytes[:parsedEnd], *flags) - - return line -} - -// render a single paragraph that has already been parsed out -func (p *parser) renderParagraph(out *bytes.Buffer, data []byte) { - if len(data) == 0 { - return - } - - // trim leading spaces - beg := 0 - for data[beg] == ' ' { - beg++ - } - - // trim trailing newline - end := len(data) - 1 - - // trim trailing spaces - for end > beg && data[end-1] == ' ' { - end-- - } - - work := func() bool { - p.inline(out, data[beg:end]) - return true - } - p.r.Paragraph(out, work) -} - -func (p *parser) paragraph(out *bytes.Buffer, data []byte) int { - // prev: index of 1st char of previous line - // line: index of 1st char of current line - // i: index of cursor/end of current line - var prev, line, i int - - // keep going until we find something to mark the end of the paragraph - for i < len(data) { - // mark the beginning of the current line - prev = line - current := data[i:] - line = i - - // did we find a blank line marking the end of the paragraph? - if n := p.isEmpty(current); n > 0 { - // did this blank line followed by a definition list item? - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if i < len(data)-1 && data[i+1] == ':' { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - p.renderParagraph(out, data[:i]) - return i + n - } - - // an underline under some text marks a header, so our paragraph ended on prev line - if i > 0 { - if level := p.isUnderlinedHeader(current); level > 0 { - // render the paragraph - p.renderParagraph(out, data[:prev]) - - // ignore leading and trailing whitespace - eol := i - 1 - for prev < eol && data[prev] == ' ' { - prev++ - } - for eol > prev && data[eol-1] == ' ' { - eol-- - } - - // render the header - // this ugly double closure avoids forcing variables onto the heap - work := func(o *bytes.Buffer, pp *parser, d []byte) func() bool { - return func() bool { - pp.inline(o, d) - return true - } - }(out, p, data[prev:eol]) - - id := "" - if p.flags&EXTENSION_AUTO_HEADER_IDS != 0 { - id = SanitizedAnchorName(string(data[prev:eol])) - } - - p.r.Header(out, work, level, id) - - // find the end of the underline - for data[i] != '\n' { - i++ - } - return i - } - } - - // if the next line starts a block of HTML, then the paragraph ends here - if p.flags&EXTENSION_LAX_HTML_BLOCKS != 0 { - if data[i] == '<' && p.html(out, current, false) > 0 { - // rewind to before the HTML block - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a prefixed header or a horizontal rule after this, paragraph is over - if p.isPrefixHeader(current) || p.isHRule(current) { - p.renderParagraph(out, data[:i]) - return i - } - - // if there's a fenced code block, paragraph is over - if p.flags&EXTENSION_FENCED_CODE != 0 { - if p.fencedCodeBlock(out, current, false) > 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // if there's a definition list item, prev line is a definition term - if p.flags&EXTENSION_DEFINITION_LISTS != 0 { - if p.dliPrefix(current) != 0 { - return p.list(out, data[prev:], LIST_TYPE_DEFINITION) - } - } - - // if there's a list after this, paragraph is over - if p.flags&EXTENSION_NO_EMPTY_LINE_BEFORE_BLOCK != 0 { - if p.uliPrefix(current) != 0 || - p.oliPrefix(current) != 0 || - p.quotePrefix(current) != 0 || - p.codePrefix(current) != 0 { - p.renderParagraph(out, data[:i]) - return i - } - } - - // otherwise, scan to the beginning of the next line - for data[i] != '\n' { - i++ - } - i++ - } - - p.renderParagraph(out, data[:i]) - return i -} - -// SanitizedAnchorName returns a sanitized anchor name for the given text. -// -// It implements the algorithm specified in the package comment. -func SanitizedAnchorName(text string) string { - var anchorName []rune - futureDash := false - for _, r := range text { - switch { - case unicode.IsLetter(r) || unicode.IsNumber(r): - if futureDash && len(anchorName) > 0 { - anchorName = append(anchorName, '-') - } - futureDash = false - anchorName = append(anchorName, unicode.ToLower(r)) - default: - futureDash = true - } - } - return string(anchorName) -} diff --git a/vendor/github.com/russross/blackfriday/doc.go b/vendor/github.com/russross/blackfriday/doc.go deleted file mode 100644 index 9656c42a..00000000 --- a/vendor/github.com/russross/blackfriday/doc.go +++ /dev/null @@ -1,32 +0,0 @@ -// Package blackfriday is a Markdown processor. -// -// It translates plain text with simple formatting rules into HTML or LaTeX. -// -// Sanitized Anchor Names -// -// Blackfriday includes an algorithm for creating sanitized anchor names -// corresponding to a given input text. This algorithm is used to create -// anchors for headings when EXTENSION_AUTO_HEADER_IDS is enabled. The -// algorithm is specified below, so that other packages can create -// compatible anchor names and links to those anchors. -// -// The algorithm iterates over the input text, interpreted as UTF-8, -// one Unicode code point (rune) at a time. All runes that are letters (category L) -// or numbers (category N) are considered valid characters. They are mapped to -// lower case, and included in the output. All other runes are considered -// invalid characters. Invalid characters that preceed the first valid character, -// as well as invalid character that follow the last valid character -// are dropped completely. All other sequences of invalid characters -// between two valid characters are replaced with a single dash character '-'. -// -// SanitizedAnchorName exposes this functionality, and can be used to -// create compatible links to the anchor names generated by blackfriday. -// This algorithm is also implemented in a small standalone package at -// github.com/shurcooL/sanitized_anchor_name. It can be useful for clients -// that want a small package and don't need full functionality of blackfriday. -package blackfriday - -// NOTE: Keep Sanitized Anchor Name algorithm in sync with package -// github.com/shurcooL/sanitized_anchor_name. -// Otherwise, users of sanitized_anchor_name will get anchor names -// that are incompatible with those generated by blackfriday. diff --git a/vendor/github.com/russross/blackfriday/html.go b/vendor/github.com/russross/blackfriday/html.go deleted file mode 100644 index fa044ca2..00000000 --- a/vendor/github.com/russross/blackfriday/html.go +++ /dev/null @@ -1,945 +0,0 @@ -// -// Blackfriday Markdown Processor -// Available at http://github.com/russross/blackfriday -// -// Copyright © 2011 Russ Ross . -// Distributed under the Simplified BSD License. -// See README.md for details. -// - -// -// -// HTML rendering backend -// -// - -package blackfriday - -import ( - "bytes" - "fmt" - "regexp" - "strconv" - "strings" -) - -// Html renderer configuration options. -const ( - HTML_SKIP_HTML = 1 << iota // skip preformatted HTML blocks - HTML_SKIP_STYLE // skip embedded