diff --git a/README.md b/README.md index 2106d1e..bbbfc8e 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ > * JS/TS SimpleWebAuthn client – https://github.com/MasterKale/SimpleWebAuthn Used in project: -![Static Badge](https://img.shields.io/badge/Go_WebAuthn-v0.10.2-green) +![Static Badge](https://img.shields.io/badge/Go_WebAuthn-v0.11.0-green) ![Static Badge](https://img.shields.io/badge/TS%5CJS%20SimpleWebAuthn-v10.0.0-green) Actual versions: diff --git a/go.mod b/go.mod index 8ffb133..3a19eab 100644 --- a/go.mod +++ b/go.mod @@ -3,16 +3,16 @@ module github.com/egregors/passkey go 1.22 require ( - github.com/go-webauthn/webauthn v0.10.2 + github.com/go-webauthn/webauthn v0.11.0 github.com/stretchr/testify v1.9.0 ) require ( github.com/davecgh/go-spew v1.1.1 // indirect - github.com/fxamacker/cbor/v2 v2.6.0 // indirect - github.com/go-webauthn/x v0.1.9 // indirect + github.com/fxamacker/cbor/v2 v2.7.0 // indirect + github.com/go-webauthn/x v0.1.12 // indirect github.com/golang-jwt/jwt/v5 v5.2.1 // indirect - github.com/google/go-tpm v0.9.0 // indirect + github.com/google/go-tpm v0.9.1 // indirect github.com/google/uuid v1.6.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect diff --git a/go.sum b/go.sum index f1287fa..c174b85 100644 --- a/go.sum +++ b/go.sum @@ -1,15 +1,15 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/fxamacker/cbor/v2 v2.6.0 h1:sU6J2usfADwWlYDAFhZBQ6TnLFBHxgesMrQfQgk1tWA= -github.com/fxamacker/cbor/v2 v2.6.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= -github.com/go-webauthn/webauthn v0.10.2 h1:OG7B+DyuTytrEPFmTX503K77fqs3HDK/0Iv+z8UYbq4= -github.com/go-webauthn/webauthn v0.10.2/go.mod h1:Gd1IDsGAybuvK1NkwUTLbGmeksxuRJjVN2PE/xsPxHs= -github.com/go-webauthn/x v0.1.9 h1:v1oeLmoaa+gPOaZqUdDentu6Rl7HkSSsmOT6gxEQHhE= -github.com/go-webauthn/x v0.1.9/go.mod h1:pJNMlIMP1SU7cN8HNlKJpLEnFHCygLCvaLZ8a1xeoQA= +github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E= +github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ= +github.com/go-webauthn/webauthn v0.11.0 h1:2U0jWuGeoiI+XSZkHPFRtwaYtqmMUsqABtlfSq1rODo= +github.com/go-webauthn/webauthn v0.11.0/go.mod h1:57ZrqsZzD/eboQDVtBkvTdfqFYAh/7IwzdPT+sPWqB0= +github.com/go-webauthn/x v0.1.12 h1:RjQ5cvApzyU/xLCiP+rub0PE4HBZsLggbxGR5ZpUf/A= +github.com/go-webauthn/x v0.1.12/go.mod h1:XlRcGkNH8PT45TfeJYc6gqpOtiOendHhVmnOxh+5yHs= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/google/go-tpm v0.9.0 h1:sQF6YqWMi+SCXpsmS3fd21oPy/vSddwZry4JnmltHVk= -github.com/google/go-tpm v0.9.0/go.mod h1:FkNVkc6C+IsvDI9Jw1OveJmxGZUUaKxtrpOS47QWKfU= +github.com/google/go-tpm v0.9.1 h1:0pGc4X//bAlmZzMKf8iz6IsDo1nYTbYJ6FZN/rg4zdM= +github.com/google/go-tpm v0.9.1/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= diff --git a/handlers_test.go b/handlers_test.go index 66b747b..245b970 100644 --- a/handlers_test.go +++ b/handlers_test.go @@ -88,10 +88,6 @@ func TestPasskey_beginRegistration(t *testing.T) { WebAuthnName(). Return("Berik the Cat") - user.EXPECT(). - WebAuthnIcon(). - Return("") - tests := []struct { name string w *httptest.ResponseRecorder diff --git a/mock_User.go b/mock_User.go index 94c2350..3b9f188 100644 --- a/mock_User.go +++ b/mock_User.go @@ -192,51 +192,6 @@ func (_c *MockUser_WebAuthnID_Call) RunAndReturn(run func() []byte) *MockUser_We return _c } -// WebAuthnIcon provides a mock function with given fields: -func (_m *MockUser) WebAuthnIcon() string { - ret := _m.Called() - - if len(ret) == 0 { - panic("no return value specified for WebAuthnIcon") - } - - var r0 string - if rf, ok := ret.Get(0).(func() string); ok { - r0 = rf() - } else { - r0 = ret.Get(0).(string) - } - - return r0 -} - -// MockUser_WebAuthnIcon_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WebAuthnIcon' -type MockUser_WebAuthnIcon_Call struct { - *mock.Call -} - -// WebAuthnIcon is a helper method to define mock.On call -func (_e *MockUser_Expecter) WebAuthnIcon() *MockUser_WebAuthnIcon_Call { - return &MockUser_WebAuthnIcon_Call{Call: _e.mock.On("WebAuthnIcon")} -} - -func (_c *MockUser_WebAuthnIcon_Call) Run(run func()) *MockUser_WebAuthnIcon_Call { - _c.Call.Run(func(args mock.Arguments) { - run() - }) - return _c -} - -func (_c *MockUser_WebAuthnIcon_Call) Return(_a0 string) *MockUser_WebAuthnIcon_Call { - _c.Call.Return(_a0) - return _c -} - -func (_c *MockUser_WebAuthnIcon_Call) RunAndReturn(run func() string) *MockUser_WebAuthnIcon_Call { - _c.Call.Return(run) - return _c -} - // WebAuthnName provides a mock function with given fields: func (_m *MockUser) WebAuthnName() string { ret := _m.Called() diff --git a/vendor/github.com/fxamacker/cbor/v2/.golangci.yml b/vendor/github.com/fxamacker/cbor/v2/.golangci.yml index e65c053..38cb9ae 100644 --- a/vendor/github.com/fxamacker/cbor/v2/.golangci.yml +++ b/vendor/github.com/fxamacker/cbor/v2/.golangci.yml @@ -1,12 +1,26 @@ # Do not delete linter settings. Linters like gocritic can be enabled on the command line. linters-settings: + depguard: + rules: + prevent_unmaintained_packages: + list-mode: strict + files: + - $all + - "!$test" + allow: + - $gostd + - github.com/x448/float16 + deny: + - pkg: io/ioutil + desc: "replaced by io and os packages since Go 1.16: https://tip.golang.org/doc/go1.16#ioutil" dupl: threshold: 100 funlen: lines: 100 statements: 50 goconst: + ignore-tests: true min-len: 2 min-occurrences: 3 gocritic: @@ -17,12 +31,12 @@ linters-settings: - performance - style disabled-checks: + - commentedOutCode - dupImport # https://github.com/go-critic/go-critic/issues/845 - ifElseChain - octalLiteral - paramTypeCombine - whyNoLint - - wrapperFunc gofmt: simplify: false goimports: @@ -37,22 +51,32 @@ linters-settings: suggest-new: true misspell: locale: US + staticcheck: + checks: ["all"] linters: disable-all: true enable: + - asciicheck - bidichk + - depguard - errcheck + - exportloopref - goconst + - gocritic - gocyclo - gofmt - goimports + - goprintffuncname - gosec + - gosimple - govet - ineffassign - misspell + - nilerr - revive - staticcheck + - stylecheck - typecheck - unconvert - unused @@ -62,16 +86,19 @@ issues: max-issues-per-linter: 0 # max-same-issues default is 3. Set to 0 to disable limit. max-same-issues: 0 - # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: - - path: _test\.go - linters: - - goconst - - dupl - - gomnd - - lll - - path: doc\.go - linters: - - goimports - - gomnd - - lll + - path: decode.go + text: "string ` overflows ` has (\\d+) occurrences, make it a constant" + - path: decode.go + text: "string ` \\(range is \\[` has (\\d+) occurrences, make it a constant" + - path: decode.go + text: "string `, ` has (\\d+) occurrences, make it a constant" + - path: decode.go + text: "string ` overflows Go's int64` has (\\d+) occurrences, make it a constant" + - path: decode.go + text: "string `\\]\\)` has (\\d+) occurrences, make it a constant" + - path: valid.go + text: "string ` for type ` has (\\d+) occurrences, make it a constant" + - path: valid.go + text: "string `cbor: ` has (\\d+) occurrences, make it a constant" diff --git a/vendor/github.com/fxamacker/cbor/v2/README.md b/vendor/github.com/fxamacker/cbor/v2/README.md index 2bed2e6..af0a795 100644 --- a/vendor/github.com/fxamacker/cbor/v2/README.md +++ b/vendor/github.com/fxamacker/cbor/v2/README.md @@ -6,9 +6,9 @@ CBOR is a [trusted alternative](https://www.rfc-editor.org/rfc/rfc8949.html#name-comparison-of-other-binary-) to JSON, MessagePack, Protocol Buffers, etc.  CBOR is an Internet Standard defined by [IETF STD 94 (RFC 8949)](https://www.rfc-editor.org/info/std94) and is designed to be relevant for decades. -`fxamacker/cbor` is used in projects by Arm Ltd., Cisco, Dapper Labs, EdgeX Foundry, Fraunhofer‑AISEC, Let's Encrypt (ISRG), Linux Foundation, Microsoft, Mozilla, Oasis Protocol, Tailscale, Teleport, [and others](https://github.com/fxamacker/cbor#who-uses-fxamackercbor). +`fxamacker/cbor` is used in projects by Arm Ltd., Cisco, EdgeX Foundry, Flow Foundation, Fraunhofer‑AISEC, Kubernetes, Let's Encrypt (ISRG), Linux Foundation, Microsoft, Mozilla, Oasis Protocol, Tailscale, Teleport, [etc](https://github.com/fxamacker/cbor#who-uses-fxamackercbor). -See [Quick Start](#quick-start) and [Releases](https://github.com/fxamacker/cbor/releases/). 🆕 `UnmarshalFirst` and `DiagnoseFirst` can decode CBOR Sequences. +See [Quick Start](#quick-start) and [Releases](https://github.com/fxamacker/cbor/releases/). 🆕 `UnmarshalFirst` and `DiagnoseFirst` can decode CBOR Sequences. `cbor.MarshalToBuffer()` and `UserBufferEncMode` accepts user-specified buffer. ## fxamacker/cbor @@ -17,7 +17,6 @@ See [Quick Start](#quick-start) and [Releases](https://github.com/fxamacker [![CodeQL](https://github.com/fxamacker/cbor/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/fxamacker/cbor/actions/workflows/codeql-analysis.yml) [![](https://img.shields.io/badge/fuzzing-passing-44c010)](#fuzzing-and-code-coverage) [![Go Report Card](https://goreportcard.com/badge/github.com/fxamacker/cbor)](https://goreportcard.com/report/github.com/fxamacker/cbor) -[![](https://img.shields.io/ossf-scorecard/github.com/fxamacker/cbor?label=openssf%20scorecard)](https://github.com/fxamacker/cbor#fuzzing-and-code-coverage) `fxamacker/cbor` is a CBOR codec in full conformance with [IETF STD 94 (RFC 8949)](https://www.rfc-editor.org/info/std94). It also supports CBOR Sequences ([RFC 8742](https://www.rfc-editor.org/rfc/rfc8742.html)) and Extended Diagnostic Notation ([Appendix G of RFC 8610](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G)). @@ -221,7 +220,7 @@ __Install__: `go get github.com/fxamacker/cbor/v2` and `import "github.com/fxama This library can encode and decode CBOR (RFC 8949) and CBOR Sequences (RFC 8742). -- __CBOR data item__ is a single piece of CBOR data and its structure may contain zero, one, or more nested data items. +- __CBOR data item__ is a single piece of CBOR data and its structure may contain 0 or more nested data items. - __CBOR sequence__ is a concatenation of 0 or more encoded CBOR data items. Configurable limits and options can be used to balance trade-offs. @@ -242,6 +241,9 @@ err = cbor.Unmarshal(b, &v) // decode []byte b to v decoder = cbor.NewDecoder(r) // create decoder with io.Reader r err = decoder.Decode(&v) // decode a CBOR data item to v +// v2.7.0 added MarshalToBuffer() and UserBufferEncMode interface. +err = cbor.MarshalToBuffer(v, b) // encode v to b instead of using built-in buf pool. + // v2.5.0 added new functions that return remaining bytes. // UnmarshalFirst decodes first CBOR data item and returns remaining bytes. @@ -297,6 +299,17 @@ err := encoder.Encode(v) // encode v to io.Writer w Default mode and custom modes automatically apply struct tags. +### User Specified Buffer for Encoding (v2.7.0) + +`UserBufferEncMode` interface extends `EncMode` interface to add `MarshalToBuffer()`. It accepts a user-specified buffer instead of using built-in buffer pool. + +```Go +em, err := myEncOptions.UserBufferEncMode() // create UserBufferEncMode mode + +var buf bytes.Buffer +err = em.MarshalToBuffer(v, &buf) // encode v to provided buf +``` + ### Struct Tags Struct tags (`toarray`, `keyasint`, `omitempty`) reduce encoded size of structs. @@ -459,12 +472,14 @@ Default limits may need to be increased for systems handling very large data (e. ## Status -v2.6.0 (February 2024) adds important new features, optimizations, and bug fixes. It is especially useful to systems that need to convert data between CBOR and JSON. New options and optimizations improve handling of bignum, integers, maps, and strings. +v2.7.0 (June 23, 2024) adds features and improvements that help large projects (e.g. Kubernetes) use CBOR as an alternative to JSON and Protocol Buffers. Other improvements include speedups, improved memory use, bug fixes, new serialization options, etc. It passed fuzz tests (5+ billion executions) and is production quality. For more details, see [release notes](https://github.com/fxamacker/cbor/releases). ### Prior Release +[v2.6.0](https://github.com/fxamacker/cbor/releases/tag/v2.6.0) (February 2024) adds important new features, optimizations, and bug fixes. It is especially useful to systems that need to convert data between CBOR and JSON. New options and optimizations improve handling of bignum, integers, maps, and strings. + v2.5.0 was released on Sunday, August 13, 2023 with new features and important bug fixes. It is fuzz tested and production quality after extended beta [v2.5.0-beta](https://github.com/fxamacker/cbor/releases/tag/v2.5.0-beta) (Dec 2022) -> [v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) (Aug 2023). __IMPORTANT__: 👉 Before upgrading from v2.4 or older release, please read the notable changes highlighted in the release notes. v2.5.0 is a large release with bug fixes to error handling for extraneous data in `Unmarshal`, etc. that should be reviewed before upgrading. @@ -534,7 +549,7 @@ geomean 2.782 ## Who uses fxamacker/cbor -`fxamacker/cbor` is used in projects by Arm Ltd., Berlin Institute of Health at Charité, Chainlink, Cisco, Confidential Computing Consortium, ConsenSys, Dapper Labs, EdgeX Foundry, F5, FIDO Alliance, Fraunhofer‑AISEC, Let's Encrypt (ISRG), Linux Foundation, Matrix.org, Microsoft, Mozilla, National Cybersecurity Agency of France (govt), Netherlands (govt), Oasis Protocol, Smallstep, Tailscale, Taurus SA, Teleport, TIBCO, and others. +`fxamacker/cbor` is used in projects by Arm Ltd., Berlin Institute of Health at Charité, Chainlink, Cisco, Confidential Computing Consortium, ConsenSys, Dapper Labs, EdgeX Foundry, F5, FIDO Alliance, Fraunhofer‑AISEC, Kubernetes, Let's Encrypt (ISRG), Linux Foundation, Matrix.org, Microsoft, Mozilla, National Cybersecurity Agency of France (govt), Netherlands (govt), Oasis Protocol, Smallstep, Tailscale, Taurus SA, Teleport, TIBCO, and others. `fxamacker/cbor` passed multiple confidential security assessments. A [nonconfidential security assessment](https://github.com/veraison/go-cose/blob/v1.0.0-rc.1/reports/NCC_Microsoft-go-cose-Report_2022-05-26_v1.0.pdf) (prepared by NCC Group for Microsoft Corporation) includes a subset of fxamacker/cbor v2.4.0 in its scope. @@ -657,6 +672,8 @@ I'm especially grateful to Bastian Müller and Dieter Shirley for suggesting and I'm very grateful to Stefan Tatschner, Yawning Angel, Jernej Kos, x448, ZenGround0, and Jakob Borg for their contributions or support in the very early days. +Big thanks to Ben Luddy for his contributions in v2.6.0 and v2.7.0. + This library clearly wouldn't be possible without Carsten Bormann authoring CBOR RFCs. Special thanks to Laurence Lundblade and Jeffrey Yasskin for their help on IETF mailing list or at [7049bis](https://github.com/cbor-wg/CBORbis). diff --git a/vendor/github.com/fxamacker/cbor/v2/bytestring.go b/vendor/github.com/fxamacker/cbor/v2/bytestring.go index 52a28ed..823bff1 100644 --- a/vendor/github.com/fxamacker/cbor/v2/bytestring.go +++ b/vendor/github.com/fxamacker/cbor/v2/bytestring.go @@ -22,8 +22,8 @@ func (bs ByteString) Bytes() []byte { // MarshalCBOR encodes ByteString as CBOR byte string (major type 2). func (bs ByteString) MarshalCBOR() ([]byte, error) { - e := getEncoderBuffer() - defer putEncoderBuffer(e) + e := getEncodeBuffer() + defer putEncodeBuffer(e) // Encode length encodeHead(e, byte(cborTypeByteString), uint64(len(bs))) diff --git a/vendor/github.com/fxamacker/cbor/v2/cache.go b/vendor/github.com/fxamacker/cbor/v2/cache.go index 8a4a5c8..ea0f39e 100644 --- a/vendor/github.com/fxamacker/cbor/v2/cache.go +++ b/vendor/github.com/fxamacker/cbor/v2/cache.go @@ -6,6 +6,7 @@ package cbor import ( "bytes" "errors" + "fmt" "reflect" "sort" "strconv" @@ -84,9 +85,25 @@ func newTypeInfo(t reflect.Type) *typeInfo { } type decodingStructType struct { - fields fields - err error - toArray bool + fields fields + fieldIndicesByName map[string]int + err error + toArray bool +} + +// The stdlib errors.Join was introduced in Go 1.20, and we still support Go 1.17, so instead, +// here's a very basic implementation of an aggregated error. +type multierror []error + +func (m multierror) Error() string { + var sb strings.Builder + for i, err := range m { + sb.WriteString(err.Error()) + if i < len(m)-1 { + sb.WriteString(", ") + } + } + return sb.String() } func getDecodingStructType(t reflect.Type) *decodingStructType { @@ -98,12 +115,12 @@ func getDecodingStructType(t reflect.Type) *decodingStructType { toArray := hasToArrayOption(structOptions) - var err error + var errs []error for i := 0; i < len(flds); i++ { if flds[i].keyAsInt { nameAsInt, numErr := strconv.Atoi(flds[i].name) if numErr != nil { - err = errors.New("cbor: failed to parse field name \"" + flds[i].name + "\" to int (" + numErr.Error() + ")") + errs = append(errs, errors.New("cbor: failed to parse field name \""+flds[i].name+"\" to int ("+numErr.Error()+")")) break } flds[i].nameAsInt = int64(nameAsInt) @@ -112,7 +129,36 @@ func getDecodingStructType(t reflect.Type) *decodingStructType { flds[i].typInfo = getTypeInfo(flds[i].typ) } - structType := &decodingStructType{fields: flds, err: err, toArray: toArray} + fieldIndicesByName := make(map[string]int, len(flds)) + for i, fld := range flds { + if _, ok := fieldIndicesByName[fld.name]; ok { + errs = append(errs, fmt.Errorf("cbor: two or more fields of %v have the same name %q", t, fld.name)) + continue + } + fieldIndicesByName[fld.name] = i + } + + var err error + { + var multi multierror + for _, each := range errs { + if each != nil { + multi = append(multi, each) + } + } + if len(multi) == 1 { + err = multi[0] + } else if len(multi) > 1 { + err = multi + } + } + + structType := &decodingStructType{ + fields: flds, + fieldIndicesByName: fieldIndicesByName, + err: err, + toArray: toArray, + } decodingStructTypeCache.Store(t, structType) return structType } @@ -124,17 +170,17 @@ type encodingStructType struct { omitEmptyFieldsIdx []int err error toArray bool - fixedLength bool // Struct type doesn't have any omitempty or anonymous fields. } func (st *encodingStructType) getFields(em *encMode) fields { - if em.sort == SortNone { + switch em.sort { + case SortNone, SortFastShuffle: return st.fields - } - if em.sort == SortLengthFirst { + case SortLengthFirst: return st.lengthFirstFields + default: + return st.bytewiseFields } - return st.bytewiseFields } type bytewiseFieldSorter struct { @@ -188,8 +234,7 @@ func getEncodingStructType(t reflect.Type) (*encodingStructType, error) { var hasKeyAsInt bool var hasKeyAsStr bool var omitEmptyIdx []int - fixedLength := true - e := getEncoderBuffer() + e := getEncodeBuffer() for i := 0; i < len(flds); i++ { // Get field's encodeFunc flds[i].ef, flds[i].ief = getEncodeFunc(flds[i].typ) @@ -231,23 +276,18 @@ func getEncodingStructType(t reflect.Type) (*encodingStructType, error) { copy(flds[i].cborNameByteString, flds[i].cborName) // Reset encoded CBOR type to byte string, preserving the "additional // information" bits: - flds[i].cborNameByteString[0] = byte(cborTypeByteString) | (flds[i].cborNameByteString[0] & 0x1f) + flds[i].cborNameByteString[0] = byte(cborTypeByteString) | + getAdditionalInformation(flds[i].cborNameByteString[0]) hasKeyAsStr = true } - // Check if field is from embedded struct - if len(flds[i].idx) > 1 { - fixedLength = false - } - // Check if field can be omitted when empty if flds[i].omitEmpty { - fixedLength = false omitEmptyIdx = append(omitEmptyIdx, i) } } - putEncoderBuffer(e) + putEncodeBuffer(e) if err != nil { structType := &encodingStructType{err: err} @@ -272,8 +312,8 @@ func getEncodingStructType(t reflect.Type) (*encodingStructType, error) { bytewiseFields: bytewiseFields, lengthFirstFields: lengthFirstFields, omitEmptyFieldsIdx: omitEmptyIdx, - fixedLength: fixedLength, } + encodingStructTypeCache.Store(t, structType) return structType, structType.err } @@ -290,9 +330,8 @@ func getEncodingStructToArrayType(t reflect.Type, flds fields) (*encodingStructT } structType := &encodingStructType{ - fields: flds, - toArray: true, - fixedLength: true, + fields: flds, + toArray: true, } encodingStructTypeCache.Store(t, structType) return structType, structType.err diff --git a/vendor/github.com/fxamacker/cbor/v2/common.go b/vendor/github.com/fxamacker/cbor/v2/common.go new file mode 100644 index 0000000..ec038a4 --- /dev/null +++ b/vendor/github.com/fxamacker/cbor/v2/common.go @@ -0,0 +1,182 @@ +// Copyright (c) Faye Amacker. All rights reserved. +// Licensed under the MIT License. See LICENSE in the project root for license information. + +package cbor + +import ( + "fmt" + "strconv" +) + +type cborType uint8 + +const ( + cborTypePositiveInt cborType = 0x00 + cborTypeNegativeInt cborType = 0x20 + cborTypeByteString cborType = 0x40 + cborTypeTextString cborType = 0x60 + cborTypeArray cborType = 0x80 + cborTypeMap cborType = 0xa0 + cborTypeTag cborType = 0xc0 + cborTypePrimitives cborType = 0xe0 +) + +func (t cborType) String() string { + switch t { + case cborTypePositiveInt: + return "positive integer" + case cborTypeNegativeInt: + return "negative integer" + case cborTypeByteString: + return "byte string" + case cborTypeTextString: + return "UTF-8 text string" + case cborTypeArray: + return "array" + case cborTypeMap: + return "map" + case cborTypeTag: + return "tag" + case cborTypePrimitives: + return "primitives" + default: + return "Invalid type " + strconv.Itoa(int(t)) + } +} + +type additionalInformation uint8 + +const ( + maxAdditionalInformationWithoutArgument = 23 + additionalInformationWith1ByteArgument = 24 + additionalInformationWith2ByteArgument = 25 + additionalInformationWith4ByteArgument = 26 + additionalInformationWith8ByteArgument = 27 + + // For major type 7. + additionalInformationAsFalse = 20 + additionalInformationAsTrue = 21 + additionalInformationAsNull = 22 + additionalInformationAsUndefined = 23 + additionalInformationAsFloat16 = 25 + additionalInformationAsFloat32 = 26 + additionalInformationAsFloat64 = 27 + + // For major type 2, 3, 4, 5. + additionalInformationAsIndefiniteLengthFlag = 31 +) + +const ( + maxSimpleValueInAdditionalInformation = 23 + minSimpleValueIn1ByteArgument = 32 +) + +func (ai additionalInformation) isIndefiniteLength() bool { + return ai == additionalInformationAsIndefiniteLengthFlag +} + +const ( + // From RFC 8949 Section 3: + // "The initial byte of each encoded data item contains both information about the major type + // (the high-order 3 bits, described in Section 3.1) and additional information + // (the low-order 5 bits)." + + // typeMask is used to extract major type in initial byte of encoded data item. + typeMask = 0xe0 + + // additionalInformationMask is used to extract additional information in initial byte of encoded data item. + additionalInformationMask = 0x1f +) + +func getType(raw byte) cborType { + return cborType(raw & typeMask) +} + +func getAdditionalInformation(raw byte) byte { + return raw & additionalInformationMask +} + +func isBreakFlag(raw byte) bool { + return raw == cborBreakFlag +} + +func parseInitialByte(b byte) (t cborType, ai byte) { + return getType(b), getAdditionalInformation(b) +} + +const ( + tagNumRFC3339Time = 0 + tagNumEpochTime = 1 + tagNumUnsignedBignum = 2 + tagNumNegativeBignum = 3 + tagNumExpectedLaterEncodingBase64URL = 21 + tagNumExpectedLaterEncodingBase64 = 22 + tagNumExpectedLaterEncodingBase16 = 23 + tagNumSelfDescribedCBOR = 55799 +) + +const ( + cborBreakFlag = byte(0xff) + cborByteStringWithIndefiniteLengthHead = byte(0x5f) + cborTextStringWithIndefiniteLengthHead = byte(0x7f) + cborArrayWithIndefiniteLengthHead = byte(0x9f) + cborMapWithIndefiniteLengthHead = byte(0xbf) +) + +var ( + cborFalse = []byte{0xf4} + cborTrue = []byte{0xf5} + cborNil = []byte{0xf6} + cborNaN = []byte{0xf9, 0x7e, 0x00} + cborPositiveInfinity = []byte{0xf9, 0x7c, 0x00} + cborNegativeInfinity = []byte{0xf9, 0xfc, 0x00} +) + +// validBuiltinTag checks that supported built-in tag numbers are followed by expected content types. +func validBuiltinTag(tagNum uint64, contentHead byte) error { + t := getType(contentHead) + switch tagNum { + case tagNumRFC3339Time: + // Tag content (date/time text string in RFC 3339 format) must be string type. + if t != cborTypeTextString { + return newInadmissibleTagContentTypeError( + tagNumRFC3339Time, + "text string", + t.String()) + } + return nil + + case tagNumEpochTime: + // Tag content (epoch date/time) must be uint, int, or float type. + if t != cborTypePositiveInt && t != cborTypeNegativeInt && (contentHead < 0xf9 || contentHead > 0xfb) { + return newInadmissibleTagContentTypeError( + tagNumEpochTime, + "integer or floating-point number", + t.String()) + } + return nil + + case tagNumUnsignedBignum, tagNumNegativeBignum: + // Tag content (bignum) must be byte type. + if t != cborTypeByteString { + return newInadmissibleTagContentTypeErrorf( + fmt.Sprintf( + "tag number %d or %d must be followed by byte string, got %s", + tagNumUnsignedBignum, + tagNumNegativeBignum, + t.String(), + )) + } + return nil + + case tagNumExpectedLaterEncodingBase64URL, tagNumExpectedLaterEncodingBase64, tagNumExpectedLaterEncodingBase16: + // From RFC 8949 3.4.5.2: + // The data item tagged can be a byte string or any other data item. In the latter + // case, the tag applies to all of the byte string data items contained in the data + // item, except for those contained in a nested data item tagged with an expected + // conversion. + return nil + } + + return nil +} diff --git a/vendor/github.com/fxamacker/cbor/v2/decode.go b/vendor/github.com/fxamacker/cbor/v2/decode.go index 0b44124..85842ac 100644 --- a/vendor/github.com/fxamacker/cbor/v2/decode.go +++ b/vendor/github.com/fxamacker/cbor/v2/decode.go @@ -5,7 +5,9 @@ package cbor import ( "encoding" + "encoding/base64" "encoding/binary" + "encoding/hex" "errors" "fmt" "io" @@ -87,7 +89,8 @@ import ( // To unmarshal a CBOR text string into a time.Time value, Unmarshal parses text // string formatted in RFC3339. To unmarshal a CBOR integer/float into a // time.Time value, Unmarshal creates an unix time with integer/float as seconds -// and fractional seconds since January 1, 1970 UTC. +// and fractional seconds since January 1, 1970 UTC. As a special case, Infinite +// and NaN float values decode to time.Time's zero value. // // To unmarshal CBOR null (0xf6) and undefined (0xf7) values into a // slice/map/pointer, Unmarshal sets Go value to nil. Because null is often @@ -207,7 +210,95 @@ func (e *UnknownFieldError) Error() string { return fmt.Sprintf("cbor: found unknown field at map element index %d", e.Index) } -// DupMapKeyMode specifies how to enforce duplicate map key. +// UnacceptableDataItemError is returned when unmarshaling a CBOR input that contains a data item +// that is not acceptable to a specific CBOR-based application protocol ("invalid or unexpected" as +// described in RFC 8949 Section 5 Paragraph 3). +type UnacceptableDataItemError struct { + CBORType string + Message string +} + +func (e UnacceptableDataItemError) Error() string { + return fmt.Sprintf("cbor: data item of cbor type %s is not accepted by protocol: %s", e.CBORType, e.Message) +} + +// ByteStringExpectedFormatError is returned when unmarshaling CBOR byte string fails when +// using non-default ByteStringExpectedFormat decoding option that makes decoder expect +// a specified format such as base64, hex, etc. +type ByteStringExpectedFormatError struct { + expectedFormatOption ByteStringExpectedFormatMode + err error +} + +func newByteStringExpectedFormatError(expectedFormatOption ByteStringExpectedFormatMode, err error) *ByteStringExpectedFormatError { + return &ByteStringExpectedFormatError{expectedFormatOption, err} +} + +func (e *ByteStringExpectedFormatError) Error() string { + switch e.expectedFormatOption { + case ByteStringExpectedBase64URL: + return fmt.Sprintf("cbor: failed to decode base64url from byte string: %s", e.err) + + case ByteStringExpectedBase64: + return fmt.Sprintf("cbor: failed to decode base64 from byte string: %s", e.err) + + case ByteStringExpectedBase16: + return fmt.Sprintf("cbor: failed to decode hex from byte string: %s", e.err) + + default: + return fmt.Sprintf("cbor: failed to decode byte string in expected format %d: %s", e.expectedFormatOption, e.err) + } +} + +func (e *ByteStringExpectedFormatError) Unwrap() error { + return e.err +} + +// InadmissibleTagContentTypeError is returned when unmarshaling built-in CBOR tags +// fails because of inadmissible type for tag content. Currently, the built-in +// CBOR tags in this codec are tags 0-3 and 21-23. +// See "Tag validity" in RFC 8949 Section 5.3.2. +type InadmissibleTagContentTypeError struct { + s string + tagNum int + expectedTagContentType string + gotTagContentType string +} + +func newInadmissibleTagContentTypeError( + tagNum int, + expectedTagContentType string, + gotTagContentType string, +) *InadmissibleTagContentTypeError { + return &InadmissibleTagContentTypeError{ + tagNum: tagNum, + expectedTagContentType: expectedTagContentType, + gotTagContentType: gotTagContentType, + } +} + +func newInadmissibleTagContentTypeErrorf(s string) *InadmissibleTagContentTypeError { + return &InadmissibleTagContentTypeError{s: "cbor: " + s} //nolint:goconst // ignore "cbor" +} + +func (e *InadmissibleTagContentTypeError) Error() string { + if e.s == "" { + return fmt.Sprintf( + "cbor: tag number %d must be followed by %s, got %s", + e.tagNum, + e.expectedTagContentType, + e.gotTagContentType, + ) + } + return e.s +} + +// DupMapKeyMode specifies how to enforce duplicate map key. Two map keys are considered duplicates if: +// 1. When decoding into a struct, both keys match the same struct field. The keys are also +// considered duplicates if neither matches any field and decoding to interface{} would produce +// equal (==) values for both keys. +// 2. When decoding into a map, both keys are equal (==) when decoded into values of the +// destination map's key type. type DupMapKeyMode int const ( @@ -422,6 +513,13 @@ const ( // ByteStringToStringAllowed permits decoding a CBOR byte string into a Go string. ByteStringToStringAllowed + // ByteStringToStringAllowedWithExpectedLaterEncoding permits decoding a CBOR byte string + // into a Go string. Also, if the byte string is enclosed (directly or indirectly) by one of + // the "expected later encoding" tags (numbers 21 through 23), the destination string will + // be populated by applying the designated text encoding to the contents of the input byte + // string. + ByteStringToStringAllowedWithExpectedLaterEncoding + maxByteStringToStringMode ) @@ -466,13 +564,214 @@ func (uttam UnrecognizedTagToAnyMode) valid() bool { return uttam >= 0 && uttam < maxUnrecognizedTagToAny } +// TimeTagToAnyMode specifies how to decode CBOR tag 0 and 1 into an empty interface (any). +// Based on the specified mode, Unmarshal can return a time.Time value or a time string in a specific format. +type TimeTagToAnyMode int + +const ( + // TimeTagToTime decodes CBOR tag 0 and 1 into a time.Time value + // when decoding tag 0 or 1 into an empty interface. + TimeTagToTime TimeTagToAnyMode = iota + + // TimeTagToRFC3339 decodes CBOR tag 0 and 1 into a time string in RFC3339 format + // when decoding tag 0 or 1 into an empty interface. + TimeTagToRFC3339 + + // TimeTagToRFC3339Nano decodes CBOR tag 0 and 1 into a time string in RFC3339Nano format + // when decoding tag 0 or 1 into an empty interface. + TimeTagToRFC3339Nano + + maxTimeTagToAnyMode +) + +func (tttam TimeTagToAnyMode) valid() bool { + return tttam >= 0 && tttam < maxTimeTagToAnyMode +} + +// SimpleValueRegistry is a registry of unmarshaling behaviors for each possible CBOR simple value +// number (0...23 and 32...255). +type SimpleValueRegistry struct { + rejected [256]bool +} + +// WithRejectedSimpleValue registers the given simple value as rejected. If the simple value is +// encountered in a CBOR input during unmarshaling, an UnacceptableDataItemError is returned. +func WithRejectedSimpleValue(sv SimpleValue) func(*SimpleValueRegistry) error { + return func(r *SimpleValueRegistry) error { + if sv >= 24 && sv <= 31 { + return fmt.Errorf("cbor: cannot set analog for reserved simple value %d", sv) + } + r.rejected[sv] = true + return nil + } +} + +// Creates a new SimpleValueRegistry. The registry state is initialized by executing the provided +// functions in order against a registry that is pre-populated with the defaults for all well-formed +// simple value numbers. +func NewSimpleValueRegistryFromDefaults(fns ...func(*SimpleValueRegistry) error) (*SimpleValueRegistry, error) { + var r SimpleValueRegistry + for _, fn := range fns { + if err := fn(&r); err != nil { + return nil, err + } + } + return &r, nil +} + +// NaNMode specifies how to decode floating-point values (major type 7, additional information 25 +// through 27) representing NaN (not-a-number). +type NaNMode int + +const ( + // NaNDecodeAllowed will decode NaN values to Go float32 or float64. + NaNDecodeAllowed NaNMode = iota + + // NaNDecodeForbidden will return an UnacceptableDataItemError on an attempt to decode a NaN value. + NaNDecodeForbidden + + maxNaNDecode +) + +func (ndm NaNMode) valid() bool { + return ndm >= 0 && ndm < maxNaNDecode +} + +// InfMode specifies how to decode floating-point values (major type 7, additional information 25 +// through 27) representing positive or negative infinity. +type InfMode int + +const ( + // InfDecodeAllowed will decode infinite values to Go float32 or float64. + InfDecodeAllowed InfMode = iota + + // InfDecodeForbidden will return an UnacceptableDataItemError on an attempt to decode an + // infinite value. + InfDecodeForbidden + + maxInfDecode +) + +func (idm InfMode) valid() bool { + return idm >= 0 && idm < maxInfDecode +} + +// ByteStringToTimeMode specifies the behavior when decoding a CBOR byte string into a Go time.Time. +type ByteStringToTimeMode int + +const ( + // ByteStringToTimeForbidden generates an error on an attempt to decode a CBOR byte string into a Go time.Time. + ByteStringToTimeForbidden ByteStringToTimeMode = iota + + // ByteStringToTimeAllowed permits decoding a CBOR byte string into a Go time.Time. + ByteStringToTimeAllowed + + maxByteStringToTimeMode +) + +func (bttm ByteStringToTimeMode) valid() bool { + return bttm >= 0 && bttm < maxByteStringToTimeMode +} + +// ByteStringExpectedFormatMode specifies how to decode CBOR byte string into Go byte slice +// when the byte string is NOT enclosed in CBOR tag 21, 22, or 23. An error is returned if +// the CBOR byte string does not contain the expected format (e.g. base64) specified. +// For tags 21-23, see "Expected Later Encoding for CBOR-to-JSON Converters" +// in RFC 8949 Section 3.4.5.2. +type ByteStringExpectedFormatMode int + +const ( + // ByteStringExpectedFormatNone copies the unmodified CBOR byte string into Go byte slice + // if the byte string is not tagged by CBOR tag 21-23. + ByteStringExpectedFormatNone ByteStringExpectedFormatMode = iota + + // ByteStringExpectedBase64URL expects CBOR byte strings to contain base64url-encoded bytes + // if the byte string is not tagged by CBOR tag 21-23. The decoder will attempt to decode + // the base64url-encoded bytes into Go slice. + ByteStringExpectedBase64URL + + // ByteStringExpectedBase64 expects CBOR byte strings to contain base64-encoded bytes + // if the byte string is not tagged by CBOR tag 21-23. The decoder will attempt to decode + // the base64-encoded bytes into Go slice. + ByteStringExpectedBase64 + + // ByteStringExpectedBase16 expects CBOR byte strings to contain base16-encoded bytes + // if the byte string is not tagged by CBOR tag 21-23. The decoder will attempt to decode + // the base16-encoded bytes into Go slice. + ByteStringExpectedBase16 + + maxByteStringExpectedFormatMode +) + +func (bsefm ByteStringExpectedFormatMode) valid() bool { + return bsefm >= 0 && bsefm < maxByteStringExpectedFormatMode +} + +// BignumTagMode specifies whether or not the "bignum" tags 2 and 3 (RFC 8949 Section 3.4.3) can be +// decoded. +type BignumTagMode int + +const ( + // BignumTagAllowed allows bignum tags to be decoded. + BignumTagAllowed BignumTagMode = iota + + // BignumTagForbidden produces an UnacceptableDataItemError during Unmarshal if a bignum tag + // is encountered in the input. + BignumTagForbidden + + maxBignumTag +) + +func (btm BignumTagMode) valid() bool { + return btm >= 0 && btm < maxBignumTag +} + +// BinaryUnmarshalerMode specifies how to decode into types that implement +// encoding.BinaryUnmarshaler. +type BinaryUnmarshalerMode int + +const ( + // BinaryUnmarshalerByteString will invoke UnmarshalBinary on the contents of a CBOR byte + // string when decoding into a value that implements BinaryUnmarshaler. + BinaryUnmarshalerByteString BinaryUnmarshalerMode = iota + + // BinaryUnmarshalerNone does not recognize BinaryUnmarshaler implementations during decode. + BinaryUnmarshalerNone + + maxBinaryUnmarshalerMode +) + +func (bum BinaryUnmarshalerMode) valid() bool { + return bum >= 0 && bum < maxBinaryUnmarshalerMode +} + // DecOptions specifies decoding options. type DecOptions struct { // DupMapKey specifies whether to enforce duplicate map key. DupMapKey DupMapKeyMode - // TimeTag specifies whether to check validity of time.Time (e.g. valid tag number and tag content type). - // For now, valid tag number means 0 or 1 as specified in RFC 7049 if the Go type is time.Time. + // TimeTag specifies whether or not untagged data items, or tags other + // than tag 0 and tag 1, can be decoded to time.Time. If tag 0 or tag 1 + // appears in an input, the type of its content is always validated as + // specified in RFC 8949. That behavior is not controlled by this + // option. The behavior of the supported modes are: + // + // DecTagIgnored (default): Untagged text strings and text strings + // enclosed in tags other than 0 and 1 are decoded as though enclosed + // in tag 0. Untagged unsigned integers, negative integers, and + // floating-point numbers (or those enclosed in tags other than 0 and + // 1) are decoded as though enclosed in tag 1. Decoding a tag other + // than 0 or 1 enclosing simple values null or undefined into a + // time.Time does not modify the destination value. + // + // DecTagOptional: Untagged text strings are decoded as though + // enclosed in tag 0. Untagged unsigned integers, negative integers, + // and floating-point numbers are decoded as though enclosed in tag + // 1. Tags other than 0 and 1 will produce an error on attempts to + // decode them into a time.Time. + // + // DecTagRequired: Only tags 0 and 1 can be decoded to time.Time. Any + // other input will produce an error. TimeTag DecTagMode // MaxNestedLevels specifies the max nested levels allowed for any combination of CBOR array, maps, and tags. @@ -538,22 +837,85 @@ type DecOptions struct { // UnrecognizedTagToAny specifies how to decode unrecognized CBOR tag into an empty interface. // Currently, recognized CBOR tag numbers are 0, 1, 2, 3, or registered by TagSet. UnrecognizedTagToAny UnrecognizedTagToAnyMode + + // TimeTagToAny specifies how to decode CBOR tag 0 and 1 into an empty interface (any). + // Based on the specified mode, Unmarshal can return a time.Time value or a time string in a specific format. + TimeTagToAny TimeTagToAnyMode + + // SimpleValues is an immutable mapping from each CBOR simple value to a corresponding + // unmarshal behavior. If nil, the simple values false, true, null, and undefined are mapped + // to the Go analog values false, true, nil, and nil, respectively, and all other simple + // values N (except the reserved simple values 24 through 31) are mapped to + // cbor.SimpleValue(N). In other words, all well-formed simple values can be decoded. + // + // Users may provide a custom SimpleValueRegistry constructed via + // NewSimpleValueRegistryFromDefaults. + SimpleValues *SimpleValueRegistry + + // NaN specifies how to decode floating-point values (major type 7, additional information + // 25 through 27) representing NaN (not-a-number). + NaN NaNMode + + // Inf specifies how to decode floating-point values (major type 7, additional information + // 25 through 27) representing positive or negative infinity. + Inf InfMode + + // ByteStringToTime specifies how to decode CBOR byte string into Go time.Time. + ByteStringToTime ByteStringToTimeMode + + // ByteStringExpectedFormat specifies how to decode CBOR byte string into Go byte slice + // when the byte string is NOT enclosed in CBOR tag 21, 22, or 23. An error is returned if + // the CBOR byte string does not contain the expected format (e.g. base64) specified. + // For tags 21-23, see "Expected Later Encoding for CBOR-to-JSON Converters" + // in RFC 8949 Section 3.4.5.2. + ByteStringExpectedFormat ByteStringExpectedFormatMode + + // BignumTag specifies whether or not the "bignum" tags 2 and 3 (RFC 8949 Section 3.4.3) can + // be decoded. Unlike BigIntDec, this option applies to all bignum tags encountered in a + // CBOR input, independent of the type of the destination value of a particular Unmarshal + // operation. + BignumTag BignumTagMode + + // BinaryUnmarshaler specifies how to decode into types that implement + // encoding.BinaryUnmarshaler. + BinaryUnmarshaler BinaryUnmarshalerMode } // DecMode returns DecMode with immutable options and no tags (safe for concurrency). -func (opts DecOptions) DecMode() (DecMode, error) { +func (opts DecOptions) DecMode() (DecMode, error) { //nolint:gocritic // ignore hugeParam return opts.decMode() } -// DecModeWithTags returns DecMode with options and tags that are both immutable (safe for concurrency). -func (opts DecOptions) DecModeWithTags(tags TagSet) (DecMode, error) { +// validForTags checks that the provided tag set is compatible with these options and returns a +// non-nil error if and only if the provided tag set is incompatible. +func (opts DecOptions) validForTags(tags TagSet) error { //nolint:gocritic // ignore hugeParam if opts.TagsMd == TagsForbidden { - return nil, errors.New("cbor: cannot create DecMode with TagSet when TagsMd is TagsForbidden") + return errors.New("cbor: cannot create DecMode with TagSet when TagsMd is TagsForbidden") } if tags == nil { - return nil, errors.New("cbor: cannot create DecMode with nil value as TagSet") + return errors.New("cbor: cannot create DecMode with nil value as TagSet") } + if opts.ByteStringToString == ByteStringToStringAllowedWithExpectedLaterEncoding || + opts.ByteStringExpectedFormat != ByteStringExpectedFormatNone { + for _, tagNum := range []uint64{ + tagNumExpectedLaterEncodingBase64URL, + tagNumExpectedLaterEncodingBase64, + tagNumExpectedLaterEncodingBase16, + } { + if rt := tags.getTypeFromTagNum([]uint64{tagNum}); rt != nil { + return fmt.Errorf("cbor: DecMode with non-default StringExpectedEncoding or ByteSliceExpectedEncoding treats tag %d as built-in and conflicts with the provided TagSet's registration of %v", tagNum, rt) + } + } + } + return nil +} + +// DecModeWithTags returns DecMode with options and tags that are both immutable (safe for concurrency). +func (opts DecOptions) DecModeWithTags(tags TagSet) (DecMode, error) { //nolint:gocritic // ignore hugeParam + if err := opts.validForTags(tags); err != nil { + return nil, err + } dm, err := opts.decMode() if err != nil { return nil, err @@ -578,12 +940,9 @@ func (opts DecOptions) DecModeWithTags(tags TagSet) (DecMode, error) { } // DecModeWithSharedTags returns DecMode with immutable options and mutable shared tags (safe for concurrency). -func (opts DecOptions) DecModeWithSharedTags(tags TagSet) (DecMode, error) { - if opts.TagsMd == TagsForbidden { - return nil, errors.New("cbor: cannot create DecMode with TagSet when TagsMd is TagsForbidden") - } - if tags == nil { - return nil, errors.New("cbor: cannot create DecMode with nil value as TagSet") +func (opts DecOptions) DecModeWithSharedTags(tags TagSet) (DecMode, error) { //nolint:gocritic // ignore hugeParam + if err := opts.validForTags(tags); err != nil { + return nil, err } dm, err := opts.decMode() if err != nil { @@ -601,89 +960,166 @@ const ( defaultMaxMapPairs = 131072 minMaxMapPairs = 16 maxMaxMapPairs = 2147483647 + + defaultMaxNestedLevels = 32 + minMaxNestedLevels = 4 + maxMaxNestedLevels = 65535 ) -func (opts DecOptions) decMode() (*decMode, error) { +var defaultSimpleValues = func() *SimpleValueRegistry { + registry, err := NewSimpleValueRegistryFromDefaults() + if err != nil { + panic(err) + } + return registry +}() + +//nolint:gocyclo // Each option comes with some manageable boilerplate +func (opts DecOptions) decMode() (*decMode, error) { //nolint:gocritic // ignore hugeParam if !opts.DupMapKey.valid() { return nil, errors.New("cbor: invalid DupMapKey " + strconv.Itoa(int(opts.DupMapKey))) } + if !opts.TimeTag.valid() { return nil, errors.New("cbor: invalid TimeTag " + strconv.Itoa(int(opts.TimeTag))) } + if !opts.IndefLength.valid() { return nil, errors.New("cbor: invalid IndefLength " + strconv.Itoa(int(opts.IndefLength))) } + if !opts.TagsMd.valid() { return nil, errors.New("cbor: invalid TagsMd " + strconv.Itoa(int(opts.TagsMd))) } + if !opts.IntDec.valid() { return nil, errors.New("cbor: invalid IntDec " + strconv.Itoa(int(opts.IntDec))) } + if !opts.MapKeyByteString.valid() { return nil, errors.New("cbor: invalid MapKeyByteString " + strconv.Itoa(int(opts.MapKeyByteString))) } + if opts.MaxNestedLevels == 0 { - opts.MaxNestedLevels = 32 - } else if opts.MaxNestedLevels < 4 || opts.MaxNestedLevels > 65535 { - return nil, errors.New("cbor: invalid MaxNestedLevels " + strconv.Itoa(opts.MaxNestedLevels) + " (range is [4, 65535])") + opts.MaxNestedLevels = defaultMaxNestedLevels + } else if opts.MaxNestedLevels < minMaxNestedLevels || opts.MaxNestedLevels > maxMaxNestedLevels { + return nil, errors.New("cbor: invalid MaxNestedLevels " + strconv.Itoa(opts.MaxNestedLevels) + + " (range is [" + strconv.Itoa(minMaxNestedLevels) + ", " + strconv.Itoa(maxMaxNestedLevels) + "])") } + if opts.MaxArrayElements == 0 { opts.MaxArrayElements = defaultMaxArrayElements } else if opts.MaxArrayElements < minMaxArrayElements || opts.MaxArrayElements > maxMaxArrayElements { - return nil, errors.New("cbor: invalid MaxArrayElements " + strconv.Itoa(opts.MaxArrayElements) + " (range is [" + strconv.Itoa(minMaxArrayElements) + ", " + strconv.Itoa(maxMaxArrayElements) + "])") + return nil, errors.New("cbor: invalid MaxArrayElements " + strconv.Itoa(opts.MaxArrayElements) + + " (range is [" + strconv.Itoa(minMaxArrayElements) + ", " + strconv.Itoa(maxMaxArrayElements) + "])") } + if opts.MaxMapPairs == 0 { opts.MaxMapPairs = defaultMaxMapPairs } else if opts.MaxMapPairs < minMaxMapPairs || opts.MaxMapPairs > maxMaxMapPairs { - return nil, errors.New("cbor: invalid MaxMapPairs " + strconv.Itoa(opts.MaxMapPairs) + " (range is [" + strconv.Itoa(minMaxMapPairs) + ", " + strconv.Itoa(maxMaxMapPairs) + "])") + return nil, errors.New("cbor: invalid MaxMapPairs " + strconv.Itoa(opts.MaxMapPairs) + + " (range is [" + strconv.Itoa(minMaxMapPairs) + ", " + strconv.Itoa(maxMaxMapPairs) + "])") } + if !opts.ExtraReturnErrors.valid() { return nil, errors.New("cbor: invalid ExtraReturnErrors " + strconv.Itoa(int(opts.ExtraReturnErrors))) } + if opts.DefaultMapType != nil && opts.DefaultMapType.Kind() != reflect.Map { return nil, fmt.Errorf("cbor: invalid DefaultMapType %s", opts.DefaultMapType) } + if !opts.UTF8.valid() { return nil, errors.New("cbor: invalid UTF8 " + strconv.Itoa(int(opts.UTF8))) } + if !opts.FieldNameMatching.valid() { return nil, errors.New("cbor: invalid FieldNameMatching " + strconv.Itoa(int(opts.FieldNameMatching))) } + if !opts.BigIntDec.valid() { return nil, errors.New("cbor: invalid BigIntDec " + strconv.Itoa(int(opts.BigIntDec))) } - if opts.DefaultByteStringType != nil && opts.DefaultByteStringType.Kind() != reflect.String && (opts.DefaultByteStringType.Kind() != reflect.Slice || opts.DefaultByteStringType.Elem().Kind() != reflect.Uint8) { + + if opts.DefaultByteStringType != nil && + opts.DefaultByteStringType.Kind() != reflect.String && + (opts.DefaultByteStringType.Kind() != reflect.Slice || opts.DefaultByteStringType.Elem().Kind() != reflect.Uint8) { return nil, fmt.Errorf("cbor: invalid DefaultByteStringType: %s is not of kind string or []uint8", opts.DefaultByteStringType) } + if !opts.ByteStringToString.valid() { return nil, errors.New("cbor: invalid ByteStringToString " + strconv.Itoa(int(opts.ByteStringToString))) } + if !opts.FieldNameByteString.valid() { return nil, errors.New("cbor: invalid FieldNameByteString " + strconv.Itoa(int(opts.FieldNameByteString))) } + if !opts.UnrecognizedTagToAny.valid() { return nil, errors.New("cbor: invalid UnrecognizedTagToAnyMode " + strconv.Itoa(int(opts.UnrecognizedTagToAny))) } + simpleValues := opts.SimpleValues + if simpleValues == nil { + simpleValues = defaultSimpleValues + } + + if !opts.TimeTagToAny.valid() { + return nil, errors.New("cbor: invalid TimeTagToAny " + strconv.Itoa(int(opts.TimeTagToAny))) + } + + if !opts.NaN.valid() { + return nil, errors.New("cbor: invalid NaNDec " + strconv.Itoa(int(opts.NaN))) + } + + if !opts.Inf.valid() { + return nil, errors.New("cbor: invalid InfDec " + strconv.Itoa(int(opts.Inf))) + } + + if !opts.ByteStringToTime.valid() { + return nil, errors.New("cbor: invalid ByteStringToTime " + strconv.Itoa(int(opts.ByteStringToTime))) + } + + if !opts.ByteStringExpectedFormat.valid() { + return nil, errors.New("cbor: invalid ByteStringExpectedFormat " + strconv.Itoa(int(opts.ByteStringExpectedFormat))) + } + + if !opts.BignumTag.valid() { + return nil, errors.New("cbor: invalid BignumTag " + strconv.Itoa(int(opts.BignumTag))) + } + + if !opts.BinaryUnmarshaler.valid() { + return nil, errors.New("cbor: invalid BinaryUnmarshaler " + strconv.Itoa(int(opts.BinaryUnmarshaler))) + } + dm := decMode{ - dupMapKey: opts.DupMapKey, - timeTag: opts.TimeTag, - maxNestedLevels: opts.MaxNestedLevels, - maxArrayElements: opts.MaxArrayElements, - maxMapPairs: opts.MaxMapPairs, - indefLength: opts.IndefLength, - tagsMd: opts.TagsMd, - intDec: opts.IntDec, - mapKeyByteString: opts.MapKeyByteString, - extraReturnErrors: opts.ExtraReturnErrors, - defaultMapType: opts.DefaultMapType, - utf8: opts.UTF8, - fieldNameMatching: opts.FieldNameMatching, - bigIntDec: opts.BigIntDec, - defaultByteStringType: opts.DefaultByteStringType, - byteStringToString: opts.ByteStringToString, - fieldNameByteString: opts.FieldNameByteString, - unrecognizedTagToAny: opts.UnrecognizedTagToAny, + dupMapKey: opts.DupMapKey, + timeTag: opts.TimeTag, + maxNestedLevels: opts.MaxNestedLevels, + maxArrayElements: opts.MaxArrayElements, + maxMapPairs: opts.MaxMapPairs, + indefLength: opts.IndefLength, + tagsMd: opts.TagsMd, + intDec: opts.IntDec, + mapKeyByteString: opts.MapKeyByteString, + extraReturnErrors: opts.ExtraReturnErrors, + defaultMapType: opts.DefaultMapType, + utf8: opts.UTF8, + fieldNameMatching: opts.FieldNameMatching, + bigIntDec: opts.BigIntDec, + defaultByteStringType: opts.DefaultByteStringType, + byteStringToString: opts.ByteStringToString, + fieldNameByteString: opts.FieldNameByteString, + unrecognizedTagToAny: opts.UnrecognizedTagToAny, + timeTagToAny: opts.TimeTagToAny, + simpleValues: simpleValues, + nanDec: opts.NaN, + infDec: opts.Inf, + byteStringToTime: opts.ByteStringToTime, + byteStringExpectedFormat: opts.ByteStringExpectedFormat, + bignumTag: opts.BignumTag, + binaryUnmarshaler: opts.BinaryUnmarshaler, } + return &dm, nil } @@ -734,50 +1170,73 @@ type DecMode interface { } type decMode struct { - tags tagProvider - dupMapKey DupMapKeyMode - timeTag DecTagMode - maxNestedLevels int - maxArrayElements int - maxMapPairs int - indefLength IndefLengthMode - tagsMd TagsMode - intDec IntDecMode - mapKeyByteString MapKeyByteStringMode - extraReturnErrors ExtraDecErrorCond - defaultMapType reflect.Type - utf8 UTF8Mode - fieldNameMatching FieldNameMatchingMode - bigIntDec BigIntDecMode - defaultByteStringType reflect.Type - byteStringToString ByteStringToStringMode - fieldNameByteString FieldNameByteStringMode - unrecognizedTagToAny UnrecognizedTagToAnyMode + tags tagProvider + dupMapKey DupMapKeyMode + timeTag DecTagMode + maxNestedLevels int + maxArrayElements int + maxMapPairs int + indefLength IndefLengthMode + tagsMd TagsMode + intDec IntDecMode + mapKeyByteString MapKeyByteStringMode + extraReturnErrors ExtraDecErrorCond + defaultMapType reflect.Type + utf8 UTF8Mode + fieldNameMatching FieldNameMatchingMode + bigIntDec BigIntDecMode + defaultByteStringType reflect.Type + byteStringToString ByteStringToStringMode + fieldNameByteString FieldNameByteStringMode + unrecognizedTagToAny UnrecognizedTagToAnyMode + timeTagToAny TimeTagToAnyMode + simpleValues *SimpleValueRegistry + nanDec NaNMode + infDec InfMode + byteStringToTime ByteStringToTimeMode + byteStringExpectedFormat ByteStringExpectedFormatMode + bignumTag BignumTagMode + binaryUnmarshaler BinaryUnmarshalerMode } var defaultDecMode, _ = DecOptions{}.decMode() // DecOptions returns user specified options used to create this DecMode. func (dm *decMode) DecOptions() DecOptions { + simpleValues := dm.simpleValues + if simpleValues == defaultSimpleValues { + // Users can't explicitly set this to defaultSimpleValues. It must have been nil in + // the original DecOptions. + simpleValues = nil + } + return DecOptions{ - DupMapKey: dm.dupMapKey, - TimeTag: dm.timeTag, - MaxNestedLevels: dm.maxNestedLevels, - MaxArrayElements: dm.maxArrayElements, - MaxMapPairs: dm.maxMapPairs, - IndefLength: dm.indefLength, - TagsMd: dm.tagsMd, - IntDec: dm.intDec, - MapKeyByteString: dm.mapKeyByteString, - ExtraReturnErrors: dm.extraReturnErrors, - DefaultMapType: dm.defaultMapType, - UTF8: dm.utf8, - FieldNameMatching: dm.fieldNameMatching, - BigIntDec: dm.bigIntDec, - DefaultByteStringType: dm.defaultByteStringType, - ByteStringToString: dm.byteStringToString, - FieldNameByteString: dm.fieldNameByteString, - UnrecognizedTagToAny: dm.unrecognizedTagToAny, + DupMapKey: dm.dupMapKey, + TimeTag: dm.timeTag, + MaxNestedLevels: dm.maxNestedLevels, + MaxArrayElements: dm.maxArrayElements, + MaxMapPairs: dm.maxMapPairs, + IndefLength: dm.indefLength, + TagsMd: dm.tagsMd, + IntDec: dm.intDec, + MapKeyByteString: dm.mapKeyByteString, + ExtraReturnErrors: dm.extraReturnErrors, + DefaultMapType: dm.defaultMapType, + UTF8: dm.utf8, + FieldNameMatching: dm.fieldNameMatching, + BigIntDec: dm.bigIntDec, + DefaultByteStringType: dm.defaultByteStringType, + ByteStringToString: dm.byteStringToString, + FieldNameByteString: dm.fieldNameByteString, + UnrecognizedTagToAny: dm.unrecognizedTagToAny, + TimeTagToAny: dm.timeTagToAny, + SimpleValues: simpleValues, + NaN: dm.nanDec, + Inf: dm.infDec, + ByteStringToTime: dm.byteStringToTime, + ByteStringExpectedFormat: dm.byteStringExpectedFormat, + BignumTag: dm.bignumTag, + BinaryUnmarshaler: dm.binaryUnmarshaler, } } @@ -790,9 +1249,9 @@ func (dm *decMode) Unmarshal(data []byte, v interface{}) error { d := decoder{data: data, dm: dm} // Check well-formedness. - off := d.off // Save offset before data validation - err := d.wellformed(false) // don't allow any extra data after valid data item. - d.off = off // Restore offset + off := d.off // Save offset before data validation + err := d.wellformed(false, false) // don't allow any extra data after valid data item. + d.off = off // Restore offset if err != nil { return err } @@ -810,9 +1269,9 @@ func (dm *decMode) UnmarshalFirst(data []byte, v interface{}) (rest []byte, err d := decoder{data: data, dm: dm} // check well-formedness. - off := d.off // Save offset before data validation - err = d.wellformed(true) // allow extra data after well-formed data item - d.off = off // Restore offset + off := d.off // Save offset before data validation + err = d.wellformed(true, false) // allow extra data after well-formed data item + d.off = off // Restore offset // If it is well-formed, parse the value. This is structured like this to allow // better test coverage @@ -853,7 +1312,7 @@ func (dm *decMode) Valid(data []byte) error { // an ExtraneousDataError is returned. func (dm *decMode) Wellformed(data []byte) error { d := decoder{data: data, dm: dm} - return d.wellformed(false) + return d.wellformed(false, false) } // NewDecoder returns a new decoder that reads from r using dm DecMode. @@ -865,6 +1324,17 @@ type decoder struct { data []byte off int // next read offset in data dm *decMode + + // expectedLaterEncodingTags stores a stack of encountered "Expected Later Encoding" tags, + // if any. + // + // The "Expected Later Encoding" tags (21 to 23) are valid for any data item. When decoding + // byte strings, the effective encoding comes from the tag nearest to the byte string being + // decoded. For example, the effective encoding of the byte string 21(22(h'41')) would be + // controlled by tag 22,and in the data item 23(h'42', 22([21(h'43')])]) the effective + // encoding of the byte strings h'42' and h'43' would be controlled by tag 23 and 21, + // respectively. + expectedLaterEncodingTags []uint64 } // value decodes CBOR data item into the value pointed to by v. @@ -886,46 +1356,6 @@ func (d *decoder) value(v interface{}) error { return d.parseToValue(rv, getTypeInfo(rv.Type())) } -type cborType uint8 - -const ( - cborTypePositiveInt cborType = 0x00 - cborTypeNegativeInt cborType = 0x20 - cborTypeByteString cborType = 0x40 - cborTypeTextString cborType = 0x60 - cborTypeArray cborType = 0x80 - cborTypeMap cborType = 0xa0 - cborTypeTag cborType = 0xc0 - cborTypePrimitives cborType = 0xe0 -) - -func (t cborType) String() string { - switch t { - case cborTypePositiveInt: - return "positive integer" - case cborTypeNegativeInt: - return "negative integer" - case cborTypeByteString: - return "byte string" - case cborTypeTextString: - return "UTF-8 text string" - case cborTypeArray: - return "array" - case cborTypeMap: - return "map" - case cborTypeTag: - return "tag" - case cborTypePrimitives: - return "primitives" - default: - return "Invalid type " + strconv.Itoa(int(t)) - } -} - -const ( - selfDescribedCBORTagNum = 55799 -) - // parseToValue decodes CBOR data to value. It assumes data is well-formed, // and does not perform bounds checking. func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolint:gocyclo @@ -942,7 +1372,7 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin // Use value type v = v.Elem() tInfo = getTypeInfo(v.Type()) - } else { + } else { //nolint:gocritic // Create and use registered type if CBOR data is registered tag if d.dm.tags != nil && d.nextCBORType() == cborTypeTag { @@ -984,22 +1414,22 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin for d.nextCBORType() == cborTypeTag { off := d.off _, _, tagNum := d.getHead() - if tagNum != selfDescribedCBORTagNum { + if tagNum != tagNumSelfDescribedCBOR { d.off = off break } } // Check validity of supported built-in tags. - if d.nextCBORType() == cborTypeTag { - off := d.off + off := d.off + for d.nextCBORType() == cborTypeTag { _, _, tagNum := d.getHead() if err := validBuiltinTag(tagNum, d.data[d.off]); err != nil { d.skip() return err } - d.off = off } + d.off = off if tInfo.spclType != specialTypeNone { switch tInfo.spclType { @@ -1009,20 +1439,25 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin v.Set(reflect.ValueOf(iv)) } return err + case specialTypeTag: return d.parseToTag(v) + case specialTypeTime: if d.nextCBORNil() { // Decoding CBOR null and undefined to time.Time is no-op. d.skip() return nil } - tm, err := d.parseToTime() + tm, ok, err := d.parseToTime() if err != nil { return err } - v.Set(reflect.ValueOf(tm)) + if ok { + v.Set(reflect.ValueOf(tm)) + } return nil + case specialTypeUnmarshalerIface: return d.parseToUnmarshaler(v) } @@ -1076,7 +1511,12 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin case cborTypeByteString: b, copied := d.parseByteString() - return fillByteString(t, b, !copied, v, d.dm.byteStringToString) + b, converted, err := d.applyByteStringTextConversion(b, v.Type()) + if err != nil { + return err + } + copied = copied || converted + return fillByteString(t, b, !copied, v, d.dm.byteStringToString, d.dm.binaryUnmarshaler) case cborTypeTextString: b, err := d.parseTextString() @@ -1088,21 +1528,35 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin case cborTypePrimitives: _, ai, val := d.getHead() switch ai { - case 25: + case additionalInformationAsFloat16: f := float64(float16.Frombits(uint16(val)).Float32()) return fillFloat(t, f, v) - case 26: + + case additionalInformationAsFloat32: f := float64(math.Float32frombits(uint32(val))) return fillFloat(t, f, v) - case 27: + + case additionalInformationAsFloat64: f := math.Float64frombits(val) return fillFloat(t, f, v) + default: // ai <= 24 + if d.dm.simpleValues.rejected[SimpleValue(val)] { + return &UnacceptableDataItemError{ + CBORType: t.String(), + Message: "simple value " + strconv.FormatInt(int64(val), 10) + " is not recognized", + } + } + switch ai { - case 20, 21: - return fillBool(t, ai == 21, v) - case 22, 23: + case additionalInformationAsFalse, + additionalInformationAsTrue: + return fillBool(t, ai == additionalInformationAsTrue, v) + + case additionalInformationAsNull, + additionalInformationAsUndefined: return fillNil(t, v) + default: return fillPositiveInt(t, val, v) } @@ -1111,7 +1565,7 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin case cborTypeTag: _, _, tagNum := d.getHead() switch tagNum { - case 2: + case tagNumUnsignedBignum: // Bignum (tag 2) can be decoded to uint, int, float, slice, array, or big.Int. b, copied := d.parseByteString() bi := new(big.Int).SetBytes(b) @@ -1121,7 +1575,7 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin return nil } if tInfo.nonPtrKind == reflect.Slice || tInfo.nonPtrKind == reflect.Array { - return fillByteString(t, b, !copied, v, ByteStringToStringForbidden) + return fillByteString(t, b, !copied, v, ByteStringToStringForbidden, d.dm.binaryUnmarshaler) } if bi.IsUint64() { return fillPositiveInt(t, bi.Uint64(), v) @@ -1131,7 +1585,8 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin GoType: tInfo.nonPtrType.String(), errorMsg: bi.String() + " overflows " + v.Type().String(), } - case 3: + + case tagNumNegativeBignum: // Bignum (tag 3) can be decoded to int, float, slice, array, or big.Int. b, copied := d.parseByteString() bi := new(big.Int).SetBytes(b) @@ -1143,7 +1598,7 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin return nil } if tInfo.nonPtrKind == reflect.Slice || tInfo.nonPtrKind == reflect.Array { - return fillByteString(t, b, !copied, v, ByteStringToStringForbidden) + return fillByteString(t, b, !copied, v, ByteStringToStringForbidden, d.dm.binaryUnmarshaler) } if bi.IsInt64() { return fillNegativeInt(t, bi.Int64(), v) @@ -1153,7 +1608,18 @@ func (d *decoder) parseToValue(v reflect.Value, tInfo *typeInfo) error { //nolin GoType: tInfo.nonPtrType.String(), errorMsg: bi.String() + " overflows " + v.Type().String(), } + + case tagNumExpectedLaterEncodingBase64URL, tagNumExpectedLaterEncodingBase64, tagNumExpectedLaterEncodingBase16: + // If conversion for interoperability with text encodings is not configured, + // treat tags 21-23 as unregistered tags. + if d.dm.byteStringToString == ByteStringToStringAllowedWithExpectedLaterEncoding || d.dm.byteStringExpectedFormat != ByteStringExpectedFormatNone { + d.expectedLaterEncodingTags = append(d.expectedLaterEncodingTags, tagNum) + defer func() { + d.expectedLaterEncodingTags = d.expectedLaterEncodingTags[:len(d.expectedLaterEncodingTags)-1] + }() + } } + return d.parseToValue(v, tInfo) case cborTypeArray: @@ -1206,64 +1672,116 @@ func (d *decoder) parseToTag(v reflect.Value) error { return nil } -func (d *decoder) parseToTime() (tm time.Time, err error) { - t := d.nextCBORType() - +// parseToTime decodes the current data item as a time.Time. The bool return value is false if and +// only if the destination value should remain unmodified. +func (d *decoder) parseToTime() (time.Time, bool, error) { // Verify that tag number or absence of tag number is acceptable to specified timeTag. - if t == cborTypeTag { + if t := d.nextCBORType(); t == cborTypeTag { if d.dm.timeTag == DecTagIgnored { - // Skip tag number + // Skip all enclosing tags for t == cborTypeTag { d.getHead() t = d.nextCBORType() } + if d.nextCBORNil() { + d.skip() + return time.Time{}, false, nil + } } else { // Read tag number _, _, tagNum := d.getHead() if tagNum != 0 && tagNum != 1 { - d.skip() - err = errors.New("cbor: wrong tag number for time.Time, got " + strconv.Itoa(int(tagNum)) + ", expect 0 or 1") - return + d.skip() // skip tag content + return time.Time{}, false, errors.New("cbor: wrong tag number for time.Time, got " + strconv.Itoa(int(tagNum)) + ", expect 0 or 1") } } } else { if d.dm.timeTag == DecTagRequired { d.skip() - err = &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String(), errorMsg: "expect CBOR tag value"} - return + return time.Time{}, false, &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String(), errorMsg: "expect CBOR tag value"} } } - var content interface{} - content, err = d.parse(false) - if err != nil { - return - } + switch t := d.nextCBORType(); t { + case cborTypeByteString: + if d.dm.byteStringToTime == ByteStringToTimeAllowed { + b, _ := d.parseByteString() + t, err := time.Parse(time.RFC3339, string(b)) + if err != nil { + return time.Time{}, false, fmt.Errorf("cbor: cannot set %q for time.Time: %w", string(b), err) + } + return t, true, nil + } + return time.Time{}, false, &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String()} - switch c := content.(type) { - case nil: - return - case uint64: - return time.Unix(int64(c), 0), nil - case int64: - return time.Unix(c, 0), nil - case float64: - if math.IsNaN(c) || math.IsInf(c, 0) { - return - } - f1, f2 := math.Modf(c) - return time.Unix(int64(f1), int64(f2*1e9)), nil - case string: - tm, err = time.Parse(time.RFC3339, c) + case cborTypeTextString: + s, err := d.parseTextString() if err != nil { - tm = time.Time{} - err = errors.New("cbor: cannot set " + c + " for time.Time: " + err.Error()) - return + return time.Time{}, false, err } - return + t, err := time.Parse(time.RFC3339, string(s)) + if err != nil { + return time.Time{}, false, errors.New("cbor: cannot set " + string(s) + " for time.Time: " + err.Error()) + } + return t, true, nil + + case cborTypePositiveInt: + _, _, val := d.getHead() + if val > math.MaxInt64 { + return time.Time{}, false, &UnmarshalTypeError{ + CBORType: t.String(), + GoType: typeTime.String(), + errorMsg: fmt.Sprintf("%d overflows Go's int64", val), + } + } + return time.Unix(int64(val), 0), true, nil + + case cborTypeNegativeInt: + _, _, val := d.getHead() + if val > math.MaxInt64 { + if val == math.MaxUint64 { + // Maximum absolute value representable by negative integer is 2^64, + // not 2^64-1, so it overflows uint64. + return time.Time{}, false, &UnmarshalTypeError{ + CBORType: t.String(), + GoType: typeTime.String(), + errorMsg: "-18446744073709551616 overflows Go's int64", + } + } + return time.Time{}, false, &UnmarshalTypeError{ + CBORType: t.String(), + GoType: typeTime.String(), + errorMsg: fmt.Sprintf("-%d overflows Go's int64", val+1), + } + } + return time.Unix(int64(-1)^int64(val), 0), true, nil + + case cborTypePrimitives: + _, ai, val := d.getHead() + var f float64 + switch ai { + case additionalInformationAsFloat16: + f = float64(float16.Frombits(uint16(val)).Float32()) + + case additionalInformationAsFloat32: + f = float64(math.Float32frombits(uint32(val))) + + case additionalInformationAsFloat64: + f = math.Float64frombits(val) + + default: + return time.Time{}, false, &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String()} + } + + if math.IsNaN(f) || math.IsInf(f, 0) { + // https://www.rfc-editor.org/rfc/rfc8949.html#section-3.4.2-6 + return time.Time{}, true, nil + } + seconds, fractional := math.Modf(f) + return time.Unix(int64(seconds), int64(fractional*1e9)), true, nil + default: - err = &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String()} - return + return time.Time{}, false, &UnmarshalTypeError{CBORType: t.String(), GoType: typeTime.String()} } } @@ -1295,7 +1813,7 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli for d.nextCBORType() == cborTypeTag { off := d.off _, _, tagNum := d.getHead() - if tagNum != selfDescribedCBORTagNum { + if tagNum != tagNumSelfDescribedCBOR { d.off = off break } @@ -1303,15 +1821,15 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli } // Check validity of supported built-in tags. - if d.nextCBORType() == cborTypeTag { - off := d.off + off := d.off + for d.nextCBORType() == cborTypeTag { _, _, tagNum := d.getHead() if err := validBuiltinTag(tagNum, d.data[d.off]); err != nil { d.skip() return nil, err } - d.off = off } + d.off = off t := d.nextCBORType() switch t { @@ -1375,20 +1893,30 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli return nValue, nil case cborTypeByteString: - switch d.dm.defaultByteStringType { - case nil, typeByteSlice: - b, copied := d.parseByteString() + b, copied := d.parseByteString() + var effectiveByteStringType = d.dm.defaultByteStringType + if effectiveByteStringType == nil { + effectiveByteStringType = typeByteSlice + } + b, converted, err := d.applyByteStringTextConversion(b, effectiveByteStringType) + if err != nil { + return nil, err + } + copied = copied || converted + + switch effectiveByteStringType { + case typeByteSlice: if copied { return b, nil } clone := make([]byte, len(b)) copy(clone, b) return clone, nil + case typeString: - b, _ := d.parseByteString() return string(b), nil + default: - b, copied := d.parseByteString() if copied || d.dm.defaultByteStringType.Kind() == reflect.String { // Avoid an unnecessary copy since the conversion to string must // copy the underlying bytes. @@ -1398,22 +1926,62 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli copy(clone, b) return reflect.ValueOf(clone).Convert(d.dm.defaultByteStringType).Interface(), nil } + case cborTypeTextString: b, err := d.parseTextString() if err != nil { return nil, err } return string(b), nil + case cborTypeTag: tagOff := d.off _, _, tagNum := d.getHead() contentOff := d.off switch tagNum { - case 0, 1: + case tagNumRFC3339Time, tagNumEpochTime: d.off = tagOff - return d.parseToTime() - case 2: + tm, _, err := d.parseToTime() + if err != nil { + return nil, err + } + + switch d.dm.timeTagToAny { + case TimeTagToTime: + return tm, nil + + case TimeTagToRFC3339: + if tagNum == 1 { + tm = tm.UTC() + } + // Call time.MarshalText() to format decoded time to RFC3339 format, + // and return error on time value that cannot be represented in + // RFC3339 format. E.g. year cannot exceed 9999, etc. + text, err := tm.Truncate(time.Second).MarshalText() + if err != nil { + return nil, fmt.Errorf("cbor: decoded time cannot be represented in RFC3339 format: %v", err) + } + return string(text), nil + + case TimeTagToRFC3339Nano: + if tagNum == 1 { + tm = tm.UTC() + } + // Call time.MarshalText() to format decoded time to RFC3339 format, + // and return error on time value that cannot be represented in + // RFC3339 format with sub-second precision. + text, err := tm.MarshalText() + if err != nil { + return nil, fmt.Errorf("cbor: decoded time cannot be represented in RFC3339 format with sub-second precision: %v", err) + } + return string(text), nil + + default: + // not reachable + } + + case tagNumUnsignedBignum: b, _ := d.parseByteString() bi := new(big.Int).SetBytes(b) @@ -1421,7 +1989,8 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli return bi, nil } return *bi, nil - case 3: + + case tagNumNegativeBignum: b, _ := d.parseByteString() bi := new(big.Int).SetBytes(b) bi.Add(bi, big.NewInt(1)) @@ -1431,6 +2000,18 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli return bi, nil } return *bi, nil + + case tagNumExpectedLaterEncodingBase64URL, tagNumExpectedLaterEncodingBase64, tagNumExpectedLaterEncodingBase16: + // If conversion for interoperability with text encodings is not configured, + // treat tags 21-23 as unregistered tags. + if d.dm.byteStringToString == ByteStringToStringAllowedWithExpectedLaterEncoding || + d.dm.byteStringExpectedFormat != ByteStringExpectedFormatNone { + d.expectedLaterEncodingTags = append(d.expectedLaterEncodingTags, tagNum) + defer func() { + d.expectedLaterEncodingTags = d.expectedLaterEncodingTags[:len(d.expectedLaterEncodingTags)-1] + }() + return d.parse(false) + } } if d.dm.tags != nil { @@ -1461,28 +2042,44 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli return content, nil } return Tag{tagNum, content}, nil + case cborTypePrimitives: _, ai, val := d.getHead() + if ai <= 24 && d.dm.simpleValues.rejected[SimpleValue(val)] { + return nil, &UnacceptableDataItemError{ + CBORType: t.String(), + Message: "simple value " + strconv.FormatInt(int64(val), 10) + " is not recognized", + } + } if ai < 20 || ai == 24 { return SimpleValue(val), nil } + switch ai { - case 20, 21: - return (ai == 21), nil - case 22, 23: + case additionalInformationAsFalse, + additionalInformationAsTrue: + return (ai == additionalInformationAsTrue), nil + + case additionalInformationAsNull, + additionalInformationAsUndefined: return nil, nil - case 25: + + case additionalInformationAsFloat16: f := float64(float16.Frombits(uint16(val)).Float32()) return f, nil - case 26: + + case additionalInformationAsFloat32: f := float64(math.Float32frombits(uint32(val))) return f, nil - case 27: + + case additionalInformationAsFloat64: f := math.Float64frombits(val) return f, nil } + case cborTypeArray: return d.parseArray() + case cborTypeMap: if d.dm.defaultMapType != nil { m := reflect.New(d.dm.defaultMapType) @@ -1494,6 +2091,7 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli } return d.parseMap() } + return nil, nil } @@ -1502,8 +2100,8 @@ func (d *decoder) parse(skipSelfDescribedTag bool) (interface{}, error) { //noli // and only if the slice is backed by a copy of the input. Callers are // responsible for making a copy if necessary. func (d *decoder) parseByteString() ([]byte, bool) { - _, ai, val := d.getHead() - if ai != 31 { + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + if !indefiniteLength { b := d.data[d.off : d.off+int(val)] d.off += int(val) return b, false @@ -1518,12 +2116,90 @@ func (d *decoder) parseByteString() ([]byte, bool) { return b, true } +// applyByteStringTextConversion converts bytes read from a byte string to or from a configured text +// encoding. If no transformation was performed (because it was not required), the original byte +// slice is returned and the bool return value is false. Otherwise, a new slice containing the +// converted bytes is returned along with the bool value true. +func (d *decoder) applyByteStringTextConversion( + src []byte, + dstType reflect.Type, +) ( + dst []byte, + transformed bool, + err error, +) { + switch dstType.Kind() { + case reflect.String: + if d.dm.byteStringToString != ByteStringToStringAllowedWithExpectedLaterEncoding || len(d.expectedLaterEncodingTags) == 0 { + return src, false, nil + } + + switch d.expectedLaterEncodingTags[len(d.expectedLaterEncodingTags)-1] { + case tagNumExpectedLaterEncodingBase64URL: + encoded := make([]byte, base64.RawURLEncoding.EncodedLen(len(src))) + base64.RawURLEncoding.Encode(encoded, src) + return encoded, true, nil + + case tagNumExpectedLaterEncodingBase64: + encoded := make([]byte, base64.StdEncoding.EncodedLen(len(src))) + base64.StdEncoding.Encode(encoded, src) + return encoded, true, nil + + case tagNumExpectedLaterEncodingBase16: + encoded := make([]byte, hex.EncodedLen(len(src))) + hex.Encode(encoded, src) + return encoded, true, nil + + default: + // If this happens, there is a bug: the decoder has pushed an invalid + // "expected later encoding" tag to the stack. + panic(fmt.Sprintf("unrecognized expected later encoding tag: %d", d.expectedLaterEncodingTags)) + } + + case reflect.Slice: + if dstType.Elem().Kind() != reflect.Uint8 || len(d.expectedLaterEncodingTags) > 0 { + // Either the destination is not a slice of bytes, or the encoder that + // produced the input indicated an expected text encoding tag and therefore + // the content of the byte string has NOT been text encoded. + return src, false, nil + } + + switch d.dm.byteStringExpectedFormat { + case ByteStringExpectedBase64URL: + decoded := make([]byte, base64.RawURLEncoding.DecodedLen(len(src))) + n, err := base64.RawURLEncoding.Decode(decoded, src) + if err != nil { + return nil, false, newByteStringExpectedFormatError(ByteStringExpectedBase64URL, err) + } + return decoded[:n], true, nil + + case ByteStringExpectedBase64: + decoded := make([]byte, base64.StdEncoding.DecodedLen(len(src))) + n, err := base64.StdEncoding.Decode(decoded, src) + if err != nil { + return nil, false, newByteStringExpectedFormatError(ByteStringExpectedBase64, err) + } + return decoded[:n], true, nil + + case ByteStringExpectedBase16: + decoded := make([]byte, hex.DecodedLen(len(src))) + n, err := hex.Decode(decoded, src) + if err != nil { + return nil, false, newByteStringExpectedFormatError(ByteStringExpectedBase16, err) + } + return decoded[:n], true, nil + } + } + + return src, false, nil +} + // parseTextString parses CBOR encoded text string. It returns a byte slice // to prevent creating an extra copy of string. Caller should wrap returned // byte slice as string when needed. func (d *decoder) parseTextString() ([]byte, error) { - _, ai, val := d.getHead() - if ai != 31 { + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + if !indefiniteLength { b := d.data[d.off : d.off+int(val)] d.off += int(val) if d.dm.utf8 == UTF8RejectInvalid && !utf8.Valid(b) { @@ -1549,8 +2225,8 @@ func (d *decoder) parseTextString() ([]byte, error) { } func (d *decoder) parseArray() ([]interface{}, error) { - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) if !hasSize { count = d.numOfItemsUntilBreak() // peek ahead to get array size to preallocate slice for better performance @@ -1571,8 +2247,8 @@ func (d *decoder) parseArray() ([]interface{}, error) { } func (d *decoder) parseArrayToSlice(v reflect.Value, tInfo *typeInfo) error { - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) if !hasSize { count = d.numOfItemsUntilBreak() // peek ahead to get array size to preallocate slice for better performance @@ -1593,8 +2269,8 @@ func (d *decoder) parseArrayToSlice(v reflect.Value, tInfo *typeInfo) error { } func (d *decoder) parseArrayToArray(v reflect.Value, tInfo *typeInfo) error { - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) gi := 0 vLen := v.Len() @@ -1623,8 +2299,8 @@ func (d *decoder) parseArrayToArray(v reflect.Value, tInfo *typeInfo) error { } func (d *decoder) parseMap() (interface{}, error) { - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) m := make(map[interface{}]interface{}) var k, e interface{} @@ -1688,8 +2364,8 @@ func (d *decoder) parseMap() (interface{}, error) { } func (d *decoder) parseMapToMap(v reflect.Value, tInfo *typeInfo) error { //nolint:gocyclo - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) if v.IsNil() { mapsize := count @@ -1813,8 +2489,8 @@ func (d *decoder) parseArrayToStruct(v reflect.Value, tInfo *typeInfo) error { } start := d.off - t, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) if !hasSize { count = d.numOfItemsUntilBreak() // peek ahead to get array size @@ -1823,7 +2499,7 @@ func (d *decoder) parseArrayToStruct(v reflect.Value, tInfo *typeInfo) error { d.off = start d.skip() return &UnmarshalTypeError{ - CBORType: t.String(), + CBORType: cborTypeArray.String(), GoType: tInfo.typ.String(), errorMsg: "cannot decode CBOR array to struct with different number of elements", } @@ -1888,25 +2564,37 @@ func (d *decoder) parseMapToStruct(v reflect.Value, tInfo *typeInfo) error { //n var err, lastErr error // Get CBOR map size - _, ai, val := d.getHead() - hasSize := (ai != 31) + _, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() + hasSize := !indefiniteLength count := int(val) // Keeps track of matched struct fields - foundFldIdx := make([]bool, len(structType.fields)) + var foundFldIdx []bool + { + const maxStackFields = 128 + if nfields := len(structType.fields); nfields <= maxStackFields { + // For structs with typical field counts, expect that this can be + // stack-allocated. + var a [maxStackFields]bool + foundFldIdx = a[:nfields] + } else { + foundFldIdx = make([]bool, len(structType.fields)) + } + } // Keeps track of CBOR map keys to detect duplicate map key keyCount := 0 var mapKeys map[interface{}]struct{} - if d.dm.dupMapKey == DupMapKeyEnforcedAPF { - mapKeys = make(map[interface{}]struct{}, len(structType.fields)) - } errOnUnknownField := (d.dm.extraReturnErrors & ExtraDecErrorUnknownField) > 0 +MapEntryLoop: for j := 0; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { var f *field - var k interface{} // Used by duplicate map key detection + + // If duplicate field detection is enabled and the key at index j did not match any + // field, k will hold the map key. + var k interface{} t := d.nextCBORType() if t == cborTypeTextString || (t == cborTypeByteString && d.dm.fieldNameByteString == FieldNameByteStringAllowed) { @@ -1924,30 +2612,61 @@ func (d *decoder) parseMapToStruct(v reflect.Value, tInfo *typeInfo) error { //n keyBytes, _ = d.parseByteString() } - keyLen := len(keyBytes) - // Find field with exact match - for i := 0; i < len(structType.fields); i++ { + // Check for exact match on field name. + if i, ok := structType.fieldIndicesByName[string(keyBytes)]; ok { fld := structType.fields[i] - if !foundFldIdx[i] && len(fld.name) == keyLen && fld.name == string(keyBytes) { + + if !foundFldIdx[i] { f = fld foundFldIdx[i] = true - break + } else if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + err = &DupMapKeyError{fld.name, j} + d.skip() // skip value + j++ + // skip the rest of the map + for ; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { + d.skip() + d.skip() + } + return err + } else { + // discard repeated match + d.skip() + continue MapEntryLoop } } + // Find field with case-insensitive match if f == nil && d.dm.fieldNameMatching == FieldNameMatchingPreferCaseSensitive { + keyLen := len(keyBytes) keyString := string(keyBytes) for i := 0; i < len(structType.fields); i++ { fld := structType.fields[i] - if !foundFldIdx[i] && len(fld.name) == keyLen && strings.EqualFold(fld.name, keyString) { - f = fld - foundFldIdx[i] = true + if len(fld.name) == keyLen && strings.EqualFold(fld.name, keyString) { + if !foundFldIdx[i] { + f = fld + foundFldIdx[i] = true + } else if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + err = &DupMapKeyError{keyString, j} + d.skip() // skip value + j++ + // skip the rest of the map + for ; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { + d.skip() + d.skip() + } + return err + } else { + // discard repeated match + d.skip() + continue MapEntryLoop + } break } } } - if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + if d.dm.dupMapKey == DupMapKeyEnforcedAPF && f == nil { k = string(keyBytes) } } else if t <= cborTypeNegativeInt { // uint/int @@ -1975,14 +2694,30 @@ func (d *decoder) parseMapToStruct(v reflect.Value, tInfo *typeInfo) error { //n // Find field for i := 0; i < len(structType.fields); i++ { fld := structType.fields[i] - if !foundFldIdx[i] && fld.keyAsInt && fld.nameAsInt == nameAsInt { - f = fld - foundFldIdx[i] = true + if fld.keyAsInt && fld.nameAsInt == nameAsInt { + if !foundFldIdx[i] { + f = fld + foundFldIdx[i] = true + } else if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + err = &DupMapKeyError{nameAsInt, j} + d.skip() // skip value + j++ + // skip the rest of the map + for ; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { + d.skip() + d.skip() + } + return err + } else { + // discard repeated match + d.skip() + continue MapEntryLoop + } break } } - if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + if d.dm.dupMapKey == DupMapKeyEnforcedAPF && f == nil { k = nameAsInt } } else { @@ -2010,23 +2745,6 @@ func (d *decoder) parseMapToStruct(v reflect.Value, tInfo *typeInfo) error { //n } } - if d.dm.dupMapKey == DupMapKeyEnforcedAPF { - mapKeys[k] = struct{}{} - newKeyCount := len(mapKeys) - if newKeyCount == keyCount { - err = &DupMapKeyError{k, j} - d.skip() // skip value - j++ - // skip the rest of the map - for ; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { - d.skip() - d.skip() - } - return err - } - keyCount = newKeyCount - } - if f == nil { if errOnUnknownField { err = &UnknownFieldError{j} @@ -2039,6 +2757,31 @@ func (d *decoder) parseMapToStruct(v reflect.Value, tInfo *typeInfo) error { //n } return err } + + // Two map keys that match the same struct field are immediately considered + // duplicates. This check detects duplicates between two map keys that do + // not match a struct field. If unknown field errors are enabled, then this + // check is never reached. + if d.dm.dupMapKey == DupMapKeyEnforcedAPF { + if mapKeys == nil { + mapKeys = make(map[interface{}]struct{}, 1) + } + mapKeys[k] = struct{}{} + newKeyCount := len(mapKeys) + if newKeyCount == keyCount { + err = &DupMapKeyError{k, j} + d.skip() // skip value + j++ + // skip the rest of the map + for ; (hasSize && j < count) || (!hasSize && !d.foundBreak()); j++ { + d.skip() + d.skip() + } + return err + } + keyCount = newKeyCount + } + d.skip() // Skip value continue } @@ -2105,13 +2848,13 @@ func (d *decoder) getRegisteredTagItem(vt reflect.Type) *tagItem { // skip moves data offset to the next item. skip assumes data is well-formed, // and does not perform bounds checking. func (d *decoder) skip() { - t, ai, val := d.getHead() + t, _, val, indefiniteLength := d.getHeadWithIndefiniteLengthFlag() - if ai == 31 { + if indefiniteLength { switch t { case cborTypeByteString, cborTypeTextString, cborTypeArray, cborTypeMap: for { - if d.data[d.off] == 0xff { + if isBreakFlag(d.data[d.off]) { d.off++ return } @@ -2123,47 +2866,67 @@ func (d *decoder) skip() { switch t { case cborTypeByteString, cborTypeTextString: d.off += int(val) + case cborTypeArray: for i := 0; i < int(val); i++ { d.skip() } + case cborTypeMap: for i := 0; i < int(val)*2; i++ { d.skip() } + case cborTypeTag: d.skip() } } +func (d *decoder) getHeadWithIndefiniteLengthFlag() ( + t cborType, + ai byte, + val uint64, + indefiniteLength bool, +) { + t, ai, val = d.getHead() + indefiniteLength = additionalInformation(ai).isIndefiniteLength() + return +} + // getHead assumes data is well-formed, and does not perform bounds checking. func (d *decoder) getHead() (t cborType, ai byte, val uint64) { - t = cborType(d.data[d.off] & 0xe0) - ai = d.data[d.off] & 0x1f + t, ai = parseInitialByte(d.data[d.off]) val = uint64(ai) d.off++ - if ai < 24 { + if ai <= maxAdditionalInformationWithoutArgument { return } - if ai == 24 { + + if ai == additionalInformationWith1ByteArgument { val = uint64(d.data[d.off]) d.off++ return } - if ai == 25 { - val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+2])) - d.off += 2 + + if ai == additionalInformationWith2ByteArgument { + const argumentSize = 2 + val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+argumentSize])) + d.off += argumentSize return } - if ai == 26 { - val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+4])) - d.off += 4 + + if ai == additionalInformationWith4ByteArgument { + const argumentSize = 4 + val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+argumentSize])) + d.off += argumentSize return } - if ai == 27 { - val = binary.BigEndian.Uint64(d.data[d.off : d.off+8]) - d.off += 8 + + if ai == additionalInformationWith8ByteArgument { + const argumentSize = 8 + val = binary.BigEndian.Uint64(d.data[d.off : d.off+argumentSize]) + d.off += argumentSize return } return @@ -2180,9 +2943,11 @@ func (d *decoder) numOfItemsUntilBreak() int { return i } +// foundBreak returns true if next byte is CBOR break code and moves cursor by 1, +// otherwise it returns false. // foundBreak assumes data is well-formed, and does not perform bounds checking. func (d *decoder) foundBreak() bool { - if d.data[d.off] == 0xff { + if isBreakFlag(d.data[d.off]) { d.off++ return true } @@ -2192,10 +2957,11 @@ func (d *decoder) foundBreak() bool { func (d *decoder) reset(data []byte) { d.data = data d.off = 0 + d.expectedLaterEncodingTags = d.expectedLaterEncodingTags[:0] } func (d *decoder) nextCBORType() cborType { - return cborType(d.data[d.off] & 0xe0) + return getType(d.data[d.off]) } func (d *decoder) nextCBORNil() bool { @@ -2240,6 +3006,7 @@ func fillPositiveInt(t cborType, val uint64, v reflect.Value) error { } v.SetInt(int64(val)) return nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: if v.OverflowUint(val) { return &UnmarshalTypeError{ @@ -2250,11 +3017,13 @@ func fillPositiveInt(t cborType, val uint64, v reflect.Value) error { } v.SetUint(val) return nil + case reflect.Float32, reflect.Float64: f := float64(val) v.SetFloat(f) return nil } + if v.Type() == typeBigInt { i := new(big.Int).SetUint64(val) v.Set(reflect.ValueOf(*i)) @@ -2275,6 +3044,7 @@ func fillNegativeInt(t cborType, val int64, v reflect.Value) error { } v.SetInt(val) return nil + case reflect.Float32, reflect.Float64: f := float64(val) v.SetFloat(f) @@ -2312,8 +3082,8 @@ func fillFloat(t cborType, val float64, v reflect.Value) error { return &UnmarshalTypeError{CBORType: t.String(), GoType: v.Type().String()} } -func fillByteString(t cborType, val []byte, shared bool, v reflect.Value, bsts ByteStringToStringMode) error { - if reflect.PtrTo(v.Type()).Implements(typeBinaryUnmarshaler) { +func fillByteString(t cborType, val []byte, shared bool, v reflect.Value, bsts ByteStringToStringMode, bum BinaryUnmarshalerMode) error { + if bum == BinaryUnmarshalerByteString && reflect.PtrTo(v.Type()).Implements(typeBinaryUnmarshaler) { if v.CanAddr() { v = v.Addr() if u, ok := v.Interface().(encoding.BinaryUnmarshaler); ok { @@ -2325,7 +3095,7 @@ func fillByteString(t cborType, val []byte, shared bool, v reflect.Value, bsts B } return errors.New("cbor: cannot set new value for " + v.Type().String()) } - if bsts == ByteStringToStringAllowed && v.Kind() == reflect.String { + if bsts != ByteStringToStringForbidden && v.Kind() == reflect.String { v.SetString(string(val)) return nil } @@ -2373,6 +3143,7 @@ func isImmutableKind(k reflect.Kind) bool { reflect.Float32, reflect.Float64, reflect.String: return true + default: return false } @@ -2382,6 +3153,7 @@ func isHashableValue(rv reflect.Value) bool { switch rv.Kind() { case reflect.Slice, reflect.Map, reflect.Func: return false + case reflect.Struct: switch rv.Type() { case typeTag: @@ -2404,6 +3176,7 @@ func convertByteSliceToByteString(v interface{}) (interface{}, bool) { switch v := v.(type) { case []byte: return ByteString(v), true + case Tag: content, converted := convertByteSliceToByteString(v.Content) if converted { @@ -2412,29 +3185,3 @@ func convertByteSliceToByteString(v interface{}) (interface{}, bool) { } return v, false } - -// validBuiltinTag checks that supported built-in tag numbers are followed by expected content types. -func validBuiltinTag(tagNum uint64, contentHead byte) error { - t := cborType(contentHead & 0xe0) - switch tagNum { - case 0: - // Tag content (date/time text string in RFC 3339 format) must be string type. - if t != cborTypeTextString { - return errors.New("cbor: tag number 0 must be followed by text string, got " + t.String()) - } - return nil - case 1: - // Tag content (epoch date/time) must be uint, int, or float type. - if t != cborTypePositiveInt && t != cborTypeNegativeInt && (contentHead < 0xf9 || contentHead > 0xfb) { - return errors.New("cbor: tag number 1 must be followed by integer or floating-point number, got " + t.String()) - } - return nil - case 2, 3: - // Tag content (bignum) must be byte type. - if t != cborTypeByteString { - return errors.New("cbor: tag number 2 or 3 must be followed by byte string, got " + t.String()) - } - return nil - } - return nil -} diff --git a/vendor/github.com/fxamacker/cbor/v2/diagnose.go b/vendor/github.com/fxamacker/cbor/v2/diagnose.go index 43e6a14..44afb86 100644 --- a/vendor/github.com/fxamacker/cbor/v2/diagnose.go +++ b/vendor/github.com/fxamacker/cbor/v2/diagnose.go @@ -9,6 +9,7 @@ import ( "encoding/base64" "encoding/hex" "errors" + "fmt" "io" "math" "math/big" @@ -158,7 +159,7 @@ func (dm *diagMode) Diagnose(data []byte) (string, error) { } // DiagnoseFirst returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest. -func (dm *diagMode) DiagnoseFirst(data []byte) (string, []byte, error) { +func (dm *diagMode) DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) { return newDiagnose(data, dm.decMode, dm).diagFirst() } @@ -173,7 +174,7 @@ func Diagnose(data []byte) (string, error) { } // Diagnose returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest. -func DiagnoseFirst(data []byte) (string, []byte, error) { +func DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) { return defaultDiagMode.DiagnoseFirst(data) } @@ -198,13 +199,11 @@ func (di *diagnose) diag(cborSequence bool) (string, error) { switch err := di.wellformed(cborSequence); err { case nil: if !firstItem { - if err = di.writeString(", "); err != nil { - return di.w.String(), err - } + di.w.WriteString(", ") } firstItem = false - if err = di.item(); err != nil { - return di.w.String(), err + if itemErr := di.item(); itemErr != nil { + return di.w.String(), itemErr } case io.EOF: @@ -219,8 +218,8 @@ func (di *diagnose) diag(cborSequence bool) (string, error) { } } -func (di *diagnose) diagFirst() (string, []byte, error) { - err := di.wellformed(true) +func (di *diagnose) diagFirst() (diagNotation string, rest []byte, err error) { + err = di.wellformed(true) if err == nil { err = di.item() } @@ -235,7 +234,7 @@ func (di *diagnose) diagFirst() (string, []byte, error) { func (di *diagnose) wellformed(allowExtraData bool) error { off := di.d.off - err := di.d.wellformed(allowExtraData) + err := di.d.wellformed(allowExtraData, false) di.d.off = off return err } @@ -243,30 +242,29 @@ func (di *diagnose) wellformed(allowExtraData bool) error { func (di *diagnose) item() error { //nolint:gocyclo initialByte := di.d.data[di.d.off] switch initialByte { - case 0x5f, 0x7f: // indefinite-length byte/text string + case cborByteStringWithIndefiniteLengthHead, + cborTextStringWithIndefiniteLengthHead: // indefinite-length byte/text string di.d.off++ - if di.d.data[di.d.off] == 0xff { + if isBreakFlag(di.d.data[di.d.off]) { di.d.off++ switch initialByte { - case 0x5f: + case cborByteStringWithIndefiniteLengthHead: // indefinite-length bytes with no chunks. - return di.writeString(`''_`) - case 0x7f: + di.w.WriteString(`''_`) + return nil + case cborTextStringWithIndefiniteLengthHead: // indefinite-length text with no chunks. - return di.writeString(`""_`) + di.w.WriteString(`""_`) + return nil } } - if err := di.writeString("(_ "); err != nil { - return err - } + di.w.WriteString("(_ ") i := 0 for !di.d.foundBreak() { if i > 0 { - if err := di.writeString(", "); err != nil { - return err - } + di.w.WriteString(", ") } i++ @@ -276,20 +274,17 @@ func (di *diagnose) item() error { //nolint:gocyclo } } - return di.writeByte(')') + di.w.WriteByte(')') + return nil - case 0x9f: // indefinite-length array + case cborArrayWithIndefiniteLengthHead: // indefinite-length array di.d.off++ - if err := di.writeString("[_ "); err != nil { - return err - } + di.w.WriteString("[_ ") i := 0 for !di.d.foundBreak() { if i > 0 { - if err := di.writeString(", "); err != nil { - return err - } + di.w.WriteString(", ") } i++ @@ -298,20 +293,17 @@ func (di *diagnose) item() error { //nolint:gocyclo } } - return di.writeByte(']') + di.w.WriteByte(']') + return nil - case 0xbf: // indefinite-length map + case cborMapWithIndefiniteLengthHead: // indefinite-length map di.d.off++ - if err := di.writeString("{_ "); err != nil { - return err - } + di.w.WriteString("{_ ") i := 0 for !di.d.foundBreak() { if i > 0 { - if err := di.writeString(", "); err != nil { - return err - } + di.w.WriteString(", ") } i++ @@ -320,9 +312,7 @@ func (di *diagnose) item() error { //nolint:gocyclo return err } - if err := di.writeString(": "); err != nil { - return err - } + di.w.WriteString(": ") // value if err := di.item(); err != nil { @@ -330,14 +320,16 @@ func (di *diagnose) item() error { //nolint:gocyclo } } - return di.writeByte('}') + di.w.WriteByte('}') + return nil } t := di.d.nextCBORType() switch t { case cborTypePositiveInt: _, _, val := di.d.getHead() - return di.writeString(strconv.FormatUint(val, 10)) + di.w.WriteString(strconv.FormatUint(val, 10)) + return nil case cborTypeNegativeInt: _, _, val := di.d.getHead() @@ -347,11 +339,13 @@ func (di *diagnose) item() error { //nolint:gocyclo bi.SetUint64(val) bi.Add(bi, big.NewInt(1)) bi.Neg(bi) - return di.writeString(bi.String()) + di.w.WriteString(bi.String()) + return nil } nValue := int64(-1) ^ int64(val) - return di.writeString(strconv.FormatInt(nValue, 10)) + di.w.WriteString(strconv.FormatInt(nValue, 10)) + return nil case cborTypeByteString: b, _ := di.d.parseByteString() @@ -367,135 +361,129 @@ func (di *diagnose) item() error { //nolint:gocyclo case cborTypeArray: _, _, val := di.d.getHead() count := int(val) - if err := di.writeByte('['); err != nil { - return err - } + di.w.WriteByte('[') for i := 0; i < count; i++ { if i > 0 { - if err := di.writeString(", "); err != nil { - return err - } + di.w.WriteString(", ") } if err := di.item(); err != nil { return err } } - return di.writeByte(']') + di.w.WriteByte(']') + return nil case cborTypeMap: _, _, val := di.d.getHead() count := int(val) - if err := di.writeByte('{'); err != nil { - return err - } + di.w.WriteByte('{') for i := 0; i < count; i++ { if i > 0 { - if err := di.writeString(", "); err != nil { - return err - } + di.w.WriteString(", ") } // key if err := di.item(); err != nil { return err } - if err := di.writeString(": "); err != nil { - return err - } + di.w.WriteString(": ") // value if err := di.item(); err != nil { return err } } - return di.writeByte('}') + di.w.WriteByte('}') + return nil case cborTypeTag: _, _, tagNum := di.d.getHead() switch tagNum { - case 2: + case tagNumUnsignedBignum: if nt := di.d.nextCBORType(); nt != cborTypeByteString { - return errors.New("cbor: tag number 2 must be followed by byte string, got " + nt.String()) + return newInadmissibleTagContentTypeError( + tagNumUnsignedBignum, + "byte string", + nt.String()) } b, _ := di.d.parseByteString() bi := new(big.Int).SetBytes(b) - return di.writeString(bi.String()) + di.w.WriteString(bi.String()) + return nil - case 3: + case tagNumNegativeBignum: if nt := di.d.nextCBORType(); nt != cborTypeByteString { - return errors.New("cbor: tag number 3 must be followed by byte string, got " + nt.String()) + return newInadmissibleTagContentTypeError( + tagNumNegativeBignum, + "byte string", + nt.String(), + ) } b, _ := di.d.parseByteString() bi := new(big.Int).SetBytes(b) bi.Add(bi, big.NewInt(1)) bi.Neg(bi) - return di.writeString(bi.String()) + di.w.WriteString(bi.String()) + return nil default: - if err := di.writeString(strconv.FormatUint(tagNum, 10)); err != nil { - return err - } - if err := di.writeByte('('); err != nil { - return err - } + di.w.WriteString(strconv.FormatUint(tagNum, 10)) + di.w.WriteByte('(') if err := di.item(); err != nil { return err } - return di.writeByte(')') + di.w.WriteByte(')') + return nil } case cborTypePrimitives: _, ai, val := di.d.getHead() switch ai { - case 20: - return di.writeString("false") + case additionalInformationAsFalse: + di.w.WriteString("false") + return nil - case 21: - return di.writeString("true") + case additionalInformationAsTrue: + di.w.WriteString("true") + return nil - case 22: - return di.writeString("null") + case additionalInformationAsNull: + di.w.WriteString("null") + return nil - case 23: - return di.writeString("undefined") + case additionalInformationAsUndefined: + di.w.WriteString("undefined") + return nil - case 25, 26, 27: + case additionalInformationAsFloat16, + additionalInformationAsFloat32, + additionalInformationAsFloat64: return di.encodeFloat(ai, val) default: - if err := di.writeString("simple("); err != nil { - return err - } - if err := di.writeString(strconv.FormatUint(val, 10)); err != nil { - return err - } - return di.writeByte(')') + di.w.WriteString("simple(") + di.w.WriteString(strconv.FormatUint(val, 10)) + di.w.WriteByte(')') + return nil } } return nil } -func (di *diagnose) writeByte(val byte) error { - return di.w.WriteByte(val) -} - -func (di *diagnose) writeString(val string) error { - _, err := di.w.WriteString(val) - return err -} - // writeU16 format a rune as "\uxxxx" -func (di *diagnose) writeU16(val rune) error { - if err := di.writeString("\\u"); err != nil { - return err - } - b := make([]byte, 2) - b[0] = byte(val >> 8) - b[1] = byte(val) - return di.writeString(hex.EncodeToString(b)) +func (di *diagnose) writeU16(val rune) { + di.w.WriteString("\\u") + var in [2]byte + in[0] = byte(val >> 8) + in[1] = byte(val) + sz := hex.EncodedLen(len(in)) + di.w.Grow(sz) + dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz] + hex.Encode(dst, in[:]) + di.w.Write(dst) } var rawBase32Encoding = base32.StdEncoding.WithPadding(base32.NoPadding) @@ -511,95 +499,91 @@ func (di *diagnose) encodeByteString(val []byte) error { di2 := newDiagnose(val, di.dm.decMode, di.dm) // should always notating embedded CBOR sequence. if str, err := di2.diag(true); err == nil { - if err := di.writeString("<<"); err != nil { - return err - } - if err := di.writeString(str); err != nil { - return err - } - return di.writeString(">>") + di.w.WriteString("<<") + di.w.WriteString(str) + di.w.WriteString(">>") + return nil } } } switch di.dm.byteStringEncoding { case ByteStringBase16Encoding: - if err := di.writeString("h'"); err != nil { - return err - } - - encoder := hex.NewEncoder(di.w) + di.w.WriteString("h'") if di.dm.byteStringHexWhitespace { - for i, b := range val { + sz := hex.EncodedLen(len(val)) + if len(val) > 0 { + sz += len(val) - 1 + } + di.w.Grow(sz) + + dst := di.w.Bytes()[di.w.Len():] + for i := range val { if i > 0 { - if err := di.writeByte(' '); err != nil { - return err - } - } - if _, err := encoder.Write([]byte{b}); err != nil { - return err + dst = append(dst, ' ') } + hex.Encode(dst[len(dst):len(dst)+2], val[i:i+1]) + dst = dst[:len(dst)+2] } + di.w.Write(dst) } else { - if _, err := encoder.Write(val); err != nil { - return err - } + sz := hex.EncodedLen(len(val)) + di.w.Grow(sz) + dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz] + hex.Encode(dst, val) + di.w.Write(dst) } - return di.writeByte('\'') + di.w.WriteByte('\'') + return nil case ByteStringBase32Encoding: - if err := di.writeString("b32'"); err != nil { - return err - } - encoder := base32.NewEncoder(rawBase32Encoding, di.w) - if _, err := encoder.Write(val); err != nil { - return err - } - encoder.Close() - return di.writeByte('\'') + di.w.WriteString("b32'") + sz := rawBase32Encoding.EncodedLen(len(val)) + di.w.Grow(sz) + dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz] + rawBase32Encoding.Encode(dst, val) + di.w.Write(dst) + di.w.WriteByte('\'') + return nil case ByteStringBase32HexEncoding: - if err := di.writeString("h32'"); err != nil { - return err - } - encoder := base32.NewEncoder(rawBase32HexEncoding, di.w) - if _, err := encoder.Write(val); err != nil { - return err - } - encoder.Close() - return di.writeByte('\'') + di.w.WriteString("h32'") + sz := rawBase32HexEncoding.EncodedLen(len(val)) + di.w.Grow(sz) + dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz] + rawBase32HexEncoding.Encode(dst, val) + di.w.Write(dst) + di.w.WriteByte('\'') + return nil case ByteStringBase64Encoding: - if err := di.writeString("b64'"); err != nil { - return err - } - encoder := base64.NewEncoder(base64.RawURLEncoding, di.w) - if _, err := encoder.Write(val); err != nil { - return err - } - encoder.Close() - return di.writeByte('\'') + di.w.WriteString("b64'") + sz := base64.RawURLEncoding.EncodedLen(len(val)) + di.w.Grow(sz) + dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz] + base64.RawURLEncoding.Encode(dst, val) + di.w.Write(dst) + di.w.WriteByte('\'') + return nil default: - return di.dm.byteStringEncoding.valid() + // It should not be possible for users to construct a *diagMode with an invalid byte + // string encoding. + panic(fmt.Sprintf("diagmode has invalid ByteStringEncoding %v", di.dm.byteStringEncoding)) } } -var utf16SurrSelf = rune(0x10000) +const utf16SurrSelf = rune(0x10000) // quote should be either `'` or `"` func (di *diagnose) encodeTextString(val string, quote byte) error { - if err := di.writeByte(quote); err != nil { - return err - } + di.w.WriteByte(quote) for i := 0; i < len(val); { if b := val[i]; b < utf8.RuneSelf { switch { case b == '\t', b == '\n', b == '\r', b == '\\', b == quote: - if err := di.writeByte('\\'); err != nil { - return err - } + di.w.WriteByte('\\') switch b { case '\t': @@ -609,19 +593,13 @@ func (di *diagnose) encodeTextString(val string, quote byte) error { case '\r': b = 'r' } - if err := di.writeByte(b); err != nil { - return err - } + di.w.WriteByte(b) case b >= ' ' && b <= '~': - if err := di.writeByte(b); err != nil { - return err - } + di.w.WriteByte(b) default: - if err := di.writeU16(rune(b)); err != nil { - return err - } + di.writeU16(rune(b)) } i++ @@ -631,84 +609,86 @@ func (di *diagnose) encodeTextString(val string, quote byte) error { c, size := utf8.DecodeRuneInString(val[i:]) switch { case c == utf8.RuneError: - // if err := di.writeU16(rune(val[i])); err != nil { - // return err - // } return &SemanticError{"cbor: invalid UTF-8 string"} case c < utf16SurrSelf: - if err := di.writeU16(c); err != nil { - return err - } + di.writeU16(c) default: c1, c2 := utf16.EncodeRune(c) - if err := di.writeU16(c1); err != nil { - return err - } - if err := di.writeU16(c2); err != nil { - return err - } + di.writeU16(c1) + di.writeU16(c2) } i += size } - return di.writeByte(quote) + di.w.WriteByte(quote) + return nil } func (di *diagnose) encodeFloat(ai byte, val uint64) error { f64 := float64(0) switch ai { - case 25: + case additionalInformationAsFloat16: f16 := float16.Frombits(uint16(val)) switch { case f16.IsNaN(): - return di.writeString("NaN") + di.w.WriteString("NaN") + return nil case f16.IsInf(1): - return di.writeString("Infinity") + di.w.WriteString("Infinity") + return nil case f16.IsInf(-1): - return di.writeString("-Infinity") + di.w.WriteString("-Infinity") + return nil default: f64 = float64(f16.Float32()) } - case 26: + case additionalInformationAsFloat32: f32 := math.Float32frombits(uint32(val)) switch { case f32 != f32: - return di.writeString("NaN") + di.w.WriteString("NaN") + return nil case f32 > math.MaxFloat32: - return di.writeString("Infinity") + di.w.WriteString("Infinity") + return nil case f32 < -math.MaxFloat32: - return di.writeString("-Infinity") + di.w.WriteString("-Infinity") + return nil default: f64 = float64(f32) } - case 27: + case additionalInformationAsFloat64: f64 = math.Float64frombits(val) switch { case f64 != f64: - return di.writeString("NaN") + di.w.WriteString("NaN") + return nil case f64 > math.MaxFloat64: - return di.writeString("Infinity") + di.w.WriteString("Infinity") + return nil case f64 < -math.MaxFloat64: - return di.writeString("-Infinity") + di.w.WriteString("-Infinity") + return nil } } // Use ES6 number to string conversion which should match most JSON generators. // Inspired by https://github.com/golang/go/blob/4df10fba1687a6d4f51d7238a403f8f2298f6a16/src/encoding/json/encode.go#L585 + const bitSize = 64 b := make([]byte, 0, 32) if abs := math.Abs(f64); abs != 0 && (abs < 1e-6 || abs >= 1e21) { - b = strconv.AppendFloat(b, f64, 'e', -1, 64) + b = strconv.AppendFloat(b, f64, 'e', -1, bitSize) // clean up e-09 to e-9 n := len(b) if n >= 4 && string(b[n-4:n-1]) == "e-0" { b = append(b[:n-2], b[n-1]) } } else { - b = strconv.AppendFloat(b, f64, 'f', -1, 64) + b = strconv.AppendFloat(b, f64, 'f', -1, bitSize) } // add decimal point and trailing zero if needed @@ -722,18 +702,21 @@ func (di *diagnose) encodeFloat(ai byte, val uint64) error { } } - if err := di.writeString(string(b)); err != nil { - return err - } + di.w.WriteString(string(b)) if di.dm.floatPrecisionIndicator { switch ai { - case 25: - return di.writeString("_1") - case 26: - return di.writeString("_2") - case 27: - return di.writeString("_3") + case additionalInformationAsFloat16: + di.w.WriteString("_1") + return nil + + case additionalInformationAsFloat32: + di.w.WriteString("_2") + return nil + + case additionalInformationAsFloat64: + di.w.WriteString("_3") + return nil } } diff --git a/vendor/github.com/fxamacker/cbor/v2/encode.go b/vendor/github.com/fxamacker/cbor/v2/encode.go index 86cc47a..6508e29 100644 --- a/vendor/github.com/fxamacker/cbor/v2/encode.go +++ b/vendor/github.com/fxamacker/cbor/v2/encode.go @@ -8,9 +8,11 @@ import ( "encoding" "encoding/binary" "errors" + "fmt" "io" "math" "math/big" + "math/rand" "reflect" "sort" "strconv" @@ -94,12 +96,40 @@ func Marshal(v interface{}) ([]byte, error) { return defaultEncMode.Marshal(v) } +// MarshalToBuffer encodes v into provided buffer (instead of using built-in buffer pool) +// and uses default encoding options. +// +// NOTE: Unlike Marshal, the buffer provided to MarshalToBuffer can contain +// partially encoded data if error is returned. +// +// See Marshal for more details. +func MarshalToBuffer(v interface{}, buf *bytes.Buffer) error { + return defaultEncMode.MarshalToBuffer(v, buf) +} + // Marshaler is the interface implemented by types that can marshal themselves // into valid CBOR. type Marshaler interface { MarshalCBOR() ([]byte, error) } +// MarshalerError represents error from checking encoded CBOR data item +// returned from MarshalCBOR for well-formedness and some very limited tag validation. +type MarshalerError struct { + typ reflect.Type + err error +} + +func (e *MarshalerError) Error() string { + return "cbor: error calling MarshalCBOR for type " + + e.typ.String() + + ": " + e.err.Error() +} + +func (e *MarshalerError) Unwrap() error { + return e.err +} + // UnsupportedTypeError is returned by Marshal when attempting to encode value // of an unsupported type. type UnsupportedTypeError struct { @@ -124,7 +154,7 @@ func (e *UnsupportedValueError) Error() string { type SortMode int const ( - // SortNone means no sorting. + // SortNone encodes map pairs and struct fields in an arbitrary order. SortNone SortMode = 0 // SortLengthFirst causes map keys or struct fields to be sorted such that: @@ -140,6 +170,12 @@ const ( // in RFC 7049bis. SortBytewiseLexical SortMode = 2 + // SortShuffle encodes map pairs and struct fields in a shuffled + // order. This mode does not guarantee an unbiased permutation, but it + // does guarantee that the runtime of the shuffle algorithm used will be + // constant. + SortFastShuffle SortMode = 3 + // SortCanonical is used in "Canonical CBOR" encoding in RFC 7049 3.9. SortCanonical SortMode = SortLengthFirst @@ -149,7 +185,7 @@ const ( // SortCoreDeterministic is used in "Core Deterministic Encoding" in RFC 7049bis. SortCoreDeterministic SortMode = SortBytewiseLexical - maxSortMode SortMode = 3 + maxSortMode SortMode = 4 ) func (sm SortMode) valid() bool { @@ -171,6 +207,7 @@ func (st StringMode) cborType() (cborType, error) { switch st { case StringToTextString: return cborTypeTextString, nil + case StringToByteString: return cborTypeByteString, nil } @@ -223,6 +260,9 @@ const ( // NaN payload. NaNConvertQuiet + // NaNConvertReject returns UnsupportedValueError on attempts to encode a NaN value. + NaNConvertReject + maxNaNConvert ) @@ -241,6 +281,9 @@ const ( // InfConvertNone never converts (used by CTAP2 Canonical CBOR). InfConvertNone + // InfConvertReject returns UnsupportedValueError on attempts to encode an infinite value. + InfConvertReject + maxInfConvert ) @@ -288,6 +331,9 @@ const ( // converting it to another CBOR type. BigIntConvertNone + // BigIntConvertReject returns an UnsupportedTypeError instead of marshaling a big.Int. + BigIntConvertReject + maxBigIntConvert ) @@ -357,6 +403,84 @@ func (fnm FieldNameMode) valid() bool { return fnm >= 0 && fnm < maxFieldNameMode } +// ByteSliceLaterFormatMode specifies which later format conversion hint (CBOR tag 21-23) +// to include (if any) when encoding Go byte slice to CBOR byte string. The encoder will +// always encode unmodified bytes from the byte slice and just wrap it within +// CBOR tag 21, 22, or 23 if specified. +// See "Expected Later Encoding for CBOR-to-JSON Converters" in RFC 8949 Section 3.4.5.2. +type ByteSliceLaterFormatMode int + +const ( + // ByteSliceLaterFormatNone encodes unmodified bytes from Go byte slice to CBOR byte string (major type 2) + // without adding CBOR tag 21, 22, or 23. + ByteSliceLaterFormatNone ByteSliceLaterFormatMode = iota + + // ByteSliceLaterFormatBase64URL encodes unmodified bytes from Go byte slice to CBOR byte string (major type 2) + // inside CBOR tag 21 (expected later conversion to base64url encoding, see RFC 8949 Section 3.4.5.2). + ByteSliceLaterFormatBase64URL + + // ByteSliceLaterFormatBase64 encodes unmodified bytes from Go byte slice to CBOR byte string (major type 2) + // inside CBOR tag 22 (expected later conversion to base64 encoding, see RFC 8949 Section 3.4.5.2). + ByteSliceLaterFormatBase64 + + // ByteSliceLaterFormatBase16 encodes unmodified bytes from Go byte slice to CBOR byte string (major type 2) + // inside CBOR tag 23 (expected later conversion to base16 encoding, see RFC 8949 Section 3.4.5.2). + ByteSliceLaterFormatBase16 +) + +func (bsefm ByteSliceLaterFormatMode) encodingTag() (uint64, error) { + switch bsefm { + case ByteSliceLaterFormatNone: + return 0, nil + + case ByteSliceLaterFormatBase64URL: + return tagNumExpectedLaterEncodingBase64URL, nil + + case ByteSliceLaterFormatBase64: + return tagNumExpectedLaterEncodingBase64, nil + + case ByteSliceLaterFormatBase16: + return tagNumExpectedLaterEncodingBase16, nil + } + return 0, errors.New("cbor: invalid ByteSliceLaterFormat " + strconv.Itoa(int(bsefm))) +} + +// ByteArrayMode specifies how to encode byte arrays. +type ByteArrayMode int + +const ( + // ByteArrayToByteSlice encodes byte arrays the same way that a byte slice with identical + // length and contents is encoded. + ByteArrayToByteSlice ByteArrayMode = iota + + // ByteArrayToArray encodes byte arrays to the CBOR array type with one unsigned integer + // item for each byte in the array. + ByteArrayToArray + + maxByteArrayMode +) + +func (bam ByteArrayMode) valid() bool { + return bam >= 0 && bam < maxByteArrayMode +} + +// BinaryMarshalerMode specifies how to encode types that implement encoding.BinaryMarshaler. +type BinaryMarshalerMode int + +const ( + // BinaryMarshalerByteString encodes the output of MarshalBinary to a CBOR byte string. + BinaryMarshalerByteString BinaryMarshalerMode = iota + + // BinaryMarshalerNone does not recognize BinaryMarshaler implementations during encode. + BinaryMarshalerNone + + maxBinaryMarshalerMode +) + +func (bmm BinaryMarshalerMode) valid() bool { + return bmm >= 0 && bmm < maxBinaryMarshalerMode +} + // EncOptions specifies encoding options. type EncOptions struct { // Sort specifies sorting order. @@ -401,6 +525,19 @@ type EncOptions struct { // FieldName specifies the CBOR type to use when encoding struct field names. FieldName FieldNameMode + + // ByteSliceLaterFormat specifies which later format conversion hint (CBOR tag 21-23) + // to include (if any) when encoding Go byte slice to CBOR byte string. The encoder will + // always encode unmodified bytes from the byte slice and just wrap it within + // CBOR tag 21, 22, or 23 if specified. + // See "Expected Later Encoding for CBOR-to-JSON Converters" in RFC 8949 Section 3.4.5.2. + ByteSliceLaterFormat ByteSliceLaterFormatMode + + // ByteArray specifies how to encode byte arrays. + ByteArray ByteArrayMode + + // BinaryMarshaler specifies how to encode types that implement encoding.BinaryMarshaler. + BinaryMarshaler BinaryMarshalerMode } // CanonicalEncOptions returns EncOptions for "Canonical CBOR" encoding, @@ -494,12 +631,22 @@ func PreferredUnsortedEncOptions() EncOptions { } // EncMode returns EncMode with immutable options and no tags (safe for concurrency). -func (opts EncOptions) EncMode() (EncMode, error) { +func (opts EncOptions) EncMode() (EncMode, error) { //nolint:gocritic // ignore hugeParam + return opts.encMode() +} + +// UserBufferEncMode returns UserBufferEncMode with immutable options and no tags (safe for concurrency). +func (opts EncOptions) UserBufferEncMode() (UserBufferEncMode, error) { //nolint:gocritic // ignore hugeParam return opts.encMode() } // EncModeWithTags returns EncMode with options and tags that are both immutable (safe for concurrency). -func (opts EncOptions) EncModeWithTags(tags TagSet) (EncMode, error) { +func (opts EncOptions) EncModeWithTags(tags TagSet) (EncMode, error) { //nolint:gocritic // ignore hugeParam + return opts.UserBufferEncModeWithTags(tags) +} + +// UserBufferEncModeWithTags returns UserBufferEncMode with options and tags that are both immutable (safe for concurrency). +func (opts EncOptions) UserBufferEncModeWithTags(tags TagSet) (UserBufferEncMode, error) { //nolint:gocritic // ignore hugeParam if opts.TagsMd == TagsForbidden { return nil, errors.New("cbor: cannot create EncMode with TagSet when TagsMd is TagsForbidden") } @@ -527,7 +674,12 @@ func (opts EncOptions) EncModeWithTags(tags TagSet) (EncMode, error) { } // EncModeWithSharedTags returns EncMode with immutable options and mutable shared tags (safe for concurrency). -func (opts EncOptions) EncModeWithSharedTags(tags TagSet) (EncMode, error) { +func (opts EncOptions) EncModeWithSharedTags(tags TagSet) (EncMode, error) { //nolint:gocritic // ignore hugeParam + return opts.UserBufferEncModeWithSharedTags(tags) +} + +// UserBufferEncModeWithSharedTags returns UserBufferEncMode with immutable options and mutable shared tags (safe for concurrency). +func (opts EncOptions) UserBufferEncModeWithSharedTags(tags TagSet) (UserBufferEncMode, error) { //nolint:gocritic // ignore hugeParam if opts.TagsMd == TagsForbidden { return nil, errors.New("cbor: cannot create EncMode with TagSet when TagsMd is TagsForbidden") } @@ -542,7 +694,7 @@ func (opts EncOptions) EncModeWithSharedTags(tags TagSet) (EncMode, error) { return em, nil } -func (opts EncOptions) encMode() (*encMode, error) { +func (opts EncOptions) encMode() (*encMode, error) { //nolint:gocritic // ignore hugeParam if !opts.Sort.valid() { return nil, errors.New("cbor: invalid SortMode " + strconv.Itoa(int(opts.Sort))) } @@ -586,21 +738,35 @@ func (opts EncOptions) encMode() (*encMode, error) { if !opts.FieldName.valid() { return nil, errors.New("cbor: invalid FieldName " + strconv.Itoa(int(opts.FieldName))) } + byteSliceLaterEncodingTag, err := opts.ByteSliceLaterFormat.encodingTag() + if err != nil { + return nil, err + } + if !opts.ByteArray.valid() { + return nil, errors.New("cbor: invalid ByteArray " + strconv.Itoa(int(opts.ByteArray))) + } + if !opts.BinaryMarshaler.valid() { + return nil, errors.New("cbor: invalid BinaryMarshaler " + strconv.Itoa(int(opts.BinaryMarshaler))) + } em := encMode{ - sort: opts.Sort, - shortestFloat: opts.ShortestFloat, - nanConvert: opts.NaNConvert, - infConvert: opts.InfConvert, - bigIntConvert: opts.BigIntConvert, - time: opts.Time, - timeTag: opts.TimeTag, - indefLength: opts.IndefLength, - nilContainers: opts.NilContainers, - tagsMd: opts.TagsMd, - omitEmpty: opts.OmitEmpty, - stringType: opts.String, - stringMajorType: stringMajorType, - fieldName: opts.FieldName, + sort: opts.Sort, + shortestFloat: opts.ShortestFloat, + nanConvert: opts.NaNConvert, + infConvert: opts.InfConvert, + bigIntConvert: opts.BigIntConvert, + time: opts.Time, + timeTag: opts.TimeTag, + indefLength: opts.IndefLength, + nilContainers: opts.NilContainers, + tagsMd: opts.TagsMd, + omitEmpty: opts.OmitEmpty, + stringType: opts.String, + stringMajorType: stringMajorType, + fieldName: opts.FieldName, + byteSliceLaterFormat: opts.ByteSliceLaterFormat, + byteSliceLaterEncodingTag: byteSliceLaterEncodingTag, + byteArray: opts.ByteArray, + binaryMarshaler: opts.BinaryMarshaler, } return &em, nil } @@ -612,44 +778,136 @@ type EncMode interface { EncOptions() EncOptions } +// UserBufferEncMode is an interface for CBOR encoding, which extends EncMode by +// adding MarshalToBuffer to support user specified buffer rather than encoding +// into the built-in buffer pool. +type UserBufferEncMode interface { + EncMode + MarshalToBuffer(v interface{}, buf *bytes.Buffer) error + + // This private method is to prevent users implementing + // this interface and so future additions to it will + // not be breaking changes. + // See https://go.dev/blog/module-compatibility + unexport() +} + type encMode struct { - tags tagProvider - sort SortMode - shortestFloat ShortestFloatMode - nanConvert NaNConvertMode - infConvert InfConvertMode - bigIntConvert BigIntConvertMode - time TimeMode - timeTag EncTagMode - indefLength IndefLengthMode - nilContainers NilContainersMode - tagsMd TagsMode - omitEmpty OmitEmptyMode - stringType StringMode - stringMajorType cborType - fieldName FieldNameMode + tags tagProvider + sort SortMode + shortestFloat ShortestFloatMode + nanConvert NaNConvertMode + infConvert InfConvertMode + bigIntConvert BigIntConvertMode + time TimeMode + timeTag EncTagMode + indefLength IndefLengthMode + nilContainers NilContainersMode + tagsMd TagsMode + omitEmpty OmitEmptyMode + stringType StringMode + stringMajorType cborType + fieldName FieldNameMode + byteSliceLaterFormat ByteSliceLaterFormatMode + byteSliceLaterEncodingTag uint64 + byteArray ByteArrayMode + binaryMarshaler BinaryMarshalerMode } var defaultEncMode, _ = EncOptions{}.encMode() +// These four decoding modes are used by getMarshalerDecMode. +// maxNestedLevels, maxArrayElements, and maxMapPairs are +// set to max allowed limits to avoid rejecting Marshaler +// output that would have been the allowable output of a +// non-Marshaler object that exceeds default limits. +var ( + marshalerForbidIndefLengthForbidTagsDecMode = decMode{ + maxNestedLevels: maxMaxNestedLevels, + maxArrayElements: maxMaxArrayElements, + maxMapPairs: maxMaxMapPairs, + indefLength: IndefLengthForbidden, + tagsMd: TagsForbidden, + } + + marshalerAllowIndefLengthForbidTagsDecMode = decMode{ + maxNestedLevels: maxMaxNestedLevels, + maxArrayElements: maxMaxArrayElements, + maxMapPairs: maxMaxMapPairs, + indefLength: IndefLengthAllowed, + tagsMd: TagsForbidden, + } + + marshalerForbidIndefLengthAllowTagsDecMode = decMode{ + maxNestedLevels: maxMaxNestedLevels, + maxArrayElements: maxMaxArrayElements, + maxMapPairs: maxMaxMapPairs, + indefLength: IndefLengthForbidden, + tagsMd: TagsAllowed, + } + + marshalerAllowIndefLengthAllowTagsDecMode = decMode{ + maxNestedLevels: maxMaxNestedLevels, + maxArrayElements: maxMaxArrayElements, + maxMapPairs: maxMaxMapPairs, + indefLength: IndefLengthAllowed, + tagsMd: TagsAllowed, + } +) + +// getMarshalerDecMode returns one of four existing decoding modes +// which can be reused (safe for parallel use) for the purpose of +// checking if data returned by Marshaler is well-formed. +func getMarshalerDecMode(indefLength IndefLengthMode, tagsMd TagsMode) *decMode { + switch { + case indefLength == IndefLengthAllowed && tagsMd == TagsAllowed: + return &marshalerAllowIndefLengthAllowTagsDecMode + + case indefLength == IndefLengthAllowed && tagsMd == TagsForbidden: + return &marshalerAllowIndefLengthForbidTagsDecMode + + case indefLength == IndefLengthForbidden && tagsMd == TagsAllowed: + return &marshalerForbidIndefLengthAllowTagsDecMode + + case indefLength == IndefLengthForbidden && tagsMd == TagsForbidden: + return &marshalerForbidIndefLengthForbidTagsDecMode + + default: + // This should never happen, unless we add new options to + // IndefLengthMode or TagsMode without updating this function. + return &decMode{ + maxNestedLevels: maxMaxNestedLevels, + maxArrayElements: maxMaxArrayElements, + maxMapPairs: maxMaxMapPairs, + indefLength: indefLength, + tagsMd: tagsMd, + } + } +} + // EncOptions returns user specified options used to create this EncMode. func (em *encMode) EncOptions() EncOptions { return EncOptions{ - Sort: em.sort, - ShortestFloat: em.shortestFloat, - NaNConvert: em.nanConvert, - InfConvert: em.infConvert, - BigIntConvert: em.bigIntConvert, - Time: em.time, - TimeTag: em.timeTag, - IndefLength: em.indefLength, - NilContainers: em.nilContainers, - TagsMd: em.tagsMd, - OmitEmpty: em.omitEmpty, - String: em.stringType, - FieldName: em.fieldName, - } -} + Sort: em.sort, + ShortestFloat: em.shortestFloat, + NaNConvert: em.nanConvert, + InfConvert: em.infConvert, + BigIntConvert: em.bigIntConvert, + Time: em.time, + TimeTag: em.timeTag, + IndefLength: em.indefLength, + NilContainers: em.nilContainers, + TagsMd: em.tagsMd, + OmitEmpty: em.omitEmpty, + String: em.stringType, + FieldName: em.fieldName, + ByteSliceLaterFormat: em.byteSliceLaterFormat, + ByteArray: em.byteArray, + BinaryMarshaler: em.binaryMarshaler, + } +} + +func (em *encMode) unexport() {} func (em *encMode) encTagBytes(t reflect.Type) []byte { if em.tags != nil { @@ -664,61 +922,61 @@ func (em *encMode) encTagBytes(t reflect.Type) []byte { // // See the documentation for Marshal for details. func (em *encMode) Marshal(v interface{}) ([]byte, error) { - e := getEncoderBuffer() + e := getEncodeBuffer() if err := encode(e, em, reflect.ValueOf(v)); err != nil { - putEncoderBuffer(e) + putEncodeBuffer(e) return nil, err } buf := make([]byte, e.Len()) copy(buf, e.Bytes()) - putEncoderBuffer(e) + putEncodeBuffer(e) return buf, nil } +// MarshalToBuffer encodes v into provided buffer (instead of using built-in buffer pool) +// and uses em encoding mode. +// +// NOTE: Unlike Marshal, the buffer provided to MarshalToBuffer can contain +// partially encoded data if error is returned. +// +// See Marshal for more details. +func (em *encMode) MarshalToBuffer(v interface{}, buf *bytes.Buffer) error { + if buf == nil { + return fmt.Errorf("cbor: encoding buffer provided by user is nil") + } + return encode(buf, em, reflect.ValueOf(v)) +} + // NewEncoder returns a new encoder that writes to w using em EncMode. func (em *encMode) NewEncoder(w io.Writer) *Encoder { return &Encoder{w: w, em: em} } -type encoderBuffer struct { - bytes.Buffer - scratch [16]byte -} - -// encoderBufferPool caches unused encoderBuffer objects for later reuse. -var encoderBufferPool = sync.Pool{ +// encodeBufferPool caches unused bytes.Buffer objects for later reuse. +var encodeBufferPool = sync.Pool{ New: func() interface{} { - e := new(encoderBuffer) + e := new(bytes.Buffer) e.Grow(32) // TODO: make this configurable return e }, } -func getEncoderBuffer() *encoderBuffer { - return encoderBufferPool.Get().(*encoderBuffer) +func getEncodeBuffer() *bytes.Buffer { + return encodeBufferPool.Get().(*bytes.Buffer) } -func putEncoderBuffer(e *encoderBuffer) { +func putEncodeBuffer(e *bytes.Buffer) { e.Reset() - encoderBufferPool.Put(e) + encodeBufferPool.Put(e) } -type encodeFunc func(e *encoderBuffer, em *encMode, v reflect.Value) error +type encodeFunc func(e *bytes.Buffer, em *encMode, v reflect.Value) error type isEmptyFunc func(em *encMode, v reflect.Value) (empty bool, err error) -var ( - cborFalse = []byte{0xf4} - cborTrue = []byte{0xf5} - cborNil = []byte{0xf6} - cborNaN = []byte{0xf9, 0x7e, 0x00} - cborPositiveInfinity = []byte{0xf9, 0x7c, 0x00} - cborNegativeInfinity = []byte{0xf9, 0xfc, 0x00} -) - -func encode(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encode(e *bytes.Buffer, em *encMode, v reflect.Value) error { if !v.IsValid() { // v is zero value e.Write(cborNil) @@ -733,7 +991,7 @@ func encode(e *encoderBuffer, em *encMode, v reflect.Value) error { return f(e, em, v) } -func encodeBool(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeBool(e *bytes.Buffer, em *encMode, v reflect.Value) error { if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -745,7 +1003,7 @@ func encodeBool(e *encoderBuffer, em *encMode, v reflect.Value) error { return nil } -func encodeInt(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeInt(e *bytes.Buffer, em *encMode, v reflect.Value) error { if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -759,7 +1017,7 @@ func encodeInt(e *encoderBuffer, em *encMode, v reflect.Value) error { return nil } -func encodeUint(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeUint(e *bytes.Buffer, em *encMode, v reflect.Value) error { if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -767,7 +1025,7 @@ func encodeUint(e *encoderBuffer, em *encMode, v reflect.Value) error { return nil } -func encodeFloat(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeFloat(e *bytes.Buffer, em *encMode, v reflect.Value) error { if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -782,9 +1040,12 @@ func encodeFloat(e *encoderBuffer, em *encMode, v reflect.Value) error { if v.Kind() == reflect.Float64 && (fopt == ShortestFloatNone || cannotFitFloat32(f64)) { // Encode float64 // Don't use encodeFloat64() because it cannot be inlined. - e.scratch[0] = byte(cborTypePrimitives) | byte(27) - binary.BigEndian.PutUint64(e.scratch[1:], math.Float64bits(f64)) - e.Write(e.scratch[:9]) + const argumentSize = 8 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | byte(additionalInformationAsFloat64) + binary.BigEndian.PutUint64(scratch[1:], math.Float64bits(f64)) + e.Write(scratch[:]) return nil } @@ -805,24 +1066,34 @@ func encodeFloat(e *encoderBuffer, em *encMode, v reflect.Value) error { if p == float16.PrecisionExact { // Encode float16 // Don't use encodeFloat16() because it cannot be inlined. - e.scratch[0] = byte(cborTypePrimitives) | byte(25) - binary.BigEndian.PutUint16(e.scratch[1:], uint16(f16)) - e.Write(e.scratch[:3]) + const argumentSize = 2 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | additionalInformationAsFloat16 + binary.BigEndian.PutUint16(scratch[1:], uint16(f16)) + e.Write(scratch[:]) return nil } } // Encode float32 // Don't use encodeFloat32() because it cannot be inlined. - e.scratch[0] = byte(cborTypePrimitives) | byte(26) - binary.BigEndian.PutUint32(e.scratch[1:], math.Float32bits(f32)) - e.Write(e.scratch[:5]) + const argumentSize = 4 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | additionalInformationAsFloat32 + binary.BigEndian.PutUint32(scratch[1:], math.Float32bits(f32)) + e.Write(scratch[:]) return nil } -func encodeInf(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeInf(e *bytes.Buffer, em *encMode, v reflect.Value) error { f64 := v.Float() - if em.infConvert == InfConvertFloat16 { + switch em.infConvert { + case InfConvertReject: + return &UnsupportedValueError{msg: "floating-point infinity"} + + case InfConvertFloat16: if f64 > 0 { e.Write(cborPositiveInfinity) } else { @@ -836,7 +1107,7 @@ func encodeInf(e *encoderBuffer, em *encMode, v reflect.Value) error { return encodeFloat32(e, float32(f64)) } -func encodeNaN(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeNaN(e *bytes.Buffer, em *encMode, v reflect.Value) error { switch em.nanConvert { case NaNConvert7e00: e.Write(cborNaN) @@ -849,6 +1120,9 @@ func encodeNaN(e *encoderBuffer, em *encMode, v reflect.Value) error { f32 := float32NaNFromReflectValue(v) return encodeFloat32(e, f32) + case NaNConvertReject: + return &UnsupportedValueError{msg: "floating-point NaN"} + default: // NaNConvertPreserveSignal, NaNConvertQuiet if v.Kind() == reflect.Float64 { f64 := v.Float() @@ -894,33 +1168,45 @@ func encodeNaN(e *encoderBuffer, em *encMode, v reflect.Value) error { } } -func encodeFloat16(e *encoderBuffer, f16 float16.Float16) error { - e.scratch[0] = byte(cborTypePrimitives) | byte(25) - binary.BigEndian.PutUint16(e.scratch[1:], uint16(f16)) - e.Write(e.scratch[:3]) +func encodeFloat16(e *bytes.Buffer, f16 float16.Float16) error { + const argumentSize = 2 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | additionalInformationAsFloat16 + binary.BigEndian.PutUint16(scratch[1:], uint16(f16)) + e.Write(scratch[:]) return nil } -func encodeFloat32(e *encoderBuffer, f32 float32) error { - e.scratch[0] = byte(cborTypePrimitives) | byte(26) - binary.BigEndian.PutUint32(e.scratch[1:], math.Float32bits(f32)) - e.Write(e.scratch[:5]) +func encodeFloat32(e *bytes.Buffer, f32 float32) error { + const argumentSize = 4 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | additionalInformationAsFloat32 + binary.BigEndian.PutUint32(scratch[1:], math.Float32bits(f32)) + e.Write(scratch[:]) return nil } -func encodeFloat64(e *encoderBuffer, f64 float64) error { - e.scratch[0] = byte(cborTypePrimitives) | byte(27) - binary.BigEndian.PutUint64(e.scratch[1:], math.Float64bits(f64)) - e.Write(e.scratch[:9]) +func encodeFloat64(e *bytes.Buffer, f64 float64) error { + const argumentSize = 8 + const headSize = 1 + argumentSize + var scratch [headSize]byte + scratch[0] = byte(cborTypePrimitives) | additionalInformationAsFloat64 + binary.BigEndian.PutUint64(scratch[1:], math.Float64bits(f64)) + e.Write(scratch[:]) return nil } -func encodeByteString(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeByteString(e *bytes.Buffer, em *encMode, v reflect.Value) error { vk := v.Kind() if vk == reflect.Slice && v.IsNil() && em.nilContainers == NilContainerAsNull { e.Write(cborNil) return nil } + if vk == reflect.Slice && v.Type().Elem().Kind() == reflect.Uint8 && em.byteSliceLaterEncodingTag != 0 { + encodeHead(e, byte(cborTypeTag), em.byteSliceLaterEncodingTag) + } if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -939,7 +1225,7 @@ func encodeByteString(e *encoderBuffer, em *encMode, v reflect.Value) error { return nil } -func encodeString(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeString(e *bytes.Buffer, em *encMode, v reflect.Value) error { if b := em.encTagBytes(v.Type()); b != nil { e.Write(b) } @@ -953,7 +1239,10 @@ type arrayEncodeFunc struct { f encodeFunc } -func (ae arrayEncodeFunc) encode(e *encoderBuffer, em *encMode, v reflect.Value) error { +func (ae arrayEncodeFunc) encode(e *bytes.Buffer, em *encMode, v reflect.Value) error { + if em.byteArray == ByteArrayToByteSlice && v.Type().Elem().Kind() == reflect.Uint8 { + return encodeByteString(e, em, v) + } if v.Kind() == reflect.Slice && v.IsNil() && em.nilContainers == NilContainerAsNull { e.Write(cborNil) return nil @@ -977,13 +1266,13 @@ func (ae arrayEncodeFunc) encode(e *encoderBuffer, em *encMode, v reflect.Value) // encodeKeyValueFunc encodes key/value pairs in map (v). // If kvs is provided (having the same length as v), length of encoded key and value are stored in kvs. // kvs is used for canonical encoding of map. -type encodeKeyValueFunc func(e *encoderBuffer, em *encMode, v reflect.Value, kvs []keyValue) error +type encodeKeyValueFunc func(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error type mapEncodeFunc struct { e encodeKeyValueFunc } -func (me mapEncodeFunc) encode(e *encoderBuffer, em *encMode, v reflect.Value) error { +func (me mapEncodeFunc) encode(e *bytes.Buffer, em *encMode, v reflect.Value) error { if v.IsNil() && em.nilContainers == NilContainerAsNull { e.Write(cborNil) return nil @@ -995,21 +1284,58 @@ func (me mapEncodeFunc) encode(e *encoderBuffer, em *encMode, v reflect.Value) e if mlen == 0 { return e.WriteByte(byte(cborTypeMap)) } - if em.sort != SortNone { - return me.encodeCanonical(e, em, v) - } + encodeHead(e, byte(cborTypeMap), uint64(mlen)) + if em.sort == SortNone || em.sort == SortFastShuffle || mlen <= 1 { + return me.e(e, em, v, nil) + } + + kvsp := getKeyValues(v.Len()) // for sorting keys + defer putKeyValues(kvsp) + kvs := *kvsp + + kvBeginOffset := e.Len() + if err := me.e(e, em, v, kvs); err != nil { + return err + } + kvTotalLen := e.Len() - kvBeginOffset + + // Use the capacity at the tail of the encode buffer as a staging area to rearrange the + // encoded pairs into sorted order. + e.Grow(kvTotalLen) + tmp := e.Bytes()[e.Len() : e.Len()+kvTotalLen] // Can use e.AvailableBuffer() in Go 1.21+. + dst := e.Bytes()[kvBeginOffset:] + + if em.sort == SortBytewiseLexical { + sort.Sort(&bytewiseKeyValueSorter{kvs: kvs, data: dst}) + } else { + sort.Sort(&lengthFirstKeyValueSorter{kvs: kvs, data: dst}) + } + + // This is where the encoded bytes are actually rearranged in the output buffer to reflect + // the desired order. + sortedOffset := 0 + for _, kv := range kvs { + copy(tmp[sortedOffset:], dst[kv.offset:kv.nextOffset]) + sortedOffset += kv.nextOffset - kv.offset + } + copy(dst, tmp[:kvTotalLen]) + + return nil - return me.e(e, em, v, nil) } +// keyValue is the position of an encoded pair in a buffer. All offsets are zero-based and relative +// to the first byte of the first encoded pair. type keyValue struct { - keyCBORData, keyValueCBORData []byte - keyLen, keyValueLen int + offset int + valueOffset int + nextOffset int } type bytewiseKeyValueSorter struct { - kvs []keyValue + kvs []keyValue + data []byte } func (x *bytewiseKeyValueSorter) Len() int { @@ -1021,11 +1347,13 @@ func (x *bytewiseKeyValueSorter) Swap(i, j int) { } func (x *bytewiseKeyValueSorter) Less(i, j int) bool { - return bytes.Compare(x.kvs[i].keyCBORData, x.kvs[j].keyCBORData) <= 0 + kvi, kvj := x.kvs[i], x.kvs[j] + return bytes.Compare(x.data[kvi.offset:kvi.valueOffset], x.data[kvj.offset:kvj.valueOffset]) <= 0 } type lengthFirstKeyValueSorter struct { - kvs []keyValue + kvs []keyValue + data []byte } func (x *lengthFirstKeyValueSorter) Len() int { @@ -1037,10 +1365,11 @@ func (x *lengthFirstKeyValueSorter) Swap(i, j int) { } func (x *lengthFirstKeyValueSorter) Less(i, j int) bool { - if len(x.kvs[i].keyCBORData) != len(x.kvs[j].keyCBORData) { - return len(x.kvs[i].keyCBORData) < len(x.kvs[j].keyCBORData) + kvi, kvj := x.kvs[i], x.kvs[j] + if keyLengthDifference := (kvi.valueOffset - kvi.offset) - (kvj.valueOffset - kvj.offset); keyLengthDifference != 0 { + return keyLengthDifference < 0 } - return bytes.Compare(x.kvs[i].keyCBORData, x.kvs[j].keyCBORData) <= 0 + return bytes.Compare(x.data[kvi.offset:kvi.valueOffset], x.data[kvj.offset:kvj.valueOffset]) <= 0 } var keyValuePool = sync.Pool{} @@ -1068,42 +1397,7 @@ func putKeyValues(x *[]keyValue) { keyValuePool.Put(x) } -func (me mapEncodeFunc) encodeCanonical(e *encoderBuffer, em *encMode, v reflect.Value) error { - kve := getEncoderBuffer() // accumulated cbor encoded key-values - defer putEncoderBuffer(kve) - - kvsp := getKeyValues(v.Len()) // for sorting keys - defer putKeyValues(kvsp) - - kvs := *kvsp - - err := me.e(kve, em, v, kvs) - if err != nil { - return err - } - - b := kve.Bytes() - for i, off := 0, 0; i < len(kvs); i++ { - kvs[i].keyCBORData = b[off : off+kvs[i].keyLen] - kvs[i].keyValueCBORData = b[off : off+kvs[i].keyValueLen] - off += kvs[i].keyValueLen - } - - if em.sort == SortBytewiseLexical { - sort.Sort(&bytewiseKeyValueSorter{kvs}) - } else { - sort.Sort(&lengthFirstKeyValueSorter{kvs}) - } - - encodeHead(e, byte(cborTypeMap), uint64(len(kvs))) - for i := 0; i < len(kvs); i++ { - e.Write(kvs[i].keyValueCBORData) - } - - return nil -} - -func encodeStructToArray(e *encoderBuffer, em *encMode, v reflect.Value) (err error) { +func encodeStructToArray(e *bytes.Buffer, em *encMode, v reflect.Value) (err error) { structType, err := getEncodingStructType(v.Type()) if err != nil { return err @@ -1124,7 +1418,7 @@ func encodeStructToArray(e *encoderBuffer, em *encMode, v reflect.Value) (err er fv = v.Field(f.idx[0]) } else { // Get embedded field value. No error is expected. - fv, _ = getFieldValue(v, f.idx, func(v reflect.Value) (reflect.Value, error) { + fv, _ = getFieldValue(v, f.idx, func(reflect.Value) (reflect.Value, error) { // Write CBOR nil for null pointer to embedded struct e.Write(cborNil) return reflect.Value{}, nil @@ -1141,31 +1435,7 @@ func encodeStructToArray(e *encoderBuffer, em *encMode, v reflect.Value) (err er return nil } -func encodeFixedLengthStruct(e *encoderBuffer, em *encMode, v reflect.Value, flds fields) error { - if b := em.encTagBytes(v.Type()); b != nil { - e.Write(b) - } - - encodeHead(e, byte(cborTypeMap), uint64(len(flds))) - - for i := 0; i < len(flds); i++ { - f := flds[i] - if !f.keyAsInt && em.fieldName == FieldNameToByteString { - e.Write(f.cborNameByteString) - } else { // int or text string - e.Write(f.cborName) - } - - fv := v.Field(f.idx[0]) - if err := f.ef(e, em, fv); err != nil { - return err - } - } - - return nil -} - -func encodeStruct(e *encoderBuffer, em *encMode, v reflect.Value) (err error) { +func encodeStruct(e *bytes.Buffer, em *encMode, v reflect.Value) (err error) { structType, err := getEncodingStructType(v.Type()) if err != nil { return err @@ -1173,21 +1443,30 @@ func encodeStruct(e *encoderBuffer, em *encMode, v reflect.Value) (err error) { flds := structType.getFields(em) - if structType.fixedLength { - return encodeFixedLengthStruct(e, em, v, flds) + start := 0 + if em.sort == SortFastShuffle && len(flds) > 0 { + start = rand.Intn(len(flds)) //nolint:gosec // Don't need a CSPRNG for deck cutting. + } + + if b := em.encTagBytes(v.Type()); b != nil { + e.Write(b) } - kve := getEncoderBuffer() // encode key-value pairs based on struct field tag options + // Encode head with struct field count. + // Head is rewritten later if actual encoded field count is different from struct field count. + encodedHeadLen := encodeHead(e, byte(cborTypeMap), uint64(len(flds))) + + kvbegin := e.Len() kvcount := 0 - for i := 0; i < len(flds); i++ { - f := flds[i] + for offset := 0; offset < len(flds); offset++ { + f := flds[(start+offset)%len(flds)] var fv reflect.Value if len(f.idx) == 1 { fv = v.Field(f.idx[0]) } else { // Get embedded field value. No error is expected. - fv, _ = getFieldValue(v, f.idx, func(v reflect.Value) (reflect.Value, error) { + fv, _ = getFieldValue(v, f.idx, func(reflect.Value) (reflect.Value, error) { // Skip null pointer to embedded struct return reflect.Value{}, nil }) @@ -1198,7 +1477,6 @@ func encodeStruct(e *encoderBuffer, em *encMode, v reflect.Value) (err error) { if f.omitEmpty { empty, err := f.ief(em, fv) if err != nil { - putEncoderBuffer(kve) return err } if empty { @@ -1207,30 +1485,51 @@ func encodeStruct(e *encoderBuffer, em *encMode, v reflect.Value) (err error) { } if !f.keyAsInt && em.fieldName == FieldNameToByteString { - kve.Write(f.cborNameByteString) + e.Write(f.cborNameByteString) } else { // int or text string - kve.Write(f.cborName) + e.Write(f.cborName) } - if err := f.ef(kve, em, fv); err != nil { - putEncoderBuffer(kve) + if err := f.ef(e, em, fv); err != nil { return err } + kvcount++ } - if b := em.encTagBytes(v.Type()); b != nil { - e.Write(b) + if len(flds) == kvcount { + // Encoded element count in head is the same as actual element count. + return nil + } + + // Overwrite the bytes that were reserved for the head before encoding the map entries. + var actualHeadLen int + { + headbuf := *bytes.NewBuffer(e.Bytes()[kvbegin-encodedHeadLen : kvbegin-encodedHeadLen : kvbegin]) + actualHeadLen = encodeHead(&headbuf, byte(cborTypeMap), uint64(kvcount)) + } + + if actualHeadLen == encodedHeadLen { + // The bytes reserved for the encoded head were exactly the right size, so the + // encoded entries are already in their final positions. + return nil } - encodeHead(e, byte(cborTypeMap), uint64(kvcount)) - e.Write(kve.Bytes()) + // We reserved more bytes than needed for the encoded head, based on the number of fields + // encoded. The encoded entries are offset to the right by the number of excess reserved + // bytes. Shift the entries left to remove the gap. + excessReservedBytes := encodedHeadLen - actualHeadLen + dst := e.Bytes()[kvbegin-excessReservedBytes : e.Len()-excessReservedBytes] + src := e.Bytes()[kvbegin:e.Len()] + copy(dst, src) - putEncoderBuffer(kve) + // After shifting, the excess bytes are at the end of the output buffer and they are + // garbage. + e.Truncate(e.Len() - excessReservedBytes) return nil } -func encodeIntf(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeIntf(e *bytes.Buffer, em *encMode, v reflect.Value) error { if v.IsNil() { e.Write(cborNil) return nil @@ -1238,7 +1537,7 @@ func encodeIntf(e *encoderBuffer, em *encMode, v reflect.Value) error { return encode(e, em, v.Elem()) } -func encodeTime(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeTime(e *bytes.Buffer, em *encMode, v reflect.Value) error { t := v.Interface().(time.Time) if t.IsZero() { e.Write(cborNil) // Even if tag is required, encode as CBOR null. @@ -1255,10 +1554,12 @@ func encodeTime(e *encoderBuffer, em *encMode, v reflect.Value) error { case TimeUnix: secs := t.Unix() return encodeInt(e, em, reflect.ValueOf(secs)) + case TimeUnixMicro: t = t.UTC().Round(time.Microsecond) f := float64(t.UnixNano()) / 1e9 return encodeFloat(e, em, reflect.ValueOf(f)) + case TimeUnixDynamic: t = t.UTC().Round(time.Microsecond) secs, nsecs := t.Unix(), uint64(t.Nanosecond()) @@ -1267,16 +1568,22 @@ func encodeTime(e *encoderBuffer, em *encMode, v reflect.Value) error { } f := float64(secs) + float64(nsecs)/1e9 return encodeFloat(e, em, reflect.ValueOf(f)) + case TimeRFC3339: s := t.Format(time.RFC3339) return encodeString(e, em, reflect.ValueOf(s)) + default: // TimeRFC3339Nano s := t.Format(time.RFC3339Nano) return encodeString(e, em, reflect.ValueOf(s)) } } -func encodeBigInt(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeBigInt(e *bytes.Buffer, em *encMode, v reflect.Value) error { + if em.bigIntConvert == BigIntConvertReject { + return &UnsupportedTypeError{Type: typeBigInt} + } + vbi := v.Interface().(big.Int) sign := vbi.Sign() bi := new(big.Int).SetBytes(vbi.Bytes()) // bi is absolute value of v @@ -1311,7 +1618,16 @@ func encodeBigInt(e *encoderBuffer, em *encMode, v reflect.Value) error { return nil } -func encodeBinaryMarshalerType(e *encoderBuffer, em *encMode, v reflect.Value) error { +type binaryMarshalerEncoder struct { + alternateEncode encodeFunc + alternateIsEmpty isEmptyFunc +} + +func (bme binaryMarshalerEncoder) encode(e *bytes.Buffer, em *encMode, v reflect.Value) error { + if em.binaryMarshaler != BinaryMarshalerByteString { + return bme.alternateEncode(e, em, v) + } + vt := v.Type() m, ok := v.Interface().(encoding.BinaryMarshaler) if !ok { @@ -1331,7 +1647,25 @@ func encodeBinaryMarshalerType(e *encoderBuffer, em *encMode, v reflect.Value) e return nil } -func encodeMarshalerType(e *encoderBuffer, em *encMode, v reflect.Value) error { +func (bme binaryMarshalerEncoder) isEmpty(em *encMode, v reflect.Value) (bool, error) { + if em.binaryMarshaler != BinaryMarshalerByteString { + return bme.alternateIsEmpty(em, v) + } + + m, ok := v.Interface().(encoding.BinaryMarshaler) + if !ok { + pv := reflect.New(v.Type()) + pv.Elem().Set(v) + m = pv.Interface().(encoding.BinaryMarshaler) + } + data, err := m.MarshalBinary() + if err != nil { + return false, err + } + return len(data) == 0, nil +} + +func encodeMarshalerType(e *bytes.Buffer, em *encMode, v reflect.Value) error { if em.tagsMd == TagsForbidden && v.Type() == typeRawTag { return errors.New("cbor: cannot encode cbor.RawTag when TagsMd is TagsForbidden") } @@ -1345,11 +1679,19 @@ func encodeMarshalerType(e *encoderBuffer, em *encMode, v reflect.Value) error { if err != nil { return err } + + // Verify returned CBOR data item from MarshalCBOR() is well-formed and passes tag validity for builtin tags 0-3. + d := decoder{data: data, dm: getMarshalerDecMode(em.indefLength, em.tagsMd)} + err = d.wellformed(false, true) + if err != nil { + return &MarshalerError{typ: v.Type(), err: err} + } + e.Write(data) return nil } -func encodeTag(e *encoderBuffer, em *encMode, v reflect.Value) error { +func encodeTag(e *bytes.Buffer, em *encMode, v reflect.Value) error { if em.tagsMd == TagsForbidden { return errors.New("cbor: cannot encode cbor.Tag when TagsMd is TagsForbidden") } @@ -1365,36 +1707,65 @@ func encodeTag(e *encoderBuffer, em *encMode, v reflect.Value) error { // Marshal tag number encodeHead(e, byte(cborTypeTag), t.Number) + vem := *em // shallow copy + + // For built-in tags, disable settings that may introduce tag validity errors when + // marshaling certain Content values. + switch t.Number { + case tagNumRFC3339Time: + vem.stringType = StringToTextString + vem.stringMajorType = cborTypeTextString + case tagNumUnsignedBignum, tagNumNegativeBignum: + vem.byteSliceLaterFormat = ByteSliceLaterFormatNone + vem.byteSliceLaterEncodingTag = 0 + } + // Marshal tag content - return encode(e, em, reflect.ValueOf(t.Content)) + return encode(e, &vem, reflect.ValueOf(t.Content)) } -func encodeHead(e *encoderBuffer, t byte, n uint64) { - if n <= 23 { +// encodeHead writes CBOR head of specified type t and returns number of bytes written. +func encodeHead(e *bytes.Buffer, t byte, n uint64) int { + if n <= maxAdditionalInformationWithoutArgument { + const headSize = 1 e.WriteByte(t | byte(n)) - return + return headSize } + if n <= math.MaxUint8 { - e.scratch[0] = t | byte(24) - e.scratch[1] = byte(n) - e.Write(e.scratch[:2]) - return + const headSize = 2 + scratch := [headSize]byte{ + t | byte(additionalInformationWith1ByteArgument), + byte(n), + } + e.Write(scratch[:]) + return headSize } + if n <= math.MaxUint16 { - e.scratch[0] = t | byte(25) - binary.BigEndian.PutUint16(e.scratch[1:], uint16(n)) - e.Write(e.scratch[:3]) - return + const headSize = 3 + var scratch [headSize]byte + scratch[0] = t | byte(additionalInformationWith2ByteArgument) + binary.BigEndian.PutUint16(scratch[1:], uint16(n)) + e.Write(scratch[:]) + return headSize } + if n <= math.MaxUint32 { - e.scratch[0] = t | byte(26) - binary.BigEndian.PutUint32(e.scratch[1:], uint32(n)) - e.Write(e.scratch[:5]) - return + const headSize = 5 + var scratch [headSize]byte + scratch[0] = t | byte(additionalInformationWith4ByteArgument) + binary.BigEndian.PutUint32(scratch[1:], uint32(n)) + e.Write(scratch[:]) + return headSize } - e.scratch[0] = t | byte(27) - binary.BigEndian.PutUint64(e.scratch[1:], n) - e.Write(e.scratch[:9]) + + const headSize = 9 + var scratch [headSize]byte + scratch[0] = t | byte(additionalInformationWith8ByteArgument) + binary.BigEndian.PutUint64(scratch[1:], n) + e.Write(scratch[:]) + return headSize } var ( @@ -1404,7 +1775,7 @@ var ( typeByteString = reflect.TypeOf(ByteString("")) ) -func getEncodeFuncInternal(t reflect.Type) (encodeFunc, isEmptyFunc) { +func getEncodeFuncInternal(t reflect.Type) (ef encodeFunc, ief isEmptyFunc) { k := t.Kind() if k == reflect.Ptr { return getEncodeIndirectValueFunc(t), isEmptyPtr @@ -1412,14 +1783,19 @@ func getEncodeFuncInternal(t reflect.Type) (encodeFunc, isEmptyFunc) { switch t { case typeSimpleValue: return encodeMarshalerType, isEmptyUint + case typeTag: return encodeTag, alwaysNotEmpty + case typeTime: return encodeTime, alwaysNotEmpty + case typeBigInt: return encodeBigInt, alwaysNotEmpty + case typeRawMessage: return encodeMarshalerType, isEmptySlice + case typeByteString: return encodeMarshalerType, isEmptyString } @@ -1427,34 +1803,52 @@ func getEncodeFuncInternal(t reflect.Type) (encodeFunc, isEmptyFunc) { return encodeMarshalerType, alwaysNotEmpty } if reflect.PtrTo(t).Implements(typeBinaryMarshaler) { - return encodeBinaryMarshalerType, isEmptyBinaryMarshaler + defer func() { + // capture encoding method used for modes that disable BinaryMarshaler + bme := binaryMarshalerEncoder{ + alternateEncode: ef, + alternateIsEmpty: ief, + } + ef = bme.encode + ief = bme.isEmpty + }() } switch k { case reflect.Bool: return encodeBool, isEmptyBool + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return encodeInt, isEmptyInt + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: return encodeUint, isEmptyUint + case reflect.Float32, reflect.Float64: return encodeFloat, isEmptyFloat + case reflect.String: return encodeString, isEmptyString - case reflect.Slice, reflect.Array: + + case reflect.Slice: if t.Elem().Kind() == reflect.Uint8 { return encodeByteString, isEmptySlice } + fallthrough + + case reflect.Array: f, _ := getEncodeFunc(t.Elem()) if f == nil { return nil, nil } return arrayEncodeFunc{f: f}.encode, isEmptySlice + case reflect.Map: f := getEncodeMapFunc(t) if f == nil { return nil, nil } return f, isEmptyMap + case reflect.Struct: // Get struct's special field "_" tag options if f, ok := t.FieldByName("_"); ok { @@ -1466,6 +1860,7 @@ func getEncodeFuncInternal(t reflect.Type) (encodeFunc, isEmptyFunc) { } } return encodeStruct, isEmptyStruct + case reflect.Interface: return encodeIntf, isEmptyIntf } @@ -1480,7 +1875,7 @@ func getEncodeIndirectValueFunc(t reflect.Type) encodeFunc { if f == nil { return nil } - return func(e *encoderBuffer, em *encMode, v reflect.Value) error { + return func(e *bytes.Buffer, em *encMode, v reflect.Value) error { for v.Kind() == reflect.Ptr && !v.IsNil() { v = v.Elem() } @@ -1559,7 +1954,7 @@ func isEmptyStruct(em *encMode, v reflect.Value) (bool, error) { fv = v.Field(f.idx[0]) } else { // Get embedded field value. No error is expected. - fv, _ = getFieldValue(v, f.idx, func(v reflect.Value) (reflect.Value, error) { + fv, _ = getFieldValue(v, f.idx, func(reflect.Value) (reflect.Value, error) { // Skip null pointer to embedded struct return reflect.Value{}, nil }) @@ -1579,20 +1974,6 @@ func isEmptyStruct(em *encMode, v reflect.Value) (bool, error) { return true, nil } -func isEmptyBinaryMarshaler(_ *encMode, v reflect.Value) (bool, error) { - m, ok := v.Interface().(encoding.BinaryMarshaler) - if !ok { - pv := reflect.New(v.Type()) - pv.Elem().Set(v) - m = pv.Interface().(encoding.BinaryMarshaler) - } - data, err := m.MarshalBinary() - if err != nil { - return false, err - } - return len(data) == 0, nil -} - func cannotFitFloat32(f64 float64) bool { f32 := float32(f64) return float64(f32) != f64 diff --git a/vendor/github.com/fxamacker/cbor/v2/encode_map.go b/vendor/github.com/fxamacker/cbor/v2/encode_map.go index 9850dc0..8b4b4bb 100644 --- a/vendor/github.com/fxamacker/cbor/v2/encode_map.go +++ b/vendor/github.com/fxamacker/cbor/v2/encode_map.go @@ -6,6 +6,7 @@ package cbor import ( + "bytes" "reflect" "sync" ) @@ -15,8 +16,7 @@ type mapKeyValueEncodeFunc struct { kpool, vpool sync.Pool } -func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *encoderBuffer, em *encMode, v reflect.Value, kvs []keyValue) error { - trackKeyValueLength := len(kvs) == v.Len() +func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error { iterk := me.kpool.Get().(*reflect.Value) defer func() { iterk.SetZero() @@ -27,24 +27,39 @@ func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *encoderBuffer, em *encMode, iterv.SetZero() me.vpool.Put(iterv) }() - iter := v.MapRange() - for i := 0; iter.Next(); i++ { - off := e.Len() + + if kvs == nil { + for i, iter := 0, v.MapRange(); iter.Next(); i++ { + iterk.SetIterKey(iter) + iterv.SetIterValue(iter) + + if err := me.kf(e, em, *iterk); err != nil { + return err + } + if err := me.ef(e, em, *iterv); err != nil { + return err + } + } + return nil + } + + initial := e.Len() + for i, iter := 0, v.MapRange(); iter.Next(); i++ { iterk.SetIterKey(iter) iterv.SetIterValue(iter) + offset := e.Len() if err := me.kf(e, em, *iterk); err != nil { return err } - if trackKeyValueLength { - kvs[i].keyLen = e.Len() - off - } - + valueOffset := e.Len() if err := me.ef(e, em, *iterv); err != nil { return err } - if trackKeyValueLength { - kvs[i].keyValueLen = e.Len() - off + kvs[i] = keyValue{ + offset: offset - initial, + valueOffset: valueOffset - initial, + nextOffset: e.Len() - initial, } } diff --git a/vendor/github.com/fxamacker/cbor/v2/encode_map_go117.go b/vendor/github.com/fxamacker/cbor/v2/encode_map_go117.go index f942147..31c3933 100644 --- a/vendor/github.com/fxamacker/cbor/v2/encode_map_go117.go +++ b/vendor/github.com/fxamacker/cbor/v2/encode_map_go117.go @@ -6,6 +6,7 @@ package cbor import ( + "bytes" "reflect" ) @@ -13,25 +14,33 @@ type mapKeyValueEncodeFunc struct { kf, ef encodeFunc } -func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *encoderBuffer, em *encMode, v reflect.Value, kvs []keyValue) error { - trackKeyValueLength := len(kvs) == v.Len() - - iter := v.MapRange() - for i := 0; iter.Next(); i++ { - off := e.Len() +func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error { + if kvs == nil { + for i, iter := 0, v.MapRange(); iter.Next(); i++ { + if err := me.kf(e, em, iter.Key()); err != nil { + return err + } + if err := me.ef(e, em, iter.Value()); err != nil { + return err + } + } + return nil + } + initial := e.Len() + for i, iter := 0, v.MapRange(); iter.Next(); i++ { + offset := e.Len() if err := me.kf(e, em, iter.Key()); err != nil { return err } - if trackKeyValueLength { - kvs[i].keyLen = e.Len() - off - } - + valueOffset := e.Len() if err := me.ef(e, em, iter.Value()); err != nil { return err } - if trackKeyValueLength { - kvs[i].keyValueLen = e.Len() - off + kvs[i] = keyValue{ + offset: offset - initial, + valueOffset: valueOffset - initial, + nextOffset: e.Len() - initial, } } diff --git a/vendor/github.com/fxamacker/cbor/v2/simplevalue.go b/vendor/github.com/fxamacker/cbor/v2/simplevalue.go index 6f93f67..de175ce 100644 --- a/vendor/github.com/fxamacker/cbor/v2/simplevalue.go +++ b/vendor/github.com/fxamacker/cbor/v2/simplevalue.go @@ -33,11 +33,11 @@ func (sv SimpleValue) MarshalCBOR() ([]byte, error) { // only has a single representation variant)." switch { - case sv <= 23: + case sv <= maxSimpleValueInAdditionalInformation: return []byte{byte(cborTypePrimitives) | byte(sv)}, nil - case sv >= 32: - return []byte{byte(cborTypePrimitives) | byte(24), byte(sv)}, nil + case sv >= minSimpleValueIn1ByteArgument: + return []byte{byte(cborTypePrimitives) | additionalInformationWith1ByteArgument, byte(sv)}, nil default: return nil, &UnsupportedValueError{msg: fmt.Sprintf("SimpleValue(%d)", sv)} @@ -57,7 +57,7 @@ func (sv *SimpleValue) UnmarshalCBOR(data []byte) error { if typ != cborTypePrimitives { return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue"} } - if ai > 24 { + if ai > additionalInformationWith1ByteArgument { return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue", errorMsg: "not simple values"} } diff --git a/vendor/github.com/fxamacker/cbor/v2/stream.go b/vendor/github.com/fxamacker/cbor/v2/stream.go index 02fea43..507ab6c 100644 --- a/vendor/github.com/fxamacker/cbor/v2/stream.go +++ b/vendor/github.com/fxamacker/cbor/v2/stream.go @@ -84,7 +84,7 @@ func (dec *Decoder) readNext() (int, error) { if dec.off < len(dec.buf) { dec.d.reset(dec.buf[dec.off:]) off := dec.off // Save offset before data validation - validErr = dec.d.wellformed(true) + validErr = dec.d.wellformed(true, false) dec.off = off // Restore offset if validErr == nil { @@ -187,14 +187,14 @@ func (enc *Encoder) Encode(v interface{}) error { } } - buf := getEncoderBuffer() + buf := getEncodeBuffer() err := encode(buf, enc.em, reflect.ValueOf(v)) if err == nil { _, err = enc.w.Write(buf.Bytes()) } - putEncoderBuffer(buf) + putEncodeBuffer(buf) return err } @@ -231,7 +231,7 @@ func (enc *Encoder) EndIndefinite() error { if len(enc.indefTypes) == 0 { return errors.New("cbor: cannot encode \"break\" code outside indefinite length values") } - _, err := enc.w.Write([]byte{0xff}) + _, err := enc.w.Write([]byte{cborBreakFlag}) if err == nil { enc.indefTypes = enc.indefTypes[:len(enc.indefTypes)-1] } @@ -239,10 +239,10 @@ func (enc *Encoder) EndIndefinite() error { } var cborIndefHeader = map[cborType][]byte{ - cborTypeByteString: {0x5f}, - cborTypeTextString: {0x7f}, - cborTypeArray: {0x9f}, - cborTypeMap: {0xbf}, + cborTypeByteString: {cborByteStringWithIndefiniteLengthHead}, + cborTypeTextString: {cborTextStringWithIndefiniteLengthHead}, + cborTypeArray: {cborArrayWithIndefiniteLengthHead}, + cborTypeMap: {cborMapWithIndefiniteLengthHead}, } func (enc *Encoder) startIndefinite(typ cborType) error { diff --git a/vendor/github.com/fxamacker/cbor/v2/structfields.go b/vendor/github.com/fxamacker/cbor/v2/structfields.go index 23a12be..81228ac 100644 --- a/vendor/github.com/fxamacker/cbor/v2/structfields.go +++ b/vendor/github.com/fxamacker/cbor/v2/structfields.go @@ -144,7 +144,15 @@ func getFields(t reflect.Type) (flds fields, structOptions string) { } // appendFields appends type t's exportable fields to flds and anonymous struct fields to nTypes . -func appendFields(t reflect.Type, idx []int, flds fields, nTypes map[reflect.Type][][]int) (fields, map[reflect.Type][][]int) { +func appendFields( + t reflect.Type, + idx []int, + flds fields, + nTypes map[reflect.Type][][]int, +) ( + _flds fields, + _nTypes map[reflect.Type][][]int, +) { for i := 0; i < t.NumField(); i++ { f := t.Field(i) @@ -165,12 +173,12 @@ func appendFields(t reflect.Type, idx []int, flds fields, nTypes map[reflect.Typ continue } - tagged := len(tag) > 0 + tagged := tag != "" // Parse field tag options var tagFieldName string var omitempty, keyasint bool - for j := 0; len(tag) > 0; j++ { + for j := 0; tag != ""; j++ { var token string idx := strings.IndexByte(tag, ',') if idx == -1 { @@ -199,7 +207,7 @@ func appendFields(t reflect.Type, idx []int, flds fields, nTypes map[reflect.Typ copy(fIdx, idx) fIdx[len(fIdx)-1] = i - if !f.Anonymous || ft.Kind() != reflect.Struct || len(tagFieldName) > 0 { + if !f.Anonymous || ft.Kind() != reflect.Struct || tagFieldName != "" { flds = append(flds, &field{ name: fieldName, idx: fIdx, @@ -221,7 +229,7 @@ func appendFields(t reflect.Type, idx []int, flds fields, nTypes map[reflect.Typ // isFieldExportable returns true if f is an exportable (regular or anonymous) field or // a nonexportable anonymous field of struct type. // Nonexportable anonymous field of struct type can contain exportable fields. -func isFieldExportable(f reflect.StructField, fk reflect.Kind) bool { +func isFieldExportable(f reflect.StructField, fk reflect.Kind) bool { //nolint:gocritic // ignore hugeParam exportable := f.PkgPath == "" return exportable || (f.Anonymous && fk == reflect.Struct) } diff --git a/vendor/github.com/fxamacker/cbor/v2/tag.go b/vendor/github.com/fxamacker/cbor/v2/tag.go index aefb4d3..5c4d2b7 100644 --- a/vendor/github.com/fxamacker/cbor/v2/tag.go +++ b/vendor/github.com/fxamacker/cbor/v2/tag.go @@ -7,7 +7,9 @@ import ( "sync" ) -// Tag represents CBOR tag data, including tag number and unmarshaled tag content. +// Tag represents CBOR tag data, including tag number and unmarshaled tag content. Marshaling and +// unmarshaling of tag content is subject to any encode and decode options that would apply to +// enclosed data item if it were to appear outside of a tag. type Tag struct { Number uint64 Content interface{} @@ -56,7 +58,7 @@ func (t RawTag) MarshalCBOR() ([]byte, error) { return b, nil } - e := getEncoderBuffer() + e := getEncodeBuffer() encodeHead(e, byte(cborTypeTag), t.Number) @@ -69,7 +71,7 @@ func (t RawTag) MarshalCBOR() ([]byte, error) { n := copy(buf, e.Bytes()) copy(buf[n:], content) - putEncoderBuffer(e) + putEncodeBuffer(e) return buf, nil } @@ -261,7 +263,7 @@ func newTagItem(opts TagOptions, contentType reflect.Type, num uint64, nestedNum if num == 2 || num == 3 { return nil, errors.New("cbor: cannot add tag number 2 or 3 to TagSet, it's built-in and supported automatically") } - if num == selfDescribedCBORTagNum { + if num == tagNumSelfDescribedCBOR { return nil, errors.New("cbor: cannot add tag number 55799 to TagSet, it's built-in and ignored automatically") } @@ -269,13 +271,13 @@ func newTagItem(opts TagOptions, contentType reflect.Type, num uint64, nestedNum te.num = append(te.num, nestedNum...) // Cache encoded tag numbers - e := getEncoderBuffer() + e := getEncodeBuffer() for _, n := range te.num { encodeHead(e, byte(cborTypeTag), n) } te.cborTagNum = make([]byte, e.Len()) copy(te.cborTagNum, e.Bytes()) - putEncoderBuffer(e) + putEncodeBuffer(e) return &te, nil } diff --git a/vendor/github.com/fxamacker/cbor/v2/valid.go b/vendor/github.com/fxamacker/cbor/v2/valid.go index a5213d0..b40793b 100644 --- a/vendor/github.com/fxamacker/cbor/v2/valid.go +++ b/vendor/github.com/fxamacker/cbor/v2/valid.go @@ -7,7 +7,10 @@ import ( "encoding/binary" "errors" "io" + "math" "strconv" + + "github.com/x448/float16" ) // SyntaxError is a description of a CBOR syntax error. @@ -82,11 +85,11 @@ func (e *ExtraneousDataError) Error() string { // allowExtraData indicates if extraneous data is allowed after the CBOR data item. // - use allowExtraData = true when using Decoder.Decode() // - use allowExtraData = false when using Unmarshal() -func (d *decoder) wellformed(allowExtraData bool) error { +func (d *decoder) wellformed(allowExtraData bool, checkBuiltinTags bool) error { if len(d.data) == d.off { return io.EOF } - _, err := d.wellformedInternal(0) + _, err := d.wellformedInternal(0, checkBuiltinTags) if err == nil { if !allowExtraData && d.off != len(d.data) { err = &ExtraneousDataError{len(d.data) - d.off, d.off} @@ -96,19 +99,19 @@ func (d *decoder) wellformed(allowExtraData bool) error { } // wellformedInternal checks data's well-formedness and returns max depth and error. -func (d *decoder) wellformedInternal(depth int) (int, error) { - t, ai, val, err := d.wellformedHead() +func (d *decoder) wellformedInternal(depth int, checkBuiltinTags bool) (int, error) { //nolint:gocyclo + t, _, val, indefiniteLength, err := d.wellformedHeadWithIndefiniteLengthFlag() if err != nil { return 0, err } switch t { case cborTypeByteString, cborTypeTextString: - if ai == 31 { + if indefiniteLength { if d.dm.indefLength == IndefLengthForbidden { return 0, &IndefiniteLengthError{t} } - return d.wellformedIndefiniteString(t, depth) + return d.wellformedIndefiniteString(t, depth, checkBuiltinTags) } valInt := int(val) if valInt < 0 { @@ -119,17 +122,18 @@ func (d *decoder) wellformedInternal(depth int) (int, error) { return 0, io.ErrUnexpectedEOF } d.off += valInt + case cborTypeArray, cborTypeMap: depth++ if depth > d.dm.maxNestedLevels { return 0, &MaxNestedLevelError{d.dm.maxNestedLevels} } - if ai == 31 { + if indefiniteLength { if d.dm.indefLength == IndefLengthForbidden { return 0, &IndefiniteLengthError{t} } - return d.wellformedIndefiniteArrayOrMap(t, depth) + return d.wellformedIndefiniteArrayOrMap(t, depth, checkBuiltinTags) } valInt := int(val) @@ -156,7 +160,7 @@ func (d *decoder) wellformedInternal(depth int) (int, error) { for j := 0; j < count; j++ { for i := 0; i < valInt; i++ { var dpt int - if dpt, err = d.wellformedInternal(depth); err != nil { + if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil { return 0, err } if dpt > maxDepth { @@ -165,20 +169,35 @@ func (d *decoder) wellformedInternal(depth int) (int, error) { } } depth = maxDepth + case cborTypeTag: if d.dm.tagsMd == TagsForbidden { return 0, &TagsMdError{} } + tagNum := val + // Scan nested tag numbers to avoid recursion. for { if len(d.data) == d.off { // Tag number must be followed by tag content. return 0, io.ErrUnexpectedEOF } - if cborType(d.data[d.off]&0xe0) != cborTypeTag { + if checkBuiltinTags { + err = validBuiltinTag(tagNum, d.data[d.off]) + if err != nil { + return 0, err + } + } + if d.dm.bignumTag == BignumTagForbidden && (tagNum == 2 || tagNum == 3) { + return 0, &UnacceptableDataItemError{ + CBORType: cborTypeTag.String(), + Message: "bignum", + } + } + if getType(d.data[d.off]) != cborTypeTag { break } - if _, _, _, err = d.wellformedHead(); err != nil { + if _, _, tagNum, err = d.wellformedHead(); err != nil { return 0, err } depth++ @@ -187,31 +206,32 @@ func (d *decoder) wellformedInternal(depth int) (int, error) { } } // Check tag content. - return d.wellformedInternal(depth) + return d.wellformedInternal(depth, checkBuiltinTags) } + return depth, nil } // wellformedIndefiniteString checks indefinite length byte/text string's well-formedness and returns max depth and error. -func (d *decoder) wellformedIndefiniteString(t cborType, depth int) (int, error) { +func (d *decoder) wellformedIndefiniteString(t cborType, depth int, checkBuiltinTags bool) (int, error) { var err error for { if len(d.data) == d.off { return 0, io.ErrUnexpectedEOF } - if d.data[d.off] == 0xff { + if isBreakFlag(d.data[d.off]) { d.off++ break } // Peek ahead to get next type and indefinite length status. - nt := cborType(d.data[d.off] & 0xe0) + nt, ai := parseInitialByte(d.data[d.off]) if t != nt { return 0, &SyntaxError{"cbor: wrong element type " + nt.String() + " for indefinite-length " + t.String()} } - if (d.data[d.off] & 0x1f) == 31 { + if additionalInformation(ai).isIndefiniteLength() { return 0, &SyntaxError{"cbor: indefinite-length " + t.String() + " chunk is not definite-length"} } - if depth, err = d.wellformedInternal(depth); err != nil { + if depth, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil { return 0, err } } @@ -219,7 +239,7 @@ func (d *decoder) wellformedIndefiniteString(t cborType, depth int) (int, error) } // wellformedIndefiniteArrayOrMap checks indefinite length array/map's well-formedness and returns max depth and error. -func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int) (int, error) { +func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int, checkBuiltinTags bool) (int, error) { var err error maxDepth := depth i := 0 @@ -227,12 +247,12 @@ func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int) (int, er if len(d.data) == d.off { return 0, io.ErrUnexpectedEOF } - if d.data[d.off] == 0xff { + if isBreakFlag(d.data[d.off]) { d.off++ break } var dpt int - if dpt, err = d.wellformedInternal(depth); err != nil { + if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil { return 0, err } if dpt > maxDepth { @@ -255,22 +275,39 @@ func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int) (int, er return maxDepth, nil } +func (d *decoder) wellformedHeadWithIndefiniteLengthFlag() ( + t cborType, + ai byte, + val uint64, + indefiniteLength bool, + err error, +) { + t, ai, val, err = d.wellformedHead() + if err != nil { + return + } + indefiniteLength = additionalInformation(ai).isIndefiniteLength() + return +} + func (d *decoder) wellformedHead() (t cborType, ai byte, val uint64, err error) { dataLen := len(d.data) - d.off if dataLen == 0 { return 0, 0, 0, io.ErrUnexpectedEOF } - t = cborType(d.data[d.off] & 0xe0) - ai = d.data[d.off] & 0x1f + t, ai = parseInitialByte(d.data[d.off]) val = uint64(ai) d.off++ + dataLen-- - if ai < 24 { + if ai <= maxAdditionalInformationWithoutArgument { return t, ai, val, nil } - if ai == 24 { - if dataLen < 2 { + + if ai == additionalInformationWith1ByteArgument { + const argumentSize = 1 + if dataLen < argumentSize { return 0, 0, 0, io.ErrUnexpectedEOF } val = uint64(d.data[d.off]) @@ -280,31 +317,53 @@ func (d *decoder) wellformedHead() (t cborType, ai byte, val uint64, err error) } return t, ai, val, nil } - if ai == 25 { - if dataLen < 3 { + + if ai == additionalInformationWith2ByteArgument { + const argumentSize = 2 + if dataLen < argumentSize { return 0, 0, 0, io.ErrUnexpectedEOF } - val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+2])) - d.off += 2 + val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+argumentSize])) + d.off += argumentSize + if t == cborTypePrimitives { + if err := d.acceptableFloat(float64(float16.Frombits(uint16(val)).Float32())); err != nil { + return 0, 0, 0, err + } + } return t, ai, val, nil } - if ai == 26 { - if dataLen < 5 { + + if ai == additionalInformationWith4ByteArgument { + const argumentSize = 4 + if dataLen < argumentSize { return 0, 0, 0, io.ErrUnexpectedEOF } - val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+4])) - d.off += 4 + val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+argumentSize])) + d.off += argumentSize + if t == cborTypePrimitives { + if err := d.acceptableFloat(float64(math.Float32frombits(uint32(val)))); err != nil { + return 0, 0, 0, err + } + } return t, ai, val, nil } - if ai == 27 { - if dataLen < 9 { + + if ai == additionalInformationWith8ByteArgument { + const argumentSize = 8 + if dataLen < argumentSize { return 0, 0, 0, io.ErrUnexpectedEOF } - val = binary.BigEndian.Uint64(d.data[d.off : d.off+8]) - d.off += 8 + val = binary.BigEndian.Uint64(d.data[d.off : d.off+argumentSize]) + d.off += argumentSize + if t == cborTypePrimitives { + if err := d.acceptableFloat(math.Float64frombits(val)); err != nil { + return 0, 0, 0, err + } + } return t, ai, val, nil } - if ai == 31 { + + if additionalInformation(ai).isIndefiniteLength() { switch t { case cborTypePositiveInt, cborTypeNegativeInt, cborTypeTag: return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()} @@ -313,6 +372,23 @@ func (d *decoder) wellformedHead() (t cborType, ai byte, val uint64, err error) } return t, ai, val, nil } + // ai == 28, 29, 30 return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()} } + +func (d *decoder) acceptableFloat(f float64) error { + switch { + case d.dm.nanDec == NaNDecodeForbidden && math.IsNaN(f): + return &UnacceptableDataItemError{ + CBORType: cborTypePrimitives.String(), + Message: "floating-point NaN", + } + case d.dm.infDec == InfDecodeForbidden && math.IsInf(f, 0): + return &UnacceptableDataItemError{ + CBORType: cborTypePrimitives.String(), + Message: "floating-point infinity", + } + } + return nil +} diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/const.go b/vendor/github.com/go-webauthn/webauthn/metadata/const.go new file mode 100644 index 0000000..97a5ad4 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/const.go @@ -0,0 +1,35 @@ +package metadata + +const ( + // https://secure.globalsign.com/cacert/root-r3.crt + ProductionMDSRoot = "MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpHWD9f" + + // Production MDS URL + ProductionMDSURL = "https://mds.fidoalliance.org" + + // https://mds3.fido.tools/pki/MDS3ROOT.crt + ConformanceMDSRoot = "MIICaDCCAe6gAwIBAgIPBCqih0DiJLW7+UHXx/o1MAoGCCqGSM49BAMDMGcxCzAJBgNVBAYTAlVTMRYwFAYDVQQKDA1GSURPIEFsbGlhbmNlMScwJQYDVQQLDB5GQUtFIE1ldGFkYXRhIDMgQkxPQiBST09UIEZBS0UxFzAVBgNVBAMMDkZBS0UgUm9vdCBGQUtFMB4XDTE3MDIwMTAwMDAwMFoXDTQ1MDEzMTIzNTk1OVowZzELMAkGA1UEBhMCVVMxFjAUBgNVBAoMDUZJRE8gQWxsaWFuY2UxJzAlBgNVBAsMHkZBS0UgTWV0YWRhdGEgMyBCTE9CIFJPT1QgRkFLRTEXMBUGA1UEAwwORkFLRSBSb290IEZBS0UwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASKYiz3YltC6+lmxhPKwA1WFZlIqnX8yL5RybSLTKFAPEQeTD9O6mOz+tg8wcSdnVxHzwnXiQKJwhrav70rKc2ierQi/4QUrdsPes8TEirZOkCVJurpDFbXZOgs++pa4XmjYDBeMAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAfBgNVHSMEGDAWgBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAKBggqhkjOPQQDAwNoADBlAjEA/xFsgri0xubSa3y3v5ormpPqCwfqn9s0MLBAtzCIgxQ/zkzPKctkiwoPtDzI51KnAjAmeMygX2S5Ht8+e+EQnezLJBJXtnkRWY+Zt491wgt/AwSs5PHHMv5QgjELOuMxQBc=" + + // Example from https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html + ExampleMDSRoot = "MIIGGTCCBAGgAwIBAgIUdT9qLX0sVMRe8l0sLmHd3mZovQ0wDQYJKoZIhvcNAQELBQAwgZsxHzAdBgNVBAMMFkVYQU1QTEUgTURTMyBURVNUIFJPT1QxIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVAZXhhbXBsZS5jb20xFDASBgNVBAoMC0V4YW1wbGUgT1JHMRAwDgYDVQQLDAdFeGFtcGxlMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTVkxEjAQBgNVBAcMCVdha2VmaWVsZDAeFw0yMTA0MTkxMTM1MDdaFw00ODA5MDQxMTM1MDdaMIGbMR8wHQYDVQQDDBZFWEFNUExFIE1EUzMgVEVTVCBST09UMSIwIAYJKoZIhvcNAQkBFhNleGFtcGxlQGV4YW1wbGUuY29tMRQwEgYDVQQKDAtFeGFtcGxlIE9SRzEQMA4GA1UECwwHRXhhbXBsZTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk1ZMRIwEAYDVQQHDAlXYWtlZmllbGQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDDjF5wyEWuhwDHsZosGdGFTCcI677rW881vV+UfW38J+K2ioFFNeGVsxbcebK6AVOiCDPFj0974IpeD9SFOhwAHoDu/LCfXdQWp8ZgQ91ULYWoW8o7NNSp01nbN9zmaO6/xKNCa0bzjmXoGqglqnP1AtRcWYvXOSKZy1rcPeDv4Dhcpdp6W72fBw0eWIqOhsrItuY2/N8ItBPiG03EX72nACq4nZJ/nAIcUbER8STSFPPzvE97TvShsi1FD8aO6l1WkR/QkreAGjMI++GbB2Qc1nN9Y/VEDbMDhQtxXQRdpFwubTjejkN9hKOtF3B71YrwIrng3V9RoPMFdapWMzSlI+WWHog0oTj1PqwJDDg7+z1I6vSDeVWAMKr9mq1w1OGNzgBopIjd9lRWkRtt2kQSPX9XxqS4E1gDDr8MKbpM3JuubQtNCg9D7Ljvbz6vwvUrbPHH+oREvucsp0PZ5PpizloepGIcLFxDQqCulGY2n7Ahl0JOFXJqOFCaK3TWHwBvZsaY5DgBuUvdUrwtgZNg2eg2omWXEepiVFQn3Fvj43Wh2npPMgIe5P0rwncXvROxaczd4rtajKS1ucoB9b9iKqM2+M1y/FDIgVf1fWEHwK7YdzxMlgOeLdeV/kqRU5PEUlLU9a2EwdOErrPbPKZmIfbs/L4B3k4zejMDH3Y+ZwIDAQABo1MwUTAdBgNVHQ4EFgQU8sWwq1TrurK7xMTwO1dKfeJBbCMwHwYDVR0jBBgwFoAU8sWwq1TrurK7xMTwO1dKfeJBbCMwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAFw6M1PiIfCPIBQ5EBUPNmRvRFuDpolOmDofnf/+mv63LqwQZAdo/W8tzZ9kOFhq24SiLw0H7fsdG/jeREXiIZMNoW/rA6Uac8sU+FYF7Q+qp6CQLlSQbDcpVMifTQjcBk2xh+aLK9SrrXBqnTAhwS+offGtAW8DpoLuH4tAcQmIjlgMlN65jnELCuqNR/wpA+zch8LZW8saQ2cwRCwdr8mAzZoLbsDSVCHxQF3/kQjPT7Nao1q2iWcY3OYcRmKrieHDP67yeLUbVmetfZis2d6ZlkqHLB4ZW1xX4otsEFkuTJA3HWDRsNyhTwx1YoCLsYut5Zp0myqPNBq28w6qGMyyoJN0Z4RzMEO3R6i/MQNfhK55/8O2HciM6xb5t/aBSuHPKlBDrFWhpRnKYkaNtlUo35qV5IbKGKau3SdZdSRciaXUd/p81YmoF01UlhhMz/Rqr1k2gyA0a9tF8+awCeanYt5izl8YO0FlrOU1SQ5UQw4szqqZqbrf4e8fRuU2TXNx4zk+ImE7WRB44f6mSD746ZCBRogZ/SA5jUBu+OPe4/sEtERWRcQD+fXgce9ZEN0+peyJIKAsl5Rm2Bmgyg5IoyWwSG5W+WekGyEokpslou2Yc6EjUj5ndZWz5EiHAiQ74hNfDoCZIxVVLU3Qbp8a0S1bmsoT2JOsspIbtZUg=" +) + +const ( + HeaderX509URI = "x5u" + HeaderX509Certificate = "x5c" +) + +var ( + errIntermediateCertRevoked = &MetadataError{ + Type: "intermediate_revoked", + Details: "Intermediate certificate is on issuers revocation list", + } + errLeafCertRevoked = &MetadataError{ + Type: "leaf_revoked", + Details: "Leaf certificate is on issuers revocation list", + } + errCRLUnavailable = &MetadataError{ + Type: "crl_unavailable", + Details: "Certificate revocation list is unavailable", + } +) diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/decode.go b/vendor/github.com/go-webauthn/webauthn/metadata/decode.go new file mode 100644 index 0000000..f3c3de7 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/decode.go @@ -0,0 +1,276 @@ +package metadata + +import ( + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/go-webauthn/x/revoke" + "github.com/golang-jwt/jwt/v5" + "github.com/mitchellh/mapstructure" +) + +// NewDecoder returns a new metadata decoder. +func NewDecoder(opts ...DecoderOption) (decoder *Decoder, err error) { + decoder = &Decoder{ + client: &http.Client{}, + parser: jwt.NewParser(), + hook: mapstructure.ComposeDecodeHookFunc(), + } + + for _, opt := range opts { + if err = opt(decoder); err != nil { + return nil, fmt.Errorf("failed to apply decoder option: %w", err) + } + } + + if decoder.root == "" { + decoder.root = ProductionMDSRoot + } + + return decoder, nil +} + +// Decoder handles decoding and specialized parsing of the metadata blob. +type Decoder struct { + client *http.Client + parser *jwt.Parser + hook mapstructure.DecodeHookFunc + root string + ignoreEntryParsingErrors bool +} + +// Parse handles parsing of the raw JSON values of the metadata blob. Should be used after using Decode or DecodeBytes. +func (d *Decoder) Parse(payload *PayloadJSON) (metadata *Metadata, err error) { + metadata = &Metadata{ + Parsed: Parsed{ + LegalHeader: payload.LegalHeader, + Number: payload.Number, + }, + } + + if metadata.Parsed.NextUpdate, err = time.Parse(time.DateOnly, payload.NextUpdate); err != nil { + return nil, fmt.Errorf("error occurred parsing next update value '%s': %w", payload.NextUpdate, err) + } + + var parsed Entry + + for _, entry := range payload.Entries { + if parsed, err = entry.Parse(); err != nil { + metadata.Unparsed = append(metadata.Unparsed, EntryError{ + Error: err, + EntryJSON: entry, + }) + + continue + } + + metadata.Parsed.Entries = append(metadata.Parsed.Entries, parsed) + } + + if n := len(metadata.Unparsed); n != 0 && !d.ignoreEntryParsingErrors { + return metadata, fmt.Errorf("error occurred parsing metadata: %d entries had errors during parsing", n) + } + + return metadata, nil +} + +// Decode the blob from an io.ReadCloser. This function will close the io.ReadCloser after completing. +func (d *Decoder) Decode(r io.ReadCloser) (payload *PayloadJSON, err error) { + defer r.Close() + + bytes, err := io.ReadAll(r) + if err != nil { + return nil, err + } + + return d.DecodeBytes(bytes) +} + +// DecodeBytes handles decoding raw bytes. If you have a read closer it's suggested to use Decode. +func (d *Decoder) DecodeBytes(bytes []byte) (payload *PayloadJSON, err error) { + var token *jwt.Token + + if token, err = d.parser.Parse(string(bytes), func(token *jwt.Token) (any, error) { + // 2. If the x5u attribute is present in the JWT Header, then + if _, ok := token.Header[HeaderX509URI].([]any); ok { + // never seen an x5u here, although it is in the spec + return nil, errors.New("x5u encountered in header of metadata TOC payload") + } + + // 3. If the x5u attribute is missing, the chain should be retrieved from the x5c attribute. + var ( + x5c, chain []any + ok, valid bool + ) + + if x5c, ok = token.Header[HeaderX509Certificate].([]any); !ok { + // If that attribute is missing as well, Metadata TOC signing trust anchor is considered the TOC signing certificate chain. + chain[0] = d.root + } else { + chain = x5c + } + + // The certificate chain MUST be verified to properly chain to the metadata TOC signing trust anchor. + if valid, err = validateChain(d.root, chain); !valid || err != nil { + return nil, err + } + + // Chain validated, extract the TOC signing certificate from the chain. Create a buffer large enough to hold the + // certificate bytes. + o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string)))) + + var ( + n int + cert *x509.Certificate + ) + + // Decode the base64 certificate into the buffer. + if n, err = base64.StdEncoding.Decode(o, []byte(chain[0].(string))); err != nil { + return nil, err + } + + // Parse the certificate from the buffer. + if cert, err = x509.ParseCertificate(o[:n]); err != nil { + return nil, err + } + + // 4. Verify the signature of the Metadata TOC object using the TOC signing certificate chain + // jwt.Parse() uses the TOC signing certificate public key internally to verify the signature. + return cert.PublicKey, err + }); err != nil { + return nil, err + } + + var decoder *mapstructure.Decoder + + payload = &PayloadJSON{} + + if decoder, err = mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Metadata: nil, + Result: payload, + DecodeHook: d.hook, + TagName: "json", + }); err != nil { + return nil, err + } + + if err = decoder.Decode(token.Claims); err != nil { + return payload, err + } + + return payload, nil +} + +// DecoderOption is a representation of a function that can set options within a decoder. +type DecoderOption func(decoder *Decoder) (err error) + +// WithIgnoreEntryParsingErrors is a DecoderOption which ignores errors when parsing individual entries. The values for +// these entries will exist as an unparsed entry. +func WithIgnoreEntryParsingErrors() DecoderOption { + return func(decoder *Decoder) (err error) { + decoder.ignoreEntryParsingErrors = true + + return nil + } +} + +// WithRootCertificate overrides the root certificate used to validate the authenticity of the metadata payload. +func WithRootCertificate(value string) DecoderOption { + return func(decoder *Decoder) (err error) { + decoder.root = value + + return nil + } +} + +func validateChain(root string, chain []any) (bool, error) { + oRoot := make([]byte, base64.StdEncoding.DecodedLen(len(root))) + + nRoot, err := base64.StdEncoding.Decode(oRoot, []byte(root)) + if err != nil { + return false, err + } + + rootcert, err := x509.ParseCertificate(oRoot[:nRoot]) + if err != nil { + return false, err + } + + roots := x509.NewCertPool() + + roots.AddCert(rootcert) + + o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[1].(string)))) + + n, err := base64.StdEncoding.Decode(o, []byte(chain[1].(string))) + if err != nil { + return false, err + } + + intcert, err := x509.ParseCertificate(o[:n]) + if err != nil { + return false, err + } + + if revoked, ok := revoke.VerifyCertificate(intcert); !ok { + issuer := intcert.IssuingCertificateURL + + if issuer != nil { + return false, errCRLUnavailable + } + } else if revoked { + return false, errIntermediateCertRevoked + } + + ints := x509.NewCertPool() + ints.AddCert(intcert) + + l := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string)))) + + n, err = base64.StdEncoding.Decode(l, []byte(chain[0].(string))) + if err != nil { + return false, err + } + + leafcert, err := x509.ParseCertificate(l[:n]) + if err != nil { + return false, err + } + + if revoked, ok := revoke.VerifyCertificate(leafcert); !ok { + return false, errCRLUnavailable + } else if revoked { + return false, errLeafCertRevoked + } + + opts := x509.VerifyOptions{ + Roots: roots, + Intermediates: ints, + } + + _, err = leafcert.Verify(opts) + + return err == nil, err +} + +func mdsParseX509Certificate(value string) (certificate *x509.Certificate, err error) { + var n int + + raw := make([]byte, base64.StdEncoding.DecodedLen(len(value))) + + if n, err = base64.StdEncoding.Decode(raw, []byte(strings.TrimSpace(value))); err != nil { + return nil, fmt.Errorf("error occurred parsing *x509.certificate: error occurred decoding base64 data: %w", err) + } + + if certificate, err = x509.ParseCertificate(raw[:n]); err != nil { + return nil, err + } + + return certificate, nil +} diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/doc.go b/vendor/github.com/go-webauthn/webauthn/metadata/doc.go new file mode 100644 index 0000000..7db8c71 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/doc.go @@ -0,0 +1,2 @@ +// Package metadata handles metadata validation instrumentation. +package metadata diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/metadata.go b/vendor/github.com/go-webauthn/webauthn/metadata/metadata.go index af0a495..0a43e68 100644 --- a/vendor/github.com/go-webauthn/webauthn/metadata/metadata.go +++ b/vendor/github.com/go-webauthn/webauthn/metadata/metadata.go @@ -2,533 +2,891 @@ package metadata import ( "crypto/x509" - "encoding/base64" - "errors" - "io" + "fmt" "net/http" - "reflect" + "net/url" + "strings" "time" - "github.com/golang-jwt/jwt/v5" "github.com/google/uuid" - "github.com/mitchellh/mapstructure" +) - "github.com/go-webauthn/x/revoke" +// Fetch creates a new HTTP client and gets the production metadata, decodes it, and parses it. This is an +// instrumentation simplification that makes it easier to either just grab the latest metadata or for implementers to +// see the rough process of retrieving it to implement any of their own logic. +func Fetch() (metadata *Metadata, err error) { + var ( + decoder *Decoder + payload *PayloadJSON + res *http.Response + ) + + if decoder, err = NewDecoder(WithIgnoreEntryParsingErrors()); err != nil { + return nil, err + } - "github.com/go-webauthn/webauthn/protocol/webauthncose" -) + client := &http.Client{} + + if res, err = client.Get(ProductionMDSURL); err != nil { + return nil, err + } + + if payload, err = decoder.Decode(res.Body); err != nil { + return nil, err + } -type PublicKeyCredentialParameters struct { - Type string `json:"type"` - Alg webauthncose.COSEAlgorithmIdentifier `json:"alg"` + return decoder.Parse(payload) } -const ( - // https://secure.globalsign.com/cacert/root-r3.crt - ProductionMDSRoot = "MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpHWD9f" - // Production MDS URL - ProductionMDSURL = "https://mds.fidoalliance.org" - // https://mds3.fido.tools/pki/MDS3ROOT.crt - ConformanceMDSRoot = "MIICaDCCAe6gAwIBAgIPBCqih0DiJLW7+UHXx/o1MAoGCCqGSM49BAMDMGcxCzAJBgNVBAYTAlVTMRYwFAYDVQQKDA1GSURPIEFsbGlhbmNlMScwJQYDVQQLDB5GQUtFIE1ldGFkYXRhIDMgQkxPQiBST09UIEZBS0UxFzAVBgNVBAMMDkZBS0UgUm9vdCBGQUtFMB4XDTE3MDIwMTAwMDAwMFoXDTQ1MDEzMTIzNTk1OVowZzELMAkGA1UEBhMCVVMxFjAUBgNVBAoMDUZJRE8gQWxsaWFuY2UxJzAlBgNVBAsMHkZBS0UgTWV0YWRhdGEgMyBCTE9CIFJPT1QgRkFLRTEXMBUGA1UEAwwORkFLRSBSb290IEZBS0UwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASKYiz3YltC6+lmxhPKwA1WFZlIqnX8yL5RybSLTKFAPEQeTD9O6mOz+tg8wcSdnVxHzwnXiQKJwhrav70rKc2ierQi/4QUrdsPes8TEirZOkCVJurpDFbXZOgs++pa4XmjYDBeMAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAfBgNVHSMEGDAWgBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAKBggqhkjOPQQDAwNoADBlAjEA/xFsgri0xubSa3y3v5ormpPqCwfqn9s0MLBAtzCIgxQ/zkzPKctkiwoPtDzI51KnAjAmeMygX2S5Ht8+e+EQnezLJBJXtnkRWY+Zt491wgt/AwSs5PHHMv5QgjELOuMxQBc=" - // Example from https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html - ExampleMDSRoot = "MIIGGTCCBAGgAwIBAgIUdT9qLX0sVMRe8l0sLmHd3mZovQ0wDQYJKoZIhvcNAQELBQAwgZsxHzAdBgNVBAMMFkVYQU1QTEUgTURTMyBURVNUIFJPT1QxIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVAZXhhbXBsZS5jb20xFDASBgNVBAoMC0V4YW1wbGUgT1JHMRAwDgYDVQQLDAdFeGFtcGxlMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTVkxEjAQBgNVBAcMCVdha2VmaWVsZDAeFw0yMTA0MTkxMTM1MDdaFw00ODA5MDQxMTM1MDdaMIGbMR8wHQYDVQQDDBZFWEFNUExFIE1EUzMgVEVTVCBST09UMSIwIAYJKoZIhvcNAQkBFhNleGFtcGxlQGV4YW1wbGUuY29tMRQwEgYDVQQKDAtFeGFtcGxlIE9SRzEQMA4GA1UECwwHRXhhbXBsZTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk1ZMRIwEAYDVQQHDAlXYWtlZmllbGQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDDjF5wyEWuhwDHsZosGdGFTCcI677rW881vV+UfW38J+K2ioFFNeGVsxbcebK6AVOiCDPFj0974IpeD9SFOhwAHoDu/LCfXdQWp8ZgQ91ULYWoW8o7NNSp01nbN9zmaO6/xKNCa0bzjmXoGqglqnP1AtRcWYvXOSKZy1rcPeDv4Dhcpdp6W72fBw0eWIqOhsrItuY2/N8ItBPiG03EX72nACq4nZJ/nAIcUbER8STSFPPzvE97TvShsi1FD8aO6l1WkR/QkreAGjMI++GbB2Qc1nN9Y/VEDbMDhQtxXQRdpFwubTjejkN9hKOtF3B71YrwIrng3V9RoPMFdapWMzSlI+WWHog0oTj1PqwJDDg7+z1I6vSDeVWAMKr9mq1w1OGNzgBopIjd9lRWkRtt2kQSPX9XxqS4E1gDDr8MKbpM3JuubQtNCg9D7Ljvbz6vwvUrbPHH+oREvucsp0PZ5PpizloepGIcLFxDQqCulGY2n7Ahl0JOFXJqOFCaK3TWHwBvZsaY5DgBuUvdUrwtgZNg2eg2omWXEepiVFQn3Fvj43Wh2npPMgIe5P0rwncXvROxaczd4rtajKS1ucoB9b9iKqM2+M1y/FDIgVf1fWEHwK7YdzxMlgOeLdeV/kqRU5PEUlLU9a2EwdOErrPbPKZmIfbs/L4B3k4zejMDH3Y+ZwIDAQABo1MwUTAdBgNVHQ4EFgQU8sWwq1TrurK7xMTwO1dKfeJBbCMwHwYDVR0jBBgwFoAU8sWwq1TrurK7xMTwO1dKfeJBbCMwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAFw6M1PiIfCPIBQ5EBUPNmRvRFuDpolOmDofnf/+mv63LqwQZAdo/W8tzZ9kOFhq24SiLw0H7fsdG/jeREXiIZMNoW/rA6Uac8sU+FYF7Q+qp6CQLlSQbDcpVMifTQjcBk2xh+aLK9SrrXBqnTAhwS+offGtAW8DpoLuH4tAcQmIjlgMlN65jnELCuqNR/wpA+zch8LZW8saQ2cwRCwdr8mAzZoLbsDSVCHxQF3/kQjPT7Nao1q2iWcY3OYcRmKrieHDP67yeLUbVmetfZis2d6ZlkqHLB4ZW1xX4otsEFkuTJA3HWDRsNyhTwx1YoCLsYut5Zp0myqPNBq28w6qGMyyoJN0Z4RzMEO3R6i/MQNfhK55/8O2HciM6xb5t/aBSuHPKlBDrFWhpRnKYkaNtlUo35qV5IbKGKau3SdZdSRciaXUd/p81YmoF01UlhhMz/Rqr1k2gyA0a9tF8+awCeanYt5izl8YO0FlrOU1SQ5UQw4szqqZqbrf4e8fRuU2TXNx4zk+ImE7WRB44f6mSD746ZCBRogZ/SA5jUBu+OPe4/sEtERWRcQD+fXgce9ZEN0+peyJIKAsl5Rm2Bmgyg5IoyWwSG5W+WekGyEokpslou2Yc6EjUj5ndZWz5EiHAiQ74hNfDoCZIxVVLU3Qbp8a0S1bmsoT2JOsspIbtZUg=" -) +type Metadata struct { + Parsed Parsed + Unparsed []EntryError +} + +func (m *Metadata) ToMap() (metadata map[uuid.UUID]*Entry) { + metadata = make(map[uuid.UUID]*Entry) + + for _, entry := range m.Parsed.Entries { + if entry.AaGUID != uuid.Nil { + metadata[entry.AaGUID] = &entry + } + } + + return metadata +} + +// Parsed is a structure representing the Parsed MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#metadata-blob-payload-entry-dictionary +type Parsed struct { + // The legalHeader, if present, contains a legal guide for accessing and using metadata, which itself MAY contain URL(s) pointing to further information, such as a full Terms and Conditions statement. + LegalHeader string + + // The serial number of this UAF Metadata TOC Payload. Serial numbers MUST be consecutive and strictly monotonic, i.e. the successor TOC will have a no value exactly incremented by one. + Number int + + // ISO-8601 formatted date when the next update will be provided at latest. + NextUpdate time.Time + + // List of zero or more MetadataTOCPayloadEntry objects. + Entries []Entry +} -// Metadata is a map of authenticator AAGUIDs to corresponding metadata statements -var Metadata = make(map[uuid.UUID]MetadataBLOBPayloadEntry) +// PayloadJSON is an intermediary JSON/JWT representation of the Parsed. +type PayloadJSON struct { + LegalHeader string `json:"legalHeader"` + Number int `json:"no"` + NextUpdate string `json:"nextUpdate"` + + Entries []EntryJSON `json:"entries"` +} + +func (j PayloadJSON) Parse() (payload Parsed, err error) { + var update time.Time + + if update, err = time.Parse(time.DateOnly, j.NextUpdate); err != nil { + return payload, fmt.Errorf("error occurred parsing next update value '%s': %w", j.NextUpdate, err) + } -// Conformance indicates if test metadata is currently being used -var Conformance = false + n := len(j.Entries) -var MDSRoot = ProductionMDSRoot + entries := make([]Entry, n) -// MetadataBLOBPayloadEntry - Represents the MetadataBLOBPayloadEntry -// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#metadata-blob-payload-entry-dictionary -type MetadataBLOBPayloadEntry struct { + for i := 0; i < n; i++ { + if entries[i], err = j.Entries[i].Parse(); err != nil { + return payload, fmt.Errorf("error occurred parsing entry %d: %w", i, err) + } + } + + return Parsed{ + LegalHeader: j.LegalHeader, + Number: j.Number, + NextUpdate: update, + Entries: entries, + }, nil +} + +// Entry is a structure representing the Entry MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#metadata-blob-payload-entry-dictionary +type Entry struct { // The Authenticator Attestation ID. Aaid string `json:"aaid"` + // The Authenticator Attestation GUID. - AaGUID string `json:"aaguid"` + AaGUID uuid.UUID `json:"aaguid"` + // A list of the attestation certificate public key identifiers encoded as hex string. AttestationCertificateKeyIdentifiers []string `json:"attestationCertificateKeyIdentifiers"` + // The metadataStatement JSON object as defined in FIDOMetadataStatement. - MetadataStatement MetadataStatement `json:"metadataStatement"` + MetadataStatement Statement `json:"metadataStatement"` + // Status of the FIDO Biometric Certification of one or more biometric components of the Authenticator BiometricStatusReports []BiometricStatusReport `json:"biometricStatusReports"` + // An array of status reports applicable to this authenticator. StatusReports []StatusReport `json:"statusReports"` + // ISO-8601 formatted date since when the status report array was set to the current value. - TimeOfLastStatusChange string `json:"timeOfLastStatusChange"` + TimeOfLastStatusChange time.Time + // URL of a list of rogue (i.e. untrusted) individual authenticators. - RogueListURL string `json:"rogueListURL"` + RogueListURL *url.URL + // The hash value computed over the Base64url encoding of the UTF-8 representation of the JSON encoded rogueList available at rogueListURL (with type rogueListEntry[]). - RogueListHash string `json:"rogueListHash"` + RogueListHash string +} + +// EntryJSON is an intermediary JSON/JWT structure representing the Entry MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#metadata-blob-payload-entry-dictionary +type EntryJSON struct { + Aaid string `json:"aaid"` + AaGUID string `json:"aaguid"` + AttestationCertificateKeyIdentifiers []string `json:"attestationCertificateKeyIdentifiers"` + + MetadataStatement StatementJSON `json:"metadataStatement"` + BiometricStatusReports []BiometricStatusReportJSON `json:"biometricStatusReports"` + StatusReports []StatusReportJSON `json:"statusReports"` + + TimeOfLastStatusChange string `json:"timeOfLastStatusChange"` + RogueListURL string `json:"rogueListURL"` + RogueListHash string `json:"rogueListHash"` +} + +func (j EntryJSON) Parse() (entry Entry, err error) { + var aaguid uuid.UUID + + if len(j.AaGUID) != 0 { + if aaguid, err = uuid.Parse(j.AaGUID); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error parsing AAGUID: %w", j.AaGUID, err) + } + } + + var statement Statement + + if statement, err = j.MetadataStatement.Parse(); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': %w", j.AaGUID, err) + } + + var i, n int + + n = len(j.BiometricStatusReports) + + bsrs := make([]BiometricStatusReport, n) + + for i = 0; i < n; i++ { + if bsrs[i], err = j.BiometricStatusReports[i].Parse(); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error occurred parsing biometric status report %d: %w", j.AaGUID, i, err) + } + } + + n = len(j.StatusReports) + + srs := make([]StatusReport, n) + + for i = 0; i < n; i++ { + if srs[i], err = j.StatusReports[i].Parse(); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error occurred parsing status report %d: %w", j.AaGUID, i, err) + } + } + + var change time.Time + + if change, err = time.Parse(time.DateOnly, j.TimeOfLastStatusChange); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error occurred parsing time of last status change value: %w", j.AaGUID, err) + } + + var rogues *url.URL + + if len(j.RogueListURL) != 0 { + if rogues, err = url.ParseRequestURI(j.RogueListURL); err != nil { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error occurred parsing rogue list URL value: %w", j.AaGUID, err) + } + + if len(j.RogueListHash) == 0 { + return entry, fmt.Errorf("error occurred parsing metadata entry with AAGUID '%s': error occurred validating rogue list URL value: the rogue list hash was absent", j.AaGUID) + } + } + + return Entry{ + Aaid: j.Aaid, + AaGUID: aaguid, + AttestationCertificateKeyIdentifiers: j.AttestationCertificateKeyIdentifiers, + MetadataStatement: statement, + BiometricStatusReports: bsrs, + StatusReports: srs, + TimeOfLastStatusChange: change, + RogueListURL: rogues, + RogueListHash: j.RogueListHash, + }, nil +} + +// Statement is a structure representing the Statement MDS3 dictionary. +// Authenticator metadata statements are used directly by the FIDO server at a relying party, but the information +// contained in the authoritative statement is used in several other places. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#metadata-keys +type Statement struct { + // The legalHeader, if present, contains a legal guide for accessing and using metadata, which itself MAY contain URL(s) pointing to further information, such as a full Terms and Conditions statement. + LegalHeader string + + // The Authenticator Attestation ID. + Aaid string + + // The Authenticator Attestation GUID. + AaGUID uuid.UUID + + // A list of the attestation certificate public key identifiers encoded as hex string. + AttestationCertificateKeyIdentifiers []string + + // A human-readable, short description of the authenticator, in English. + Description string + + // A list of human-readable short descriptions of the authenticator in different languages. + AlternativeDescriptions map[string]string + + // Earliest (i.e. lowest) trustworthy authenticatorVersion meeting the requirements specified in this metadata statement. + AuthenticatorVersion uint32 + + // The FIDO protocol family. The values "uaf", "u2f", and "fido2" are supported. + ProtocolFamily string + + // he Metadata Schema version. + Schema uint16 + + // The FIDO unified protocol version(s) (related to the specific protocol family) supported by this authenticator. + Upv []Version + + // The list of authentication algorithms supported by the authenticator. + AuthenticationAlgorithms []AuthenticationAlgorithm + + // The list of public key formats supported by the authenticator during registration operations. + PublicKeyAlgAndEncodings []PublicKeyAlgAndEncoding + + // The supported attestation type(s). + AttestationTypes AuthenticatorAttestationTypes + + // A list of alternative VerificationMethodANDCombinations. + UserVerificationDetails [][]VerificationMethodDescriptor + + // A 16-bit number representing the bit fields defined by the KEY_PROTECTION constants in the FIDO Registry of Predefined Values + KeyProtection []string + + // This entry is set to true or it is omitted, if the Uauth private key is restricted by the authenticator to only sign valid FIDO signature assertions. + // This entry is set to false, if the authenticator doesn't restrict the Uauth key to only sign valid FIDO signature assertions. + IsKeyRestricted bool + + // This entry is set to true or it is omitted, if Uauth key usage always requires a fresh user verification + // This entry is set to false, if the Uauth key can be used without requiring a fresh user verification, e.g. without any additional user interaction, if the user was verified a (potentially configurable) caching time ago. + IsFreshUserVerificationRequired bool + + // A 16-bit number representing the bit fields defined by the MATCHER_PROTECTION constants in the FIDO Registry of Predefined Values + MatcherProtection []string + + // The authenticator's overall claimed cryptographic strength in bits (sometimes also called security strength or security level). + CryptoStrength uint16 + + // A 32-bit number representing the bit fields defined by the ATTACHMENT_HINT constants in the FIDO Registry of Predefined Values + AttachmentHint []string + + // A 16-bit number representing a combination of the bit flags defined by the TRANSACTION_CONFIRMATION_DISPLAY constants in the FIDO Registry of Predefined Values + TcDisplay []string + + // Supported MIME content type [RFC2049] for the transaction confirmation display, such as text/plain or image/png. + TcDisplayContentType string + + // A list of alternative DisplayPNGCharacteristicsDescriptor. Each of these entries is one alternative of supported image characteristics for displaying a PNG image. + TcDisplayPNGCharacteristics []DisplayPNGCharacteristicsDescriptor + + // Each element of this array represents a PKIX [RFC5280] X.509 certificate that is a valid trust anchor for this authenticator model. + // Multiple certificates might be used for different batches of the same model. + // The array does not represent a certificate chain, but only the trust anchor of that chain. + // A trust anchor can be a root certificate, an intermediate CA certificate or even the attestation certificate itself. + AttestationRootCertificates []*x509.Certificate + + // A list of trust anchors used for ECDAA attestation. This entry MUST be present if and only if attestationType includes ATTESTATION_ECDAA. + EcdaaTrustAnchors []EcdaaTrustAnchor + + // A data: url [RFC2397] encoded PNG [PNG] icon for the Authenticator. + Icon *url.URL + + // List of extensions supported by the authenticator. + SupportedExtensions []ExtensionDescriptor + + // Describes supported versions, extensions, AAGUID of the device and its capabilities + AuthenticatorGetInfo AuthenticatorGetInfo +} + +func (s *Statement) Verifier() (opts x509.VerifyOptions) { + roots := x509.NewCertPool() + + for _, root := range s.AttestationRootCertificates { + roots.AddCert(root) + } + + return x509.VerifyOptions{ + Roots: roots, + } +} + +// StatementJSON is an intermediary JSON/JWT structure representing the Statement MDS3 dictionary. +// Authenticator metadata statements are used directly by the FIDO server at a relying party, but the information +// contained in the authoritative statement is used in several other places. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#metadata-keys +type StatementJSON struct { + LegalHeader string `json:"legalHeader"` + Aaid string `json:"aaid"` + AaGUID string `json:"aaguid"` + AttestationCertificateKeyIdentifiers []string `json:"attestationCertificateKeyIdentifiers"` + Description string `json:"description"` + AlternativeDescriptions map[string]string `json:"alternativeDescriptions"` + AuthenticatorVersion uint32 `json:"authenticatorVersion"` + ProtocolFamily string `json:"protocolFamily"` + Schema uint16 `json:"schema"` + Upv []Version `json:"upv"` + AuthenticationAlgorithms []AuthenticationAlgorithm `json:"authenticationAlgorithms"` + PublicKeyAlgAndEncodings []PublicKeyAlgAndEncoding `json:"publicKeyAlgAndEncodings"` + AttestationTypes []AuthenticatorAttestationType `json:"attestationTypes"` + UserVerificationDetails [][]VerificationMethodDescriptor `json:"userVerificationDetails"` + KeyProtection []string `json:"keyProtection"` + IsKeyRestricted bool `json:"isKeyRestricted"` + IsFreshUserVerificationRequired bool `json:"isFreshUserVerificationRequired"` + MatcherProtection []string `json:"matcherProtection"` + CryptoStrength uint16 `json:"cryptoStrength"` + AttachmentHint []string `json:"attachmentHint"` + TcDisplay []string `json:"tcDisplay"` + TcDisplayContentType string `json:"tcDisplayContentType"` + TcDisplayPNGCharacteristics []DisplayPNGCharacteristicsDescriptor `json:"tcDisplayPNGCharacteristics"` + AttestationRootCertificates []string `json:"attestationRootCertificates"` + EcdaaTrustAnchors []EcdaaTrustAnchor `json:"ecdaaTrustAnchors"` + Icon string `json:"icon"` + SupportedExtensions []ExtensionDescriptor `json:"supportedExtensions"` + AuthenticatorGetInfo AuthenticatorGetInfoJSON `json:"authenticatorGetInfo"` +} + +func (j StatementJSON) Parse() (statement Statement, err error) { + var aaguid uuid.UUID + + if len(j.AaGUID) != 0 { + if aaguid, err = uuid.Parse(j.AaGUID); err != nil { + return statement, fmt.Errorf("error occurred parsing statement with description '%s': error occurred parsing AAGUID value: %w", j.Description, err) + } + } + + n := len(j.AttestationRootCertificates) + + certificates := make([]*x509.Certificate, n) + + for i := 0; i < n; i++ { + if certificates[i], err = mdsParseX509Certificate(j.AttestationRootCertificates[i]); err != nil { + return statement, fmt.Errorf("error occurred parsing statement with description '%s': error occurred parsing attestation root certificate %d value: %w", j.Description, i, err) + } + } + + var icon *url.URL + + if len(j.Icon) != 0 { + if icon, err = url.ParseRequestURI(j.Icon); err != nil { + return statement, fmt.Errorf("error occurred parsing statement with description '%s': error occurred parsing icon value: %w", j.Description, err) + } + } + + var info AuthenticatorGetInfo + + if info, err = j.AuthenticatorGetInfo.Parse(); err != nil { + return statement, fmt.Errorf("error occurred parsing statement with description '%s': error occurred parsing authenticator get info value: %w", j.Description, err) + } + + return Statement{ + LegalHeader: j.LegalHeader, + Aaid: j.Aaid, + AaGUID: aaguid, + AttestationCertificateKeyIdentifiers: j.AttestationCertificateKeyIdentifiers, + Description: j.Description, + AlternativeDescriptions: j.AlternativeDescriptions, + AuthenticatorVersion: j.AuthenticatorVersion, + ProtocolFamily: j.ProtocolFamily, + Schema: j.Schema, + Upv: j.Upv, + AuthenticationAlgorithms: j.AuthenticationAlgorithms, + PublicKeyAlgAndEncodings: j.PublicKeyAlgAndEncodings, + AttestationTypes: j.AttestationTypes, + UserVerificationDetails: j.UserVerificationDetails, + KeyProtection: j.KeyProtection, + IsKeyRestricted: j.IsKeyRestricted, + IsFreshUserVerificationRequired: j.IsFreshUserVerificationRequired, + MatcherProtection: j.MatcherProtection, + CryptoStrength: j.CryptoStrength, + AttachmentHint: j.AttachmentHint, + TcDisplay: j.TcDisplay, + TcDisplayContentType: j.TcDisplayContentType, + TcDisplayPNGCharacteristics: j.TcDisplayPNGCharacteristics, + AttestationRootCertificates: certificates, + EcdaaTrustAnchors: j.EcdaaTrustAnchors, + Icon: icon, + SupportedExtensions: j.SupportedExtensions, + AuthenticatorGetInfo: info, + }, nil } -// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#biometricstatusreport-dictionary -// BiometricStatusReport - Contains the current BiometricStatusReport of one of the authenticator's biometric component. +// BiometricStatusReport is a structure representing the BiometricStatusReport MDS3 dictionary. +// Contains the current status of the authenticator's biometric component. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#biometricstatusreport-dictionary type BiometricStatusReport struct { // Achieved level of the biometric certification of this biometric component of the authenticator - CertLevel uint16 `json:"certLevel"` + CertLevel uint16 + // A single USER_VERIFY constant indicating the modality of the biometric component - Modality string `json:"modality"` + Modality string + // ISO-8601 formatted date since when the certLevel achieved, if applicable. If no date is given, the status is assumed to be effective while present. - EffectiveDate string `json:"effectiveDate"` + EffectiveDate time.Time + // Describes the externally visible aspects of the Biometric Certification evaluation. - CertificationDescriptor string `json:"certificationDescriptor"` + CertificationDescriptor string + // The unique identifier for the issued Biometric Certification. - CertificateNumber string `json:"certificateNumber"` + CertificateNumber string + // The version of the Biometric Certification Policy the implementation is Certified to, e.g. "1.0.0". - CertificationPolicyVersion string `json:"certificationPolicyVersion"` + CertificationPolicyVersion string + // The version of the Biometric Requirements [FIDOBiometricsRequirements] the implementation is certified to, e.g. "1.0.0". + CertificationRequirementsVersion string +} + +// BiometricStatusReportJSON is a structure representing the BiometricStatusReport MDS3 dictionary. +// Contains the current status of the authenticator's biometric component. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#biometricstatusreport-dictionary +type BiometricStatusReportJSON struct { + CertLevel uint16 `json:"certLevel"` + Modality string `json:"modality"` + EffectiveDate string `json:"effectiveDate"` + CertificationDescriptor string `json:"certificationDescriptor"` + CertificateNumber string `json:"certificateNumber"` + + CertificationPolicyVersion string `json:"certificationPolicyVersion"` CertificationRequirementsVersion string `json:"certificationRequirementsVersion"` } -// StatusReport - Contains the current BiometricStatusReport of one of the authenticator's biometric component. -// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#statusreport-dictionary +func (j BiometricStatusReportJSON) Parse() (report BiometricStatusReport, err error) { + var effective time.Time + + if effective, err = time.Parse(time.DateOnly, j.EffectiveDate); err != nil { + return report, fmt.Errorf("error occurred parsing effective date value: %w", err) + } + + return BiometricStatusReport{ + CertLevel: j.CertLevel, + Modality: j.Modality, + EffectiveDate: effective, + CertificationDescriptor: j.CertificationDescriptor, + CertificateNumber: j.CertificateNumber, + CertificationPolicyVersion: j.CertificationPolicyVersion, + CertificationRequirementsVersion: j.CertificationRequirementsVersion, + }, nil +} + +// StatusReport is a structure representing the StatusReport MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#statusreport-dictionary type StatusReport struct { // Status of the authenticator. Additional fields MAY be set depending on this value. - Status AuthenticatorStatus `json:"status"` + Status AuthenticatorStatus + // ISO-8601 formatted date since when the status code was set, if applicable. If no date is given, the status is assumed to be effective while present. - EffectiveDate string `json:"effectiveDate"` + EffectiveDate time.Time + // The authenticatorVersion that this status report relates to. In the case of FIDO_CERTIFIED* status values, the status applies to higher authenticatorVersions until there is a new statusReport. - AuthenticatorVersion uint32 `json:"authenticatorVersion"` + AuthenticatorVersion uint32 + // Base64-encoded [RFC4648] (not base64url!) DER [ITU-X690-2008] PKIX certificate value related to the current status, if applicable. - Certificate string `json:"certificate"` + Certificate *x509.Certificate + // HTTPS URL where additional information may be found related to the current status, if applicable. - URL string `json:"url"` + URL *url.URL + // Describes the externally visible aspects of the Authenticator Certification evaluation. - CertificationDescriptor string `json:"certificationDescriptor"` + CertificationDescriptor string + // The unique identifier for the issued Certification. - CertificateNumber string `json:"certificateNumber"` + CertificateNumber string + // The version of the Authenticator Certification Policy the implementation is Certified to, e.g. "1.0.0". - CertificationPolicyVersion string `json:"certificationPolicyVersion"` + CertificationPolicyVersion string + // The Document Version of the Authenticator Security Requirements (DV) [FIDOAuthenticatorSecurityRequirements] the implementation is certified to, e.g. "1.2.0". - CertificationRequirementsVersion string `json:"certificationRequirementsVersion"` + CertificationRequirementsVersion string } -// AuthenticatorAttestationType - The ATTESTATION constants are 16 bit long integers indicating the specific attestation that authenticator supports. -// Each constant has a case-sensitive string representation (in quotes), which is used in the authoritative metadata for FIDO authenticators. -type AuthenticatorAttestationType string - -const ( - // BasicFull - Indicates full basic attestation, based on an attestation private key shared among a class of authenticators (e.g. same model). Authenticators must provide its attestation signature during the registration process for the same reason. The attestation trust anchor is shared with FIDO Servers out of band (as part of the Metadata). This sharing process should be done according to [UAFMetadataService]. - BasicFull AuthenticatorAttestationType = "basic_full" - // BasicSurrogate - Just syntactically a Basic Attestation. The attestation object self-signed, i.e. it is signed using the UAuth.priv key, i.e. the key corresponding to the UAuth.pub key included in the attestation object. As a consequence it does not provide a cryptographic proof of the security characteristics. But it is the best thing we can do if the authenticator is not able to have an attestation private key. - BasicSurrogate AuthenticatorAttestationType = "basic_surrogate" - // Ecdaa - Indicates use of elliptic curve based direct anonymous attestation as defined in [FIDOEcdaaAlgorithm]. Support for this attestation type is optional at this time. It might be required by FIDO Certification. - Ecdaa AuthenticatorAttestationType = "ecdaa" - // AttCA - Indicates PrivacyCA attestation as defined in [TCG-CMCProfile-AIKCertEnroll]. Support for this attestation type is optional at this time. It might be required by FIDO Certification. - AttCA AuthenticatorAttestationType = "attca" - // AnonCA In this case, the authenticator uses an Anonymization CA which dynamically generates per-credential attestation certificates such that the attestation statements presented to Relying Parties do not provide uniquely identifiable information, e.g., that might be used for tracking purposes. The applicable [WebAuthn] attestation formats "fmt" are Google SafetyNet Attestation "android-safetynet", Android Keystore Attestation "android-key", Apple Anonymous Attestation "apple", and Apple Application Attestation "apple-appattest". - AnonCA AuthenticatorAttestationType = "anonca" - // None - Indicates absence of attestation - None AuthenticatorAttestationType = "none" -) +// StatusReportJSON is an intermediary JSON/JWT structure representing the StatusReport MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#statusreport-dictionary +type StatusReportJSON struct { + Status AuthenticatorStatus `json:"status"` + EffectiveDate string `json:"effectiveDate"` + AuthenticatorVersion uint32 `json:"authenticatorVersion"` + Certificate string `json:"certificate"` + URL string `json:"url"` + CertificationDescriptor string `json:"certificationDescriptor"` + CertificateNumber string `json:"certificateNumber"` + CertificationPolicyVersion string `json:"certificationPolicyVersion"` + CertificationRequirementsVersion string `json:"certificationRequirementsVersion"` +} -// AuthenticatorStatus - This enumeration describes the status of an authenticator model as identified by its AAID and potentially some additional information (such as a specific attestation key). -// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#authenticatorstatus-enum -type AuthenticatorStatus string - -const ( - // NotFidoCertified - This authenticator is not FIDO certified. - NotFidoCertified AuthenticatorStatus = "NOT_FIDO_CERTIFIED" - // FidoCertified - This authenticator has passed FIDO functional certification. This certification scheme is phased out and will be replaced by FIDO_CERTIFIED_L1. - FidoCertified AuthenticatorStatus = "FIDO_CERTIFIED" - // UserVerificationBypass - Indicates that malware is able to bypass the user verification. This means that the authenticator could be used without the user's consent and potentially even without the user's knowledge. - UserVerificationBypass AuthenticatorStatus = "USER_VERIFICATION_BYPASS" - // AttestationKeyCompromise - Indicates that an attestation key for this authenticator is known to be compromised. Additional data should be supplied, including the key identifier and the date of compromise, if known. - AttestationKeyCompromise AuthenticatorStatus = "ATTESTATION_KEY_COMPROMISE" - // UserKeyRemoteCompromise - This authenticator has identified weaknesses that allow registered keys to be compromised and should not be trusted. This would include both, e.g. weak entropy that causes predictable keys to be generated or side channels that allow keys or signatures to be forged, guessed or extracted. - UserKeyRemoteCompromise AuthenticatorStatus = "USER_KEY_REMOTE_COMPROMISE" - // UserKeyPhysicalCompromise - This authenticator has known weaknesses in its key protection mechanism(s) that allow user keys to be extracted by an adversary in physical possession of the device. - UserKeyPhysicalCompromise AuthenticatorStatus = "USER_KEY_PHYSICAL_COMPROMISE" - // UpdateAvailable - A software or firmware update is available for the device. Additional data should be supplied including a URL where users can obtain an update and the date the update was published. - UpdateAvailable AuthenticatorStatus = "UPDATE_AVAILABLE" - // Revoked - The FIDO Alliance has determined that this authenticator should not be trusted for any reason, for example if it is known to be a fraudulent product or contain a deliberate backdoor. - Revoked AuthenticatorStatus = "REVOKED" - // SelfAssertionSubmitted - The authenticator vendor has completed and submitted the self-certification checklist to the FIDO Alliance. If this completed checklist is publicly available, the URL will be specified in StatusReport.url. - SelfAssertionSubmitted AuthenticatorStatus = "SELF_ASSERTION_SUBMITTED" - // FidoCertifiedL1 - The authenticator has passed FIDO Authenticator certification at level 1. This level is the more strict successor of FIDO_CERTIFIED. - FidoCertifiedL1 AuthenticatorStatus = "FIDO_CERTIFIED_L1" - // FidoCertifiedL1plus - The authenticator has passed FIDO Authenticator certification at level 1+. This level is the more than level 1. - FidoCertifiedL1plus AuthenticatorStatus = "FIDO_CERTIFIED_L1plus" - // FidoCertifiedL2 - The authenticator has passed FIDO Authenticator certification at level 2. This level is more strict than level 1+. - FidoCertifiedL2 AuthenticatorStatus = "FIDO_CERTIFIED_L2" - // FidoCertifiedL2plus - The authenticator has passed FIDO Authenticator certification at level 2+. This level is more strict than level 2. - FidoCertifiedL2plus AuthenticatorStatus = "FIDO_CERTIFIED_L2plus" - // FidoCertifiedL3 - The authenticator has passed FIDO Authenticator certification at level 3. This level is more strict than level 2+. - FidoCertifiedL3 AuthenticatorStatus = "FIDO_CERTIFIED_L3" - // FidoCertifiedL3plus - The authenticator has passed FIDO Authenticator certification at level 3+. This level is more strict than level 3. - FidoCertifiedL3plus AuthenticatorStatus = "FIDO_CERTIFIED_L3plus" -) +func (j StatusReportJSON) Parse() (report StatusReport, err error) { + var certificate *x509.Certificate -// UndesiredAuthenticatorStatus is an array of undesirable authenticator statuses -var UndesiredAuthenticatorStatus = [...]AuthenticatorStatus{ - AttestationKeyCompromise, - UserVerificationBypass, - UserKeyRemoteCompromise, - UserKeyPhysicalCompromise, - Revoked, -} + if len(j.Certificate) != 0 { + if certificate, err = mdsParseX509Certificate(j.Certificate); err != nil { + return report, fmt.Errorf("error occurred parsing certificate value: %w", err) + } + } + + var effective time.Time + + if effective, err = time.Parse(time.DateOnly, j.EffectiveDate); err != nil { + return report, fmt.Errorf("error occurred parsing effective date value: %w", err) + } + + var uri *url.URL + + if len(j.URL) != 0 { + if uri, err = url.ParseRequestURI(j.URL); err != nil { + if !strings.HasPrefix(j.URL, "http") { + var e error -// IsUndesiredAuthenticatorStatus returns whether the supplied authenticator status is desirable or not -func IsUndesiredAuthenticatorStatus(status AuthenticatorStatus) bool { - for _, s := range UndesiredAuthenticatorStatus { - if s == status { - return true + if uri, e = url.ParseRequestURI(fmt.Sprintf("https://%s", j.URL)); e != nil { + return report, fmt.Errorf("error occurred parsing URL value: %w", err) + } + } } } - return false + return StatusReport{ + Status: j.Status, + EffectiveDate: effective, + AuthenticatorVersion: j.AuthenticatorVersion, + Certificate: certificate, + URL: uri, + CertificationDescriptor: j.CertificationDescriptor, + CertificateNumber: j.CertificateNumber, + CertificationPolicyVersion: j.CertificationPolicyVersion, + CertificationRequirementsVersion: j.CertificationRequirementsVersion, + }, nil } -// RogueListEntry - Contains a list of individual authenticators known to be rogue +// RogueListEntry is a structure representing the RogueListEntry MDS3 dictionary. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#roguelistentry-dictionary type RogueListEntry struct { // Base64url encoding of the rogue authenticator's secret key Sk string `json:"sk"` + // ISO-8601 formatted date since when this entry is effective. Date string `json:"date"` } -// MetadataBLOBPayload - Represents the MetadataBLOBPayload -type MetadataBLOBPayload struct { - // The legalHeader, if present, contains a legal guide for accessing and using metadata, which itself MAY contain URL(s) pointing to further information, such as a full Terms and Conditions statement. - LegalHeader string `json:"legalHeader"` - // The serial number of this UAF Metadata TOC Payload. Serial numbers MUST be consecutive and strictly monotonic, i.e. the successor TOC will have a no value exactly incremented by one. - Number int `json:"no"` - // ISO-8601 formatted date when the next update will be provided at latest. - NextUpdate string `json:"nextUpdate"` - // List of zero or more MetadataTOCPayloadEntry objects. - Entries []MetadataBLOBPayloadEntry `json:"entries"` -} - -// CodeAccuracyDescriptor describes the relevant accuracy/complexity aspects of passcode user verification methods. +// CodeAccuracyDescriptor is a structure representing the CodeAccuracyDescriptor MDS3 dictionary. +// It describes the relevant accuracy/complexity aspects of passcode user verification methods. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#codeaccuracydescriptor-dictionary type CodeAccuracyDescriptor struct { // The numeric system base (radix) of the code, e.g. 10 in the case of decimal digits. Base uint16 `json:"base"` + // The minimum number of digits of the given base required for that code, e.g. 4 in the case of 4 digits. MinLength uint16 `json:"minLength"` + // Maximum number of false attempts before the authenticator will block this method (at least for some time). 0 means it will never block. MaxRetries uint16 `json:"maxRetries"` + // Enforced minimum number of seconds wait time after blocking (e.g. due to forced reboot or similar). // 0 means this user verification method will be blocked, either permanently or until an alternative user verification method method succeeded. // All alternative user verification methods MUST be specified appropriately in the Metadata in userVerificationDetails. BlockSlowdown uint16 `json:"blockSlowdown"` } -// The BiometricAccuracyDescriptor describes relevant accuracy/complexity aspects in the case of a biometric user verification method. +// BiometricAccuracyDescriptor is a structure representing the BiometricAccuracyDescriptor MDS3 dictionary. +// It describes relevant accuracy/complexity aspects in the case of a biometric user verification method. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#biometricaccuracydescriptor-dictionary type BiometricAccuracyDescriptor struct { // The false rejection rate [ISO19795-1] for a single template, i.e. the percentage of verification transactions with truthful claims of identity that are incorrectly denied. - SelfAttestedFRR int64 `json:"selfAttestedFRR "` + SelfAttestedFRR int64 `json:"selfAttestedFRR"` + // The false acceptance rate [ISO19795-1] for a single template, i.e. the percentage of verification transactions with wrongful claims of identity that are incorrectly confirmed. - SelfAttestedFAR int64 `json:"selfAttestedFAR "` + SelfAttestedFAR int64 `json:"selfAttestedFAR"` + // Maximum number of alternative templates from different fingers allowed. MaxTemplates uint16 `json:"maxTemplates"` + // Maximum number of false attempts before the authenticator will block this method (at least for some time). 0 means it will never block. MaxRetries uint16 `json:"maxRetries"` + // Enforced minimum number of seconds wait time after blocking (e.g. due to forced reboot or similar). // 0 means that this user verification method will be blocked either permanently or until an alternative user verification method succeeded. // All alternative user verification methods MUST be specified appropriately in the metadata in userVerificationDetails. BlockSlowdown uint16 `json:"blockSlowdown"` } -// The PatternAccuracyDescriptor describes relevant accuracy/complexity aspects in the case that a pattern is used as the user verification method. +// PatternAccuracyDescriptor is a structure representing the PatternAccuracyDescriptor MDS3 dictionary. +// It describes relevant accuracy/complexity aspects in the case that a pattern is used as the user verification method. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#patternaccuracydescriptor-dictionary type PatternAccuracyDescriptor struct { // Number of possible patterns (having the minimum length) out of which exactly one would be the right one, i.e. 1/probability in the case of equal distribution. MinComplexity uint32 `json:"minComplexity"` + // Maximum number of false attempts before the authenticator will block authentication using this method (at least temporarily). 0 means it will never block. MaxRetries uint16 `json:"maxRetries"` + // Enforced minimum number of seconds wait time after blocking (due to forced reboot or similar mechanism). // 0 means this user verification method will be blocked, either permanently or until an alternative user verification method method succeeded. // All alternative user verification methods MUST be specified appropriately in the metadata under userVerificationDetails. BlockSlowdown uint16 `json:"blockSlowdown"` } -// VerificationMethodDescriptor - A descriptor for a specific base user verification method as implemented by the authenticator. +// VerificationMethodDescriptor is a structure representing the VerificationMethodDescriptor MDS3 dictionary. +// It describes a descriptor for a specific base user verification method as implemented by the authenticator. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#verificationmethoddescriptor-dictionary type VerificationMethodDescriptor struct { // a single USER_VERIFY constant (see [FIDORegistry]), not a bit flag combination. This value MUST be non-zero. - UserVerificationMethod string `json:"userVerification"` + UserVerificationMethod string `json:"userVerificationMethod"` + // May optionally be used in the case of method USER_VERIFY_PASSCODE. CaDesc CodeAccuracyDescriptor `json:"caDesc"` + // May optionally be used in the case of method USER_VERIFY_FINGERPRINT, USER_VERIFY_VOICEPRINT, USER_VERIFY_FACEPRINT, USER_VERIFY_EYEPRINT, or USER_VERIFY_HANDPRINT. BaDesc BiometricAccuracyDescriptor `json:"baDesc"` + // May optionally be used in case of method USER_VERIFY_PATTERN. PaDesc PatternAccuracyDescriptor `json:"paDesc"` } -// The rgbPaletteEntry is an RGB three-sample tuple palette entry -type rgbPaletteEntry struct { +// RGBPaletteEntry is a structure representing the RGBPaletteEntry MDS3 dictionary. +// It describes an RGB three-sample tuple palette entry. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#rgbpaletteentry-dictionary +type RGBPaletteEntry struct { // Red channel sample value R uint16 `json:"r"` + // Green channel sample value G uint16 `json:"g"` + // Blue channel sample value B uint16 `json:"b"` } -// The DisplayPNGCharacteristicsDescriptor describes a PNG image characteristics as defined in the PNG [PNG] spec for IHDR (image header) and PLTE (palette table) +// DisplayPNGCharacteristicsDescriptor is a structure representing the DisplayPNGCharacteristicsDescriptor MDS3 dictionary. +// It describes a PNG image characteristics as defined in the PNG [PNG] spec for IHDR (image header) and PLTE (palette table)/ +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#displaypngcharacteristicsdescriptor-dictionary type DisplayPNGCharacteristicsDescriptor struct { // image width Width uint32 `json:"width"` + // image height Height uint32 `json:"height"` + // Bit depth - bits per sample or per palette index. BitDepth byte `json:"bitDepth"` + // Color type defines the PNG image type. ColorType byte `json:"colorType"` + // Compression method used to compress the image data. Compression byte `json:"compression"` + // Filter method is the preprocessing method applied to the image data before compression. Filter byte `json:"filter"` + // Interlace method is the transmission order of the image data. Interlace byte `json:"interlace"` + // 1 to 256 palette entries - Plte []rgbPaletteEntry `json:"plte"` + Plte []RGBPaletteEntry `json:"plte"` } -// EcdaaTrustAnchor - In the case of ECDAA attestation, the ECDAA-Issuer's trust anchor MUST be specified in this field. +// EcdaaTrustAnchor is a structure representing the EcdaaTrustAnchor MDS3 dictionary. +// In the case of ECDAA attestation, the ECDAA-Issuer's trust anchor MUST be specified in this field. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#ecdaatrustanchor-dictionary type EcdaaTrustAnchor struct { // base64url encoding of the result of ECPoint2ToB of the ECPoint2 X X string `json:"X"` + // base64url encoding of the result of ECPoint2ToB of the ECPoint2 Y Y string `json:"Y"` + // base64url encoding of the result of BigNumberToB(c) C string `json:"c"` + // base64url encoding of the result of BigNumberToB(sx) SX string `json:"sx"` + // base64url encoding of the result of BigNumberToB(sy) SY string `json:"sy"` + // Name of the Barreto-Naehrig elliptic curve for G1. "BN_P256", "BN_P638", "BN_ISOP256", and "BN_ISOP512" are supported. G1Curve string `json:"G1Curve"` } -// ExtensionDescriptor - This descriptor contains an extension supported by the authenticator. +// ExtensionDescriptor is a structure representing the ExtensionDescriptor MDS3 dictionary. +// This descriptor contains an extension supported by the authenticator. +// +// See: https://fidoalliance.org/specs/mds/fido-metadata-statement-v3.0-ps-20210518.html#extensiondescriptor-dictionary type ExtensionDescriptor struct { // Identifies the extension. ID string `json:"id"` + // The TAG of the extension if this was assigned. TAGs are assigned to extensions if they could appear in an assertion. Tag uint16 `json:"tag"` + // Contains arbitrary data further describing the extension and/or data needed to correctly process the extension. Data string `json:"data"` + // Indicates whether unknown extensions must be ignored (false) or must lead to an error (true) when the extension is to be processed by the FIDO Server, FIDO Client, ASM, or FIDO Authenticator. FailIfUnknown bool `json:"fail_if_unknown"` } -// MetadataStatement - Authenticator metadata statements are used directly by the FIDO server at a relying party, but the information contained in the authoritative statement is used in several other places. -type MetadataStatement struct { - // The legalHeader, if present, contains a legal guide for accessing and using metadata, which itself MAY contain URL(s) pointing to further information, such as a full Terms and Conditions statement. - LegalHeader string `json:"legalHeader"` - // The Authenticator Attestation ID. - Aaid string `json:"aaid"` - // The Authenticator Attestation GUID. - AaGUID string `json:"aaguid"` - // A list of the attestation certificate public key identifiers encoded as hex string. - AttestationCertificateKeyIdentifiers []string `json:"attestationCertificateKeyIdentifiers"` - // A human-readable, short description of the authenticator, in English. - Description string `json:"description"` - // A list of human-readable short descriptions of the authenticator in different languages. - AlternativeDescriptions map[string]string `json:"alternativeDescriptions"` - // Earliest (i.e. lowest) trustworthy authenticatorVersion meeting the requirements specified in this metadata statement. - AuthenticatorVersion uint32 `json:"authenticatorVersion"` - // The FIDO protocol family. The values "uaf", "u2f", and "fido2" are supported. - ProtocolFamily string `json:"protocolFamily"` - // The FIDO unified protocol version(s) (related to the specific protocol family) supported by this authenticator. - Upv []Version `json:"upv"` - // The list of authentication algorithms supported by the authenticator. - AuthenticationAlgorithms []AuthenticationAlgorithm `json:"authenticationAlgorithms"` - // The list of public key formats supported by the authenticator during registration operations. - PublicKeyAlgAndEncodings []PublicKeyAlgAndEncoding `json:"publicKeyAlgAndEncodings"` - // The supported attestation type(s). - AttestationTypes []AuthenticatorAttestationType `json:"attestationTypes"` - // A list of alternative VerificationMethodANDCombinations. - UserVerificationDetails [][]VerificationMethodDescriptor `json:"userVerificationDetails"` - // A 16-bit number representing the bit fields defined by the KEY_PROTECTION constants in the FIDO Registry of Predefined Values - KeyProtection []string `json:"keyProtection"` - // This entry is set to true or it is omitted, if the Uauth private key is restricted by the authenticator to only sign valid FIDO signature assertions. - // This entry is set to false, if the authenticator doesn't restrict the Uauth key to only sign valid FIDO signature assertions. - IsKeyRestricted bool `json:"isKeyRestricted"` - // This entry is set to true or it is omitted, if Uauth key usage always requires a fresh user verification - // This entry is set to false, if the Uauth key can be used without requiring a fresh user verification, e.g. without any additional user interaction, if the user was verified a (potentially configurable) caching time ago. - IsFreshUserVerificationRequired bool `json:"isFreshUserVerificationRequired"` - // A 16-bit number representing the bit fields defined by the MATCHER_PROTECTION constants in the FIDO Registry of Predefined Values - MatcherProtection []string `json:"matcherProtection"` - // The authenticator's overall claimed cryptographic strength in bits (sometimes also called security strength or security level). - CryptoStrength uint16 `json:"cryptoStrength"` - // A 32-bit number representing the bit fields defined by the ATTACHMENT_HINT constants in the FIDO Registry of Predefined Values - AttachmentHint []string `json:"attachmentHint"` - // A 16-bit number representing a combination of the bit flags defined by the TRANSACTION_CONFIRMATION_DISPLAY constants in the FIDO Registry of Predefined Values - TcDisplay []string `json:"tcDisplay"` - // Supported MIME content type [RFC2049] for the transaction confirmation display, such as text/plain or image/png. - TcDisplayContentType string `json:"tcDisplayContentType"` - // A list of alternative DisplayPNGCharacteristicsDescriptor. Each of these entries is one alternative of supported image characteristics for displaying a PNG image. - TcDisplayPNGCharacteristics []DisplayPNGCharacteristicsDescriptor `json:"tcDisplayPNGCharacteristics"` - // Each element of this array represents a PKIX [RFC5280] X.509 certificate that is a valid trust anchor for this authenticator model. - // Multiple certificates might be used for different batches of the same model. - // The array does not represent a certificate chain, but only the trust anchor of that chain. - // A trust anchor can be a root certificate, an intermediate CA certificate or even the attestation certificate itself. - AttestationRootCertificates []string `json:"attestationRootCertificates"` - // A list of trust anchors used for ECDAA attestation. This entry MUST be present if and only if attestationType includes ATTESTATION_ECDAA. - EcdaaTrustAnchors []EcdaaTrustAnchor `json:"ecdaaTrustAnchors"` - // A data: url [RFC2397] encoded PNG [PNG] icon for the Authenticator. - Icon string `json:"icon"` - // List of extensions supported by the authenticator. - SupportedExtensions []ExtensionDescriptor `json:"supportedExtensions"` - // Describes supported versions, extensions, AAGUID of the device and its capabilities - AuthenticatorGetInfo AuthenticatorGetInfo `json:"authenticatorGetInfo"` -} - -type AuthenticationAlgorithm string - -const ( - // An ECDSA signature on the NIST secp256r1 curve which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_raw" - // DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the NIST secp256r1 curve. - ALG_SIGN_SECP256R1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_der" - // RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian order RFC4055 RFC4056. - ALG_SIGN_RSASSA_PSS_SHA256_RAW AuthenticationAlgorithm = "rsassa_pss_sha256_raw" - // DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the RSASSA-PSS RFC3447 signature RFC4055 RFC4056. - ALG_SIGN_RSASSA_PSS_SHA256_DER AuthenticationAlgorithm = "rsassa_pss_sha256_der" - // An ECDSA signature on the secp256k1 curve which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_raw" - // DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the secp256k1 curve. - ALG_SIGN_SECP256K1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_der" - // Chinese SM2 elliptic curve based signature algorithm combined with SM3 hash algorithm OSCCA-SM2 OSCCA-SM3. - ALG_SIGN_SM2_SM3_RAW AuthenticationAlgorithm = "sm2_sm3_raw" - // This is the EMSA-PKCS1-v1_5 signature as defined in RFC3447. - ALG_SIGN_RSA_EMSA_PKCS1_SHA256_RAW AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_raw" - // DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the EMSA-PKCS1-v1_5 signature as defined in RFC3447. - ALG_SIGN_RSA_EMSA_PKCS1_SHA256_DER AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_der" - // RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian order RFC4055 RFC4056. - ALG_SIGN_RSASSA_PSS_SHA384_RAW AuthenticationAlgorithm = "rsassa_pss_sha384_raw" - // RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian order RFC4055 RFC4056. - ALG_SIGN_RSASSA_PSS_SHA512_RAW AuthenticationAlgorithm = "rsassa_pss_sha512_raw" - // RSASSA-PKCS1-v1_5 RFC3447 with SHA256(aka RS256) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056 - ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha256_raw" - // RSASSA-PKCS1-v1_5 RFC3447 with SHA384(aka RS384) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056 - ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha384_raw" - // RSASSA-PKCS1-v1_5 RFC3447 with SHA512(aka RS512) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056 - ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha512_raw" - // RSASSA-PKCS1-v1_5 RFC3447 with SHA1(aka RS1) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056 - ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha1_raw" - // An ECDSA signature on the NIST secp384r1 curve with SHA384(aka: ES384) which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW AuthenticationAlgorithm = "secp384r1_ecdsa_sha384_raw" - // An ECDSA signature on the NIST secp512r1 curve with SHA512(aka: ES512) which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW AuthenticationAlgorithm = "secp521r1_ecdsa_sha512_raw" - // An EdDSA signature on the curve 25519, which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_ED25519_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed25519_eddsa_sha512_raw" - // An EdDSA signature on the curve Ed448, which must have raw R and S buffers, encoded in big-endian order. - ALG_SIGN_ED448_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed448_eddsa_sha512_raw" -) - -// TODO: this goes away after webauthncose.CredentialPublicKey gets implemented -type algKeyCose struct { - KeyType webauthncose.COSEKeyType - Algorithm webauthncose.COSEAlgorithmIdentifier - Curve webauthncose.COSEEllipticCurve -} - -func algKeyCoseDictionary() func(AuthenticationAlgorithm) algKeyCose { - mapping := map[AuthenticationAlgorithm]algKeyCose{ - ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256}, - ALG_SIGN_SECP256R1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256}, - ALG_SIGN_RSASSA_PSS_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256}, - ALG_SIGN_RSASSA_PSS_SHA256_DER: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256}, - ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1}, - ALG_SIGN_SECP256K1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1}, - ALG_SIGN_RSASSA_PSS_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS384}, - ALG_SIGN_RSASSA_PSS_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS512}, - ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS256}, - ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS384}, - ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS512}, - ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS1}, - ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES384, Curve: webauthncose.P384}, - ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES512, Curve: webauthncose.P521}, - ALG_SIGN_ED25519_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed25519}, - ALG_SIGN_ED448_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed448}, - } - - return func(key AuthenticationAlgorithm) algKeyCose { - return mapping[key] - } -} - -func AlgKeyMatch(key algKeyCose, algs []AuthenticationAlgorithm) bool { - for _, alg := range algs { - if reflect.DeepEqual(algKeyCoseDictionary()(alg), key) { - return true - } - } - - return false -} - -type PublicKeyAlgAndEncoding string - -const ( - // Raw ANSI X9.62 formatted Elliptic Curve public key. - ALG_KEY_ECC_X962_RAW PublicKeyAlgAndEncoding = "ecc_x962_raw" - // DER ITU-X690-2008 encoded ANSI X.9.62 formatted SubjectPublicKeyInfo RFC5480 specifying an elliptic curve public key. - ALG_KEY_ECC_X962_DER PublicKeyAlgAndEncoding = "ecc_x962_der" - // Raw encoded 2048-bit RSA public key RFC3447. - ALG_KEY_RSA_2048_RAW PublicKeyAlgAndEncoding = "rsa_2048_raw" - // ASN.1 DER [ITU-X690-2008] encoded 2048-bit RSA RFC3447 public key RFC4055. - ALG_KEY_RSA_2048_DER PublicKeyAlgAndEncoding = "rsa_2048_der" - // COSE_Key format, as defined in Section 7 of RFC8152. This encoding includes its own field for indicating the public key algorithm. - ALG_KEY_COSE PublicKeyAlgAndEncoding = "cose" -) - -// Version - Represents a generic version with major and minor fields. +// Version represents a generic version with major and minor fields. type Version struct { // Major version. Major uint16 `json:"major"` + // Minor version. Minor uint16 `json:"minor"` } type AuthenticatorGetInfo struct { // List of supported versions. - Versions []string `json:"versions"` + Versions []string + // List of supported extensions. - Extensions []string `json:"extensions"` + Extensions []string + // The claimed AAGUID. - AaGUID string `json:"aaguid"` + AaGUID uuid.UUID + // List of supported options. - Options map[string]bool `json:"options"` + Options map[string]bool + // Maximum message size supported by the authenticator. - MaxMsgSize uint `json:"maxMsgSize"` + MaxMsgSize uint + // List of supported PIN/UV auth protocols in order of decreasing authenticator preference. - PivUvAuthProtocols []uint `json:"pinUvAuthProtocols"` + PivUvAuthProtocols []uint + // Maximum number of credentials supported in credentialID list at a time by the authenticator. - MaxCredentialCountInList uint `json:"maxCredentialCountInList"` + MaxCredentialCountInList uint + // Maximum Credential ID Length supported by the authenticator. - MaxCredentialIdLength uint `json:"maxCredentialLength"` + MaxCredentialIdLength uint + // List of supported transports. - Transports []string `json:"transports"` + Transports []string + // List of supported algorithms for credential generation, as specified in WebAuthn. - Algorithms []PublicKeyCredentialParameters `json:"algorithms"` + Algorithms []PublicKeyCredentialParameters + // The maximum size, in bytes, of the serialized large-blob array that this authenticator can store. - MaxSerializedLargeBlobArray uint `json:"maxSerializedLargeBlobArray"` + MaxSerializedLargeBlobArray uint + // If this member is present and set to true, the PIN must be changed. - ForcePINChange bool `json:"forcePINChange"` + ForcePINChange bool + // This specifies the current minimum PIN length, in Unicode code points, the authenticator enforces for ClientPIN. - MinPINLength uint `json:"minPINLength"` + MinPINLength uint + // Indicates the firmware version of the authenticator model identified by AAGUID. - FirmwareVersion uint `json:"firmwareVersion"` + FirmwareVersion uint + // Maximum credBlob length in bytes supported by the authenticator. - MaxCredBlobLength uint `json:"maxCredBlobLength"` + MaxCredBlobLength uint + // This specifies the max number of RP IDs that authenticator can set via setMinPINLength subcommand. - MaxRPIDsForSetMinPINLength uint `json:"maxRPIDsForSetMinPINLength"` + MaxRPIDsForSetMinPINLength uint + // This specifies the preferred number of invocations of the getPinUvAuthTokenUsingUvWithPermissions subCommand the platform may attempt before falling back to the getPinUvAuthTokenUsingPinWithPermissions subCommand or displaying an error. - PreferredPlatformUvAttempts uint `json:"preferredPlatformUvAttempts"` + PreferredPlatformUvAttempts uint + // This specifies the user verification modality supported by the authenticator via authenticatorClientPIN's getPinUvAuthTokenUsingUvWithPermissions subcommand. - UvModality uint `json:"uvModality"` + UvModality uint + // This specifies a list of authenticator certifications. - Certifications map[string]float64 `json:"certifications"` + Certifications map[string]float64 + // If this member is present it indicates the estimated number of additional discoverable credentials that can be stored. - RemainingDiscoverableCredentials uint `json:"remainingDiscoverableCredentials"` + RemainingDiscoverableCredentials uint + // If present the authenticator supports the authenticatorConfig vendorPrototype subcommand, and its value is a list of authenticatorConfig vendorCommandId values supported, which MAY be empty. - VendorPrototypeConfigCommands []uint `json:"vendorPrototypeConfigCommands"` + VendorPrototypeConfigCommands []uint +} + +type AuthenticatorGetInfoJSON struct { + Versions []string `json:"versions"` + Extensions []string `json:"extensions"` + AaGUID string `json:"aaguid"` + Options map[string]bool `json:"options"` + MaxMsgSize uint `json:"maxMsgSize"` + PivUvAuthProtocols []uint `json:"pinUvAuthProtocols"` + MaxCredentialCountInList uint `json:"maxCredentialCountInList"` + MaxCredentialIdLength uint `json:"maxCredentialIdLength"` + Transports []string `json:"transports"` + Algorithms []PublicKeyCredentialParameters `json:"algorithms"` + MaxSerializedLargeBlobArray uint `json:"maxSerializedLargeBlobArray"` + ForcePINChange bool `json:"forcePINChange"` + MinPINLength uint `json:"minPINLength"` + FirmwareVersion uint `json:"firmwareVersion"` + MaxCredBlobLength uint `json:"maxCredBlobLength"` + MaxRPIDsForSetMinPINLength uint `json:"maxRPIDsForSetMinPINLength"` + PreferredPlatformUvAttempts uint `json:"preferredPlatformUvAttempts"` + UvModality uint `json:"uvModality"` + Certifications map[string]float64 `json:"certifications"` + RemainingDiscoverableCredentials uint `json:"remainingDiscoverableCredentials"` + VendorPrototypeConfigCommands []uint `json:"vendorPrototypeConfigCommands"` +} + +func (j AuthenticatorGetInfoJSON) Parse() (info AuthenticatorGetInfo, err error) { + var aaguid uuid.UUID + + if len(j.AaGUID) != 0 { + if aaguid, err = uuid.Parse(j.AaGUID); err != nil { + return info, fmt.Errorf("error occurred parsing AAGUID value: %w", err) + } + } + + return AuthenticatorGetInfo{ + Versions: j.Versions, + Extensions: j.Extensions, + AaGUID: aaguid, + Options: j.Options, + MaxMsgSize: j.MaxMsgSize, + PivUvAuthProtocols: j.PivUvAuthProtocols, + MaxCredentialCountInList: j.MaxCredentialCountInList, + MaxCredentialIdLength: j.MaxCredentialIdLength, + Transports: j.Transports, + Algorithms: j.Algorithms, + MaxSerializedLargeBlobArray: j.MaxSerializedLargeBlobArray, + ForcePINChange: j.ForcePINChange, + MinPINLength: j.MinPINLength, + FirmwareVersion: j.FirmwareVersion, + MaxCredBlobLength: j.MaxCredBlobLength, + MaxRPIDsForSetMinPINLength: j.MaxRPIDsForSetMinPINLength, + PreferredPlatformUvAttempts: j.PreferredPlatformUvAttempts, + UvModality: j.UvModality, + Certifications: j.Certifications, + RemainingDiscoverableCredentials: j.RemainingDiscoverableCredentials, + VendorPrototypeConfigCommands: j.VendorPrototypeConfigCommands, + }, nil } // MDSGetEndpointsRequest is the request sent to the conformance metadata getEndpoints endpoint. @@ -541,189 +899,21 @@ type MDSGetEndpointsRequest struct { type MDSGetEndpointsResponse struct { // The status of the response. Status string `json:"status"` + // An array of urls, each pointing to a MetadataTOCPayload. Result []string `json:"result"` } -func unmarshalMDSBLOB(body []byte, c http.Client) (MetadataBLOBPayload, error) { - var payload MetadataBLOBPayload - - token, err := jwt.Parse(string(body), func(token *jwt.Token) (interface{}, error) { - // 2. If the x5u attribute is present in the JWT Header, then - if _, ok := token.Header["x5u"].([]interface{}); ok { - // never seen an x5u here, although it is in the spec - return nil, errors.New("x5u encountered in header of metadata TOC payload") - } - var chain []interface{} - // 3. If the x5u attribute is missing, the chain should be retrieved from the x5c attribute. - - if x5c, ok := token.Header["x5c"].([]interface{}); !ok { - // If that attribute is missing as well, Metadata TOC signing trust anchor is considered the TOC signing certificate chain. - chain[0] = MDSRoot - } else { - chain = x5c - } - - // The certificate chain MUST be verified to properly chain to the metadata TOC signing trust anchor. - valid, err := validateChain(chain, c) - if !valid || err != nil { - return nil, err - } - - // Chain validated, extract the TOC signing certificate from the chain. Create a buffer large enough to hold the - // certificate bytes. - o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string)))) - - // base64 decode the certificate into the buffer. - n, err := base64.StdEncoding.Decode(o, []byte(chain[0].(string))) - if err != nil { - return nil, err - } - - // Parse the certificate from the buffer. - cert, err := x509.ParseCertificate(o[:n]) - if err != nil { - return nil, err - } - - // 4. Verify the signature of the Metadata TOC object using the TOC signing certificate chain - // jwt.Parse() uses the TOC signing certificate public key internally to verify the signature. - return cert.PublicKey, err - }) - - if err != nil { - return payload, err - } - - err = mapstructure.Decode(token.Claims, &payload) - - return payload, err -} - -func validateChain(chain []interface{}, c http.Client) (bool, error) { - oRoot := make([]byte, base64.StdEncoding.DecodedLen(len(MDSRoot))) - - nRoot, err := base64.StdEncoding.Decode(oRoot, []byte(MDSRoot)) - if err != nil { - return false, err - } - - rootcert, err := x509.ParseCertificate(oRoot[:nRoot]) - if err != nil { - return false, err - } - - roots := x509.NewCertPool() - - roots.AddCert(rootcert) - - o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[1].(string)))) - - n, err := base64.StdEncoding.Decode(o, []byte(chain[1].(string))) - if err != nil { - return false, err - } - - intcert, err := x509.ParseCertificate(o[:n]) - if err != nil { - return false, err - } - - if revoked, ok := revoke.VerifyCertificate(intcert); !ok { - issuer := intcert.IssuingCertificateURL - - if issuer != nil { - return false, errCRLUnavailable - } - } else if revoked { - return false, errIntermediateCertRevoked - } - - ints := x509.NewCertPool() - ints.AddCert(intcert) - - l := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string)))) - - n, err = base64.StdEncoding.Decode(l, []byte(chain[0].(string))) - if err != nil { - return false, err - } - - leafcert, err := x509.ParseCertificate(l[:n]) - if err != nil { - return false, err - } - - if revoked, ok := revoke.VerifyCertificate(leafcert); !ok { - return false, errCRLUnavailable - } else if revoked { - return false, errLeafCertRevoked - } - - opts := x509.VerifyOptions{ - Roots: roots, - Intermediates: ints, - } - - _, err = leafcert.Verify(opts) - - return err == nil, err -} - -type MetadataError struct { - // Short name for the type of error that has occurred. - Type string `json:"type"` - // Additional details about the error. - Details string `json:"error"` - // Information to help debug the error. - DevInfo string `json:"debug"` -} +// DefaultUndesiredAuthenticatorStatuses returns a copy of the defaultUndesiredAuthenticatorStatus slice. +func DefaultUndesiredAuthenticatorStatuses() []AuthenticatorStatus { + undesired := make([]AuthenticatorStatus, len(defaultUndesiredAuthenticatorStatus)) -var ( - errIntermediateCertRevoked = &MetadataError{ - Type: "intermediate_revoked", - Details: "Intermediate certificate is on issuers revocation list", - } - errLeafCertRevoked = &MetadataError{ - Type: "leaf_revoked", - Details: "Leaf certificate is on issuers revocation list", - } - errCRLUnavailable = &MetadataError{ - Type: "crl_unavailable", - Details: "Certificate revocation list is unavailable", - } -) + copy(undesired, defaultUndesiredAuthenticatorStatus[:]) -func (err *MetadataError) Error() string { - return err.Details + return undesired } -func PopulateMetadata(url string) error { - c := &http.Client{ - Timeout: time.Second * 30, - } - - res, err := c.Get(url) - if err != nil { - return err - } - - defer res.Body.Close() - - body, err := io.ReadAll(res.Body) - if err != nil { - return err - } - - blob, err := unmarshalMDSBLOB(body, *c) - if err != nil { - return err - } - - for _, entry := range blob.Entries { - aaguid, _ := uuid.Parse(entry.AaGUID) - Metadata[aaguid] = entry - } - - return err +type EntryError struct { + Error error + EntryJSON } diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/passkey_authenticator.go b/vendor/github.com/go-webauthn/webauthn/metadata/passkey_authenticator.go new file mode 100644 index 0000000..bd993d0 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/passkey_authenticator.go @@ -0,0 +1,16 @@ +package metadata + +// PasskeyAuthenticator is a type that represents the schema from the Passkey Developer AAGUID listing. +// +// See: https://github.com/passkeydeveloper/passkey-authenticator-aaguids +type PasskeyAuthenticator map[string]PassKeyAuthenticatorAAGUID + +// PassKeyAuthenticatorAAGUID is a type that represents the indivudal schema entry from the Passkey Developer AAGUID +// listing. Used with PasskeyAuthenticator. +// +// See: https://github.com/passkeydeveloper/passkey-authenticator-aaguids +type PassKeyAuthenticatorAAGUID struct { + Name string `json:"name"` + IconDark string `json:"icon_dark,omitempty"` + IconLight string `json:"icon_light,omitempty"` +} diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/status.go b/vendor/github.com/go-webauthn/webauthn/metadata/status.go new file mode 100644 index 0000000..158e37c --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/status.go @@ -0,0 +1,62 @@ +package metadata + +import ( + "fmt" + "strings" +) + +// ValidateStatusReports checks a list of StatusReport's against a list of desired and undesired AuthenticatorStatus +// values. If the reports contain all of the desired and none of the undesired status reports then no error is returned +// otherwise an error describing the issue is returned. +func ValidateStatusReports(reports []StatusReport, desired, undesired []AuthenticatorStatus) (err error) { + if len(desired) == 0 && (len(undesired) == 0 || len(reports) == 0) { + return nil + } + + var present, absent []string + + if len(undesired) != 0 { + for _, report := range reports { + for _, status := range undesired { + if report.Status == status { + present = append(present, string(status)) + + continue + } + } + } + } + + if len(desired) != 0 { + desired: + for _, status := range desired { + for _, report := range reports { + if report.Status == status { + continue desired + } + } + + absent = append(absent, string(status)) + } + } + + switch { + case len(present) == 0 && len(absent) == 0: + return nil + case len(present) != 0 && len(absent) == 0: + return &MetadataError{ + Type: "invalid_status", + Details: fmt.Sprintf("The following undesired status reports were present: %s", strings.Join(present, ", ")), + } + case len(present) == 0 && len(absent) != 0: + return &MetadataError{ + Type: "invalid_status", + Details: fmt.Sprintf("The following desired status reports were absent: %s", strings.Join(absent, ", ")), + } + default: + return &MetadataError{ + Type: "invalid_status", + Details: fmt.Sprintf("The following undesired status reports were present: %s; the following desired status reports were absent: %s", strings.Join(present, ", "), strings.Join(absent, ", ")), + } + } +} diff --git a/vendor/github.com/go-webauthn/webauthn/metadata/types.go b/vendor/github.com/go-webauthn/webauthn/metadata/types.go new file mode 100644 index 0000000..1562b63 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/metadata/types.go @@ -0,0 +1,329 @@ +package metadata + +import ( + "context" + "errors" + "reflect" + "time" + + "github.com/google/uuid" + + "github.com/go-webauthn/webauthn/protocol/webauthncose" +) + +// The Provider is an interface which describes the elements required to satisfy validation of metadata. +type Provider interface { + // GetEntry returns a MDS3 payload entry given a AAGUID. This + GetEntry(ctx context.Context, aaguid uuid.UUID) (entry *Entry, err error) + + // GetValidateEntry returns true if this provider requires an entry to exist with a AAGUID matching the attestation + // statement during registration. + GetValidateEntry(ctx context.Context) (validate bool) + + // GetValidateEntryPermitZeroAAGUID returns true if attestation statements with zerod AAGUID should be permitted + // when considering the result from GetValidateEntry. i.e. if the AAGUID is zeroed, and GetValidateEntry returns + // true, and this implementation returns true, the attestation statement will pass validation. + GetValidateEntryPermitZeroAAGUID(ctx context.Context) (skip bool) + + // GetValidateTrustAnchor returns true if trust anchor validation of attestation statements is enforced during + // registration. + GetValidateTrustAnchor(ctx context.Context) (validate bool) + + // GetValidateStatus returns true if the status reports for an authenticator should be validated against desired and + // undesired statuses. + GetValidateStatus(ctx context.Context) (validate bool) + + // GetValidateAttestationTypes if true will enforce checking that the provided attestation is possible with the + // given authenticator. + GetValidateAttestationTypes(ctx context.Context) (validate bool) + + // ValidateStatusReports returns nil if the provided authenticator status reports are desired. + ValidateStatusReports(ctx context.Context, reports []StatusReport) (err error) +} + +var ( + ErrNotInitialized = errors.New("metadata: not initialized") +) + +type PublicKeyCredentialParameters struct { + Type string `json:"type"` + Alg webauthncose.COSEAlgorithmIdentifier `json:"alg"` +} + +type AuthenticatorAttestationTypes []AuthenticatorAttestationType + +func (t AuthenticatorAttestationTypes) HasBasicFull() bool { + for _, a := range t { + if a == BasicFull || a == AttCA { + return true + } + } + + return false +} + +// AuthenticatorAttestationType - The ATTESTATION constants are 16 bit long integers indicating the specific attestation that authenticator supports. +// Each constant has a case-sensitive string representation (in quotes), which is used in the authoritative metadata for FIDO authenticators. +type AuthenticatorAttestationType string + +const ( + // BasicFull - Indicates full basic attestation, based on an attestation private key shared among a class of authenticators (e.g. same model). Authenticators must provide its attestation signature during the registration process for the same reason. The attestation trust anchor is shared with FIDO Servers out of band (as part of the Metadata). This sharing process should be done according to [UAFMetadataService]. + BasicFull AuthenticatorAttestationType = "basic_full" + + // BasicSurrogate - Just syntactically a Basic Attestation. The attestation object self-signed, i.e. it is signed using the UAuth.priv key, i.e. the key corresponding to the UAuth.pub key included in the attestation object. As a consequence it does not provide a cryptographic proof of the security characteristics. But it is the best thing we can do if the authenticator is not able to have an attestation private key. + BasicSurrogate AuthenticatorAttestationType = "basic_surrogate" + + // Ecdaa - Indicates use of elliptic curve based direct anonymous attestation as defined in [FIDOEcdaaAlgorithm]. Support for this attestation type is optional at this time. It might be required by FIDO Certification. + Ecdaa AuthenticatorAttestationType = "ecdaa" + + // AttCA - Indicates PrivacyCA attestation as defined in [TCG-CMCProfile-AIKCertEnroll]. Support for this attestation type is optional at this time. It might be required by FIDO Certification. + AttCA AuthenticatorAttestationType = "attca" + + // AnonCA In this case, the authenticator uses an Anonymization CA which dynamically generates per-credential attestation certificates such that the attestation statements presented to Relying Parties do not provide uniquely identifiable information, e.g., that might be used for tracking purposes. The applicable [WebAuthn] attestation formats "fmt" are Google SafetyNet Attestation "android-safetynet", Android Keystore Attestation "android-key", Apple Anonymous Attestation "apple", and Apple Application Attestation "apple-appattest". + AnonCA AuthenticatorAttestationType = "anonca" + + // None - Indicates absence of attestation + None AuthenticatorAttestationType = "none" +) + +// AuthenticatorStatus - This enumeration describes the status of an authenticator model as identified by its AAID and potentially some additional information (such as a specific attestation key). +// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.0-ps-20210518.html#authenticatorstatus-enum +type AuthenticatorStatus string + +const ( + // NotFidoCertified - This authenticator is not FIDO certified. + NotFidoCertified AuthenticatorStatus = "NOT_FIDO_CERTIFIED" + // FidoCertified - This authenticator has passed FIDO functional certification. This certification scheme is phased out and will be replaced by FIDO_CERTIFIED_L1. + FidoCertified AuthenticatorStatus = "FIDO_CERTIFIED" + // UserVerificationBypass - Indicates that malware is able to bypass the user verification. This means that the authenticator could be used without the user's consent and potentially even without the user's knowledge. + UserVerificationBypass AuthenticatorStatus = "USER_VERIFICATION_BYPASS" + // AttestationKeyCompromise - Indicates that an attestation key for this authenticator is known to be compromised. Additional data should be supplied, including the key identifier and the date of compromise, if known. + AttestationKeyCompromise AuthenticatorStatus = "ATTESTATION_KEY_COMPROMISE" + // UserKeyRemoteCompromise - This authenticator has identified weaknesses that allow registered keys to be compromised and should not be trusted. This would include both, e.g. weak entropy that causes predictable keys to be generated or side channels that allow keys or signatures to be forged, guessed or extracted. + UserKeyRemoteCompromise AuthenticatorStatus = "USER_KEY_REMOTE_COMPROMISE" + // UserKeyPhysicalCompromise - This authenticator has known weaknesses in its key protection mechanism(s) that allow user keys to be extracted by an adversary in physical possession of the device. + UserKeyPhysicalCompromise AuthenticatorStatus = "USER_KEY_PHYSICAL_COMPROMISE" + // UpdateAvailable - A software or firmware update is available for the device. Additional data should be supplied including a URL where users can obtain an update and the date the update was published. + UpdateAvailable AuthenticatorStatus = "UPDATE_AVAILABLE" + // Revoked - The FIDO Alliance has determined that this authenticator should not be trusted for any reason, for example if it is known to be a fraudulent product or contain a deliberate backdoor. + Revoked AuthenticatorStatus = "REVOKED" + // SelfAssertionSubmitted - The authenticator vendor has completed and submitted the self-certification checklist to the FIDO Alliance. If this completed checklist is publicly available, the URL will be specified in StatusReportJSON.url. + SelfAssertionSubmitted AuthenticatorStatus = "SELF_ASSERTION_SUBMITTED" + // FidoCertifiedL1 - The authenticator has passed FIDO Authenticator certification at level 1. This level is the more strict successor of FIDO_CERTIFIED. + FidoCertifiedL1 AuthenticatorStatus = "FIDO_CERTIFIED_L1" + // FidoCertifiedL1plus - The authenticator has passed FIDO Authenticator certification at level 1+. This level is the more than level 1. + FidoCertifiedL1plus AuthenticatorStatus = "FIDO_CERTIFIED_L1plus" + // FidoCertifiedL2 - The authenticator has passed FIDO Authenticator certification at level 2. This level is more strict than level 1+. + FidoCertifiedL2 AuthenticatorStatus = "FIDO_CERTIFIED_L2" + // FidoCertifiedL2plus - The authenticator has passed FIDO Authenticator certification at level 2+. This level is more strict than level 2. + FidoCertifiedL2plus AuthenticatorStatus = "FIDO_CERTIFIED_L2plus" + // FidoCertifiedL3 - The authenticator has passed FIDO Authenticator certification at level 3. This level is more strict than level 2+. + FidoCertifiedL3 AuthenticatorStatus = "FIDO_CERTIFIED_L3" + // FidoCertifiedL3plus - The authenticator has passed FIDO Authenticator certification at level 3+. This level is more strict than level 3. + FidoCertifiedL3plus AuthenticatorStatus = "FIDO_CERTIFIED_L3plus" +) + +// defaultUndesiredAuthenticatorStatus is an array of undesirable authenticator statuses +var defaultUndesiredAuthenticatorStatus = [...]AuthenticatorStatus{ + AttestationKeyCompromise, + UserVerificationBypass, + UserKeyRemoteCompromise, + UserKeyPhysicalCompromise, + Revoked, +} + +// IsUndesiredAuthenticatorStatus returns whether the supplied authenticator status is desirable or not +func IsUndesiredAuthenticatorStatus(status AuthenticatorStatus) bool { + for _, s := range defaultUndesiredAuthenticatorStatus { + if s == status { + return true + } + } + + return false +} + +// IsUndesiredAuthenticatorStatusSlice returns whether the supplied authenticator status is desirable or not +func IsUndesiredAuthenticatorStatusSlice(status AuthenticatorStatus, values []AuthenticatorStatus) bool { + for _, s := range values { + if s == status { + return true + } + } + + return false +} + +// IsUndesiredAuthenticatorStatusMap returns whether the supplied authenticator status is desirable or not +func IsUndesiredAuthenticatorStatusMap(status AuthenticatorStatus, values map[AuthenticatorStatus]bool) bool { + _, ok := values[status] + + return ok +} + +type AuthenticationAlgorithm string + +const ( + // ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW is an ECDSA signature on the NIST secp256r1 curve which must have raw R and + // S buffers, encoded in big-endian order. + ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_raw" + + // ALG_SIGN_SECP256R1_ECDSA_SHA256_DER is a DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the NIST secp256r1 + // curve. + ALG_SIGN_SECP256R1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_der" + + // ALG_SIGN_RSASSA_PSS_SHA256_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian + // order RFC4055 RFC4056. + ALG_SIGN_RSASSA_PSS_SHA256_RAW AuthenticationAlgorithm = "rsassa_pss_sha256_raw" + + // ALG_SIGN_RSASSA_PSS_SHA256_DER is a DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the + // RSASSA-PSS RFC3447 signature RFC4055 RFC4056. + ALG_SIGN_RSASSA_PSS_SHA256_DER AuthenticationAlgorithm = "rsassa_pss_sha256_der" + + // ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW is an ECDSA signature on the secp256k1 curve which must have raw R and S + // buffers, encoded in big-endian order. + ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_raw" + + // ALG_SIGN_SECP256K1_ECDSA_SHA256_DER is a DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the secp256k1 curve. + ALG_SIGN_SECP256K1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_der" + + // ALG_SIGN_SM2_SM3_RAW is a Chinese SM2 elliptic curve based signature algorithm combined with SM3 hash algorithm + // OSCCA-SM2 OSCCA-SM3. + ALG_SIGN_SM2_SM3_RAW AuthenticationAlgorithm = "sm2_sm3_raw" + + // ALG_SIGN_RSA_EMSA_PKCS1_SHA256_RAW is the EMSA-PKCS1-v1_5 signature as defined in RFC3447. + ALG_SIGN_RSA_EMSA_PKCS1_SHA256_RAW AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_raw" + + // ALG_SIGN_RSA_EMSA_PKCS1_SHA256_DER is a DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the + // EMSA-PKCS1-v1_5 signature as defined in RFC3447. + ALG_SIGN_RSA_EMSA_PKCS1_SHA256_DER AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_der" + + // ALG_SIGN_RSASSA_PSS_SHA384_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian + // order RFC4055 RFC4056. + ALG_SIGN_RSASSA_PSS_SHA384_RAW AuthenticationAlgorithm = "rsassa_pss_sha384_raw" + + // ALG_SIGN_RSASSA_PSS_SHA512_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian + // order RFC4055 RFC4056. + ALG_SIGN_RSASSA_PSS_SHA512_RAW AuthenticationAlgorithm = "rsassa_pss_sha512_raw" + + // ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA256(aka RS256) signature must have raw + // S buffers, encoded in big-endian order RFC8017 RFC4056 + ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha256_raw" + + // RSASSA-PKCS1-v1_5 RFC3447 with SHA384(aka RS384) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056 + ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha384_raw" + + // ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA512(aka RS512) signature must have raw + // S buffers, encoded in big-endian order RFC8017 RFC4056 + ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha512_raw" + + // ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA1(aka RS1) signature must have raw S + // buffers, encoded in big-endian order RFC8017 RFC4056 + ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha1_raw" + + // ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW is an ECDSA signature on the NIST secp384r1 curve with SHA384(aka: ES384) + // which must have raw R and S buffers, encoded in big-endian order. + ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW AuthenticationAlgorithm = "secp384r1_ecdsa_sha384_raw" + + // ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW is an ECDSA signature on the NIST secp512r1 curve with SHA512(aka: ES512) + // which must have raw R and S buffers, encoded in big-endian order. + ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW AuthenticationAlgorithm = "secp521r1_ecdsa_sha512_raw" + + // ALG_SIGN_ED25519_EDDSA_SHA512_RAW is an EdDSA signature on the curve 25519, which must have raw R and S buffers, + // encoded in big-endian order. + ALG_SIGN_ED25519_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed25519_eddsa_sha512_raw" + + // ALG_SIGN_ED448_EDDSA_SHA512_RAW is an EdDSA signature on the curve Ed448, which must have raw R and S buffers, + // encoded in big-endian order. + ALG_SIGN_ED448_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed448_eddsa_sha512_raw" +) + +// TODO: this goes away after webauthncose.CredentialPublicKey gets implemented +type algKeyCose struct { + KeyType webauthncose.COSEKeyType + Algorithm webauthncose.COSEAlgorithmIdentifier + Curve webauthncose.COSEEllipticCurve +} + +func algKeyCoseDictionary() func(AuthenticationAlgorithm) algKeyCose { + mapping := map[AuthenticationAlgorithm]algKeyCose{ + ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256}, + ALG_SIGN_SECP256R1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256}, + ALG_SIGN_RSASSA_PSS_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256}, + ALG_SIGN_RSASSA_PSS_SHA256_DER: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256}, + ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1}, + ALG_SIGN_SECP256K1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1}, + ALG_SIGN_RSASSA_PSS_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS384}, + ALG_SIGN_RSASSA_PSS_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS512}, + ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS256}, + ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS384}, + ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS512}, + ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS1}, + ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES384, Curve: webauthncose.P384}, + ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES512, Curve: webauthncose.P521}, + ALG_SIGN_ED25519_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed25519}, + ALG_SIGN_ED448_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed448}, + } + + return func(key AuthenticationAlgorithm) algKeyCose { + return mapping[key] + } +} + +func AlgKeyMatch(key algKeyCose, algs []AuthenticationAlgorithm) bool { + for _, alg := range algs { + if reflect.DeepEqual(algKeyCoseDictionary()(alg), key) { + return true + } + } + + return false +} + +type PublicKeyAlgAndEncoding string + +const ( + // ALG_KEY_ECC_X962_RAW is a raw ANSI X9.62 formatted Elliptic Curve public key. + ALG_KEY_ECC_X962_RAW PublicKeyAlgAndEncoding = "ecc_x962_raw" + + // ALG_KEY_ECC_X962_DER is a DER ITU-X690-2008 encoded ANSI X.9.62 formatted SubjectPublicKeyInfo RFC5480 specifying an elliptic curve public key. + ALG_KEY_ECC_X962_DER PublicKeyAlgAndEncoding = "ecc_x962_der" + + // ALG_KEY_RSA_2048_RAW is a raw encoded 2048-bit RSA public key RFC3447. + ALG_KEY_RSA_2048_RAW PublicKeyAlgAndEncoding = "rsa_2048_raw" + + // ALG_KEY_RSA_2048_DER is a ASN.1 DER [ITU-X690-2008] encoded 2048-bit RSA RFC3447 public key RFC4055. + ALG_KEY_RSA_2048_DER PublicKeyAlgAndEncoding = "rsa_2048_der" + + // ALG_KEY_COSE is a COSE_Key format, as defined in Section 7 of RFC8152. This encoding includes its own field for indicating the public key algorithm. + ALG_KEY_COSE PublicKeyAlgAndEncoding = "cose" +) + +type MetadataError struct { + // Short name for the type of error that has occurred. + Type string `json:"type"` + + // Additional details about the error. + Details string `json:"error"` + + // Information to help debug the error. + DevInfo string `json:"debug"` +} + +func (err *MetadataError) Error() string { + return err.Details +} + +// Clock is an interface used to implement clock functionality in various metadata areas. +type Clock interface { + // Now returns the current time. + Now() time.Time +} + +// RealClock is just a real clock. +type RealClock struct{} + +// Now returns the current time. +func (RealClock) Now() time.Time { + return time.Now() +} diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/assertion.go b/vendor/github.com/go-webauthn/webauthn/protocol/assertion.go index 897a56c..331d9fe 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/assertion.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/assertion.go @@ -15,6 +15,7 @@ import ( // credential for login/assertion. type CredentialAssertionResponse struct { PublicKeyCredential + AssertionResponse AuthenticatorAssertionResponse `json:"response"` } @@ -22,6 +23,7 @@ type CredentialAssertionResponse struct { // that allows us to verify the client and authenticator data inside the response. type ParsedCredentialAssertionData struct { ParsedPublicKeyCredential + Response ParsedAssertionResponse Raw CredentialAssertionResponse } @@ -30,6 +32,7 @@ type ParsedCredentialAssertionData struct { // ParsedAssertionResponse. type AuthenticatorAssertionResponse struct { AuthenticatorResponse + AuthenticatorData URLEncodedBase64 `json:"authenticatorData"` Signature URLEncodedBase64 `json:"signature"` UserHandle URLEncodedBase64 `json:"userHandle,omitempty"` @@ -70,6 +73,18 @@ func ParseCredentialRequestResponseBody(body io.Reader) (par *ParsedCredentialAs return car.Parse() } +// ParseCredentialRequestResponseBytes is an alternative version of ParseCredentialRequestResponseBody that just takes +// a byte slice. +func ParseCredentialRequestResponseBytes(data []byte) (par *ParsedCredentialAssertionData, err error) { + var car CredentialAssertionResponse + + if err = decodeBytes(data, &car); err != nil { + return nil, ErrBadRequest.WithDetails("Parse error for Assertion").WithInfo(err.Error()) + } + + return car.Parse() +} + // Parse validates and parses the CredentialAssertionResponse into a ParseCredentialCreationResponseBody. This receiver // is unlikely to be expressly guaranteed under the versioning policy. Users looking for this guarantee should see // ParseCredentialRequestResponseBody instead, and this receiver should only be used if that function is inadequate @@ -124,14 +139,14 @@ func (car CredentialAssertionResponse) Parse() (par *ParsedCredentialAssertionDa // documentation. // // Specification: §7.2 Verifying an Authentication Assertion (https://www.w3.org/TR/webauthn/#sctn-verifying-assertion) -func (p *ParsedCredentialAssertionData) Verify(storedChallenge string, relyingPartyID string, relyingPartyOrigins []string, appID string, verifyUser bool, credentialBytes []byte) error { +func (p *ParsedCredentialAssertionData) Verify(storedChallenge string, relyingPartyID string, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode, appID string, verifyUser bool, credentialBytes []byte) error { // Steps 4 through 6 in verifying the assertion data (https://www.w3.org/TR/webauthn/#verifying-assertion) are // "assertive" steps, i.e "Let JSONtext be the result of running UTF-8 decode on the value of cData." // We handle these steps in part as we verify but also beforehand // Handle steps 7 through 10 of assertion by verifying stored data against the Collected Client Data // returned by the authenticator - validError := p.Response.CollectedClientData.Verify(storedChallenge, AssertCeremony, relyingPartyOrigins) + validError := p.Response.CollectedClientData.Verify(storedChallenge, AssertCeremony, rpOrigins, rpTopOrigins, rpTopOriginsVerify) if validError != nil { return validError } @@ -161,7 +176,7 @@ func (p *ParsedCredentialAssertionData) Verify(storedChallenge string, relyingPa sigData := append(p.Raw.AssertionResponse.AuthenticatorData, clientDataHash[:]...) var ( - key interface{} + key any err error ) diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation.go index 54716de..adddce3 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation.go @@ -1,6 +1,7 @@ package protocol import ( + "context" "crypto/sha256" "crypto/x509" "encoding/json" @@ -22,6 +23,14 @@ type AuthenticatorAttestationResponse struct { // The byte slice of clientDataJSON, which becomes CollectedClientData AuthenticatorResponse + Transports []string `json:"transports,omitempty"` + + AuthenticatorData URLEncodedBase64 `json:"authenticatorData"` + + PublicKey URLEncodedBase64 `json:"publicKey"` + + PublicKeyAlgorithm int64 `json:"publicKeyAlgorithm"` + // AttestationObject is the byte slice version of attestationObject. // This attribute contains an attestation object, which is opaque to, and // cryptographically protected against tampering by, the client. The @@ -33,8 +42,6 @@ type AuthenticatorAttestationResponse struct { // requires to validate the attestation statement, as well as to decode and // validate the authenticator data along with the JSON-serialized client data. AttestationObject URLEncodedBase64 `json:"attestationObject"` - - Transports []string `json:"transports,omitempty"` } // ParsedAttestationResponse is the parsed version of AuthenticatorAttestationResponse. @@ -60,21 +67,24 @@ type ParsedAttestationResponse struct { type AttestationObject struct { // The authenticator data, including the newly created public key. See AuthenticatorData for more info AuthData AuthenticatorData + // The byteform version of the authenticator data, used in part for signature validation RawAuthData []byte `json:"authData"` + // The format of the Attestation data. Format string `json:"fmt"` + // The attestation statement data sent back if attestation is requested. - AttStatement map[string]interface{} `json:"attStmt,omitempty"` + AttStatement map[string]any `json:"attStmt,omitempty"` } -type attestationFormatValidationHandler func(AttestationObject, []byte) (string, []interface{}, error) +type attestationFormatValidationHandler func(AttestationObject, []byte, metadata.Provider) (string, []any, error) -var attestationRegistry = make(map[string]attestationFormatValidationHandler) +var attestationRegistry = make(map[AttestationFormat]attestationFormatValidationHandler) // RegisterAttestationFormat is a method to register attestation formats with the library. Generally using one of the // locally registered attestation formats is sufficient. -func RegisterAttestationFormat(format string, handler attestationFormatValidationHandler) { +func RegisterAttestationFormat(format AttestationFormat, handler attestationFormatValidationHandler) { attestationRegistry[format] = handler } @@ -114,15 +124,20 @@ func (ccr *AuthenticatorAttestationResponse) Parse() (p *ParsedAttestationRespon // // Steps 9 through 12 are verified against the auth data. These steps are identical to 11 through 14 for assertion so we // handle them with AuthData. -func (attestationObject *AttestationObject) Verify(relyingPartyID string, clientDataHash []byte, verificationRequired bool) error { +func (a *AttestationObject) Verify(relyingPartyID string, clientDataHash []byte, userVerificationRequired bool, mds metadata.Provider) (err error) { rpIDHash := sha256.Sum256([]byte(relyingPartyID)) // Begin Step 9 through 12. Verify that the rpIdHash in authData is the SHA-256 hash of the RP ID expected by the RP. - authDataVerificationError := attestationObject.AuthData.Verify(rpIDHash[:], nil, verificationRequired) - if authDataVerificationError != nil { - return authDataVerificationError + if err = a.AuthData.Verify(rpIDHash[:], nil, userVerificationRequired); err != nil { + return err } + return a.VerifyAttestation(clientDataHash, mds) +} + +// VerifyAttestation only verifies the attestation object excluding the AuthData values. If you wish to also verify the +// AuthData values you should use Verify. +func (a *AttestationObject) VerifyAttestation(clientDataHash []byte, mds metadata.Provider) (err error) { // Step 13. Determine the attestation statement format by performing a // USASCII case-sensitive match on fmt against the set of supported // WebAuthn Attestation Statement Format Identifier values. The up-to-date @@ -135,61 +150,114 @@ func (attestationObject *AttestationObject) Verify(relyingPartyID string, client // But first let's make sure attestation is present. If it isn't, we don't need to handle // any of the following steps - if attestationObject.Format == "none" { - if len(attestationObject.AttStatement) != 0 { + if AttestationFormat(a.Format) == AttestationFormatNone { + if len(a.AttStatement) != 0 { return ErrAttestationFormat.WithInfo("Attestation format none with attestation present") } return nil } - formatHandler, valid := attestationRegistry[attestationObject.Format] + formatHandler, valid := attestationRegistry[AttestationFormat(a.Format)] if !valid { - return ErrAttestationFormat.WithInfo(fmt.Sprintf("Attestation format %s is unsupported", attestationObject.Format)) + return ErrAttestationFormat.WithInfo(fmt.Sprintf("Attestation format %s is unsupported", a.Format)) } // Step 14. Verify that attStmt is a correct attestation statement, conveying a valid attestation signature, by using // the attestation statement format fmt’s verification procedure given attStmt, authData and the hash of the serialized // client data computed in step 7. - attestationType, x5c, err := formatHandler(*attestationObject, clientDataHash) + attestationType, x5cs, err := formatHandler(*a, clientDataHash, mds) if err != nil { return err.(*Error).WithInfo(attestationType) } - aaguid, err := uuid.FromBytes(attestationObject.AuthData.AttData.AAGUID) - if err != nil { - return err + var ( + aaguid uuid.UUID + entry *metadata.Entry + ) + + if len(a.AuthData.AttData.AAGUID) != 0 { + if aaguid, err = uuid.FromBytes(a.AuthData.AttData.AAGUID); err != nil { + return ErrInvalidAttestation.WithInfo("Error occurred parsing AAGUID during attestation validation").WithDetails(err.Error()) + } } - if meta, ok := metadata.Metadata[aaguid]; ok { - for _, s := range meta.StatusReports { - if metadata.IsUndesiredAuthenticatorStatus(s.Status) { - return ErrInvalidAttestation.WithDetails("Authenticator with undesirable status encountered") - } + if mds == nil { + return nil + } + + ctx := context.Background() + + if entry, err = mds.GetEntry(ctx, aaguid); err != nil { + return ErrInvalidAttestation.WithInfo(fmt.Sprintf("Error occurred retrieving metadata entry during attestation validation: %+v", err)).WithDetails(fmt.Sprintf("Error occurred looking up entry for AAGUID %s", aaguid.String())) + } + + if entry == nil { + if aaguid == uuid.Nil && mds.GetValidateEntryPermitZeroAAGUID(ctx) { + return nil } - if x5c != nil { - x5cAtt, err := x509.ParseCertificate(x5c[0].([]byte)) - if err != nil { - return ErrInvalidAttestation.WithDetails("Unable to parse attestation certificate from x5c") + if mds.GetValidateEntry(ctx) { + return ErrInvalidAttestation.WithDetails(fmt.Sprintf("AAGUID %s not found in metadata during attestation validation", aaguid.String())) + } + + return nil + } + + if mds.GetValidateAttestationTypes(ctx) { + found := false + + for _, atype := range entry.MetadataStatement.AttestationTypes { + if string(atype) == attestationType { + found = true + + break } + } - if x5cAtt.Subject.CommonName != x5cAtt.Issuer.CommonName { - var hasBasicFull = false + if !found { + return ErrInvalidAttestation.WithDetails(fmt.Sprintf("Authenticator with invalid attestation type encountered during attestation validation. The attestation type '%s' is not known to be used by AAGUID '%s'", attestationType, aaguid.String())) + } + } - for _, a := range meta.MetadataStatement.AttestationTypes { - if a == metadata.BasicFull || a == metadata.AttCA { - hasBasicFull = true - } - } + if mds.GetValidateStatus(ctx) { + if err = mds.ValidateStatusReports(ctx, entry.StatusReports); err != nil { + return ErrInvalidAttestation.WithDetails(fmt.Sprintf("Authenticator with invalid status encountered during attestation validation. %s", err.Error())) + } + } + + if mds.GetValidateTrustAnchor(ctx) { + if x5cs == nil { + return nil + } + + var ( + x5c *x509.Certificate + raw []byte + ok bool + ) + + if len(x5cs) == 0 { + return ErrInvalidAttestation.WithDetails("Unable to parse attestation certificate from x5c during attestation validation").WithInfo("The attestation had no certificates") + } + + if raw, ok = x5cs[0].([]byte); !ok { + return ErrInvalidAttestation.WithDetails("Unable to parse attestation certificate from x5c during attestation validation").WithInfo(fmt.Sprintf("The first certificate in the attestation was type '%T' but '[]byte' was expected", x5cs[0])) + } + + if x5c, err = x509.ParseCertificate(raw); err != nil { + return ErrInvalidAttestation.WithDetails("Unable to parse attestation certificate from x5c during attestation validation").WithInfo(fmt.Sprintf("Error returned from x509.ParseCertificate: %+v", err)) + } + + if x5c.Subject.CommonName != x5c.Issuer.CommonName { + if !entry.MetadataStatement.AttestationTypes.HasBasicFull() { + return ErrInvalidAttestation.WithDetails("Unable to validate attestation statement signature during attestation validation: attestation with full attestation from authenticator that does not support full attestation") + } - if !hasBasicFull { - return ErrInvalidAttestation.WithDetails("Attestation with full attestation from authenticator that does not support full attestation") - } + if _, err = x5c.Verify(entry.MetadataStatement.Verifier()); err != nil { + return ErrInvalidAttestation.WithDetails(fmt.Sprintf("Unable to validate attestation signature statement during attestation validation: invalid certificate chain from MDS: %v", err)) } } - } else if metadata.Conformance { - return ErrInvalidAttestation.WithDetails(fmt.Sprintf("AAGUID %s not found in metadata during conformance testing", aaguid.String())) } return nil diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go index de43783..2201303 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go @@ -10,10 +10,8 @@ import ( "github.com/go-webauthn/webauthn/protocol/webauthncose" ) -var androidAttestationKey = "android-key" - func init() { - RegisterAttestationFormat(androidAttestationKey, verifyAndroidKeyFormat) + RegisterAttestationFormat(AttestationFormatAndroidKey, verifyAndroidKeyFormat) } // The android-key attestation statement looks like: @@ -31,26 +29,26 @@ func init() { // } // // Specification: §8.4. Android Key Attestation Statement Format (https://www.w3.org/TR/webauthn/#sctn-android-key-attestation) -func verifyAndroidKeyFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifyAndroidKeyFormat(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (string, []any, error) { // Given the verification procedure inputs attStmt, authenticatorData and clientDataHash, the verification procedure is as follows: // §8.4.1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it to extract // the contained fields. // Get the alg value - A COSEAlgorithmIdentifier containing the identifier of the algorithm // used to generate the attestation signature. - alg, present := att.AttStatement["alg"].(int64) + alg, present := att.AttStatement[stmtAlgorithm].(int64) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving alg value") } // Get the sig value - A byte string containing the attestation signature. - sig, present := att.AttStatement["sig"].([]byte) + sig, present := att.AttStatement[stmtSignature].([]byte) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving sig value") } // If x5c is not present, return an error - x5c, x509present := att.AttStatement["x5c"].([]interface{}) + x5c, x509present := att.AttStatement[stmtX5C].([]any) if !x509present { // Handle Basic Attestation steps for the x509 Certificate return "", nil, ErrAttestationFormat.WithDetails("Error retrieving x5c value") @@ -165,19 +163,19 @@ type authorizationList struct { Padding []int `asn1:"tag:6,explicit,set,optional"` EcCurve int `asn1:"tag:10,explicit,optional"` RsaPublicExponent int `asn1:"tag:200,explicit,optional"` - RollbackResistance interface{} `asn1:"tag:303,explicit,optional"` + RollbackResistance any `asn1:"tag:303,explicit,optional"` ActiveDateTime int `asn1:"tag:400,explicit,optional"` OriginationExpireDateTime int `asn1:"tag:401,explicit,optional"` UsageExpireDateTime int `asn1:"tag:402,explicit,optional"` - NoAuthRequired interface{} `asn1:"tag:503,explicit,optional"` + NoAuthRequired any `asn1:"tag:503,explicit,optional"` UserAuthType int `asn1:"tag:504,explicit,optional"` AuthTimeout int `asn1:"tag:505,explicit,optional"` - AllowWhileOnBody interface{} `asn1:"tag:506,explicit,optional"` - TrustedUserPresenceRequired interface{} `asn1:"tag:507,explicit,optional"` - TrustedConfirmationRequired interface{} `asn1:"tag:508,explicit,optional"` - UnlockedDeviceRequired interface{} `asn1:"tag:509,explicit,optional"` - AllApplications interface{} `asn1:"tag:600,explicit,optional"` - ApplicationID interface{} `asn1:"tag:601,explicit,optional"` + AllowWhileOnBody any `asn1:"tag:506,explicit,optional"` + TrustedUserPresenceRequired any `asn1:"tag:507,explicit,optional"` + TrustedConfirmationRequired any `asn1:"tag:508,explicit,optional"` + UnlockedDeviceRequired any `asn1:"tag:509,explicit,optional"` + AllApplications any `asn1:"tag:600,explicit,optional"` + ApplicationID any `asn1:"tag:601,explicit,optional"` CreationDateTime int `asn1:"tag:701,explicit,optional"` Origin int `asn1:"tag:702,explicit,optional"` RootOfTrust rootOfTrust `asn1:"tag:704,explicit,optional"` diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go index 935218f..c828e7b 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go @@ -14,10 +14,8 @@ import ( "github.com/go-webauthn/webauthn/protocol/webauthncose" ) -var appleAttestationKey = "apple" - func init() { - RegisterAttestationFormat(appleAttestationKey, verifyAppleFormat) + RegisterAttestationFormat(AttestationFormatApple, verifyAppleFormat) } // The apple attestation statement looks like: @@ -33,12 +31,12 @@ func init() { // } // // Specification: §8.8. Apple Anonymous Attestation Statement Format (https://www.w3.org/TR/webauthn/#sctn-apple-anonymous-attestation) -func verifyAppleFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifyAppleFormat(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (string, []any, error) { // Step 1. Verify that attStmt is valid CBOR conforming to the syntax defined // above and perform CBOR decoding on it to extract the contained fields. // If x5c is not present, return an error - x5c, x509present := att.AttStatement["x5c"].([]interface{}) + x5c, x509present := att.AttStatement[stmtX5C].([]any) if !x509present { // Handle Basic Attestation steps for the x509 Certificate return "", nil, ErrAttestationFormat.WithDetails("Error retrieving x5c value") diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go index 8b0940a..7176947 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go @@ -12,10 +12,8 @@ import ( "github.com/go-webauthn/webauthn/protocol/webauthncose" ) -var packedAttestationKey = "packed" - func init() { - RegisterAttestationFormat(packedAttestationKey, verifyPackedFormat) + RegisterAttestationFormat(AttestationFormatPacked, verifyPackedFormat) } // The packed attestation statement looks like: @@ -36,26 +34,26 @@ func init() { // } // // Specification: §8.2. Packed Attestation Statement Format (https://www.w3.org/TR/webauthn/#sctn-packed-attestation) -func verifyPackedFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifyPackedFormat(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (string, []any, error) { // Step 1. Verify that attStmt is valid CBOR conforming to the syntax defined // above and perform CBOR decoding on it to extract the contained fields. // Get the alg value - A COSEAlgorithmIdentifier containing the identifier of the algorithm // used to generate the attestation signature. - alg, present := att.AttStatement["alg"].(int64) + alg, present := att.AttStatement[stmtAlgorithm].(int64) if !present { - return packedAttestationKey, nil, ErrAttestationFormat.WithDetails("Error retrieving alg value") + return string(AttestationFormatPacked), nil, ErrAttestationFormat.WithDetails("Error retrieving alg value") } // Get the sig value - A byte string containing the attestation signature. - sig, present := att.AttStatement["sig"].([]byte) + sig, present := att.AttStatement[stmtSignature].([]byte) if !present { - return packedAttestationKey, nil, ErrAttestationFormat.WithDetails("Error retrieving sig value") + return string(AttestationFormatPacked), nil, ErrAttestationFormat.WithDetails("Error retrieving sig value") } // Step 2. If x5c is present, this indicates that the attestation type is not ECDAA. - x5c, x509present := att.AttStatement["x5c"].([]interface{}) + x5c, x509present := att.AttStatement[stmtX5C].([]any) if x509present { // Handle Basic Attestation steps for the x509 Certificate return handleBasicAttestation(sig, clientDataHash, att.RawAuthData, att.AuthData.AttData.AAGUID, alg, x5c) @@ -63,7 +61,7 @@ func verifyPackedFormat(att AttestationObject, clientDataHash []byte) (string, [ // Step 3. If ecdaaKeyId is present, then the attestation type is ECDAA. // Also make sure the we did not have an x509 then - ecdaaKeyID, ecdaaKeyPresent := att.AttStatement["ecdaaKeyId"].([]byte) + ecdaaKeyID, ecdaaKeyPresent := att.AttStatement[stmtECDAAKID].([]byte) if ecdaaKeyPresent { // Handle ECDAA Attestation steps for the x509 Certificate return handleECDAAAttestation(sig, clientDataHash, ecdaaKeyID) @@ -74,7 +72,7 @@ func verifyPackedFormat(att AttestationObject, clientDataHash []byte) (string, [ } // Handle the attestation steps laid out in -func handleBasicAttestation(signature, clientDataHash, authData, aaguid []byte, alg int64, x5c []interface{}) (string, []interface{}, error) { +func handleBasicAttestation(signature, clientDataHash, authData, aaguid []byte, alg int64, x5c []any) (string, []any, error) { // Step 2.1. Verify that sig is a valid signature over the concatenation of authenticatorData // and clientDataHash using the attestation public key in attestnCert with the algorithm specified in alg. for _, c := range x5c { @@ -201,11 +199,11 @@ func handleBasicAttestation(signature, clientDataHash, authData, aaguid []byte, return string(metadata.BasicFull), x5c, nil } -func handleECDAAAttestation(signature, clientDataHash, ecdaaKeyID []byte) (string, []interface{}, error) { +func handleECDAAAttestation(signature, clientDataHash, ecdaaKeyID []byte) (string, []any, error) { return "Packed (ECDAA)", nil, ErrNotSpecImplemented } -func handleSelfAttestation(alg int64, pubKey, authData, clientDataHash, signature []byte) (string, []interface{}, error) { +func handleSelfAttestation(alg int64, pubKey, authData, clientDataHash, signature []byte) (string, []any, error) { // §4.1 Validate that alg matches the algorithm of the credentialPublicKey in authenticatorData. // §4.2 Verify that sig is a valid signature over the concatenation of authenticatorData and diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go index 8e94ad1..849cda1 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go @@ -2,6 +2,7 @@ package protocol import ( "bytes" + "context" "crypto/sha256" "crypto/x509" "encoding/base64" @@ -14,20 +15,18 @@ import ( "github.com/go-webauthn/webauthn/metadata" ) -var safetyNetAttestationKey = "android-safetynet" - func init() { - RegisterAttestationFormat(safetyNetAttestationKey, verifySafetyNetFormat) + RegisterAttestationFormat(AttestationFormatAndroidSafetyNet, verifySafetyNetFormat) } type SafetyNetResponse struct { - Nonce string `json:"nonce"` - TimestampMs int64 `json:"timestampMs"` - ApkPackageName string `json:"apkPackageName"` - ApkDigestSha256 string `json:"apkDigestSha256"` - CtsProfileMatch bool `json:"ctsProfileMatch"` - ApkCertificateDigestSha256 []interface{} `json:"apkCertificateDigestSha256"` - BasicIntegrity bool `json:"basicIntegrity"` + Nonce string `json:"nonce"` + TimestampMs int64 `json:"timestampMs"` + ApkPackageName string `json:"apkPackageName"` + ApkDigestSha256 string `json:"apkDigestSha256"` + CtsProfileMatch bool `json:"ctsProfileMatch"` + ApkCertificateDigestSha256 []any `json:"apkCertificateDigestSha256"` + BasicIntegrity bool `json:"basicIntegrity"` } // Thanks to @koesie10 and @herrjemand for outlining how to support this type really well @@ -42,7 +41,7 @@ type SafetyNetResponse struct { // authenticators SHOULD make use of the Android Key Attestation when available, even if the SafetyNet API is also present. // // Specification: §8.5. Android SafetyNet Attestation Statement Format (https://www.w3.org/TR/webauthn/#sctn-android-safetynet-attestation) -func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte, mds metadata.Provider) (string, []any, error) { // The syntax of an Android Attestation statement is defined as follows: // $$attStmtType //= ( // fmt: "android-safetynet", @@ -59,7 +58,7 @@ func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte) (string // We have done this // §8.5.2 Verify that response is a valid SafetyNet response of version ver. - version, present := att.AttStatement["ver"].(string) + version, present := att.AttStatement[stmtVersion].(string) if !present { return "", nil, ErrAttestationFormat.WithDetails("Unable to find the version of SafetyNet") } @@ -75,8 +74,8 @@ func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte) (string return "", nil, ErrAttestationFormat.WithDetails("Unable to find the SafetyNet response") } - token, err := jwt.Parse(string(response), func(token *jwt.Token) (interface{}, error) { - chain := token.Header["x5c"].([]interface{}) + token, err := jwt.Parse(string(response), func(token *jwt.Token) (any, error) { + chain := token.Header[stmtX5C].([]any) o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string)))) @@ -110,7 +109,7 @@ func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte) (string } // §8.5.4 Let attestationCert be the attestation certificate (https://www.w3.org/TR/webauthn/#attestation-certificate) - certChain := token.Header["x5c"].([]interface{}) + certChain := token.Header[stmtX5C].([]any) l := make([]byte, base64.StdEncoding.DecodedLen(len(certChain[0].(string)))) n, err := base64.StdEncoding.Decode(l, []byte(certChain[0].(string))) @@ -134,19 +133,13 @@ func verifySafetyNetFormat(att AttestationObject, clientDataHash []byte) (string return "", nil, ErrInvalidAttestation.WithDetails("ctsProfileMatch attribute of the JWT payload is false") } - // Verify sanity of timestamp in the payload - now := time.Now() - oneMinuteAgo := now.Add(-time.Minute) - - if t := time.Unix(safetyNetResponse.TimestampMs/1000, 0); t.After(now) { - // zero tolerance for post-dated timestamps + if t := time.Unix(safetyNetResponse.TimestampMs/1000, 0); t.After(time.Now()) { + // Zero tolerance for post-dated timestamps. return "", nil, ErrInvalidAttestation.WithDetails("SafetyNet response with timestamp after current time") - } else if t.Before(oneMinuteAgo) { - // allow old timestamp for testing purposes - // TODO: Make this user configurable - msg := "SafetyNet response with timestamp before one minute ago" - if metadata.Conformance { - return "", nil, ErrInvalidAttestation.WithDetails(msg) + } else if t.Before(time.Now().Add(-time.Minute)) { + // Small tolerance for pre-dated timestamps. + if mds != nil && mds.GetValidateEntry(context.Background()) { + return "", nil, ErrInvalidAttestation.WithDetails("SafetyNet response with timestamp before one minute ago") } } diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go index 892bdd8..e077739 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go @@ -15,20 +15,18 @@ import ( "github.com/go-webauthn/webauthn/protocol/webauthncose" ) -var tpmAttestationKey = "tpm" - func init() { - RegisterAttestationFormat(tpmAttestationKey, verifyTPMFormat) + RegisterAttestationFormat(AttestationFormatTPM, verifyTPMFormat) } -func verifyTPMFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifyTPMFormat(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (string, []any, error) { // Given the verification procedure inputs attStmt, authenticatorData // and clientDataHash, the verification procedure is as follows // Verify that attStmt is valid CBOR conforming to the syntax defined // above and perform CBOR decoding on it to extract the contained fields - ver, present := att.AttStatement["ver"].(string) + ver, present := att.AttStatement[stmtVersion].(string) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving ver value") } @@ -37,35 +35,35 @@ func verifyTPMFormat(att AttestationObject, clientDataHash []byte) (string, []in return "", nil, ErrAttestationFormat.WithDetails("WebAuthn only supports TPM 2.0 currently") } - alg, present := att.AttStatement["alg"].(int64) + alg, present := att.AttStatement[stmtAlgorithm].(int64) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving alg value") } coseAlg := webauthncose.COSEAlgorithmIdentifier(alg) - x5c, x509present := att.AttStatement["x5c"].([]interface{}) + x5c, x509present := att.AttStatement[stmtX5C].([]any) if !x509present { // Handle Basic Attestation steps for the x509 Certificate return "", nil, ErrNotImplemented } - _, ecdaaKeyPresent := att.AttStatement["ecdaaKeyId"].([]byte) + _, ecdaaKeyPresent := att.AttStatement[stmtECDAAKID].([]byte) if ecdaaKeyPresent { return "", nil, ErrNotImplemented } - sigBytes, present := att.AttStatement["sig"].([]byte) + sigBytes, present := att.AttStatement[stmtSignature].([]byte) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving sig value") } - certInfoBytes, present := att.AttStatement["certInfo"].([]byte) + certInfoBytes, present := att.AttStatement[stmtCertInfo].([]byte) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving certInfo value") } - pubAreaBytes, present := att.AttStatement["pubArea"].([]byte) + pubAreaBytes, present := att.AttStatement[stmtPubArea].([]byte) if !present { return "", nil, ErrAttestationFormat.WithDetails("Error retrieving pubArea value") } diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_u2f.go b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_u2f.go index e203f07..211aab7 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/attestation_u2f.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/attestation_u2f.go @@ -11,14 +11,12 @@ import ( "github.com/go-webauthn/webauthn/protocol/webauthncose" ) -var u2fAttestationKey = "fido-u2f" - func init() { - RegisterAttestationFormat(u2fAttestationKey, verifyU2FFormat) + RegisterAttestationFormat(AttestationFormatFIDOUniversalSecondFactor, verifyU2FFormat) } // verifyU2FFormat - Follows verification steps set out by https://www.w3.org/TR/webauthn/#fido-u2f-attestation -func verifyU2FFormat(att AttestationObject, clientDataHash []byte) (string, []interface{}, error) { +func verifyU2FFormat(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (string, []any, error) { if !bytes.Equal(att.AuthData.AttData.AAGUID, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) { return "", nil, ErrUnsupportedAlgorithm.WithDetails("U2F attestation format AAGUID not set to 0x00") } @@ -42,7 +40,7 @@ func verifyU2FFormat(att AttestationObject, clientDataHash []byte) (string, []in // } // Check for "x5c" which is a single element array containing the attestation certificate in X.509 format. - x5c, present := att.AttStatement["x5c"].([]interface{}) + x5c, present := att.AttStatement[stmtX5C].([]any) if !present { return "", nil, ErrAttestationFormat.WithDetails("Missing properly formatted x5c data") } @@ -50,7 +48,7 @@ func verifyU2FFormat(att AttestationObject, clientDataHash []byte) (string, []in // Check for "sig" which is The attestation signature. The signature was calculated over the (raw) U2F // registration response message https://www.w3.org/TR/webauthn/#biblio-fido-u2f-message-formats] // received by the client from the authenticator. - signature, present := att.AttStatement["sig"].([]byte) + signature, present := att.AttStatement[stmtSignature].([]byte) if !present { return "", nil, ErrAttestationFormat.WithDetails("Missing sig data") } diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go b/vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go index 8424fe5..b10b72d 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go @@ -135,6 +135,11 @@ const ( // BLE indicates the respective authenticator can be contacted over Bluetooth Smart (Bluetooth Low Energy / BLE). BLE AuthenticatorTransport = "ble" + // SmartCard indicates the respective authenticator can be contacted over ISO/IEC 7816 smart card with contacts. + // + // WebAuthn Level 3. + SmartCard AuthenticatorTransport = "smart-card" + // Hybrid indicates the respective authenticator can be contacted using a combination of (often separate) // data-transport and proximity mechanisms. This supports, for example, authentication on a desktop computer using // a smartphone. @@ -322,7 +327,7 @@ func (a *AuthenticatorData) unmarshalAttestedData(rawAuthData []byte) (err error // Unmarshall the credential's Public Key into CBOR encoding. func unmarshalCredentialPublicKey(keyBytes []byte) (rawBytes []byte, err error) { - var m interface{} + var m any if err = webauthncbor.Unmarshal(keyBytes, &m); err != nil { return nil, err @@ -348,11 +353,6 @@ func ResidentKeyNotRequired() *bool { return &required } -// Deprecated: ResidentKeyUnrequired is an alias for ResidentKeyNotRequired and will be completely removed in the future. -func ResidentKeyUnrequired() *bool { - return ResidentKeyNotRequired() -} - // Verify on AuthenticatorData handles Steps 9 through 12 for Registration // and Steps 11 through 14 for Assertion. func (a *AuthenticatorData) Verify(rpIdHash []byte, appIDHash []byte, userVerificationRequired bool) error { diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/client.go b/vendor/github.com/go-webauthn/webauthn/protocol/client.go index c98577a..ab9b6ab 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/client.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/client.go @@ -20,6 +20,8 @@ type CollectedClientData struct { Type CeremonyType `json:"type"` Challenge string `json:"challenge"` Origin string `json:"origin"` + TopOrigin string `json:"topOrigin,omitempty"` + CrossOrigin bool `json:"crossOrigin,omitempty"` TokenBinding *TokenBinding `json:"tokenBinding,omitempty"` // Chromium (Chrome) returns a hint sometimes about how to handle clientDataJSON in a safe manner. @@ -77,7 +79,10 @@ func FullyQualifiedOrigin(rawOrigin string) (fqOrigin string, err error) { // new credential and steps 7 through 10 of verifying an authentication assertion // See https://www.w3.org/TR/webauthn/#registering-a-new-credential // and https://www.w3.org/TR/webauthn/#verifying-assertion -func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyType, rpOrigins []string) error { +// +// Note: the rpTopOriginsVerify parameter does not accept the TopOriginVerificationMode value of +// TopOriginDefaultVerificationMode as it's expected this value is updated by the config validation process. +func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyType, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode) (err error) { // Registration Step 3. Verify that the value of C.type is webauthn.create. // Assertion Step 7. Verify that the value of C.type is the string webauthn.get. @@ -101,8 +106,9 @@ func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyTy // Registration Step 5 & Assertion Step 9. Verify that the value of C.origin matches // the Relying Party's origin. - fqOrigin, err := FullyQualifiedOrigin(c.Origin) - if err != nil { + var fqOrigin string + + if fqOrigin, err = FullyQualifiedOrigin(c.Origin); err != nil { return ErrParsingData.WithDetails("Error decoding clientData origin as URL") } @@ -121,6 +127,54 @@ func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyTy WithInfo(fmt.Sprintf("Expected Values: %s, Received: %s", rpOrigins, fqOrigin)) } + if rpTopOriginsVerify != TopOriginIgnoreVerificationMode { + switch len(c.TopOrigin) { + case 0: + break + default: + if !c.CrossOrigin { + return ErrVerification. + WithDetails("Error validating topOrigin"). + WithInfo("The topOrigin can't have values unless crossOrigin is true.") + } + + var ( + fqTopOrigin string + possibleTopOrigins []string + ) + + if fqTopOrigin, err = FullyQualifiedOrigin(c.TopOrigin); err != nil { + return ErrParsingData.WithDetails("Error decoding clientData topOrigin as URL") + } + + switch rpTopOriginsVerify { + case TopOriginExplicitVerificationMode: + possibleTopOrigins = rpTopOrigins + case TopOriginAutoVerificationMode: + possibleTopOrigins = append(rpTopOrigins, rpOrigins...) + case TopOriginImplicitVerificationMode: + possibleTopOrigins = rpOrigins + default: + return ErrNotImplemented.WithDetails("Error handling unknown Top Origin verification mode") + } + + found = false + + for _, origin := range possibleTopOrigins { + if strings.EqualFold(fqTopOrigin, origin) { + found = true + break + } + } + + if !found { + return ErrVerification. + WithDetails("Error validating top origin"). + WithInfo(fmt.Sprintf("Expected Values: %s, Received: %s", possibleTopOrigins, fqTopOrigin)) + } + } + } + // Registration Step 6 and Assertion Step 10. Verify that the value of C.tokenBinding.status // matches the state of Token Binding for the TLS connection over which the assertion was // obtained. If Token Binding was used on that TLS connection, also verify that C.tokenBinding.id @@ -140,3 +194,28 @@ func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyTy return nil } + +type TopOriginVerificationMode int + +const ( + // TopOriginDefaultVerificationMode represents the default verification mode for the Top Origin. At this time this + // mode is the same as TopOriginIgnoreVerificationMode until such a time as the specification becomes stable. This + // value is intended as a fallback value and implementers should very intentionally pick another option if they want + // stability. + TopOriginDefaultVerificationMode TopOriginVerificationMode = iota + + // TopOriginIgnoreVerificationMode ignores verification entirely. + TopOriginIgnoreVerificationMode + + // TopOriginAutoVerificationMode represents the automatic verification mode for the Top Origin. In this mode the + // If the Top Origins parameter has values it checks against this, otherwise it checks against the Origins parameter. + TopOriginAutoVerificationMode + + // TopOriginImplicitVerificationMode represents the implicit verification mode for the Top Origin. In this mode the + // Top Origin is verified against the allowed Origins values. + TopOriginImplicitVerificationMode + + // TopOriginExplicitVerificationMode represents the explicit verification mode for the Top Origin. In this mode the + // Top Origin is verified against the allowed Top Origins values. + TopOriginExplicitVerificationMode +) diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/const.go b/vendor/github.com/go-webauthn/webauthn/protocol/const.go new file mode 100644 index 0000000..a1560f2 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/protocol/const.go @@ -0,0 +1,11 @@ +package protocol + +const ( + stmtX5C = "x5c" + stmtSignature = "sig" + stmtAlgorithm = "alg" + stmtVersion = "ver" + stmtECDAAKID = "ecdaaKeyId" + stmtCertInfo = "certInfo" + stmtPubArea = "pubArea" +) diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/credential.go b/vendor/github.com/go-webauthn/webauthn/protocol/credential.go index bb9782b..5c9f6b0 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/credential.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/credential.go @@ -5,6 +5,8 @@ import ( "encoding/base64" "io" "net/http" + + "github.com/go-webauthn/webauthn/metadata" ) // Credential is the basic credential type from the Credential Management specification that is inherited by WebAuthn's @@ -31,6 +33,7 @@ type ParsedCredential struct { type PublicKeyCredential struct { Credential + RawID URLEncodedBase64 `json:"rawId"` ClientExtensionResults AuthenticationExtensionsClientOutputs `json:"clientExtensionResults,omitempty"` AuthenticatorAttachment string `json:"authenticatorAttachment,omitempty"` @@ -38,6 +41,7 @@ type PublicKeyCredential struct { type ParsedPublicKeyCredential struct { ParsedCredential + RawID []byte `json:"rawId"` ClientExtensionResults AuthenticationExtensionsClientOutputs `json:"clientExtensionResults,omitempty"` AuthenticatorAttachment AuthenticatorAttachment `json:"authenticatorAttachment,omitempty"` @@ -45,17 +49,13 @@ type ParsedPublicKeyCredential struct { type CredentialCreationResponse struct { PublicKeyCredential - AttestationResponse AuthenticatorAttestationResponse `json:"response"` - // Deprecated: Transports is deprecated due to upstream changes to the API. - // Use the Transports field of AuthenticatorAttestationResponse - // instead. Transports is kept for backward compatibility, and should not - // be used by new clients. - Transports []string `json:"transports,omitempty"` + AttestationResponse AuthenticatorAttestationResponse `json:"response"` } type ParsedCredentialCreationData struct { ParsedPublicKeyCredential + Response ParsedAttestationResponse Raw CredentialCreationResponse } @@ -85,6 +85,18 @@ func ParseCredentialCreationResponseBody(body io.Reader) (pcc *ParsedCredentialC return ccr.Parse() } +// ParseCredentialCreationResponseBytes is an alternative version of ParseCredentialCreationResponseBody that just takes +// a byte slice. +func ParseCredentialCreationResponseBytes(data []byte) (pcc *ParsedCredentialCreationData, err error) { + var ccr CredentialCreationResponse + + if err = decodeBytes(data, &ccr); err != nil { + return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo(err.Error()) + } + + return ccr.Parse() +} + // Parse validates and parses the CredentialCreationResponse into a ParsedCredentialCreationData. This receiver // is unlikely to be expressly guaranteed under the versioning policy. Users looking for this guarantee should see // ParseCredentialCreationResponseBody instead, and this receiver should only be used if that function is inadequate @@ -112,13 +124,6 @@ func (ccr CredentialCreationResponse) Parse() (pcc *ParsedCredentialCreationData return nil, ErrParsingData.WithDetails("Error parsing attestation response") } - // TODO: Remove this as it's a backwards compatibility layer. - if len(response.Transports) == 0 && len(ccr.Transports) != 0 { - for _, t := range ccr.Transports { - response.Transports = append(response.Transports, AuthenticatorTransport(t)) - } - } - var attachment AuthenticatorAttachment switch ccr.AuthenticatorAttachment { @@ -140,15 +145,15 @@ func (ccr CredentialCreationResponse) Parse() (pcc *ParsedCredentialCreationData // Verify the Client and Attestation data. // // Specification: §7.1. Registering a New Credential (https://www.w3.org/TR/webauthn/#sctn-registering-a-new-credential) -func (pcc *ParsedCredentialCreationData) Verify(storedChallenge string, verifyUser bool, relyingPartyID string, relyingPartyOrigins []string) error { +func (pcc *ParsedCredentialCreationData) Verify(storedChallenge string, verifyUser bool, relyingPartyID string, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode, mds metadata.Provider) (clientDataHash []byte, err error) { // Handles steps 3 through 6 - Verifying the Client Data against the Relying Party's stored data - verifyError := pcc.Response.CollectedClientData.Verify(storedChallenge, CreateCeremony, relyingPartyOrigins) - if verifyError != nil { - return verifyError + if err = pcc.Response.CollectedClientData.Verify(storedChallenge, CreateCeremony, rpOrigins, rpTopOrigins, rpTopOriginsVerify); err != nil { + return nil, err } // Step 7. Compute the hash of response.clientDataJSON using SHA-256. - clientDataHash := sha256.Sum256(pcc.Raw.AttestationResponse.ClientDataJSON) + sum := sha256.Sum256(pcc.Raw.AttestationResponse.ClientDataJSON) + clientDataHash = sum[:] // Step 8. Perform CBOR decoding on the attestationObject field of the AuthenticatorAttestationResponse // structure to obtain the attestation statement format fmt, the authenticator data authData, and the @@ -156,9 +161,8 @@ func (pcc *ParsedCredentialCreationData) Verify(storedChallenge string, verifyUs // We do the above step while parsing and decoding the CredentialCreationResponse // Handle steps 9 through 14 - This verifies the attestation object. - verifyError = pcc.Response.AttestationObject.Verify(relyingPartyID, clientDataHash[:], verifyUser) - if verifyError != nil { - return verifyError + if err = pcc.Response.AttestationObject.Verify(relyingPartyID, clientDataHash, verifyUser, mds); err != nil { + return clientDataHash, err } // Step 15. If validation is successful, obtain a list of acceptable trust anchors (attestation root @@ -197,7 +201,7 @@ func (pcc *ParsedCredentialCreationData) Verify(storedChallenge string, verifyUs // TODO: Not implemented for the reasons mentioned under Step 16 - return nil + return clientDataHash, nil } // GetAppID takes a AuthenticationExtensions object or nil. It then performs the following checks in order: diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/decoder.go b/vendor/github.com/go-webauthn/webauthn/protocol/decoder.go index 92e8a81..bd76316 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/decoder.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/decoder.go @@ -1,6 +1,7 @@ package protocol import ( + "bytes" "encoding/json" "errors" "io" @@ -21,3 +22,19 @@ func decodeBody(body io.Reader, v any) (err error) { return nil } + +func decodeBytes(data []byte, v any) (err error) { + decoder := json.NewDecoder(bytes.NewReader(data)) + + if err = decoder.Decode(v); err != nil { + return err + } + + _, err = decoder.Token() + + if !errors.Is(err, io.EOF) { + return errors.New("The body contains trailing data") + } + + return nil +} diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/entities.go b/vendor/github.com/go-webauthn/webauthn/protocol/entities.go index 1d2f6e8..b0ba2ad 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/entities.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/entities.go @@ -14,16 +14,6 @@ type CredentialEntity struct { // intended only for display, i.e., aiding the user in determining the difference between user accounts with similar // displayNames. For example, "alexm", "alex.p.mueller@example.com" or "+14255551234". Name string `json:"name"` - - // A serialized URL which resolves to an image associated with the entity. For example, - // this could be a user’s avatar or a Relying Party's logo. This URL MUST be an a priori - // authenticated URL. Authenticators MUST accept and store a 128-byte minimum length for - // an icon member’s value. Authenticators MAY ignore an icon member’s value if its length - // is greater than 128 bytes. The URL’s scheme MAY be "data" to avoid fetches of the URL, - // at the cost of needing more storage. - // - // Deprecated: this has been removed from the specification recommendations. - Icon string `json:"icon,omitempty"` } // The RelyingPartyEntity represents the PublicKeyCredentialRpEntity IDL and is used to supply additional Relying Party @@ -32,6 +22,7 @@ type CredentialEntity struct { // Specification: §5.4.2. Relying Party Parameters for Credential Generation (https://www.w3.org/TR/webauthn/#dictionary-rp-credential-params) type RelyingPartyEntity struct { CredentialEntity + // A unique identifier for the Relying Party entity, which sets the RP ID. ID string `json:"id"` } @@ -51,5 +42,5 @@ type UserEntity struct { // authentication and authorization decisions MUST be made on the basis of this id // member, not the displayName nor name members. See Section 6.1 of // [RFC8266](https://www.w3.org/TR/webauthn/#biblio-rfc8266). - ID interface{} `json:"id"` + ID any `json:"id"` } diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/extensions.go b/vendor/github.com/go-webauthn/webauthn/protocol/extensions.go index f925eb2..e226f7c 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/extensions.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/extensions.go @@ -5,7 +5,7 @@ package protocol // For a list of commonly supported extensions, see §10. Defined Extensions // (https://www.w3.org/TR/webauthn/#sctn-defined-extensions). -type AuthenticationExtensionsClientOutputs map[string]interface{} +type AuthenticationExtensionsClientOutputs map[string]any const ( ExtensionAppID = "appid" diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/metadata.go b/vendor/github.com/go-webauthn/webauthn/protocol/metadata.go new file mode 100644 index 0000000..a7e6651 --- /dev/null +++ b/vendor/github.com/go-webauthn/webauthn/protocol/metadata.go @@ -0,0 +1,44 @@ +package protocol + +import ( + "context" + "fmt" + + "github.com/google/uuid" + + "github.com/go-webauthn/webauthn/metadata" +) + +func ValidateMetadata(ctx context.Context, aaguid uuid.UUID, mds metadata.Provider) (err error) { + if mds == nil { + return nil + } + + var ( + entry *metadata.Entry + ) + + if entry, err = mds.GetEntry(ctx, aaguid); err != nil { + return err + } + + if entry == nil { + if aaguid == uuid.Nil && mds.GetValidateEntryPermitZeroAAGUID(ctx) { + return nil + } + + if mds.GetValidateEntry(ctx) { + return fmt.Errorf("error occurred performing authenticator entry validation: AAGUID entry has not been registered with the metadata service") + } + + return nil + } + + if mds.GetValidateStatus(ctx) { + if err = mds.ValidateStatusReports(ctx, entry.StatusReports); err != nil { + return fmt.Errorf("error occurred performing authenticator status validation: %w", err) + } + } + + return nil +} diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/options.go b/vendor/github.com/go-webauthn/webauthn/protocol/options.go index 80a9e55..9085fcd 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/options.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/options.go @@ -17,26 +17,27 @@ type CredentialAssertion struct { // In order to create a Credential via create(), the caller specifies a few parameters in a // PublicKeyCredentialCreationOptions object. // -// TODO: There is one field missing from this for WebAuthn Level 3. A string slice named 'attestationFormats'. +// WebAuthn Level 3: hints,attestationFormats. // // Specification: §5.4. Options for Credential Creation (https://www.w3.org/TR/webauthn/#dictionary-makecredentialoptions) type PublicKeyCredentialCreationOptions struct { - RelyingParty RelyingPartyEntity `json:"rp"` - User UserEntity `json:"user"` - Challenge URLEncodedBase64 `json:"challenge"` - Parameters []CredentialParameter `json:"pubKeyCredParams,omitempty"` - Timeout int `json:"timeout,omitempty"` - CredentialExcludeList []CredentialDescriptor `json:"excludeCredentials,omitempty"` - AuthenticatorSelection AuthenticatorSelection `json:"authenticatorSelection,omitempty"` - Attestation ConveyancePreference `json:"attestation,omitempty"` - Extensions AuthenticationExtensions `json:"extensions,omitempty"` + RelyingParty RelyingPartyEntity `json:"rp"` + User UserEntity `json:"user"` + Challenge URLEncodedBase64 `json:"challenge"` + Parameters []CredentialParameter `json:"pubKeyCredParams,omitempty"` + Timeout int `json:"timeout,omitempty"` + CredentialExcludeList []CredentialDescriptor `json:"excludeCredentials,omitempty"` + AuthenticatorSelection AuthenticatorSelection `json:"authenticatorSelection,omitempty"` + Hints []PublicKeyCredentialHints `json:"hints,omitempty"` + Attestation ConveyancePreference `json:"attestation,omitempty"` + AttestationFormats []AttestationFormat `json:"attestationFormats,omitempty"` + Extensions AuthenticationExtensions `json:"extensions,omitempty"` } // The PublicKeyCredentialRequestOptions dictionary supplies get() with the data it needs to generate an assertion. // Its challenge member MUST be present, while its other members are OPTIONAL. // -// TODO: There are two fields missing from this for WebAuthn Level 3. A string type named 'attestation', and a string -// slice named 'attestationFormats'. +// WebAuthn Level 3: hints. // // Specification: §5.5. Options for Assertion Generation (https://www.w3.org/TR/webauthn/#dictionary-assertion-options) type PublicKeyCredentialRequestOptions struct { @@ -45,6 +46,7 @@ type PublicKeyCredentialRequestOptions struct { RelyingPartyID string `json:"rpId,omitempty"` AllowedCredentials []CredentialDescriptor `json:"allowCredentials,omitempty"` UserVerification UserVerificationRequirement `json:"userVerification,omitempty"` + Hints []PublicKeyCredentialHints `json:"hints,omitempty"` Extensions AuthenticationExtensions `json:"extensions,omitempty"` } @@ -98,7 +100,7 @@ const ( // parameters requesting additional processing by the client and authenticator. // // Specification: §5.7.1. Authentication Extensions Client Inputs (https://www.w3.org/TR/webauthn/#iface-authentication-extensions-client-inputs) -type AuthenticationExtensions map[string]interface{} +type AuthenticationExtensions map[string]any // AuthenticatorSelection represents the AuthenticatorSelectionCriteria IDL. // @@ -183,6 +185,72 @@ const ( PreferEnterpriseAttestation ConveyancePreference = "enterprise" ) +// AttestationFormat is an internal representation of the relevant inputs for registration. +// +// Specification: §5.4 Options for Credential Creation (https://w3c.github.io/webauthn/#dom-publickeycredentialcreationoptions-attestationformats) +// Registry: https://www.iana.org/assignments/webauthn/webauthn.xhtml +type AttestationFormat string + +const ( + // AttestationFormatPacked is the "packed" attestation statement format is a WebAuthn-optimized format for + // attestation. It uses a very compact but still extensible encoding method. This format is implementable by + //authenticators with limited resources (e.g., secure elements). + AttestationFormatPacked AttestationFormat = "packed" + + // AttestationFormatTPM is the TPM attestation statement format returns an attestation statement in the same format + // as the packed attestation statement format, although the rawData and signature fields are computed differently. + AttestationFormatTPM AttestationFormat = "tpm" + + // AttestationFormatAndroidKey is the attestation statement format for platform authenticators on versions "N", and + // later, which may provide this proprietary "hardware attestation" statement. + AttestationFormatAndroidKey AttestationFormat = "android-key" + + // AttestationFormatAndroidSafetyNet is the attestation statement format that Android-based platform authenticators + // MAY produce an attestation statement based on the Android SafetyNet API. + AttestationFormatAndroidSafetyNet AttestationFormat = "android-safetynet" + + // AttestationFormatFIDOUniversalSecondFactor is the attestation statement format that is used with FIDO U2F + // authenticators. + AttestationFormatFIDOUniversalSecondFactor AttestationFormat = "fido-u2f" + + // AttestationFormatApple is the attestation statement format that is used with Apple devices' platform + // authenticators. + AttestationFormatApple AttestationFormat = "apple" + + // AttestationFormatNone is the attestation statement format that is used to replace any authenticator-provided + // attestation statement when a WebAuthn Relying Party indicates it does not wish to receive attestation information. + AttestationFormatNone AttestationFormat = "none" +) + +type PublicKeyCredentialHints string + +const ( + // PublicKeyCredentialHintSecurityKey is a PublicKeyCredentialHint that indicates that the Relying Party believes + // that users will satisfy this request with a physical security key. For example, an enterprise Relying Party may + // set this hint if they have issued security keys to their employees and will only accept those authenticators for + // registration and authentication. + // + // For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the + // authenticatorAttachment SHOULD be set to cross-platform. + PublicKeyCredentialHintSecurityKey PublicKeyCredentialHints = "security-key" + + // PublicKeyCredentialHintClientDevice is a PublicKeyCredentialHint that indicates that the Relying Party believes + // that users will satisfy this request with a platform authenticator attached to the client device. + // + // For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the + // authenticatorAttachment SHOULD be set to platform. + PublicKeyCredentialHintClientDevice PublicKeyCredentialHints = "client-device" + + // PublicKeyCredentialHintHybrid is a PublicKeyCredentialHint that indicates that the Relying Party believes that + // users will satisfy this request with general-purpose authenticators such as smartphones. For example, a consumer + // Relying Party may believe that only a small fraction of their customers possesses dedicated security keys. This + // option also implies that the local platform authenticator should not be promoted in the UI. + // + // For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the + // authenticatorAttachment SHOULD be set to cross-platform. + PublicKeyCredentialHintHybrid PublicKeyCredentialHints = "hybrid" +) + func (a *PublicKeyCredentialRequestOptions) GetAllowedCredentialIDs() [][]byte { var allowedCredentialIDs = make([][]byte, len(a.AllowedCredentials)) @@ -193,7 +261,7 @@ func (a *PublicKeyCredentialRequestOptions) GetAllowedCredentialIDs() [][]byte { return allowedCredentialIDs } -type Extensions interface{} +type Extensions any type ServerResponse struct { Status ServerResponseStatus `json:"status"` diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go b/vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go index 2886d0f..aff1ac1 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go @@ -18,7 +18,7 @@ var ctap2CBOREncMode, _ = cbor.CTAP2EncOptions().EncMode() // Unmarshal parses the CBOR-encoded data into the value pointed to by v // following the CTAP2 canonical CBOR encoding form. // (https://fidoalliance.org/specs/fido-v2.0-ps-20190130/fido-client-to-authenticator-protocol-v2.0-ps-20190130.html#message-encoding) -func Unmarshal(data []byte, v interface{}) error { +func Unmarshal(data []byte, v any) error { // TODO (james-d-elliott): investigate the specific use case for Unmarshal vs UnmarshalFirst to determine the edge cases where this may be useful. _, err := ctap2CBORDecMode.UnmarshalFirst(data, v) @@ -28,6 +28,6 @@ func Unmarshal(data []byte, v interface{}) error { // Marshal encodes the value pointed to by v // following the CTAP2 canonical CBOR encoding form. // (https://fidoalliance.org/specs/fido-v2.0-ps-20190130/fido-client-to-authenticator-protocol-v2.0-ps-20190130.html#message-encoding) -func Marshal(v interface{}) ([]byte, error) { +func Marshal(v any) ([]byte, error) { return ctap2CBOREncMode.Marshal(v) } diff --git a/vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go b/vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go index 308adef..eb1f0d7 100644 --- a/vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go +++ b/vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go @@ -35,6 +35,7 @@ type PublicKeyData struct { // A COSEAlgorithmIdentifier for the algorithm used to derive the key signature. Algorithm int64 `cbor:"3,keyasint" json:"alg"` } + type EC2PublicKeyData struct { PublicKeyData @@ -178,7 +179,7 @@ func HasherFromCOSEAlg(coseAlg COSEAlgorithmIdentifier) func() hash.Hash { } // ParsePublicKey figures out what kind of COSE material was provided and create the data for the new key. -func ParsePublicKey(keyBytes []byte) (interface{}, error) { +func ParsePublicKey(keyBytes []byte) (any, error) { pk := PublicKeyData{} // TODO (james-d-elliott): investigate the ignored errors. webauthncbor.Unmarshal(keyBytes, &pk) @@ -342,7 +343,7 @@ func (k *EC2PublicKeyData) TPMCurveID() tpm2.EllipticCurve { } } -func VerifySignature(key interface{}, data []byte, sig []byte) (bool, error) { +func VerifySignature(key any, data []byte, sig []byte) (bool, error) { switch k := key.(type) { case OKPPublicKeyData: return k.Verify(data, sig) diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/const.go b/vendor/github.com/go-webauthn/webauthn/webauthn/const.go index 9ab74f2..7f08c3d 100644 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/const.go +++ b/vendor/github.com/go-webauthn/webauthn/webauthn/const.go @@ -5,7 +5,6 @@ import ( ) const ( - errFmtFieldEmpty = "the field '%s' must be configured but it is empty" errFmtFieldNotValidURI = "field '%s' is not a valid URI: %w" errFmtConfigValidate = "error occurred validating the configuration: %w" ) diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/credential.go b/vendor/github.com/go-webauthn/webauthn/webauthn/credential.go index 81bcc9a..19e45f9 100644 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/credential.go +++ b/vendor/github.com/go-webauthn/webauthn/webauthn/credential.go @@ -1,18 +1,22 @@ package webauthn import ( + "crypto/sha256" + "fmt" + + "github.com/go-webauthn/webauthn/metadata" "github.com/go-webauthn/webauthn/protocol" ) -// Credential contains all needed information about a WebAuthn credential for storage. +// Credential contains all needed information about a WebAuthn credential for storage. This struct is effectively the +// Credential Record as described in the specification. +// +// See: §4. Terminology: Credential Record (https://www.w3.org/TR/webauthn-3/#credential-record) type Credential struct { - // A probabilistically-unique byte sequence identifying a public key credential source and its authentication assertions. + // The Credential ID of the public key credential source. Described by the Credential Record 'id' field. ID []byte `json:"id"` - // The public key portion of a Relying Party-specific credential key pair, generated by an authenticator and returned to - // a Relying Party at registration time (see also public key credential). The private key portion of the credential key - // pair is known as the credential private key. Note that in the case of self attestation, the credential key pair is also - // used as the attestation key pair, see self attestation for details. + // The credential public key of the public key credential source. Described by the Credential Record 'publicKey field. PublicKey []byte `json:"publicKey"` // The attestation format used (if any) by the authenticator when creating the credential. @@ -26,6 +30,9 @@ type Credential struct { // The Authenticator information for a given certificate. Authenticator Authenticator `json:"authenticator"` + + // The attestation values that can be used to validate this credential via the MDS3 at a later date. + Attestation CredentialAttestation `json:"attestation"` } type CredentialFlags struct { @@ -43,6 +50,14 @@ type CredentialFlags struct { BackupState bool `json:"backupState"` } +type CredentialAttestation struct { + ClientDataJSON []byte `json:"clientDataJSON"` + ClientDataHash []byte `json:"clientDataHash"` + AuthenticatorData []byte `json:"authenticatorData"` + PublicKeyAlgorithm int64 `json:"publicKeyAlgorithm"` + Object []byte `json:"object"` +} + // Descriptor converts a Credential into a protocol.CredentialDescriptor. func (c Credential) Descriptor() (descriptor protocol.CredentialDescriptor) { return protocol.CredentialDescriptor{ @@ -53,9 +68,9 @@ func (c Credential) Descriptor() (descriptor protocol.CredentialDescriptor) { } } -// MakeNewCredential will return a credential pointer on successful validation of a registration response. -func MakeNewCredential(c *protocol.ParsedCredentialCreationData) (*Credential, error) { - newCredential := &Credential{ +// NewCredential will return a credential pointer on successful validation of a registration response. +func NewCredential(clientDataHash []byte, c *protocol.ParsedCredentialCreationData) (credential *Credential, err error) { + credential = &Credential{ ID: c.Response.AttestationObject.AuthData.AttData.CredentialID, PublicKey: c.Response.AttestationObject.AuthData.AttData.CredentialPublicKey, AttestationType: c.Response.AttestationObject.Format, @@ -71,7 +86,56 @@ func MakeNewCredential(c *protocol.ParsedCredentialCreationData) (*Credential, e SignCount: c.Response.AttestationObject.AuthData.Counter, Attachment: c.AuthenticatorAttachment, }, + Attestation: CredentialAttestation{ + ClientDataJSON: c.Raw.AttestationResponse.ClientDataJSON, + ClientDataHash: clientDataHash, + AuthenticatorData: c.Raw.AttestationResponse.AuthenticatorData, + PublicKeyAlgorithm: c.Raw.AttestationResponse.PublicKeyAlgorithm, + Object: c.Raw.AttestationResponse.AttestationObject, + }, + } + + return credential, nil +} + +// Verify this credentials against the metadata.Provider given. +func (c Credential) Verify(mds metadata.Provider) (err error) { + if mds == nil { + return fmt.Errorf("error verifying credential: the metadata provider must be provided but it's nil") + } + + raw := &protocol.AuthenticatorAttestationResponse{ + AuthenticatorResponse: protocol.AuthenticatorResponse{ + ClientDataJSON: c.Attestation.ClientDataJSON, + }, + Transports: make([]string, len(c.Transport)), + AuthenticatorData: c.Attestation.AuthenticatorData, + PublicKey: c.PublicKey, + PublicKeyAlgorithm: c.Attestation.PublicKeyAlgorithm, + AttestationObject: c.Attestation.Object, + } + + for i, transport := range c.Transport { + raw.Transports[i] = string(transport) + } + + var attestation *protocol.ParsedAttestationResponse + + if attestation, err = raw.Parse(); err != nil { + return fmt.Errorf("error verifying credential: error parsing attestation: %w", err) + } + + clientDataHash := c.Attestation.ClientDataHash + + if len(clientDataHash) == 0 { + sum := sha256.Sum256(c.Attestation.ClientDataJSON) + + clientDataHash = sum[:] + } + + if err = attestation.AttestationObject.VerifyAttestation(clientDataHash, mds); err != nil { + return fmt.Errorf("error verifying credential: error verifying attestation: %w", err) } - return newCredential, nil + return nil } diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/login.go b/vendor/github.com/go-webauthn/webauthn/webauthn/login.go index 73e69af..89ff5f8 100644 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/login.go +++ b/vendor/github.com/go-webauthn/webauthn/webauthn/login.go @@ -2,10 +2,14 @@ package webauthn import ( "bytes" + "context" "fmt" "net/http" + "net/url" "time" + "github.com/google/uuid" + "github.com/go-webauthn/webauthn/protocol" ) @@ -70,6 +74,12 @@ func (webauthn *WebAuthn) beginLogin(userID []byte, allowedCredentials []protoco opt(&assertion.Response) } + if len(assertion.Response.RelyingPartyID) == 0 { + return nil, nil, fmt.Errorf("error generating assertion: the relying party id must be provided via the configuration or a functional option for a login") + } else if _, err = url.Parse(assertion.Response.RelyingPartyID); err != nil { + return nil, nil, fmt.Errorf("error generating assertion: the relying party id failed to validate as it's not a valid uri with error: %w", err) + } + if assertion.Response.Timeout == 0 { switch { case assertion.Response.UserVerification == protocol.VerificationDiscouraged: @@ -81,6 +91,7 @@ func (webauthn *WebAuthn) beginLogin(userID []byte, allowedCredentials []protoco session = &SessionData{ Challenge: challenge.String(), + RelyingPartyID: assertion.Response.RelyingPartyID, UserID: userID, AllowedCredentialIDs: assertion.Response.GetAllowedCredentialIDs(), UserVerification: assertion.Response.UserVerification, @@ -115,6 +126,15 @@ func WithUserVerification(userVerification protocol.UserVerificationRequirement) } } +// WithAssertionPublicKeyCredentialHints adjusts the non-default hints for credential types to select during login. +// +// WebAuthn Level 3. +func WithAssertionPublicKeyCredentialHints(hints []protocol.PublicKeyCredentialHints) LoginOption { + return func(cco *protocol.PublicKeyCredentialRequestOptions) { + cco.Hints = hints + } +} + // WithAssertionExtensions adjusts the requested extensions. func WithAssertionExtensions(extensions protocol.AuthenticationExtensions) LoginOption { return func(cco *protocol.PublicKeyCredentialRequestOptions) { @@ -129,7 +149,7 @@ func WithAppIdExtension(appid string) LoginOption { for _, credential := range cco.AllowedCredentials { if credential.AttestationType == protocol.CredentialTypeFIDOU2F { if cco.Extensions == nil { - cco.Extensions = map[string]interface{}{} + cco.Extensions = map[string]any{} } cco.Extensions[protocol.ExtensionAppID] = appid @@ -138,6 +158,13 @@ func WithAppIdExtension(appid string) LoginOption { } } +// WithLoginRelyingPartyID sets the Relying Party ID for this particular login. +func WithLoginRelyingPartyID(id string) LoginOption { + return func(cco *protocol.PublicKeyCredentialRequestOptions) { + cco.RelyingPartyID = id + } +} + // FinishLogin takes the response from the client and validate it against the user credentials and stored session data. func (webauthn *WebAuthn) FinishLogin(user User, session SessionData, response *http.Request) (*Credential, error) { parsedResponse, err := protocol.ParseCredentialRequestResponse(response) @@ -174,21 +201,33 @@ func (webauthn *WebAuthn) ValidateLogin(user User, session SessionData, parsedRe } // ValidateDiscoverableLogin is an overloaded version of ValidateLogin that allows for discoverable credentials. -func (webauthn *WebAuthn) ValidateDiscoverableLogin(handler DiscoverableUserHandler, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (*Credential, error) { +// +// Note: this is just a backwards compatibility layer over ValidatePasskeyLogin which returns more information. +func (webauthn *WebAuthn) ValidateDiscoverableLogin(handler DiscoverableUserHandler, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (credential *Credential, err error) { + _, credential, err = webauthn.ValidatePasskeyLogin(handler, session, parsedResponse) + + return credential, err +} + +// ValidatePasskeyLogin is an overloaded version of ValidateLogin that allows for passkey credentials. +func (webauthn *WebAuthn) ValidatePasskeyLogin(handler DiscoverableUserHandler, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (user User, credential *Credential, err error) { if session.UserID != nil { - return nil, protocol.ErrBadRequest.WithDetails("Session was not initiated as a client-side discoverable login") + return nil, nil, protocol.ErrBadRequest.WithDetails("Session was not initiated as a client-side discoverable login") } if parsedResponse.Response.UserHandle == nil { - return nil, protocol.ErrBadRequest.WithDetails("Client-side Discoverable Assertion was attempted with a blank User Handle") + return nil, nil, protocol.ErrBadRequest.WithDetails("Client-side Discoverable Assertion was attempted with a blank User Handle") } - user, err := handler(parsedResponse.RawID, parsedResponse.Response.UserHandle) - if err != nil { - return nil, protocol.ErrBadRequest.WithDetails(fmt.Sprintf("Failed to lookup Client-side Discoverable Credential: %s", err)) + if user, err = handler(parsedResponse.RawID, parsedResponse.Response.UserHandle); err != nil { + return nil, nil, protocol.ErrBadRequest.WithDetails(fmt.Sprintf("Failed to lookup Client-side Discoverable Credential: %s", err)) } - return webauthn.validateLogin(user, session, parsedResponse) + if credential, err = webauthn.validateLogin(user, session, parsedResponse); err != nil { + return nil, nil, err + } + + return user, credential, nil } // ValidateLogin takes a parsed response and validates it against the user credentials and session data. @@ -198,16 +237,19 @@ func (webauthn *WebAuthn) validateLogin(user User, session SessionData, parsedRe // allowCredentials. // NON-NORMATIVE Prior Step: Verify that the allowCredentials for the session are owned by the user provided. - userCredentials := user.WebAuthnCredentials() + credentials := user.WebAuthnCredentials() - var credentialFound bool + var ( + found bool + credential Credential + ) if len(session.AllowedCredentialIDs) > 0 { var credentialsOwned bool for _, allowedCredentialID := range session.AllowedCredentialIDs { - for _, userCredential := range userCredentials { - if bytes.Equal(userCredential.ID, allowedCredentialID) { + for _, credential = range credentials { + if bytes.Equal(credential.ID, allowedCredentialID) { credentialsOwned = true break @@ -223,13 +265,13 @@ func (webauthn *WebAuthn) validateLogin(user User, session SessionData, parsedRe for _, allowedCredentialID := range session.AllowedCredentialIDs { if bytes.Equal(parsedResponse.RawID, allowedCredentialID) { - credentialFound = true + found = true break } } - if !credentialFound { + if !found { return nil, protocol.ErrBadRequest.WithDetails("User does not own the credential returned") } } @@ -248,48 +290,70 @@ func (webauthn *WebAuthn) validateLogin(user User, session SessionData, parsedRe // Step 3. Using credential’s id attribute (or the corresponding rawId, if base64url encoding is inappropriate // for your use case), look up the corresponding credential public key. - var loginCredential Credential - - for _, cred := range userCredentials { - if bytes.Equal(cred.ID, parsedResponse.RawID) { - loginCredential = cred - credentialFound = true + for _, credential = range credentials { + if bytes.Equal(credential.ID, parsedResponse.RawID) { + found = true break } - credentialFound = false + found = false } - if !credentialFound { + if !found { return nil, protocol.ErrBadRequest.WithDetails("Unable to find the credential for the returned credential ID") } + var ( + appID string + err error + ) + + // Ensure authenticators with a bad status is not used. + if webauthn.Config.MDS != nil { + var aaguid uuid.UUID + + if aaguid, err = uuid.FromBytes(credential.Authenticator.AAGUID); err != nil { + return nil, protocol.ErrBadRequest.WithDetails("Failed to decode AAGUID").WithInfo(fmt.Sprintf("Error occurred decoding AAGUID from the credential record: %s", err)) + } + + if err = protocol.ValidateMetadata(context.Background(), aaguid, webauthn.Config.MDS); err != nil { + return nil, protocol.ErrBadRequest.WithDetails("Failed to validate credential record metadata").WithInfo(fmt.Sprintf("Error occurred validating authenticator metadata from the credential record: %s", err)) + } + } + shouldVerifyUser := session.UserVerification == protocol.VerificationRequired rpID := webauthn.Config.RPID rpOrigins := webauthn.Config.RPOrigins + rpTopOrigins := webauthn.Config.RPTopOrigins - appID, err := parsedResponse.GetAppID(session.Extensions, loginCredential.AttestationType) - if err != nil { + if appID, err = parsedResponse.GetAppID(session.Extensions, credential.AttestationType); err != nil { return nil, err } // Handle steps 4 through 16. - validError := parsedResponse.Verify(session.Challenge, rpID, rpOrigins, appID, shouldVerifyUser, loginCredential.PublicKey) - if validError != nil { - return nil, validError + if err = parsedResponse.Verify(session.Challenge, rpID, rpOrigins, rpTopOrigins, webauthn.Config.RPTopOriginVerificationMode, appID, shouldVerifyUser, credential.PublicKey); err != nil { + return nil, err } // Handle step 17. - loginCredential.Authenticator.UpdateCounter(parsedResponse.Response.AuthenticatorData.Counter) + credential.Authenticator.UpdateCounter(parsedResponse.Response.AuthenticatorData.Counter) + // Check if the BackupEligible flag has changed. + if credential.Flags.BackupEligible != parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() { + return nil, protocol.ErrBadRequest.WithDetails("BackupEligible flag inconsistency detected during login validation") + } + + // Check for the invalid combination BE=0 and BS=1. + if !parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() && parsedResponse.Response.AuthenticatorData.Flags.HasBackupState() { + return nil, protocol.ErrBadRequest.WithDetails("Invalid flag combination: BE=0 and BS=1") + } - // TODO: The backup eligible flag shouldn't change. Should decide if we want to error if it does. // Update flags from response data. - loginCredential.Flags.UserPresent = parsedResponse.Response.AuthenticatorData.Flags.HasUserPresent() - loginCredential.Flags.UserVerified = parsedResponse.Response.AuthenticatorData.Flags.HasUserVerified() - loginCredential.Flags.BackupEligible = parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() - loginCredential.Flags.BackupState = parsedResponse.Response.AuthenticatorData.Flags.HasBackupState() + credential.Flags.UserPresent = parsedResponse.Response.AuthenticatorData.Flags.HasUserPresent() + credential.Flags.UserVerified = parsedResponse.Response.AuthenticatorData.Flags.HasUserVerified() + credential.Flags.BackupEligible = parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() + credential.Flags.BackupState = parsedResponse.Response.AuthenticatorData.Flags.HasBackupState() - return &loginCredential, nil + return &credential, nil } diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/registration.go b/vendor/github.com/go-webauthn/webauthn/webauthn/registration.go index 9715246..79590da 100644 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/registration.go +++ b/vendor/github.com/go-webauthn/webauthn/webauthn/registration.go @@ -4,6 +4,7 @@ import ( "bytes" "fmt" "net/http" + "net/url" "time" "github.com/go-webauthn/webauthn/protocol" @@ -29,7 +30,7 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio return nil, nil, err } - var entityUserID interface{} + var entityUserID any if webauthn.Config.EncodeUserIDAsString { entityUserID = string(user.WebAuthnID()) @@ -42,7 +43,6 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio DisplayName: user.WebAuthnDisplayName(), CredentialEntity: protocol.CredentialEntity{ Name: user.WebAuthnName(), - Icon: user.WebAuthnIcon(), }, } @@ -50,7 +50,6 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio ID: webauthn.Config.RPID, CredentialEntity: protocol.CredentialEntity{ Name: webauthn.Config.RPDisplayName, - Icon: webauthn.Config.RPIcon, }, } @@ -71,6 +70,16 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio opt(&creation.Response) } + if len(creation.Response.RelyingParty.ID) == 0 { + return nil, nil, fmt.Errorf("error generating credential creation: the relying party id must be provided via the configuration or a functional option for a creation") + } else if _, err = url.Parse(creation.Response.RelyingParty.ID); err != nil { + return nil, nil, fmt.Errorf("error generating credential creation: the relying party id failed to validate as it's not a valid uri with error: %w", err) + } + + if len(creation.Response.RelyingParty.Name) == 0 { + return nil, nil, fmt.Errorf("error generating credential creation: the relying party display name must be provided via the configuration or a functional option for a creation") + } + if creation.Response.Timeout == 0 { switch { case creation.Response.AuthenticatorSelection.UserVerification == protocol.VerificationDiscouraged: @@ -82,6 +91,7 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio session = &SessionData{ Challenge: challenge.String(), + RelyingPartyID: creation.Response.RelyingParty.ID, UserID: user.WebAuthnID(), UserVerification: creation.Response.AuthenticatorSelection.UserVerification, } @@ -93,6 +103,20 @@ func (webauthn *WebAuthn) BeginRegistration(user User, opts ...RegistrationOptio return creation, session, nil } +// WithCredentialParameters adjusts the credential parameters in the registration options. +func WithCredentialParameters(credentialParams []protocol.CredentialParameter) RegistrationOption { + return func(cco *protocol.PublicKeyCredentialCreationOptions) { + cco.Parameters = credentialParams + } +} + +// WithExclusions adjusts the non-default parameters regarding credentials to exclude from registration. +func WithExclusions(excludeList []protocol.CredentialDescriptor) RegistrationOption { + return func(cco *protocol.PublicKeyCredentialCreationOptions) { + cco.CredentialExcludeList = excludeList + } +} + // WithAuthenticatorSelection adjusts the non-default parameters regarding the authenticator to select during // registration. func WithAuthenticatorSelection(authenticatorSelection protocol.AuthenticatorSelection) RegistrationOption { @@ -101,10 +125,26 @@ func WithAuthenticatorSelection(authenticatorSelection protocol.AuthenticatorSel } } -// WithExclusions adjusts the non-default parameters regarding credentials to exclude from registration. -func WithExclusions(excludeList []protocol.CredentialDescriptor) RegistrationOption { +// WithResidentKeyRequirement sets both the resident key and require resident key protocol options. +func WithResidentKeyRequirement(requirement protocol.ResidentKeyRequirement) RegistrationOption { return func(cco *protocol.PublicKeyCredentialCreationOptions) { - cco.CredentialExcludeList = excludeList + cco.AuthenticatorSelection.ResidentKey = requirement + + switch requirement { + case protocol.ResidentKeyRequirementRequired: + cco.AuthenticatorSelection.RequireResidentKey = protocol.ResidentKeyRequired() + default: + cco.AuthenticatorSelection.RequireResidentKey = protocol.ResidentKeyNotRequired() + } + } +} + +// WithPublicKeyCredentialHints adjusts the non-default hints for credential types to select during registration. +// +// WebAuthn Level 3. +func WithPublicKeyCredentialHints(hints []protocol.PublicKeyCredentialHints) RegistrationOption { + return func(cco *protocol.PublicKeyCredentialCreationOptions) { + cco.Hints = hints } } @@ -116,17 +156,19 @@ func WithConveyancePreference(preference protocol.ConveyancePreference) Registra } } -// WithExtensions adjusts the extension parameter in the registration options. -func WithExtensions(extension protocol.AuthenticationExtensions) RegistrationOption { +// WithAttestationFormats adjusts the non-default formats for credential types to select during registration. +// +// WebAuthn Level 3. +func WithAttestationFormats(formats []protocol.AttestationFormat) RegistrationOption { return func(cco *protocol.PublicKeyCredentialCreationOptions) { - cco.Extensions = extension + cco.AttestationFormats = formats } } -// WithCredentialParameters adjusts the credential parameters in the registration options. -func WithCredentialParameters(credentialParams []protocol.CredentialParameter) RegistrationOption { +// WithExtensions adjusts the extension parameter in the registration options. +func WithExtensions(extension protocol.AuthenticationExtensions) RegistrationOption { return func(cco *protocol.PublicKeyCredentialCreationOptions) { - cco.Parameters = credentialParams + cco.Extensions = extension } } @@ -137,7 +179,7 @@ func WithAppIdExcludeExtension(appid string) RegistrationOption { for _, credential := range cco.CredentialExcludeList { if credential.AttestationType == protocol.CredentialTypeFIDOU2F { if cco.Extensions == nil { - cco.Extensions = map[string]interface{}{} + cco.Extensions = map[string]any{} } cco.Extensions[protocol.ExtensionAppIDExclude] = appid @@ -146,17 +188,17 @@ func WithAppIdExcludeExtension(appid string) RegistrationOption { } } -// WithResidentKeyRequirement sets both the resident key and require resident key protocol options. -func WithResidentKeyRequirement(requirement protocol.ResidentKeyRequirement) RegistrationOption { +// WithRegistrationRelyingPartyID sets the relying party id for the registration. +func WithRegistrationRelyingPartyID(id string) RegistrationOption { return func(cco *protocol.PublicKeyCredentialCreationOptions) { - cco.AuthenticatorSelection.ResidentKey = requirement + cco.RelyingParty.ID = id + } +} - switch requirement { - case protocol.ResidentKeyRequirementRequired: - cco.AuthenticatorSelection.RequireResidentKey = protocol.ResidentKeyRequired() - default: - cco.AuthenticatorSelection.RequireResidentKey = protocol.ResidentKeyNotRequired() - } +// WithRegistrationRelyingPartyName sets the relying party name for the registration. +func WithRegistrationRelyingPartyName(name string) RegistrationOption { + return func(cco *protocol.PublicKeyCredentialCreationOptions) { + cco.RelyingParty.Name = name } } @@ -172,7 +214,7 @@ func (webauthn *WebAuthn) FinishRegistration(user User, session SessionData, res } // CreateCredential verifies a parsed response against the user's credentials and session data. -func (webauthn *WebAuthn) CreateCredential(user User, session SessionData, parsedResponse *protocol.ParsedCredentialCreationData) (*Credential, error) { +func (webauthn *WebAuthn) CreateCredential(user User, session SessionData, parsedResponse *protocol.ParsedCredentialCreationData) (credential *Credential, err error) { if !bytes.Equal(user.WebAuthnID(), session.UserID) { return nil, protocol.ErrBadRequest.WithDetails("ID mismatch for User and Session") } @@ -183,12 +225,13 @@ func (webauthn *WebAuthn) CreateCredential(user User, session SessionData, parse shouldVerifyUser := session.UserVerification == protocol.VerificationRequired - invalidErr := parsedResponse.Verify(session.Challenge, shouldVerifyUser, webauthn.Config.RPID, webauthn.Config.RPOrigins) - if invalidErr != nil { - return nil, invalidErr + var clientDataHash []byte + + if clientDataHash, err = parsedResponse.Verify(session.Challenge, shouldVerifyUser, webauthn.Config.RPID, webauthn.Config.RPOrigins, webauthn.Config.RPTopOrigins, webauthn.Config.RPTopOriginVerificationMode, webauthn.Config.MDS); err != nil { + return nil, err } - return MakeNewCredential(parsedResponse) + return NewCredential(clientDataHash, parsedResponse) } func defaultRegistrationCredentialParameters() []protocol.CredentialParameter { diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/types.go b/vendor/github.com/go-webauthn/webauthn/webauthn/types.go index bb93f31..5ce2f93 100644 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/types.go +++ b/vendor/github.com/go-webauthn/webauthn/webauthn/types.go @@ -5,6 +5,7 @@ import ( "net/url" "time" + "github.com/go-webauthn/webauthn/metadata" "github.com/go-webauthn/webauthn/protocol" ) @@ -36,6 +37,15 @@ type Config struct { // qualified origins. RPOrigins []string + // RPTopOrigins configures the list of Relying Party Server Top Origins that are permitted. These should be fully + // qualified origins. + RPTopOrigins []string + + // RPTopOriginVerificationMode determines the verification mode for the Top Origin value. By default the + // TopOriginIgnoreVerificationMode is used however this is going to change at such a time as WebAuthn Level 3 + // becomes recommended, implementers should explicitly set this value if they want stability. + RPTopOriginVerificationMode protocol.TopOriginVerificationMode + // AttestationPreference sets the default attestation conveyance preferences. AttestationPreference protocol.ConveyancePreference @@ -53,22 +63,10 @@ type Config struct { // Timeouts configures various timeouts. Timeouts TimeoutsConfig - validated bool - - // RPIcon sets the icon URL for the Relying Party Server. - // - // Deprecated: this option has been removed from newer specifications due to security considerations. - RPIcon string - - // RPOrigin configures the permitted Relying Party Server Origin. - // - // Deprecated: Use RPOrigins instead. - RPOrigin string + // MDS is a metadata.Provider and enables various metadata validations if configured. + MDS metadata.Provider - // Timeout configures the default timeout in milliseconds. - // - // Deprecated: Use Timeouts instead. - Timeout int + validated bool } // TimeoutsConfig represents the WebAuthn timeouts configuration. @@ -97,34 +95,17 @@ func (config *Config) validate() error { return nil } - if len(config.RPDisplayName) == 0 { - return fmt.Errorf(errFmtFieldEmpty, "RPDisplayName") - } - - if len(config.RPID) == 0 { - return fmt.Errorf(errFmtFieldEmpty, "RPID") - } - var err error - if _, err = url.Parse(config.RPID); err != nil { - return fmt.Errorf(errFmtFieldNotValidURI, "RPID", err) - } - - if config.RPIcon != "" { - if _, err = url.Parse(config.RPIcon); err != nil { - return fmt.Errorf(errFmtFieldNotValidURI, "RPIcon", err) + if len(config.RPID) != 0 { + if _, err = url.Parse(config.RPID); err != nil { + return fmt.Errorf(errFmtFieldNotValidURI, "RPID", err) } } defaultTimeoutConfig := defaultTimeout defaultTimeoutUVDConfig := defaultTimeoutUVD - if config.Timeout != 0 { - defaultTimeoutConfig = time.Millisecond * time.Duration(config.Timeout) - defaultTimeoutUVDConfig = defaultTimeoutConfig - } - if config.Timeouts.Login.Timeout.Milliseconds() == 0 { config.Timeouts.Login.Timeout = defaultTimeoutConfig } @@ -141,24 +122,17 @@ func (config *Config) validate() error { config.Timeouts.Registration.TimeoutUVD = defaultTimeoutUVDConfig } - if len(config.RPOrigin) > 0 { - if len(config.RPOrigins) != 0 { - return fmt.Errorf("deprecated field 'RPOrigin' can't be defined at the same tme as the replacement field 'RPOrigins'") - } - - config.RPOrigins = []string{config.RPOrigin} - } - if len(config.RPOrigins) == 0 { return fmt.Errorf("must provide at least one value to the 'RPOrigins' field") } - if config.AuthenticatorSelection.RequireResidentKey == nil { - config.AuthenticatorSelection.RequireResidentKey = protocol.ResidentKeyNotRequired() - } - - if config.AuthenticatorSelection.UserVerification == "" { - config.AuthenticatorSelection.UserVerification = protocol.VerificationPreferred + switch config.RPTopOriginVerificationMode { + case protocol.TopOriginDefaultVerificationMode: + config.RPTopOriginVerificationMode = protocol.TopOriginIgnoreVerificationMode + case protocol.TopOriginImplicitVerificationMode: + if len(config.RPTopOrigins) == 0 { + return fmt.Errorf("must provide at least one value to the 'RPTopOrigins' field when 'RPTopOriginVerificationMode' field is set to protocol.TopOriginImplicitVerificationMode") + } } config.validated = true @@ -166,6 +140,34 @@ func (config *Config) validate() error { return nil } +func (c *Config) GetRPID() string { + return c.RPID +} + +func (c *Config) GetOrigins() []string { + return c.RPOrigins +} + +func (c *Config) GetTopOrigins() []string { + return c.RPTopOrigins +} + +func (c *Config) GetTopOriginVerificationMode() protocol.TopOriginVerificationMode { + return c.RPTopOriginVerificationMode +} + +func (c *Config) GetMetaDataProvider() metadata.Provider { + return c.MDS +} + +type ConfigProvider interface { + GetRPID() string + GetOrigins() []string + GetTopOrigins() []string + GetTopOriginVerificationMode() protocol.TopOriginVerificationMode + GetMetaDataProvider() metadata.Provider +} + // User is an interface with the Relying Party's User entry and provides the fields and methods needed for WebAuthn // registration operations. type User interface { @@ -196,16 +198,13 @@ type User interface { // WebAuthnCredentials provides the list of Credential objects owned by the user. WebAuthnCredentials() []Credential - - // WebAuthnIcon is a deprecated option. - // Deprecated: this has been removed from the specification recommendation. Suggest a blank string. - WebAuthnIcon() string } // SessionData is the data that should be stored by the Relying Party for the duration of the web authentication // ceremony. type SessionData struct { Challenge string `json:"challenge"` + RelyingPartyID string `json:"rpId"` UserID []byte `json:"user_id"` AllowedCredentialIDs [][]byte `json:"allowed_credentials,omitempty"` Expires time.Time `json:"expires"` diff --git a/vendor/github.com/go-webauthn/webauthn/webauthn/user.go b/vendor/github.com/go-webauthn/webauthn/webauthn/user.go deleted file mode 100644 index 045ed8f..0000000 --- a/vendor/github.com/go-webauthn/webauthn/webauthn/user.go +++ /dev/null @@ -1,28 +0,0 @@ -package webauthn - -// TODO: move this to a _test.go file. -type defaultUser struct { - id []byte -} - -var _ User = (*defaultUser)(nil) - -func (user *defaultUser) WebAuthnID() []byte { - return user.id -} - -func (user *defaultUser) WebAuthnName() string { - return "newUser" -} - -func (user *defaultUser) WebAuthnDisplayName() string { - return "New User" -} - -func (user *defaultUser) WebAuthnIcon() string { - return "https://pics.com/avatar.png" -} - -func (user *defaultUser) WebAuthnCredentials() []Credential { - return []Credential{} -} diff --git a/vendor/github.com/google/go-tpm/legacy/tpm2/tpm2.go b/vendor/github.com/google/go-tpm/legacy/tpm2/tpm2.go index 8de0c40..18d5a96 100644 --- a/vendor/github.com/google/go-tpm/legacy/tpm2/tpm2.go +++ b/vendor/github.com/google/go-tpm/legacy/tpm2/tpm2.go @@ -1276,7 +1276,6 @@ func NVDefineSpace(rw io.ReadWriter, owner, handle tpmutil.Handle, ownerAuth, au Auth: []byte(ownerAuth), } return NVDefineSpaceEx(rw, owner, authString, nvPub, authArea) - } // NVDefineSpaceEx accepts NVPublic structure and AuthCommand, allowing more flexibility. @@ -2121,12 +2120,12 @@ func RSAEncrypt(rw io.ReadWriter, key tpmutil.Handle, message []byte, scheme *As return decodeRSAEncrypt(resp) } -func encodeRSADecrypt(key tpmutil.Handle, password string, message tpmutil.U16Bytes, scheme *AsymScheme, label string) ([]byte, error) { +func encodeRSADecrypt(sessionHandle, key tpmutil.Handle, password string, message tpmutil.U16Bytes, scheme *AsymScheme, label string) ([]byte, error) { ha, err := tpmutil.Pack(key) if err != nil { return nil, err } - auth, err := encodeAuthArea(AuthCommand{Session: HandlePasswordSession, Attributes: AttrContinueSession, Auth: []byte(password)}) + auth, err := encodeAuthArea(AuthCommand{Session: sessionHandle, Attributes: AttrContinueSession, Auth: []byte(password)}) if err != nil { return nil, err } @@ -2160,7 +2159,15 @@ func decodeRSADecrypt(resp []byte) ([]byte, error) { // label, a null byte is appended to the label and the null byte is included in the // padding scheme. func RSADecrypt(rw io.ReadWriter, key tpmutil.Handle, password string, message []byte, scheme *AsymScheme, label string) ([]byte, error) { - Cmd, err := encodeRSADecrypt(key, password, message, scheme, label) + return RSADecryptWithSession(rw, HandlePasswordSession, key, password, message, scheme, label) +} + +// RSADecryptWithSession performs RSA decryption in the TPM according to RFC 3447. The key must be +// a private RSA key in the TPM with FlagDecrypt set. Note that when using OAEP with a +// label, a null byte is appended to the label and the null byte is included in the +// padding scheme. +func RSADecryptWithSession(rw io.ReadWriter, sessionHandle, key tpmutil.Handle, password string, message []byte, scheme *AsymScheme, label string) ([]byte, error) { + Cmd, err := encodeRSADecrypt(sessionHandle, key, password, message, scheme, label) if err != nil { return nil, err } diff --git a/vendor/modules.txt b/vendor/modules.txt index d02a38c..6aa9d7c 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,24 +1,24 @@ # github.com/davecgh/go-spew v1.1.1 ## explicit github.com/davecgh/go-spew/spew -# github.com/fxamacker/cbor/v2 v2.6.0 +# github.com/fxamacker/cbor/v2 v2.7.0 ## explicit; go 1.17 github.com/fxamacker/cbor/v2 -# github.com/go-webauthn/webauthn v0.10.2 -## explicit; go 1.21 +# github.com/go-webauthn/webauthn v0.11.0 +## explicit; go 1.22 github.com/go-webauthn/webauthn/metadata github.com/go-webauthn/webauthn/protocol github.com/go-webauthn/webauthn/protocol/webauthncbor github.com/go-webauthn/webauthn/protocol/webauthncose github.com/go-webauthn/webauthn/webauthn -# github.com/go-webauthn/x v0.1.9 -## explicit; go 1.21 +# github.com/go-webauthn/x v0.1.12 +## explicit; go 1.22 github.com/go-webauthn/x/revoke # github.com/golang-jwt/jwt/v5 v5.2.1 ## explicit; go 1.18 github.com/golang-jwt/jwt/v5 -# github.com/google/go-tpm v0.9.0 -## explicit; go 1.20 +# github.com/google/go-tpm v0.9.1 +## explicit; go 1.22 github.com/google/go-tpm/legacy/tpm2 github.com/google/go-tpm/tpmutil github.com/google/go-tpm/tpmutil/tbs