Add Nix flake for mciasctl and mciasgrpcctl
Vendor dependencies and expose control program binaries via nix build. Uses nixpkgs-unstable for Go 1.26 support. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1774273680,
|
||||||
|
"narHash": "sha256-a++tZ1RQsDb1I0NHrFwdGuRlR5TORvCEUksM459wKUA=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "fdc7b8f7b30fdbedec91b71ed82f36e1637483ed",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixpkgs-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
34
flake.nix
Normal file
34
flake.nix
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
description = "mcias - Metacircular Identity and Access Service";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
{ self, nixpkgs }:
|
||||||
|
let
|
||||||
|
system = "x86_64-linux";
|
||||||
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
version = "1.7.0";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages.${system} = {
|
||||||
|
default = pkgs.buildGoModule {
|
||||||
|
pname = "mciasctl";
|
||||||
|
inherit version;
|
||||||
|
src = ./.;
|
||||||
|
vendorHash = null;
|
||||||
|
subPackages = [
|
||||||
|
"cmd/mciasctl"
|
||||||
|
"cmd/mciasgrpcctl"
|
||||||
|
];
|
||||||
|
ldflags = [
|
||||||
|
"-s"
|
||||||
|
"-w"
|
||||||
|
"-X main.version=${version}"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
||||||
21
vendor/github.com/dustin/go-humanize/.travis.yml
generated
vendored
Normal file
21
vendor/github.com/dustin/go-humanize/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go_import_path: github.com/dustin/go-humanize
|
||||||
|
go:
|
||||||
|
- 1.13.x
|
||||||
|
- 1.14.x
|
||||||
|
- 1.15.x
|
||||||
|
- 1.16.x
|
||||||
|
- stable
|
||||||
|
- master
|
||||||
|
matrix:
|
||||||
|
allow_failures:
|
||||||
|
- go: master
|
||||||
|
fast_finish: true
|
||||||
|
install:
|
||||||
|
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
|
||||||
|
script:
|
||||||
|
- diff -u <(echo -n) <(gofmt -d -s .)
|
||||||
|
- go vet .
|
||||||
|
- go install -v -race ./...
|
||||||
|
- go test -v -race ./...
|
||||||
21
vendor/github.com/dustin/go-humanize/LICENSE
generated
vendored
Normal file
21
vendor/github.com/dustin/go-humanize/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
Copyright (c) 2005-2008 Dustin Sallings <dustin@spy.net>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
|
<http://www.opensource.org/licenses/mit-license.php>
|
||||||
124
vendor/github.com/dustin/go-humanize/README.markdown
generated
vendored
Normal file
124
vendor/github.com/dustin/go-humanize/README.markdown
generated
vendored
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Humane Units [](https://travis-ci.org/dustin/go-humanize) [](https://godoc.org/github.com/dustin/go-humanize)
|
||||||
|
|
||||||
|
Just a few functions for helping humanize times and sizes.
|
||||||
|
|
||||||
|
`go get` it as `github.com/dustin/go-humanize`, import it as
|
||||||
|
`"github.com/dustin/go-humanize"`, use it as `humanize`.
|
||||||
|
|
||||||
|
See [godoc](https://pkg.go.dev/github.com/dustin/go-humanize) for
|
||||||
|
complete documentation.
|
||||||
|
|
||||||
|
## Sizes
|
||||||
|
|
||||||
|
This lets you take numbers like `82854982` and convert them to useful
|
||||||
|
strings like, `83 MB` or `79 MiB` (whichever you prefer).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
fmt.Printf("That file is %s.", humanize.Bytes(82854982)) // That file is 83 MB.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Times
|
||||||
|
|
||||||
|
This lets you take a `time.Time` and spit it out in relative terms.
|
||||||
|
For example, `12 seconds ago` or `3 days from now`.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
fmt.Printf("This was touched %s.", humanize.Time(someTimeInstance)) // This was touched 7 hours ago.
|
||||||
|
```
|
||||||
|
|
||||||
|
Thanks to Kyle Lemons for the time implementation from an IRC
|
||||||
|
conversation one day. It's pretty neat.
|
||||||
|
|
||||||
|
## Ordinals
|
||||||
|
|
||||||
|
From a [mailing list discussion][odisc] where a user wanted to be able
|
||||||
|
to label ordinals.
|
||||||
|
|
||||||
|
0 -> 0th
|
||||||
|
1 -> 1st
|
||||||
|
2 -> 2nd
|
||||||
|
3 -> 3rd
|
||||||
|
4 -> 4th
|
||||||
|
[...]
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
fmt.Printf("You're my %s best friend.", humanize.Ordinal(193)) // You are my 193rd best friend.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commas
|
||||||
|
|
||||||
|
Want to shove commas into numbers? Be my guest.
|
||||||
|
|
||||||
|
0 -> 0
|
||||||
|
100 -> 100
|
||||||
|
1000 -> 1,000
|
||||||
|
1000000000 -> 1,000,000,000
|
||||||
|
-100000 -> -100,000
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
fmt.Printf("You owe $%s.\n", humanize.Comma(6582491)) // You owe $6,582,491.
|
||||||
|
```
|
||||||
|
|
||||||
|
## Ftoa
|
||||||
|
|
||||||
|
Nicer float64 formatter that removes trailing zeros.
|
||||||
|
|
||||||
|
```go
|
||||||
|
fmt.Printf("%f", 2.24) // 2.240000
|
||||||
|
fmt.Printf("%s", humanize.Ftoa(2.24)) // 2.24
|
||||||
|
fmt.Printf("%f", 2.0) // 2.000000
|
||||||
|
fmt.Printf("%s", humanize.Ftoa(2.0)) // 2
|
||||||
|
```
|
||||||
|
|
||||||
|
## SI notation
|
||||||
|
|
||||||
|
Format numbers with [SI notation][sinotation].
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```go
|
||||||
|
humanize.SI(0.00000000223, "M") // 2.23 nM
|
||||||
|
```
|
||||||
|
|
||||||
|
## English-specific functions
|
||||||
|
|
||||||
|
The following functions are in the `humanize/english` subpackage.
|
||||||
|
|
||||||
|
### Plurals
|
||||||
|
|
||||||
|
Simple English pluralization
|
||||||
|
|
||||||
|
```go
|
||||||
|
english.PluralWord(1, "object", "") // object
|
||||||
|
english.PluralWord(42, "object", "") // objects
|
||||||
|
english.PluralWord(2, "bus", "") // buses
|
||||||
|
english.PluralWord(99, "locus", "loci") // loci
|
||||||
|
|
||||||
|
english.Plural(1, "object", "") // 1 object
|
||||||
|
english.Plural(42, "object", "") // 42 objects
|
||||||
|
english.Plural(2, "bus", "") // 2 buses
|
||||||
|
english.Plural(99, "locus", "loci") // 99 loci
|
||||||
|
```
|
||||||
|
|
||||||
|
### Word series
|
||||||
|
|
||||||
|
Format comma-separated words lists with conjuctions:
|
||||||
|
|
||||||
|
```go
|
||||||
|
english.WordSeries([]string{"foo"}, "and") // foo
|
||||||
|
english.WordSeries([]string{"foo", "bar"}, "and") // foo and bar
|
||||||
|
english.WordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar and baz
|
||||||
|
|
||||||
|
english.OxfordWordSeries([]string{"foo", "bar", "baz"}, "and") // foo, bar, and baz
|
||||||
|
```
|
||||||
|
|
||||||
|
[odisc]: https://groups.google.com/d/topic/golang-nuts/l8NhI74jl-4/discussion
|
||||||
|
[sinotation]: http://en.wikipedia.org/wiki/Metric_prefix
|
||||||
31
vendor/github.com/dustin/go-humanize/big.go
generated
vendored
Normal file
31
vendor/github.com/dustin/go-humanize/big.go
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
|
||||||
|
// order of magnitude (to a max order)
|
||||||
|
func oomm(n, b *big.Int, maxmag int) (float64, int) {
|
||||||
|
mag := 0
|
||||||
|
m := &big.Int{}
|
||||||
|
for n.Cmp(b) >= 0 {
|
||||||
|
n.DivMod(n, b, m)
|
||||||
|
mag++
|
||||||
|
if mag == maxmag && maxmag >= 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
|
||||||
|
}
|
||||||
|
|
||||||
|
// total order of magnitude
|
||||||
|
// (same as above, but with no upper limit)
|
||||||
|
func oom(n, b *big.Int) (float64, int) {
|
||||||
|
mag := 0
|
||||||
|
m := &big.Int{}
|
||||||
|
for n.Cmp(b) >= 0 {
|
||||||
|
n.DivMod(n, b, m)
|
||||||
|
mag++
|
||||||
|
}
|
||||||
|
return float64(n.Int64()) + (float64(m.Int64()) / float64(b.Int64())), mag
|
||||||
|
}
|
||||||
189
vendor/github.com/dustin/go-humanize/bigbytes.go
generated
vendored
Normal file
189
vendor/github.com/dustin/go-humanize/bigbytes.go
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
bigIECExp = big.NewInt(1024)
|
||||||
|
|
||||||
|
// BigByte is one byte in bit.Ints
|
||||||
|
BigByte = big.NewInt(1)
|
||||||
|
// BigKiByte is 1,024 bytes in bit.Ints
|
||||||
|
BigKiByte = (&big.Int{}).Mul(BigByte, bigIECExp)
|
||||||
|
// BigMiByte is 1,024 k bytes in bit.Ints
|
||||||
|
BigMiByte = (&big.Int{}).Mul(BigKiByte, bigIECExp)
|
||||||
|
// BigGiByte is 1,024 m bytes in bit.Ints
|
||||||
|
BigGiByte = (&big.Int{}).Mul(BigMiByte, bigIECExp)
|
||||||
|
// BigTiByte is 1,024 g bytes in bit.Ints
|
||||||
|
BigTiByte = (&big.Int{}).Mul(BigGiByte, bigIECExp)
|
||||||
|
// BigPiByte is 1,024 t bytes in bit.Ints
|
||||||
|
BigPiByte = (&big.Int{}).Mul(BigTiByte, bigIECExp)
|
||||||
|
// BigEiByte is 1,024 p bytes in bit.Ints
|
||||||
|
BigEiByte = (&big.Int{}).Mul(BigPiByte, bigIECExp)
|
||||||
|
// BigZiByte is 1,024 e bytes in bit.Ints
|
||||||
|
BigZiByte = (&big.Int{}).Mul(BigEiByte, bigIECExp)
|
||||||
|
// BigYiByte is 1,024 z bytes in bit.Ints
|
||||||
|
BigYiByte = (&big.Int{}).Mul(BigZiByte, bigIECExp)
|
||||||
|
// BigRiByte is 1,024 y bytes in bit.Ints
|
||||||
|
BigRiByte = (&big.Int{}).Mul(BigYiByte, bigIECExp)
|
||||||
|
// BigQiByte is 1,024 r bytes in bit.Ints
|
||||||
|
BigQiByte = (&big.Int{}).Mul(BigRiByte, bigIECExp)
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
bigSIExp = big.NewInt(1000)
|
||||||
|
|
||||||
|
// BigSIByte is one SI byte in big.Ints
|
||||||
|
BigSIByte = big.NewInt(1)
|
||||||
|
// BigKByte is 1,000 SI bytes in big.Ints
|
||||||
|
BigKByte = (&big.Int{}).Mul(BigSIByte, bigSIExp)
|
||||||
|
// BigMByte is 1,000 SI k bytes in big.Ints
|
||||||
|
BigMByte = (&big.Int{}).Mul(BigKByte, bigSIExp)
|
||||||
|
// BigGByte is 1,000 SI m bytes in big.Ints
|
||||||
|
BigGByte = (&big.Int{}).Mul(BigMByte, bigSIExp)
|
||||||
|
// BigTByte is 1,000 SI g bytes in big.Ints
|
||||||
|
BigTByte = (&big.Int{}).Mul(BigGByte, bigSIExp)
|
||||||
|
// BigPByte is 1,000 SI t bytes in big.Ints
|
||||||
|
BigPByte = (&big.Int{}).Mul(BigTByte, bigSIExp)
|
||||||
|
// BigEByte is 1,000 SI p bytes in big.Ints
|
||||||
|
BigEByte = (&big.Int{}).Mul(BigPByte, bigSIExp)
|
||||||
|
// BigZByte is 1,000 SI e bytes in big.Ints
|
||||||
|
BigZByte = (&big.Int{}).Mul(BigEByte, bigSIExp)
|
||||||
|
// BigYByte is 1,000 SI z bytes in big.Ints
|
||||||
|
BigYByte = (&big.Int{}).Mul(BigZByte, bigSIExp)
|
||||||
|
// BigRByte is 1,000 SI y bytes in big.Ints
|
||||||
|
BigRByte = (&big.Int{}).Mul(BigYByte, bigSIExp)
|
||||||
|
// BigQByte is 1,000 SI r bytes in big.Ints
|
||||||
|
BigQByte = (&big.Int{}).Mul(BigRByte, bigSIExp)
|
||||||
|
)
|
||||||
|
|
||||||
|
var bigBytesSizeTable = map[string]*big.Int{
|
||||||
|
"b": BigByte,
|
||||||
|
"kib": BigKiByte,
|
||||||
|
"kb": BigKByte,
|
||||||
|
"mib": BigMiByte,
|
||||||
|
"mb": BigMByte,
|
||||||
|
"gib": BigGiByte,
|
||||||
|
"gb": BigGByte,
|
||||||
|
"tib": BigTiByte,
|
||||||
|
"tb": BigTByte,
|
||||||
|
"pib": BigPiByte,
|
||||||
|
"pb": BigPByte,
|
||||||
|
"eib": BigEiByte,
|
||||||
|
"eb": BigEByte,
|
||||||
|
"zib": BigZiByte,
|
||||||
|
"zb": BigZByte,
|
||||||
|
"yib": BigYiByte,
|
||||||
|
"yb": BigYByte,
|
||||||
|
"rib": BigRiByte,
|
||||||
|
"rb": BigRByte,
|
||||||
|
"qib": BigQiByte,
|
||||||
|
"qb": BigQByte,
|
||||||
|
// Without suffix
|
||||||
|
"": BigByte,
|
||||||
|
"ki": BigKiByte,
|
||||||
|
"k": BigKByte,
|
||||||
|
"mi": BigMiByte,
|
||||||
|
"m": BigMByte,
|
||||||
|
"gi": BigGiByte,
|
||||||
|
"g": BigGByte,
|
||||||
|
"ti": BigTiByte,
|
||||||
|
"t": BigTByte,
|
||||||
|
"pi": BigPiByte,
|
||||||
|
"p": BigPByte,
|
||||||
|
"ei": BigEiByte,
|
||||||
|
"e": BigEByte,
|
||||||
|
"z": BigZByte,
|
||||||
|
"zi": BigZiByte,
|
||||||
|
"y": BigYByte,
|
||||||
|
"yi": BigYiByte,
|
||||||
|
"r": BigRByte,
|
||||||
|
"ri": BigRiByte,
|
||||||
|
"q": BigQByte,
|
||||||
|
"qi": BigQiByte,
|
||||||
|
}
|
||||||
|
|
||||||
|
var ten = big.NewInt(10)
|
||||||
|
|
||||||
|
func humanateBigBytes(s, base *big.Int, sizes []string) string {
|
||||||
|
if s.Cmp(ten) < 0 {
|
||||||
|
return fmt.Sprintf("%d B", s)
|
||||||
|
}
|
||||||
|
c := (&big.Int{}).Set(s)
|
||||||
|
val, mag := oomm(c, base, len(sizes)-1)
|
||||||
|
suffix := sizes[mag]
|
||||||
|
f := "%.0f %s"
|
||||||
|
if val < 10 {
|
||||||
|
f = "%.1f %s"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(f, val, suffix)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// BigBytes produces a human readable representation of an SI size.
|
||||||
|
//
|
||||||
|
// See also: ParseBigBytes.
|
||||||
|
//
|
||||||
|
// BigBytes(82854982) -> 83 MB
|
||||||
|
func BigBytes(s *big.Int) string {
|
||||||
|
sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB", "RB", "QB"}
|
||||||
|
return humanateBigBytes(s, bigSIExp, sizes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BigIBytes produces a human readable representation of an IEC size.
|
||||||
|
//
|
||||||
|
// See also: ParseBigBytes.
|
||||||
|
//
|
||||||
|
// BigIBytes(82854982) -> 79 MiB
|
||||||
|
func BigIBytes(s *big.Int) string {
|
||||||
|
sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB", "RiB", "QiB"}
|
||||||
|
return humanateBigBytes(s, bigIECExp, sizes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseBigBytes parses a string representation of bytes into the number
|
||||||
|
// of bytes it represents.
|
||||||
|
//
|
||||||
|
// See also: BigBytes, BigIBytes.
|
||||||
|
//
|
||||||
|
// ParseBigBytes("42 MB") -> 42000000, nil
|
||||||
|
// ParseBigBytes("42 mib") -> 44040192, nil
|
||||||
|
func ParseBigBytes(s string) (*big.Int, error) {
|
||||||
|
lastDigit := 0
|
||||||
|
hasComma := false
|
||||||
|
for _, r := range s {
|
||||||
|
if !(unicode.IsDigit(r) || r == '.' || r == ',') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if r == ',' {
|
||||||
|
hasComma = true
|
||||||
|
}
|
||||||
|
lastDigit++
|
||||||
|
}
|
||||||
|
|
||||||
|
num := s[:lastDigit]
|
||||||
|
if hasComma {
|
||||||
|
num = strings.Replace(num, ",", "", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
val := &big.Rat{}
|
||||||
|
_, err := fmt.Sscanf(num, "%f", val)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
|
||||||
|
if m, ok := bigBytesSizeTable[extra]; ok {
|
||||||
|
mv := (&big.Rat{}).SetInt(m)
|
||||||
|
val.Mul(val, mv)
|
||||||
|
rv := &big.Int{}
|
||||||
|
rv.Div(val.Num(), val.Denom())
|
||||||
|
return rv, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("unhandled size name: %v", extra)
|
||||||
|
}
|
||||||
143
vendor/github.com/dustin/go-humanize/bytes.go
generated
vendored
Normal file
143
vendor/github.com/dustin/go-humanize/bytes.go
generated
vendored
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
// IEC Sizes.
|
||||||
|
// kibis of bits
|
||||||
|
const (
|
||||||
|
Byte = 1 << (iota * 10)
|
||||||
|
KiByte
|
||||||
|
MiByte
|
||||||
|
GiByte
|
||||||
|
TiByte
|
||||||
|
PiByte
|
||||||
|
EiByte
|
||||||
|
)
|
||||||
|
|
||||||
|
// SI Sizes.
|
||||||
|
const (
|
||||||
|
IByte = 1
|
||||||
|
KByte = IByte * 1000
|
||||||
|
MByte = KByte * 1000
|
||||||
|
GByte = MByte * 1000
|
||||||
|
TByte = GByte * 1000
|
||||||
|
PByte = TByte * 1000
|
||||||
|
EByte = PByte * 1000
|
||||||
|
)
|
||||||
|
|
||||||
|
var bytesSizeTable = map[string]uint64{
|
||||||
|
"b": Byte,
|
||||||
|
"kib": KiByte,
|
||||||
|
"kb": KByte,
|
||||||
|
"mib": MiByte,
|
||||||
|
"mb": MByte,
|
||||||
|
"gib": GiByte,
|
||||||
|
"gb": GByte,
|
||||||
|
"tib": TiByte,
|
||||||
|
"tb": TByte,
|
||||||
|
"pib": PiByte,
|
||||||
|
"pb": PByte,
|
||||||
|
"eib": EiByte,
|
||||||
|
"eb": EByte,
|
||||||
|
// Without suffix
|
||||||
|
"": Byte,
|
||||||
|
"ki": KiByte,
|
||||||
|
"k": KByte,
|
||||||
|
"mi": MiByte,
|
||||||
|
"m": MByte,
|
||||||
|
"gi": GiByte,
|
||||||
|
"g": GByte,
|
||||||
|
"ti": TiByte,
|
||||||
|
"t": TByte,
|
||||||
|
"pi": PiByte,
|
||||||
|
"p": PByte,
|
||||||
|
"ei": EiByte,
|
||||||
|
"e": EByte,
|
||||||
|
}
|
||||||
|
|
||||||
|
func logn(n, b float64) float64 {
|
||||||
|
return math.Log(n) / math.Log(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func humanateBytes(s uint64, base float64, sizes []string) string {
|
||||||
|
if s < 10 {
|
||||||
|
return fmt.Sprintf("%d B", s)
|
||||||
|
}
|
||||||
|
e := math.Floor(logn(float64(s), base))
|
||||||
|
suffix := sizes[int(e)]
|
||||||
|
val := math.Floor(float64(s)/math.Pow(base, e)*10+0.5) / 10
|
||||||
|
f := "%.0f %s"
|
||||||
|
if val < 10 {
|
||||||
|
f = "%.1f %s"
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf(f, val, suffix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bytes produces a human readable representation of an SI size.
|
||||||
|
//
|
||||||
|
// See also: ParseBytes.
|
||||||
|
//
|
||||||
|
// Bytes(82854982) -> 83 MB
|
||||||
|
func Bytes(s uint64) string {
|
||||||
|
sizes := []string{"B", "kB", "MB", "GB", "TB", "PB", "EB"}
|
||||||
|
return humanateBytes(s, 1000, sizes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IBytes produces a human readable representation of an IEC size.
|
||||||
|
//
|
||||||
|
// See also: ParseBytes.
|
||||||
|
//
|
||||||
|
// IBytes(82854982) -> 79 MiB
|
||||||
|
func IBytes(s uint64) string {
|
||||||
|
sizes := []string{"B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB"}
|
||||||
|
return humanateBytes(s, 1024, sizes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseBytes parses a string representation of bytes into the number
|
||||||
|
// of bytes it represents.
|
||||||
|
//
|
||||||
|
// See Also: Bytes, IBytes.
|
||||||
|
//
|
||||||
|
// ParseBytes("42 MB") -> 42000000, nil
|
||||||
|
// ParseBytes("42 mib") -> 44040192, nil
|
||||||
|
func ParseBytes(s string) (uint64, error) {
|
||||||
|
lastDigit := 0
|
||||||
|
hasComma := false
|
||||||
|
for _, r := range s {
|
||||||
|
if !(unicode.IsDigit(r) || r == '.' || r == ',') {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if r == ',' {
|
||||||
|
hasComma = true
|
||||||
|
}
|
||||||
|
lastDigit++
|
||||||
|
}
|
||||||
|
|
||||||
|
num := s[:lastDigit]
|
||||||
|
if hasComma {
|
||||||
|
num = strings.Replace(num, ",", "", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := strconv.ParseFloat(num, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
extra := strings.ToLower(strings.TrimSpace(s[lastDigit:]))
|
||||||
|
if m, ok := bytesSizeTable[extra]; ok {
|
||||||
|
f *= float64(m)
|
||||||
|
if f >= math.MaxUint64 {
|
||||||
|
return 0, fmt.Errorf("too large: %v", s)
|
||||||
|
}
|
||||||
|
return uint64(f), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0, fmt.Errorf("unhandled size name: %v", extra)
|
||||||
|
}
|
||||||
116
vendor/github.com/dustin/go-humanize/comma.go
generated
vendored
Normal file
116
vendor/github.com/dustin/go-humanize/comma.go
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Comma produces a string form of the given number in base 10 with
|
||||||
|
// commas after every three orders of magnitude.
|
||||||
|
//
|
||||||
|
// e.g. Comma(834142) -> 834,142
|
||||||
|
func Comma(v int64) string {
|
||||||
|
sign := ""
|
||||||
|
|
||||||
|
// Min int64 can't be negated to a usable value, so it has to be special cased.
|
||||||
|
if v == math.MinInt64 {
|
||||||
|
return "-9,223,372,036,854,775,808"
|
||||||
|
}
|
||||||
|
|
||||||
|
if v < 0 {
|
||||||
|
sign = "-"
|
||||||
|
v = 0 - v
|
||||||
|
}
|
||||||
|
|
||||||
|
parts := []string{"", "", "", "", "", "", ""}
|
||||||
|
j := len(parts) - 1
|
||||||
|
|
||||||
|
for v > 999 {
|
||||||
|
parts[j] = strconv.FormatInt(v%1000, 10)
|
||||||
|
switch len(parts[j]) {
|
||||||
|
case 2:
|
||||||
|
parts[j] = "0" + parts[j]
|
||||||
|
case 1:
|
||||||
|
parts[j] = "00" + parts[j]
|
||||||
|
}
|
||||||
|
v = v / 1000
|
||||||
|
j--
|
||||||
|
}
|
||||||
|
parts[j] = strconv.Itoa(int(v))
|
||||||
|
return sign + strings.Join(parts[j:], ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commaf produces a string form of the given number in base 10 with
|
||||||
|
// commas after every three orders of magnitude.
|
||||||
|
//
|
||||||
|
// e.g. Commaf(834142.32) -> 834,142.32
|
||||||
|
func Commaf(v float64) string {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
if v < 0 {
|
||||||
|
buf.Write([]byte{'-'})
|
||||||
|
v = 0 - v
|
||||||
|
}
|
||||||
|
|
||||||
|
comma := []byte{','}
|
||||||
|
|
||||||
|
parts := strings.Split(strconv.FormatFloat(v, 'f', -1, 64), ".")
|
||||||
|
pos := 0
|
||||||
|
if len(parts[0])%3 != 0 {
|
||||||
|
pos += len(parts[0]) % 3
|
||||||
|
buf.WriteString(parts[0][:pos])
|
||||||
|
buf.Write(comma)
|
||||||
|
}
|
||||||
|
for ; pos < len(parts[0]); pos += 3 {
|
||||||
|
buf.WriteString(parts[0][pos : pos+3])
|
||||||
|
buf.Write(comma)
|
||||||
|
}
|
||||||
|
buf.Truncate(buf.Len() - 1)
|
||||||
|
|
||||||
|
if len(parts) > 1 {
|
||||||
|
buf.Write([]byte{'.'})
|
||||||
|
buf.WriteString(parts[1])
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// CommafWithDigits works like the Commaf but limits the resulting
|
||||||
|
// string to the given number of decimal places.
|
||||||
|
//
|
||||||
|
// e.g. CommafWithDigits(834142.32, 1) -> 834,142.3
|
||||||
|
func CommafWithDigits(f float64, decimals int) string {
|
||||||
|
return stripTrailingDigits(Commaf(f), decimals)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BigComma produces a string form of the given big.Int in base 10
|
||||||
|
// with commas after every three orders of magnitude.
|
||||||
|
func BigComma(b *big.Int) string {
|
||||||
|
sign := ""
|
||||||
|
if b.Sign() < 0 {
|
||||||
|
sign = "-"
|
||||||
|
b.Abs(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
athousand := big.NewInt(1000)
|
||||||
|
c := (&big.Int{}).Set(b)
|
||||||
|
_, m := oom(c, athousand)
|
||||||
|
parts := make([]string, m+1)
|
||||||
|
j := len(parts) - 1
|
||||||
|
|
||||||
|
mod := &big.Int{}
|
||||||
|
for b.Cmp(athousand) >= 0 {
|
||||||
|
b.DivMod(b, athousand, mod)
|
||||||
|
parts[j] = strconv.FormatInt(mod.Int64(), 10)
|
||||||
|
switch len(parts[j]) {
|
||||||
|
case 2:
|
||||||
|
parts[j] = "0" + parts[j]
|
||||||
|
case 1:
|
||||||
|
parts[j] = "00" + parts[j]
|
||||||
|
}
|
||||||
|
j--
|
||||||
|
}
|
||||||
|
parts[j] = strconv.Itoa(int(b.Int64()))
|
||||||
|
return sign + strings.Join(parts[j:], ",")
|
||||||
|
}
|
||||||
41
vendor/github.com/dustin/go-humanize/commaf.go
generated
vendored
Normal file
41
vendor/github.com/dustin/go-humanize/commaf.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
//go:build go1.6
|
||||||
|
// +build go1.6
|
||||||
|
|
||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"math/big"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BigCommaf produces a string form of the given big.Float in base 10
|
||||||
|
// with commas after every three orders of magnitude.
|
||||||
|
func BigCommaf(v *big.Float) string {
|
||||||
|
buf := &bytes.Buffer{}
|
||||||
|
if v.Sign() < 0 {
|
||||||
|
buf.Write([]byte{'-'})
|
||||||
|
v.Abs(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
comma := []byte{','}
|
||||||
|
|
||||||
|
parts := strings.Split(v.Text('f', -1), ".")
|
||||||
|
pos := 0
|
||||||
|
if len(parts[0])%3 != 0 {
|
||||||
|
pos += len(parts[0]) % 3
|
||||||
|
buf.WriteString(parts[0][:pos])
|
||||||
|
buf.Write(comma)
|
||||||
|
}
|
||||||
|
for ; pos < len(parts[0]); pos += 3 {
|
||||||
|
buf.WriteString(parts[0][pos : pos+3])
|
||||||
|
buf.Write(comma)
|
||||||
|
}
|
||||||
|
buf.Truncate(buf.Len() - 1)
|
||||||
|
|
||||||
|
if len(parts) > 1 {
|
||||||
|
buf.Write([]byte{'.'})
|
||||||
|
buf.WriteString(parts[1])
|
||||||
|
}
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
49
vendor/github.com/dustin/go-humanize/ftoa.go
generated
vendored
Normal file
49
vendor/github.com/dustin/go-humanize/ftoa.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func stripTrailingZeros(s string) string {
|
||||||
|
if !strings.ContainsRune(s, '.') {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
offset := len(s) - 1
|
||||||
|
for offset > 0 {
|
||||||
|
if s[offset] == '.' {
|
||||||
|
offset--
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if s[offset] != '0' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
offset--
|
||||||
|
}
|
||||||
|
return s[:offset+1]
|
||||||
|
}
|
||||||
|
|
||||||
|
func stripTrailingDigits(s string, digits int) string {
|
||||||
|
if i := strings.Index(s, "."); i >= 0 {
|
||||||
|
if digits <= 0 {
|
||||||
|
return s[:i]
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
if i+digits >= len(s) {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
return s[:i+digits]
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ftoa converts a float to a string with no trailing zeros.
|
||||||
|
func Ftoa(num float64) string {
|
||||||
|
return stripTrailingZeros(strconv.FormatFloat(num, 'f', 6, 64))
|
||||||
|
}
|
||||||
|
|
||||||
|
// FtoaWithDigits converts a float to a string but limits the resulting string
|
||||||
|
// to the given number of decimal places, and no trailing zeros.
|
||||||
|
func FtoaWithDigits(num float64, digits int) string {
|
||||||
|
return stripTrailingZeros(stripTrailingDigits(strconv.FormatFloat(num, 'f', 6, 64), digits))
|
||||||
|
}
|
||||||
8
vendor/github.com/dustin/go-humanize/humanize.go
generated
vendored
Normal file
8
vendor/github.com/dustin/go-humanize/humanize.go
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
Package humanize converts boring ugly numbers to human-friendly strings and back.
|
||||||
|
|
||||||
|
Durations can be turned into strings such as "3 days ago", numbers
|
||||||
|
representing sizes like 82854982 into useful strings like, "83 MB" or
|
||||||
|
"79 MiB" (whichever you prefer).
|
||||||
|
*/
|
||||||
|
package humanize
|
||||||
192
vendor/github.com/dustin/go-humanize/number.go
generated
vendored
Normal file
192
vendor/github.com/dustin/go-humanize/number.go
generated
vendored
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
/*
|
||||||
|
Slightly adapted from the source to fit go-humanize.
|
||||||
|
|
||||||
|
Author: https://github.com/gorhill
|
||||||
|
Source: https://gist.github.com/gorhill/5285193
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
renderFloatPrecisionMultipliers = [...]float64{
|
||||||
|
1,
|
||||||
|
10,
|
||||||
|
100,
|
||||||
|
1000,
|
||||||
|
10000,
|
||||||
|
100000,
|
||||||
|
1000000,
|
||||||
|
10000000,
|
||||||
|
100000000,
|
||||||
|
1000000000,
|
||||||
|
}
|
||||||
|
|
||||||
|
renderFloatPrecisionRounders = [...]float64{
|
||||||
|
0.5,
|
||||||
|
0.05,
|
||||||
|
0.005,
|
||||||
|
0.0005,
|
||||||
|
0.00005,
|
||||||
|
0.000005,
|
||||||
|
0.0000005,
|
||||||
|
0.00000005,
|
||||||
|
0.000000005,
|
||||||
|
0.0000000005,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
// FormatFloat produces a formatted number as string based on the following user-specified criteria:
|
||||||
|
// * thousands separator
|
||||||
|
// * decimal separator
|
||||||
|
// * decimal precision
|
||||||
|
//
|
||||||
|
// Usage: s := RenderFloat(format, n)
|
||||||
|
// The format parameter tells how to render the number n.
|
||||||
|
//
|
||||||
|
// See examples: http://play.golang.org/p/LXc1Ddm1lJ
|
||||||
|
//
|
||||||
|
// Examples of format strings, given n = 12345.6789:
|
||||||
|
// "#,###.##" => "12,345.67"
|
||||||
|
// "#,###." => "12,345"
|
||||||
|
// "#,###" => "12345,678"
|
||||||
|
// "#\u202F###,##" => "12 345,68"
|
||||||
|
// "#.###,###### => 12.345,678900
|
||||||
|
// "" (aka default format) => 12,345.67
|
||||||
|
//
|
||||||
|
// The highest precision allowed is 9 digits after the decimal symbol.
|
||||||
|
// There is also a version for integer number, FormatInteger(),
|
||||||
|
// which is convenient for calls within template.
|
||||||
|
func FormatFloat(format string, n float64) string {
|
||||||
|
// Special cases:
|
||||||
|
// NaN = "NaN"
|
||||||
|
// +Inf = "+Infinity"
|
||||||
|
// -Inf = "-Infinity"
|
||||||
|
if math.IsNaN(n) {
|
||||||
|
return "NaN"
|
||||||
|
}
|
||||||
|
if n > math.MaxFloat64 {
|
||||||
|
return "Infinity"
|
||||||
|
}
|
||||||
|
if n < (0.0 - math.MaxFloat64) {
|
||||||
|
return "-Infinity"
|
||||||
|
}
|
||||||
|
|
||||||
|
// default format
|
||||||
|
precision := 2
|
||||||
|
decimalStr := "."
|
||||||
|
thousandStr := ","
|
||||||
|
positiveStr := ""
|
||||||
|
negativeStr := "-"
|
||||||
|
|
||||||
|
if len(format) > 0 {
|
||||||
|
format := []rune(format)
|
||||||
|
|
||||||
|
// If there is an explicit format directive,
|
||||||
|
// then default values are these:
|
||||||
|
precision = 9
|
||||||
|
thousandStr = ""
|
||||||
|
|
||||||
|
// collect indices of meaningful formatting directives
|
||||||
|
formatIndx := []int{}
|
||||||
|
for i, char := range format {
|
||||||
|
if char != '#' && char != '0' {
|
||||||
|
formatIndx = append(formatIndx, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(formatIndx) > 0 {
|
||||||
|
// Directive at index 0:
|
||||||
|
// Must be a '+'
|
||||||
|
// Raise an error if not the case
|
||||||
|
// index: 0123456789
|
||||||
|
// +0.000,000
|
||||||
|
// +000,000.0
|
||||||
|
// +0000.00
|
||||||
|
// +0000
|
||||||
|
if formatIndx[0] == 0 {
|
||||||
|
if format[formatIndx[0]] != '+' {
|
||||||
|
panic("RenderFloat(): invalid positive sign directive")
|
||||||
|
}
|
||||||
|
positiveStr = "+"
|
||||||
|
formatIndx = formatIndx[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Two directives:
|
||||||
|
// First is thousands separator
|
||||||
|
// Raise an error if not followed by 3-digit
|
||||||
|
// 0123456789
|
||||||
|
// 0.000,000
|
||||||
|
// 000,000.00
|
||||||
|
if len(formatIndx) == 2 {
|
||||||
|
if (formatIndx[1] - formatIndx[0]) != 4 {
|
||||||
|
panic("RenderFloat(): thousands separator directive must be followed by 3 digit-specifiers")
|
||||||
|
}
|
||||||
|
thousandStr = string(format[formatIndx[0]])
|
||||||
|
formatIndx = formatIndx[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// One directive:
|
||||||
|
// Directive is decimal separator
|
||||||
|
// The number of digit-specifier following the separator indicates wanted precision
|
||||||
|
// 0123456789
|
||||||
|
// 0.00
|
||||||
|
// 000,0000
|
||||||
|
if len(formatIndx) == 1 {
|
||||||
|
decimalStr = string(format[formatIndx[0]])
|
||||||
|
precision = len(format) - formatIndx[0] - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// generate sign part
|
||||||
|
var signStr string
|
||||||
|
if n >= 0.000000001 {
|
||||||
|
signStr = positiveStr
|
||||||
|
} else if n <= -0.000000001 {
|
||||||
|
signStr = negativeStr
|
||||||
|
n = -n
|
||||||
|
} else {
|
||||||
|
signStr = ""
|
||||||
|
n = 0.0
|
||||||
|
}
|
||||||
|
|
||||||
|
// split number into integer and fractional parts
|
||||||
|
intf, fracf := math.Modf(n + renderFloatPrecisionRounders[precision])
|
||||||
|
|
||||||
|
// generate integer part string
|
||||||
|
intStr := strconv.FormatInt(int64(intf), 10)
|
||||||
|
|
||||||
|
// add thousand separator if required
|
||||||
|
if len(thousandStr) > 0 {
|
||||||
|
for i := len(intStr); i > 3; {
|
||||||
|
i -= 3
|
||||||
|
intStr = intStr[:i] + thousandStr + intStr[i:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// no fractional part, we can leave now
|
||||||
|
if precision == 0 {
|
||||||
|
return signStr + intStr
|
||||||
|
}
|
||||||
|
|
||||||
|
// generate fractional part
|
||||||
|
fracStr := strconv.Itoa(int(fracf * renderFloatPrecisionMultipliers[precision]))
|
||||||
|
// may need padding
|
||||||
|
if len(fracStr) < precision {
|
||||||
|
fracStr = "000000000000000"[:precision-len(fracStr)] + fracStr
|
||||||
|
}
|
||||||
|
|
||||||
|
return signStr + intStr + decimalStr + fracStr
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatInteger produces a formatted number as string.
|
||||||
|
// See FormatFloat.
|
||||||
|
func FormatInteger(format string, n int) string {
|
||||||
|
return FormatFloat(format, float64(n))
|
||||||
|
}
|
||||||
25
vendor/github.com/dustin/go-humanize/ordinals.go
generated
vendored
Normal file
25
vendor/github.com/dustin/go-humanize/ordinals.go
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import "strconv"
|
||||||
|
|
||||||
|
// Ordinal gives you the input number in a rank/ordinal format.
|
||||||
|
//
|
||||||
|
// Ordinal(3) -> 3rd
|
||||||
|
func Ordinal(x int) string {
|
||||||
|
suffix := "th"
|
||||||
|
switch x % 10 {
|
||||||
|
case 1:
|
||||||
|
if x%100 != 11 {
|
||||||
|
suffix = "st"
|
||||||
|
}
|
||||||
|
case 2:
|
||||||
|
if x%100 != 12 {
|
||||||
|
suffix = "nd"
|
||||||
|
}
|
||||||
|
case 3:
|
||||||
|
if x%100 != 13 {
|
||||||
|
suffix = "rd"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strconv.Itoa(x) + suffix
|
||||||
|
}
|
||||||
127
vendor/github.com/dustin/go-humanize/si.go
generated
vendored
Normal file
127
vendor/github.com/dustin/go-humanize/si.go
generated
vendored
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"math"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
var siPrefixTable = map[float64]string{
|
||||||
|
-30: "q", // quecto
|
||||||
|
-27: "r", // ronto
|
||||||
|
-24: "y", // yocto
|
||||||
|
-21: "z", // zepto
|
||||||
|
-18: "a", // atto
|
||||||
|
-15: "f", // femto
|
||||||
|
-12: "p", // pico
|
||||||
|
-9: "n", // nano
|
||||||
|
-6: "µ", // micro
|
||||||
|
-3: "m", // milli
|
||||||
|
0: "",
|
||||||
|
3: "k", // kilo
|
||||||
|
6: "M", // mega
|
||||||
|
9: "G", // giga
|
||||||
|
12: "T", // tera
|
||||||
|
15: "P", // peta
|
||||||
|
18: "E", // exa
|
||||||
|
21: "Z", // zetta
|
||||||
|
24: "Y", // yotta
|
||||||
|
27: "R", // ronna
|
||||||
|
30: "Q", // quetta
|
||||||
|
}
|
||||||
|
|
||||||
|
var revSIPrefixTable = revfmap(siPrefixTable)
|
||||||
|
|
||||||
|
// revfmap reverses the map and precomputes the power multiplier
|
||||||
|
func revfmap(in map[float64]string) map[string]float64 {
|
||||||
|
rv := map[string]float64{}
|
||||||
|
for k, v := range in {
|
||||||
|
rv[v] = math.Pow(10, k)
|
||||||
|
}
|
||||||
|
return rv
|
||||||
|
}
|
||||||
|
|
||||||
|
var riParseRegex *regexp.Regexp
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
ri := `^([\-0-9.]+)\s?([`
|
||||||
|
for _, v := range siPrefixTable {
|
||||||
|
ri += v
|
||||||
|
}
|
||||||
|
ri += `]?)(.*)`
|
||||||
|
|
||||||
|
riParseRegex = regexp.MustCompile(ri)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComputeSI finds the most appropriate SI prefix for the given number
|
||||||
|
// and returns the prefix along with the value adjusted to be within
|
||||||
|
// that prefix.
|
||||||
|
//
|
||||||
|
// See also: SI, ParseSI.
|
||||||
|
//
|
||||||
|
// e.g. ComputeSI(2.2345e-12) -> (2.2345, "p")
|
||||||
|
func ComputeSI(input float64) (float64, string) {
|
||||||
|
if input == 0 {
|
||||||
|
return 0, ""
|
||||||
|
}
|
||||||
|
mag := math.Abs(input)
|
||||||
|
exponent := math.Floor(logn(mag, 10))
|
||||||
|
exponent = math.Floor(exponent/3) * 3
|
||||||
|
|
||||||
|
value := mag / math.Pow(10, exponent)
|
||||||
|
|
||||||
|
// Handle special case where value is exactly 1000.0
|
||||||
|
// Should return 1 M instead of 1000 k
|
||||||
|
if value == 1000.0 {
|
||||||
|
exponent += 3
|
||||||
|
value = mag / math.Pow(10, exponent)
|
||||||
|
}
|
||||||
|
|
||||||
|
value = math.Copysign(value, input)
|
||||||
|
|
||||||
|
prefix := siPrefixTable[exponent]
|
||||||
|
return value, prefix
|
||||||
|
}
|
||||||
|
|
||||||
|
// SI returns a string with default formatting.
|
||||||
|
//
|
||||||
|
// SI uses Ftoa to format float value, removing trailing zeros.
|
||||||
|
//
|
||||||
|
// See also: ComputeSI, ParseSI.
|
||||||
|
//
|
||||||
|
// e.g. SI(1000000, "B") -> 1 MB
|
||||||
|
// e.g. SI(2.2345e-12, "F") -> 2.2345 pF
|
||||||
|
func SI(input float64, unit string) string {
|
||||||
|
value, prefix := ComputeSI(input)
|
||||||
|
return Ftoa(value) + " " + prefix + unit
|
||||||
|
}
|
||||||
|
|
||||||
|
// SIWithDigits works like SI but limits the resulting string to the
|
||||||
|
// given number of decimal places.
|
||||||
|
//
|
||||||
|
// e.g. SIWithDigits(1000000, 0, "B") -> 1 MB
|
||||||
|
// e.g. SIWithDigits(2.2345e-12, 2, "F") -> 2.23 pF
|
||||||
|
func SIWithDigits(input float64, decimals int, unit string) string {
|
||||||
|
value, prefix := ComputeSI(input)
|
||||||
|
return FtoaWithDigits(value, decimals) + " " + prefix + unit
|
||||||
|
}
|
||||||
|
|
||||||
|
var errInvalid = errors.New("invalid input")
|
||||||
|
|
||||||
|
// ParseSI parses an SI string back into the number and unit.
|
||||||
|
//
|
||||||
|
// See also: SI, ComputeSI.
|
||||||
|
//
|
||||||
|
// e.g. ParseSI("2.2345 pF") -> (2.2345e-12, "F", nil)
|
||||||
|
func ParseSI(input string) (float64, string, error) {
|
||||||
|
found := riParseRegex.FindStringSubmatch(input)
|
||||||
|
if len(found) != 4 {
|
||||||
|
return 0, "", errInvalid
|
||||||
|
}
|
||||||
|
mag := revSIPrefixTable[found[2]]
|
||||||
|
unit := found[3]
|
||||||
|
|
||||||
|
base, err := strconv.ParseFloat(found[1], 64)
|
||||||
|
return base * mag, unit, err
|
||||||
|
}
|
||||||
117
vendor/github.com/dustin/go-humanize/times.go
generated
vendored
Normal file
117
vendor/github.com/dustin/go-humanize/times.go
generated
vendored
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
package humanize
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Seconds-based time units
|
||||||
|
const (
|
||||||
|
Day = 24 * time.Hour
|
||||||
|
Week = 7 * Day
|
||||||
|
Month = 30 * Day
|
||||||
|
Year = 12 * Month
|
||||||
|
LongTime = 37 * Year
|
||||||
|
)
|
||||||
|
|
||||||
|
// Time formats a time into a relative string.
|
||||||
|
//
|
||||||
|
// Time(someT) -> "3 weeks ago"
|
||||||
|
func Time(then time.Time) string {
|
||||||
|
return RelTime(then, time.Now(), "ago", "from now")
|
||||||
|
}
|
||||||
|
|
||||||
|
// A RelTimeMagnitude struct contains a relative time point at which
|
||||||
|
// the relative format of time will switch to a new format string. A
|
||||||
|
// slice of these in ascending order by their "D" field is passed to
|
||||||
|
// CustomRelTime to format durations.
|
||||||
|
//
|
||||||
|
// The Format field is a string that may contain a "%s" which will be
|
||||||
|
// replaced with the appropriate signed label (e.g. "ago" or "from
|
||||||
|
// now") and a "%d" that will be replaced by the quantity.
|
||||||
|
//
|
||||||
|
// The DivBy field is the amount of time the time difference must be
|
||||||
|
// divided by in order to display correctly.
|
||||||
|
//
|
||||||
|
// e.g. if D is 2*time.Minute and you want to display "%d minutes %s"
|
||||||
|
// DivBy should be time.Minute so whatever the duration is will be
|
||||||
|
// expressed in minutes.
|
||||||
|
type RelTimeMagnitude struct {
|
||||||
|
D time.Duration
|
||||||
|
Format string
|
||||||
|
DivBy time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultMagnitudes = []RelTimeMagnitude{
|
||||||
|
{time.Second, "now", time.Second},
|
||||||
|
{2 * time.Second, "1 second %s", 1},
|
||||||
|
{time.Minute, "%d seconds %s", time.Second},
|
||||||
|
{2 * time.Minute, "1 minute %s", 1},
|
||||||
|
{time.Hour, "%d minutes %s", time.Minute},
|
||||||
|
{2 * time.Hour, "1 hour %s", 1},
|
||||||
|
{Day, "%d hours %s", time.Hour},
|
||||||
|
{2 * Day, "1 day %s", 1},
|
||||||
|
{Week, "%d days %s", Day},
|
||||||
|
{2 * Week, "1 week %s", 1},
|
||||||
|
{Month, "%d weeks %s", Week},
|
||||||
|
{2 * Month, "1 month %s", 1},
|
||||||
|
{Year, "%d months %s", Month},
|
||||||
|
{18 * Month, "1 year %s", 1},
|
||||||
|
{2 * Year, "2 years %s", 1},
|
||||||
|
{LongTime, "%d years %s", Year},
|
||||||
|
{math.MaxInt64, "a long while %s", 1},
|
||||||
|
}
|
||||||
|
|
||||||
|
// RelTime formats a time into a relative string.
|
||||||
|
//
|
||||||
|
// It takes two times and two labels. In addition to the generic time
|
||||||
|
// delta string (e.g. 5 minutes), the labels are used applied so that
|
||||||
|
// the label corresponding to the smaller time is applied.
|
||||||
|
//
|
||||||
|
// RelTime(timeInPast, timeInFuture, "earlier", "later") -> "3 weeks earlier"
|
||||||
|
func RelTime(a, b time.Time, albl, blbl string) string {
|
||||||
|
return CustomRelTime(a, b, albl, blbl, defaultMagnitudes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CustomRelTime formats a time into a relative string.
|
||||||
|
//
|
||||||
|
// It takes two times two labels and a table of relative time formats.
|
||||||
|
// In addition to the generic time delta string (e.g. 5 minutes), the
|
||||||
|
// labels are used applied so that the label corresponding to the
|
||||||
|
// smaller time is applied.
|
||||||
|
func CustomRelTime(a, b time.Time, albl, blbl string, magnitudes []RelTimeMagnitude) string {
|
||||||
|
lbl := albl
|
||||||
|
diff := b.Sub(a)
|
||||||
|
|
||||||
|
if a.After(b) {
|
||||||
|
lbl = blbl
|
||||||
|
diff = a.Sub(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
n := sort.Search(len(magnitudes), func(i int) bool {
|
||||||
|
return magnitudes[i].D > diff
|
||||||
|
})
|
||||||
|
|
||||||
|
if n >= len(magnitudes) {
|
||||||
|
n = len(magnitudes) - 1
|
||||||
|
}
|
||||||
|
mag := magnitudes[n]
|
||||||
|
args := []interface{}{}
|
||||||
|
escaped := false
|
||||||
|
for _, ch := range mag.Format {
|
||||||
|
if escaped {
|
||||||
|
switch ch {
|
||||||
|
case 's':
|
||||||
|
args = append(args, lbl)
|
||||||
|
case 'd':
|
||||||
|
args = append(args, diff/mag.DivBy)
|
||||||
|
}
|
||||||
|
escaped = false
|
||||||
|
} else {
|
||||||
|
escaped = ch == '%'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(mag.Format, args...)
|
||||||
|
}
|
||||||
12
vendor/github.com/fxamacker/cbor/v2/.gitignore
generated
vendored
Normal file
12
vendor/github.com/fxamacker/cbor/v2/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.exe~
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
|
# Test binary, build with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
104
vendor/github.com/fxamacker/cbor/v2/.golangci.yml
generated
vendored
Normal file
104
vendor/github.com/fxamacker/cbor/v2/.golangci.yml
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
# Do not delete linter settings. Linters like gocritic can be enabled on the command line.
|
||||||
|
|
||||||
|
linters-settings:
|
||||||
|
depguard:
|
||||||
|
rules:
|
||||||
|
prevent_unmaintained_packages:
|
||||||
|
list-mode: strict
|
||||||
|
files:
|
||||||
|
- $all
|
||||||
|
- "!$test"
|
||||||
|
allow:
|
||||||
|
- $gostd
|
||||||
|
- github.com/x448/float16
|
||||||
|
deny:
|
||||||
|
- pkg: io/ioutil
|
||||||
|
desc: "replaced by io and os packages since Go 1.16: https://tip.golang.org/doc/go1.16#ioutil"
|
||||||
|
dupl:
|
||||||
|
threshold: 100
|
||||||
|
funlen:
|
||||||
|
lines: 100
|
||||||
|
statements: 50
|
||||||
|
goconst:
|
||||||
|
ignore-tests: true
|
||||||
|
min-len: 2
|
||||||
|
min-occurrences: 3
|
||||||
|
gocritic:
|
||||||
|
enabled-tags:
|
||||||
|
- diagnostic
|
||||||
|
- experimental
|
||||||
|
- opinionated
|
||||||
|
- performance
|
||||||
|
- style
|
||||||
|
disabled-checks:
|
||||||
|
- commentedOutCode
|
||||||
|
- dupImport # https://github.com/go-critic/go-critic/issues/845
|
||||||
|
- ifElseChain
|
||||||
|
- octalLiteral
|
||||||
|
- paramTypeCombine
|
||||||
|
- whyNoLint
|
||||||
|
gofmt:
|
||||||
|
simplify: false
|
||||||
|
goimports:
|
||||||
|
local-prefixes: github.com/fxamacker/cbor
|
||||||
|
golint:
|
||||||
|
min-confidence: 0
|
||||||
|
govet:
|
||||||
|
check-shadowing: true
|
||||||
|
lll:
|
||||||
|
line-length: 140
|
||||||
|
maligned:
|
||||||
|
suggest-new: true
|
||||||
|
misspell:
|
||||||
|
locale: US
|
||||||
|
staticcheck:
|
||||||
|
checks: ["all"]
|
||||||
|
|
||||||
|
linters:
|
||||||
|
disable-all: true
|
||||||
|
enable:
|
||||||
|
- asciicheck
|
||||||
|
- bidichk
|
||||||
|
- depguard
|
||||||
|
- errcheck
|
||||||
|
- exportloopref
|
||||||
|
- goconst
|
||||||
|
- gocritic
|
||||||
|
- gocyclo
|
||||||
|
- gofmt
|
||||||
|
- goimports
|
||||||
|
- goprintffuncname
|
||||||
|
- gosec
|
||||||
|
- gosimple
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
- misspell
|
||||||
|
- nilerr
|
||||||
|
- revive
|
||||||
|
- staticcheck
|
||||||
|
- stylecheck
|
||||||
|
- typecheck
|
||||||
|
- unconvert
|
||||||
|
- unused
|
||||||
|
|
||||||
|
issues:
|
||||||
|
# max-issues-per-linter default is 50. Set to 0 to disable limit.
|
||||||
|
max-issues-per-linter: 0
|
||||||
|
# max-same-issues default is 3. Set to 0 to disable limit.
|
||||||
|
max-same-issues: 0
|
||||||
|
|
||||||
|
exclude-rules:
|
||||||
|
- path: decode.go
|
||||||
|
text: "string ` overflows ` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: decode.go
|
||||||
|
text: "string ` \\(range is \\[` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: decode.go
|
||||||
|
text: "string `, ` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: decode.go
|
||||||
|
text: "string ` overflows Go's int64` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: decode.go
|
||||||
|
text: "string `\\]\\)` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: valid.go
|
||||||
|
text: "string ` for type ` has (\\d+) occurrences, make it a constant"
|
||||||
|
- path: valid.go
|
||||||
|
text: "string `cbor: ` has (\\d+) occurrences, make it a constant"
|
||||||
133
vendor/github.com/fxamacker/cbor/v2/CODE_OF_CONDUCT.md
generated
vendored
Normal file
133
vendor/github.com/fxamacker/cbor/v2/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
We as members, contributors, and leaders pledge to make participation in our
|
||||||
|
community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||||
|
identity and expression, level of experience, education, socio-economic status,
|
||||||
|
nationality, personal appearance, race, caste, color, religion, or sexual
|
||||||
|
identity and orientation.
|
||||||
|
|
||||||
|
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||||
|
diverse, inclusive, and healthy community.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to a positive environment for our
|
||||||
|
community include:
|
||||||
|
|
||||||
|
* Demonstrating empathy and kindness toward other people
|
||||||
|
* Being respectful of differing opinions, viewpoints, and experiences
|
||||||
|
* Giving and gracefully accepting constructive feedback
|
||||||
|
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||||
|
and learning from the experience
|
||||||
|
* Focusing on what is best not just for us as individuals, but for the overall
|
||||||
|
community
|
||||||
|
|
||||||
|
Examples of unacceptable behavior include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery, and sexual attention or advances of
|
||||||
|
any kind
|
||||||
|
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or email address,
|
||||||
|
without their explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Enforcement Responsibilities
|
||||||
|
|
||||||
|
Community leaders are responsible for clarifying and enforcing our standards of
|
||||||
|
acceptable behavior and will take appropriate and fair corrective action in
|
||||||
|
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||||
|
or harmful.
|
||||||
|
|
||||||
|
Community leaders have the right and responsibility to remove, edit, or reject
|
||||||
|
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||||
|
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||||
|
decisions when appropriate.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all community spaces, and also applies when
|
||||||
|
an individual is officially representing the community in public spaces.
|
||||||
|
Examples of representing our community include using an official e-mail address,
|
||||||
|
posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported to the community leaders responsible for enforcement at
|
||||||
|
faye.github@gmail.com.
|
||||||
|
All complaints will be reviewed and investigated promptly and fairly.
|
||||||
|
|
||||||
|
All community leaders are obligated to respect the privacy and security of the
|
||||||
|
reporter of any incident.
|
||||||
|
|
||||||
|
## Enforcement Guidelines
|
||||||
|
|
||||||
|
Community leaders will follow these Community Impact Guidelines in determining
|
||||||
|
the consequences for any action they deem in violation of this Code of Conduct:
|
||||||
|
|
||||||
|
### 1. Correction
|
||||||
|
|
||||||
|
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||||
|
unprofessional or unwelcome in the community.
|
||||||
|
|
||||||
|
**Consequence**: A private, written warning from community leaders, providing
|
||||||
|
clarity around the nature of the violation and an explanation of why the
|
||||||
|
behavior was inappropriate. A public apology may be requested.
|
||||||
|
|
||||||
|
### 2. Warning
|
||||||
|
|
||||||
|
**Community Impact**: A violation through a single incident or series of
|
||||||
|
actions.
|
||||||
|
|
||||||
|
**Consequence**: A warning with consequences for continued behavior. No
|
||||||
|
interaction with the people involved, including unsolicited interaction with
|
||||||
|
those enforcing the Code of Conduct, for a specified period of time. This
|
||||||
|
includes avoiding interactions in community spaces as well as external channels
|
||||||
|
like social media. Violating these terms may lead to a temporary or permanent
|
||||||
|
ban.
|
||||||
|
|
||||||
|
### 3. Temporary Ban
|
||||||
|
|
||||||
|
**Community Impact**: A serious violation of community standards, including
|
||||||
|
sustained inappropriate behavior.
|
||||||
|
|
||||||
|
**Consequence**: A temporary ban from any sort of interaction or public
|
||||||
|
communication with the community for a specified period of time. No public or
|
||||||
|
private interaction with the people involved, including unsolicited interaction
|
||||||
|
with those enforcing the Code of Conduct, is allowed during this period.
|
||||||
|
Violating these terms may lead to a permanent ban.
|
||||||
|
|
||||||
|
### 4. Permanent Ban
|
||||||
|
|
||||||
|
**Community Impact**: Demonstrating a pattern of violation of community
|
||||||
|
standards, including sustained inappropriate behavior, harassment of an
|
||||||
|
individual, or aggression toward or disparagement of classes of individuals.
|
||||||
|
|
||||||
|
**Consequence**: A permanent ban from any sort of public interaction within the
|
||||||
|
community.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
|
version 2.1, available at
|
||||||
|
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
|
||||||
|
|
||||||
|
Community Impact Guidelines were inspired by
|
||||||
|
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see the FAQ at
|
||||||
|
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
|
||||||
|
[https://www.contributor-covenant.org/translations][translations].
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
|
||||||
|
[Mozilla CoC]: https://github.com/mozilla/diversity
|
||||||
|
[FAQ]: https://www.contributor-covenant.org/faq
|
||||||
|
[translations]: https://www.contributor-covenant.org/translations
|
||||||
41
vendor/github.com/fxamacker/cbor/v2/CONTRIBUTING.md
generated
vendored
Normal file
41
vendor/github.com/fxamacker/cbor/v2/CONTRIBUTING.md
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# How to contribute
|
||||||
|
|
||||||
|
You can contribute by using the library, opening issues, or opening pull requests.
|
||||||
|
|
||||||
|
## Bug reports and security vulnerabilities
|
||||||
|
|
||||||
|
Most issues are tracked publicly on [GitHub](https://github.com/fxamacker/cbor/issues).
|
||||||
|
|
||||||
|
To report security vulnerabilities, please email faye.github@gmail.com and allow time for the problem to be resolved before disclosing it to the public. For more info, see [Security Policy](https://github.com/fxamacker/cbor#security-policy).
|
||||||
|
|
||||||
|
Please do not send data that might contain personally identifiable information, even if you think you have permission. That type of support requires payment and a signed contract where I'm indemnified, held harmless, and defended by you for any data you send to me.
|
||||||
|
|
||||||
|
## Pull requests
|
||||||
|
|
||||||
|
Please [create an issue](https://github.com/fxamacker/cbor/issues/new/choose) before you begin work on a PR. The improvement may have already been considered, etc.
|
||||||
|
|
||||||
|
Pull requests have signing requirements and must not be anonymous. Exceptions are usually made for docs and CI scripts.
|
||||||
|
|
||||||
|
See the [Pull Request Template](https://github.com/fxamacker/cbor/blob/master/.github/pull_request_template.md) for details.
|
||||||
|
|
||||||
|
Pull requests have a greater chance of being approved if:
|
||||||
|
- it does not reduce speed, increase memory use, reduce security, etc. for people not using the new option or feature.
|
||||||
|
- it has > 97% code coverage.
|
||||||
|
|
||||||
|
## Describe your issue
|
||||||
|
|
||||||
|
Clearly describe the issue:
|
||||||
|
* If it's a bug, please provide: **version of this library** and **Go** (`go version`), **unmodified error message**, and describe **how to reproduce it**. Also state **what you expected to happen** instead of the error.
|
||||||
|
* If you propose a change or addition, try to give an example how the improved code could look like or how to use it.
|
||||||
|
* If you found a compilation error, please confirm you're using a supported version of Go. If you are, then provide the output of `go version` first, followed by the complete error message.
|
||||||
|
|
||||||
|
## Please don't
|
||||||
|
|
||||||
|
Please don't send data containing personally identifiable information, even if you think you have permission. That type of support requires payment and a contract where I'm indemnified, held harmless, and defended for any data you send to me.
|
||||||
|
|
||||||
|
Please don't send CBOR data larger than 1024 bytes by email. If you want to send crash-producing CBOR data > 1024 bytes by email, please get my permission before sending it to me.
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
|
||||||
|
- This guide used nlohmann/json contribution guidelines for inspiration as suggested in issue #22.
|
||||||
|
- Special thanks to @lukseven for pointing out the contribution guidelines didn't mention signing requirements.
|
||||||
21
vendor/github.com/fxamacker/cbor/v2/LICENSE
generated
vendored
Normal file
21
vendor/github.com/fxamacker/cbor/v2/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019-present Faye Amacker
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
934
vendor/github.com/fxamacker/cbor/v2/README.md
generated
vendored
Normal file
934
vendor/github.com/fxamacker/cbor/v2/README.md
generated
vendored
Normal file
@@ -0,0 +1,934 @@
|
|||||||
|
<h1>CBOR Codec <a href="https://pkg.go.dev/github.com/fxamacker/cbor/v2"><img src="https://raw.githubusercontent.com/fxamacker/images/refs/heads/master/cbor/go-logo-blue.svg" alt="Go logo" style="height: 1em;" align="right"></a></h1>
|
||||||
|
|
||||||
|
[fxamacker/cbor](https://github.com/fxamacker/cbor) is a library for encoding and decoding [CBOR](https://www.rfc-editor.org/info/std94) and [CBOR Sequences](https://www.rfc-editor.org/rfc/rfc8742.html).
|
||||||
|
|
||||||
|
CBOR is a [trusted alternative](https://www.rfc-editor.org/rfc/rfc8949.html#name-comparison-of-other-binary-) to JSON, MessagePack, Protocol Buffers, etc. CBOR is an Internet Standard defined by [IETF STD 94 (RFC 8949)](https://www.rfc-editor.org/info/std94) and is designed to be relevant for decades.
|
||||||
|
|
||||||
|
`fxamacker/cbor` is used in projects by Arm Ltd., EdgeX Foundry, Flow Foundation, Fraunhofer‑AISEC, IBM, Kubernetes[*](https://github.com/search?q=org%3Akubernetes%20fxamacker%2Fcbor&type=code), Let's Encrypt, Linux Foundation, Microsoft, Oasis Protocol, Red Hat[*](https://github.com/search?q=org%3Aopenshift+fxamacker%2Fcbor&type=code), Tailscale[*](https://github.com/search?q=org%3Atailscale+fxamacker%2Fcbor&type=code), Veraison[*](https://github.com/search?q=org%3Averaison+fxamacker%2Fcbor&type=code), [etc](https://github.com/fxamacker/cbor#who-uses-fxamackercbor).
|
||||||
|
|
||||||
|
See [Quick Start](#quick-start) and [Releases](https://github.com/fxamacker/cbor/releases/). 🆕 `UnmarshalFirst` and `DiagnoseFirst` can decode CBOR Sequences. `MarshalToBuffer` and `UserBufferEncMode` accepts user-specified buffer.
|
||||||
|
|
||||||
|
## fxamacker/cbor
|
||||||
|
|
||||||
|
[](https://github.com/fxamacker/cbor/actions?query=workflow%3Aci)
|
||||||
|
[](https://github.com/fxamacker/cbor/actions?query=workflow%3A%22cover+%E2%89%A597%25%22)
|
||||||
|
[](https://github.com/fxamacker/cbor/actions/workflows/codeql-analysis.yml)
|
||||||
|
[](#fuzzing-and-code-coverage)
|
||||||
|
[](https://goreportcard.com/report/github.com/fxamacker/cbor)
|
||||||
|
[](https://github.com/fxamacker/cbor#fuzzing-and-code-coverage)
|
||||||
|
|
||||||
|
`fxamacker/cbor` is a CBOR codec in full conformance with [IETF STD 94 (RFC 8949)](https://www.rfc-editor.org/info/std94). It also supports CBOR Sequences ([RFC 8742](https://www.rfc-editor.org/rfc/rfc8742.html)) and Extended Diagnostic Notation ([Appendix G of RFC 8610](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G)).
|
||||||
|
|
||||||
|
Features include full support for CBOR tags, [Core Deterministic Encoding](https://www.rfc-editor.org/rfc/rfc8949.html#name-core-deterministic-encoding), duplicate map key detection, etc.
|
||||||
|
|
||||||
|
API is mostly same as `encoding/json`, plus interfaces that simplify concurrency and CBOR options.
|
||||||
|
|
||||||
|
Design balances trade-offs between security, speed, concurrency, encoded data size, usability, etc.
|
||||||
|
|
||||||
|
<details><summary> 🔎 Highlights</summary><p/>
|
||||||
|
|
||||||
|
__🚀 Speed__
|
||||||
|
|
||||||
|
Encoding and decoding is fast without using Go's `unsafe` package. Slower settings are opt-in. Default limits allow very fast and memory efficient rejection of malformed CBOR data.
|
||||||
|
|
||||||
|
__🔒 Security__
|
||||||
|
|
||||||
|
Decoder has configurable limits that defend against malicious inputs. Duplicate map key detection is supported. By contrast, `encoding/gob` is [not designed to be hardened against adversarial inputs](https://pkg.go.dev/encoding/gob#hdr-Security).
|
||||||
|
|
||||||
|
Codec passed multiple confidential security assessments in 2022. No vulnerabilities found in subset of codec in a [nonconfidential security assessment](https://github.com/veraison/go-cose/blob/v1.0.0-rc.1/reports/NCC_Microsoft-go-cose-Report_2022-05-26_v1.0.pdf) prepared by NCC Group for Microsoft Corporation.
|
||||||
|
|
||||||
|
__🗜️ Data Size__
|
||||||
|
|
||||||
|
Struct tag options (`toarray`, `keyasint`, `omitempty`, `omitzero`) and field tag "-" automatically reduce size of encoded structs. Encoding optionally shrinks float64→32→16 when values fit.
|
||||||
|
|
||||||
|
__:jigsaw: Usability__
|
||||||
|
|
||||||
|
API is mostly same as `encoding/json` plus interfaces that simplify concurrency for CBOR options. Encoding and decoding modes can be created at startup and reused by any goroutines.
|
||||||
|
|
||||||
|
Presets include Core Deterministic Encoding, Preferred Serialization, CTAP2 Canonical CBOR, etc.
|
||||||
|
|
||||||
|
__📆 Extensibility__
|
||||||
|
|
||||||
|
Features include CBOR [extension points](https://www.rfc-editor.org/rfc/rfc8949.html#section-7.1) (e.g. CBOR tags) and extensive settings. API has interfaces that allow users to create custom encoding and decoding without modifying this library.
|
||||||
|
|
||||||
|
<hr/>
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Secure Decoding with Configurable Settings
|
||||||
|
|
||||||
|
`fxamacker/cbor` has configurable limits, etc. that defend against malicious CBOR data.
|
||||||
|
|
||||||
|
Notably, `fxamacker/cbor` is fast at rejecting malformed CBOR data.
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Benchmarks rejecting 10 bytes of malicious CBOR data decoding to `[]byte`:
|
||||||
|
>
|
||||||
|
> | Codec | Speed (ns/op) | Memory | Allocs |
|
||||||
|
> | :---- | ------------: | -----: | -----: |
|
||||||
|
> | fxamacker/cbor 2.7.0 | 47 ± 7% | 32 B/op | 2 allocs/op |
|
||||||
|
> | ugorji/go 1.2.12 | 5878187 ± 3% | 67111556 B/op | 13 allocs/op |
|
||||||
|
>
|
||||||
|
> Faster hardware (overclocked DDR4 or DDR5) can reduce speed difference.
|
||||||
|
>
|
||||||
|
> <details><summary> 🔎 Benchmark details </summary><p/>
|
||||||
|
>
|
||||||
|
> Latest comparison for decoding CBOR data to Go `[]byte`:
|
||||||
|
> - Input: `[]byte{0x9B, 0x00, 0x00, 0x42, 0xFA, 0x42, 0xFA, 0x42, 0xFA, 0x42}`
|
||||||
|
> - go1.22.7, linux/amd64, i5-13600K (DDR4-2933, disabled e-cores)
|
||||||
|
> - go test -bench=. -benchmem -count=20
|
||||||
|
>
|
||||||
|
> #### Prior comparisons
|
||||||
|
>
|
||||||
|
> | Codec | Speed (ns/op) | Memory | Allocs |
|
||||||
|
> | :---- | ------------: | -----: | -----: |
|
||||||
|
> | fxamacker/cbor 2.5.0-beta2 | 44.33 ± 2% | 32 B/op | 2 allocs/op |
|
||||||
|
> | fxamacker/cbor 0.1.0 - 2.4.0 | ~44.68 ± 6% | 32 B/op | 2 allocs/op |
|
||||||
|
> | ugorji/go 1.2.10 | 5524792.50 ± 3% | 67110491 B/op | 12 allocs/op |
|
||||||
|
> | ugorji/go 1.1.0 - 1.2.6 | 💥 runtime: | out of memory: | cannot allocate |
|
||||||
|
>
|
||||||
|
> - Input: `[]byte{0x9B, 0x00, 0x00, 0x42, 0xFA, 0x42, 0xFA, 0x42, 0xFA, 0x42}`
|
||||||
|
> - go1.19.6, linux/amd64, i5-13600K (DDR4)
|
||||||
|
> - go test -bench=. -benchmem -count=20
|
||||||
|
>
|
||||||
|
> </details>
|
||||||
|
|
||||||
|
In contrast, some codecs can crash or use excessive resources while decoding bad data.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> Go's `encoding/gob` is [not designed to be hardened against adversarial inputs](https://pkg.go.dev/encoding/gob#hdr-Security).
|
||||||
|
>
|
||||||
|
> <details><summary> 🔎 gob fatal error (out of memory) 💥 decoding 181 bytes</summary><p/>
|
||||||
|
>
|
||||||
|
> ```Go
|
||||||
|
> // Example of encoding/gob having "fatal error: runtime: out of memory"
|
||||||
|
> // while decoding 181 bytes (all Go versions as of Dec. 8, 2024).
|
||||||
|
> package main
|
||||||
|
> import (
|
||||||
|
> "bytes"
|
||||||
|
> "encoding/gob"
|
||||||
|
> "encoding/hex"
|
||||||
|
> "fmt"
|
||||||
|
> )
|
||||||
|
>
|
||||||
|
> // Example data is from https://github.com/golang/go/issues/24446
|
||||||
|
> // (shortened to 181 bytes).
|
||||||
|
> const data = "4dffb503010102303001ff30000109010130010800010130010800010130" +
|
||||||
|
> "01ffb80001014a01ffb60001014b01ff860001013001ff860001013001ff" +
|
||||||
|
> "860001013001ff860001013001ffb80000001eff850401010e3030303030" +
|
||||||
|
> "30303030303030303001ff3000010c0104000016ffb70201010830303030" +
|
||||||
|
> "3030303001ff3000010c000030ffb6040405fcff00303030303030303030" +
|
||||||
|
> "303030303030303030303030303030303030303030303030303030303030" +
|
||||||
|
> "30"
|
||||||
|
>
|
||||||
|
> type X struct {
|
||||||
|
> J *X
|
||||||
|
> K map[string]int
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> func main() {
|
||||||
|
> raw, _ := hex.DecodeString(data)
|
||||||
|
> decoder := gob.NewDecoder(bytes.NewReader(raw))
|
||||||
|
>
|
||||||
|
> var x X
|
||||||
|
> decoder.Decode(&x) // fatal error: runtime: out of memory
|
||||||
|
> fmt.Println("Decoding finished.")
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
>
|
||||||
|
> </details>
|
||||||
|
|
||||||
|
### Smaller Encodings with Struct Tag Options
|
||||||
|
|
||||||
|
Struct tags automatically reduce encoded size of structs and improve speed.
|
||||||
|
|
||||||
|
We can write less code by using struct tag options:
|
||||||
|
- `toarray`: encode without field names (decode back to original struct)
|
||||||
|
- `keyasint`: encode field names as integers (decode back to original struct)
|
||||||
|
- `omitempty`: omit empty field when encoding
|
||||||
|
- `omitzero`: omit zero-value field when encoding
|
||||||
|
|
||||||
|
As a special case, struct field tag "-" omits the field.
|
||||||
|
|
||||||
|
NOTE: When a struct uses `toarray`, the encoder will ignore `omitempty` and `omitzero` to prevent position of encoded array elements from changing. This allows decoder to match encoded elements to their Go struct field.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> `fxamacker/cbor` can encode a 3-level nested Go struct to 1 byte!
|
||||||
|
> - `encoding/json`: 18 bytes of JSON
|
||||||
|
> - `fxamacker/cbor`: 1 byte of CBOR
|
||||||
|
>
|
||||||
|
> <details><summary> 🔎 Encoding 3-level nested Go struct with omitempty</summary><p/>
|
||||||
|
>
|
||||||
|
> https://go.dev/play/p/YxwvfPdFQG2
|
||||||
|
>
|
||||||
|
> ```Go
|
||||||
|
> // Example encoding nested struct (with omitempty tag)
|
||||||
|
> // - encoding/json: 18 byte JSON
|
||||||
|
> // - fxamacker/cbor: 1 byte CBOR
|
||||||
|
>
|
||||||
|
> package main
|
||||||
|
>
|
||||||
|
> import (
|
||||||
|
> "encoding/hex"
|
||||||
|
> "encoding/json"
|
||||||
|
> "fmt"
|
||||||
|
>
|
||||||
|
> "github.com/fxamacker/cbor/v2"
|
||||||
|
> )
|
||||||
|
>
|
||||||
|
> type GrandChild struct {
|
||||||
|
> Quux int `json:",omitempty"`
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> type Child struct {
|
||||||
|
> Baz int `json:",omitempty"`
|
||||||
|
> Qux GrandChild `json:",omitempty"`
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> type Parent struct {
|
||||||
|
> Foo Child `json:",omitempty"`
|
||||||
|
> Bar int `json:",omitempty"`
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> func cb() {
|
||||||
|
> results, _ := cbor.Marshal(Parent{})
|
||||||
|
> fmt.Println("hex(CBOR): " + hex.EncodeToString(results))
|
||||||
|
>
|
||||||
|
> text, _ := cbor.Diagnose(results) // Diagnostic Notation
|
||||||
|
> fmt.Println("DN: " + text)
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> func js() {
|
||||||
|
> results, _ := json.Marshal(Parent{})
|
||||||
|
> fmt.Println("hex(JSON): " + hex.EncodeToString(results))
|
||||||
|
>
|
||||||
|
> text := string(results) // JSON
|
||||||
|
> fmt.Println("JSON: " + text)
|
||||||
|
> }
|
||||||
|
>
|
||||||
|
> func main() {
|
||||||
|
> cb()
|
||||||
|
> fmt.Println("-------------")
|
||||||
|
> js()
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> Output (DN is Diagnostic Notation):
|
||||||
|
> ```
|
||||||
|
> hex(CBOR): a0
|
||||||
|
> DN: {}
|
||||||
|
> -------------
|
||||||
|
> hex(JSON): 7b22466f6f223a7b22517578223a7b7d7d7d
|
||||||
|
> JSON: {"Foo":{"Qux":{}}}
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> </details>
|
||||||
|
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
__Install__: `go get github.com/fxamacker/cbor/v2` and `import "github.com/fxamacker/cbor/v2"`.
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
>
|
||||||
|
> Tinygo users can try beta/experimental branch [feature/cbor-tinygo-beta](https://github.com/fxamacker/cbor/tree/feature/cbor-tinygo-beta).
|
||||||
|
>
|
||||||
|
> <details><summary> 🔎 More about tinygo feature branch</summary>
|
||||||
|
>
|
||||||
|
> ### Tinygo
|
||||||
|
>
|
||||||
|
> Branch [feature/cbor-tinygo-beta](https://github.com/fxamacker/cbor/tree/feature/cbor-tinygo-beta) is based on fxamacker/cbor v2.7.0 and it can be compiled using tinygo v0.33 (also compiles with golang/go).
|
||||||
|
>
|
||||||
|
> It passes unit tests (with both go1.22 and tinygo v0.33) and is considered beta/experimental for tinygo.
|
||||||
|
>
|
||||||
|
> :warning: The `feature/cbor-tinygo-beta` branch does not get fuzz tested yet.
|
||||||
|
>
|
||||||
|
> Changes in this feature branch only affect tinygo compiled software. Summary of changes:
|
||||||
|
> - default `DecOptions.MaxNestedLevels` is reduced to 16 (was 32). User can specify higher limit but 24+ crashes tests when compiled with tinygo v0.33.
|
||||||
|
> - disabled decoding CBOR tag data to Go interface because tinygo v0.33 is missing needed feature.
|
||||||
|
> - encoding error message can be different when encoding function type.
|
||||||
|
>
|
||||||
|
> Related tinygo issues:
|
||||||
|
> - https://github.com/tinygo-org/tinygo/issues/4277
|
||||||
|
> - https://github.com/tinygo-org/tinygo/issues/4458
|
||||||
|
>
|
||||||
|
> </details>
|
||||||
|
|
||||||
|
|
||||||
|
### Key Points
|
||||||
|
|
||||||
|
This library can encode and decode CBOR (RFC 8949) and CBOR Sequences (RFC 8742).
|
||||||
|
|
||||||
|
- __CBOR data item__ is a single piece of CBOR data and its structure may contain 0 or more nested data items.
|
||||||
|
- __CBOR sequence__ is a concatenation of 0 or more encoded CBOR data items.
|
||||||
|
|
||||||
|
Configurable limits and options can be used to balance trade-offs.
|
||||||
|
|
||||||
|
- Encoding and decoding modes are created from options (settings).
|
||||||
|
- Modes can be created at startup and reused.
|
||||||
|
- Modes are safe for concurrent use.
|
||||||
|
|
||||||
|
### Default Mode
|
||||||
|
|
||||||
|
Package level functions only use this library's default settings.
|
||||||
|
They provide the "default mode" of encoding and decoding.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// API matches encoding/json for Marshal, Unmarshal, Encode, Decode, etc.
|
||||||
|
b, err = cbor.Marshal(v) // encode v to []byte b
|
||||||
|
err = cbor.Unmarshal(b, &v) // decode []byte b to v
|
||||||
|
decoder = cbor.NewDecoder(r) // create decoder with io.Reader r
|
||||||
|
err = decoder.Decode(&v) // decode a CBOR data item to v
|
||||||
|
|
||||||
|
// v2.7.0 added MarshalToBuffer() and UserBufferEncMode interface.
|
||||||
|
err = cbor.MarshalToBuffer(v, b) // encode v to b instead of using built-in buf pool.
|
||||||
|
|
||||||
|
// v2.5.0 added new functions that return remaining bytes.
|
||||||
|
|
||||||
|
// UnmarshalFirst decodes first CBOR data item and returns remaining bytes.
|
||||||
|
rest, err = cbor.UnmarshalFirst(b, &v) // decode []byte b to v
|
||||||
|
|
||||||
|
// DiagnoseFirst translates first CBOR data item to text and returns remaining bytes.
|
||||||
|
text, rest, err = cbor.DiagnoseFirst(b) // decode []byte b to Diagnostic Notation text
|
||||||
|
|
||||||
|
// NOTE: Unmarshal() returns ExtraneousDataError if there are remaining bytes, but
|
||||||
|
// UnmarshalFirst() and DiagnoseFirst() allow trailing bytes.
|
||||||
|
```
|
||||||
|
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> CBOR settings allow trade-offs between speed, security, encoding size, etc.
|
||||||
|
>
|
||||||
|
> - Different CBOR libraries may use different default settings.
|
||||||
|
> - CBOR-based formats or protocols usually require specific settings.
|
||||||
|
>
|
||||||
|
> For example, WebAuthn uses "CTAP2 Canonical CBOR" which is available as a preset.
|
||||||
|
|
||||||
|
### Presets
|
||||||
|
|
||||||
|
Presets can be used as-is or as a starting point for custom settings.
|
||||||
|
|
||||||
|
```go
|
||||||
|
// EncOptions is a struct of encoder settings.
|
||||||
|
func CoreDetEncOptions() EncOptions // RFC 8949 Core Deterministic Encoding
|
||||||
|
func PreferredUnsortedEncOptions() EncOptions // RFC 8949 Preferred Serialization
|
||||||
|
func CanonicalEncOptions() EncOptions // RFC 7049 Canonical CBOR
|
||||||
|
func CTAP2EncOptions() EncOptions // FIDO2 CTAP2 Canonical CBOR
|
||||||
|
```
|
||||||
|
|
||||||
|
Presets are used to create custom modes.
|
||||||
|
|
||||||
|
### Custom Modes
|
||||||
|
|
||||||
|
Modes are created from settings. Once created, modes have immutable settings.
|
||||||
|
|
||||||
|
💡 Create the mode at startup and reuse it. It is safe for concurrent use.
|
||||||
|
|
||||||
|
```Go
|
||||||
|
// Create encoding mode.
|
||||||
|
opts := cbor.CoreDetEncOptions() // use preset options as a starting point
|
||||||
|
opts.Time = cbor.TimeUnix // change any settings if needed
|
||||||
|
em, err := opts.EncMode() // create an immutable encoding mode
|
||||||
|
|
||||||
|
// Reuse the encoding mode. It is safe for concurrent use.
|
||||||
|
|
||||||
|
// API matches encoding/json.
|
||||||
|
b, err := em.Marshal(v) // encode v to []byte b
|
||||||
|
encoder := em.NewEncoder(w) // create encoder with io.Writer w
|
||||||
|
err := encoder.Encode(v) // encode v to io.Writer w
|
||||||
|
```
|
||||||
|
|
||||||
|
Default mode and custom modes automatically apply struct tags.
|
||||||
|
|
||||||
|
### User Specified Buffer for Encoding (v2.7.0)
|
||||||
|
|
||||||
|
`UserBufferEncMode` interface extends `EncMode` interface to add `MarshalToBuffer()`. It accepts a user-specified buffer instead of using built-in buffer pool.
|
||||||
|
|
||||||
|
```Go
|
||||||
|
em, err := myEncOptions.UserBufferEncMode() // create UserBufferEncMode mode
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
err = em.MarshalToBuffer(v, &buf) // encode v to provided buf
|
||||||
|
```
|
||||||
|
|
||||||
|
### Struct Tags
|
||||||
|
|
||||||
|
Struct tag options (`toarray`, `keyasint`, `omitempty`, `omitzero`) reduce encoded size of structs.
|
||||||
|
|
||||||
|
As a special case, struct field tag "-" omits the field.
|
||||||
|
|
||||||
|
<details><summary> 🔎 Example encoding with struct field tag "-"</summary><p/>
|
||||||
|
|
||||||
|
https://go.dev/play/p/aWEIFxd7InX
|
||||||
|
|
||||||
|
```Go
|
||||||
|
// https://github.com/fxamacker/cbor/issues/652
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/fxamacker/cbor/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The `cbor:"-"` tag omits the Type field when encoding to CBOR.
|
||||||
|
type Entity struct {
|
||||||
|
_ struct{} `cbor:",toarray"`
|
||||||
|
ID uint64 `json:"id"`
|
||||||
|
Type string `cbor:"-" json:"typeOf"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
entity := Entity{
|
||||||
|
ID: 1,
|
||||||
|
Type: "int64",
|
||||||
|
Name: "Identifier",
|
||||||
|
}
|
||||||
|
|
||||||
|
c, _ := cbor.Marshal(entity)
|
||||||
|
diag, _ := cbor.Diagnose(c)
|
||||||
|
fmt.Printf("CBOR in hex: %x\n", c)
|
||||||
|
fmt.Printf("CBOR in edn: %s\n", diag)
|
||||||
|
|
||||||
|
j, _ := json.Marshal(entity)
|
||||||
|
fmt.Printf("JSON: %s\n", string(j))
|
||||||
|
|
||||||
|
fmt.Printf("JSON encoding is %d bytes\n", len(j))
|
||||||
|
fmt.Printf("CBOR encoding is %d bytes\n", len(c))
|
||||||
|
|
||||||
|
// Output:
|
||||||
|
// CBOR in hex: 82016a4964656e746966696572
|
||||||
|
// CBOR in edn: [1, "Identifier"]
|
||||||
|
// JSON: {"id":1,"typeOf":"int64","name":"Identifier"}
|
||||||
|
// JSON encoding is 45 bytes
|
||||||
|
// CBOR encoding is 13 bytes
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details><summary> 🔎 Example encoding 3-level nested Go struct to 1 byte CBOR</summary><p/>
|
||||||
|
|
||||||
|
https://go.dev/play/p/YxwvfPdFQG2
|
||||||
|
|
||||||
|
```Go
|
||||||
|
// Example encoding nested struct (with omitempty tag)
|
||||||
|
// - encoding/json: 18 byte JSON
|
||||||
|
// - fxamacker/cbor: 1 byte CBOR
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/fxamacker/cbor/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GrandChild struct {
|
||||||
|
Quux int `json:",omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Child struct {
|
||||||
|
Baz int `json:",omitempty"`
|
||||||
|
Qux GrandChild `json:",omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Parent struct {
|
||||||
|
Foo Child `json:",omitempty"`
|
||||||
|
Bar int `json:",omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func cb() {
|
||||||
|
results, _ := cbor.Marshal(Parent{})
|
||||||
|
fmt.Println("hex(CBOR): " + hex.EncodeToString(results))
|
||||||
|
|
||||||
|
text, _ := cbor.Diagnose(results) // Diagnostic Notation
|
||||||
|
fmt.Println("DN: " + text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func js() {
|
||||||
|
results, _ := json.Marshal(Parent{})
|
||||||
|
fmt.Println("hex(JSON): " + hex.EncodeToString(results))
|
||||||
|
|
||||||
|
text := string(results) // JSON
|
||||||
|
fmt.Println("JSON: " + text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
cb()
|
||||||
|
fmt.Println("-------------")
|
||||||
|
js()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Output (DN is Diagnostic Notation):
|
||||||
|
```
|
||||||
|
hex(CBOR): a0
|
||||||
|
DN: {}
|
||||||
|
-------------
|
||||||
|
hex(JSON): 7b22466f6f223a7b22517578223a7b7d7d7d
|
||||||
|
JSON: {"Foo":{"Qux":{}}}
|
||||||
|
```
|
||||||
|
|
||||||
|
<hr/>
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details><summary> 🔎 Example using struct tag options</summary><p/>
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
Struct tag options simplify use of CBOR-based protocols that require CBOR arrays or maps with integer keys.
|
||||||
|
|
||||||
|
### CBOR Tags
|
||||||
|
|
||||||
|
CBOR tags are specified in a `TagSet`.
|
||||||
|
|
||||||
|
Custom modes can be created with a `TagSet` to handle CBOR tags.
|
||||||
|
|
||||||
|
```go
|
||||||
|
em, err := opts.EncMode() // no CBOR tags
|
||||||
|
em, err := opts.EncModeWithTags(ts) // immutable CBOR tags
|
||||||
|
em, err := opts.EncModeWithSharedTags(ts) // mutable shared CBOR tags
|
||||||
|
```
|
||||||
|
|
||||||
|
`TagSet` and modes using it are safe for concurrent use. Equivalent API is available for `DecMode`.
|
||||||
|
|
||||||
|
<details><summary> 🔎 Example using TagSet and TagOptions</summary><p/>
|
||||||
|
|
||||||
|
```go
|
||||||
|
// Use signedCWT struct defined in "Decoding CWT" example.
|
||||||
|
|
||||||
|
// Create TagSet (safe for concurrency).
|
||||||
|
tags := cbor.NewTagSet()
|
||||||
|
// Register tag COSE_Sign1 18 with signedCWT type.
|
||||||
|
tags.Add(
|
||||||
|
cbor.TagOptions{EncTag: cbor.EncTagRequired, DecTag: cbor.DecTagRequired},
|
||||||
|
reflect.TypeOf(signedCWT{}),
|
||||||
|
18)
|
||||||
|
|
||||||
|
// Create DecMode with immutable tags.
|
||||||
|
dm, _ := cbor.DecOptions{}.DecModeWithTags(tags)
|
||||||
|
|
||||||
|
// Unmarshal to signedCWT with tag support.
|
||||||
|
var v signedCWT
|
||||||
|
if err := dm.Unmarshal(data, &v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create EncMode with immutable tags.
|
||||||
|
em, _ := cbor.EncOptions{}.EncModeWithTags(tags)
|
||||||
|
|
||||||
|
// Marshal signedCWT with tag number.
|
||||||
|
if data, err := em.Marshal(v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
👉 `fxamacker/cbor` allows user apps to use almost any current or future CBOR tag number by implementing `cbor.Marshaler` and `cbor.Unmarshaler` interfaces.
|
||||||
|
|
||||||
|
Basically, `MarshalCBOR` and `UnmarshalCBOR` functions can be implemented by user apps and those functions will automatically be called by this CBOR codec's `Marshal`, `Unmarshal`, etc.
|
||||||
|
|
||||||
|
The following [example](https://github.com/fxamacker/cbor/blob/master/example_embedded_json_tag_for_cbor_test.go) shows how to encode and decode a tagged CBOR data item with tag number 262. The tag content is a JSON object "embedded" as a CBOR byte string (major type 2).
|
||||||
|
|
||||||
|
<details><summary> 🔎 Example using Embedded JSON Tag for CBOR (tag 262)</summary>
|
||||||
|
|
||||||
|
```go
|
||||||
|
// https://github.com/fxamacker/cbor/issues/657
|
||||||
|
|
||||||
|
package cbor_test
|
||||||
|
|
||||||
|
// NOTE: RFC 8949 does not mention tag number 262. IANA assigned
|
||||||
|
// CBOR tag number 262 as "Embedded JSON Object" specified by the
|
||||||
|
// document Embedded JSON Tag for CBOR:
|
||||||
|
//
|
||||||
|
// "Tag 262 can be applied to a byte string (major type 2) to indicate
|
||||||
|
// that the byte string is a JSON Object. The length of the byte string
|
||||||
|
// indicates the content."
|
||||||
|
//
|
||||||
|
// For more info, see Embedded JSON Tag for CBOR at:
|
||||||
|
// https://github.com/toravir/CBOR-Tag-Specs/blob/master/embeddedJSON.md
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/fxamacker/cbor/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// cborTagNumForEmbeddedJSON is the CBOR tag number 262.
|
||||||
|
const cborTagNumForEmbeddedJSON = 262
|
||||||
|
|
||||||
|
// EmbeddedJSON represents a Go value to be encoded as a tagged CBOR data item
|
||||||
|
// with tag number 262 and the tag content is a JSON object "embedded" as a
|
||||||
|
// CBOR byte string (major type 2).
|
||||||
|
type EmbeddedJSON struct {
|
||||||
|
any
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewEmbeddedJSON(val any) EmbeddedJSON {
|
||||||
|
return EmbeddedJSON{val}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalCBOR encodes EmbeddedJSON to a tagged CBOR data item with the
|
||||||
|
// tag number 262 and the tag content is a JSON object that is
|
||||||
|
// "embedded" as a CBOR byte string.
|
||||||
|
func (v EmbeddedJSON) MarshalCBOR() ([]byte, error) {
|
||||||
|
// Encode v to JSON object.
|
||||||
|
data, err := json.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create cbor.Tag representing a tagged CBOR data item.
|
||||||
|
tag := cbor.Tag{
|
||||||
|
Number: cborTagNumForEmbeddedJSON,
|
||||||
|
Content: data,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal to a tagged CBOR data item.
|
||||||
|
return cbor.Marshal(tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalCBOR decodes a tagged CBOR data item to EmbeddedJSON.
|
||||||
|
// The byte slice provided to this function must contain a single
|
||||||
|
// tagged CBOR data item with the tag number 262 and tag content
|
||||||
|
// must be a JSON object "embedded" as a CBOR byte string.
|
||||||
|
func (v *EmbeddedJSON) UnmarshalCBOR(b []byte) error {
|
||||||
|
// Unmarshal tagged CBOR data item.
|
||||||
|
var tag cbor.Tag
|
||||||
|
if err := cbor.Unmarshal(b, &tag); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check tag number.
|
||||||
|
if tag.Number != cborTagNumForEmbeddedJSON {
|
||||||
|
return fmt.Errorf("got tag number %d, expect tag number %d", tag.Number, cborTagNumForEmbeddedJSON)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check tag content.
|
||||||
|
jsonData, isByteString := tag.Content.([]byte)
|
||||||
|
if !isByteString {
|
||||||
|
return fmt.Errorf("got tag content type %T, expect tag content []byte", tag.Content)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal JSON object.
|
||||||
|
return json.Unmarshal(jsonData, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON encodes EmbeddedJSON to a JSON object.
|
||||||
|
func (v EmbeddedJSON) MarshalJSON() ([]byte, error) {
|
||||||
|
return json.Marshal(v.any)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON decodes a JSON object.
|
||||||
|
func (v *EmbeddedJSON) UnmarshalJSON(b []byte) error {
|
||||||
|
dec := json.NewDecoder(bytes.NewReader(b))
|
||||||
|
dec.UseNumber()
|
||||||
|
return dec.Decode(&v.any)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Example_embeddedJSONTagForCBOR() {
|
||||||
|
value := NewEmbeddedJSON(map[string]any{
|
||||||
|
"name": "gopher",
|
||||||
|
"id": json.Number("42"),
|
||||||
|
})
|
||||||
|
|
||||||
|
data, err := cbor.Marshal(value)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("cbor: %x\n", data)
|
||||||
|
|
||||||
|
var v EmbeddedJSON
|
||||||
|
err = cbor.Unmarshal(data, &v)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("%+v\n", v.any)
|
||||||
|
for k, v := range v.any.(map[string]any) {
|
||||||
|
fmt.Printf(" %s: %v (%T)\n", k, v, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
|
||||||
|
### Functions and Interfaces
|
||||||
|
|
||||||
|
<details><summary> 🔎 Functions and interfaces at a glance</summary><p/>
|
||||||
|
|
||||||
|
Common functions with same API as `encoding/json`:
|
||||||
|
- `Marshal`, `Unmarshal`
|
||||||
|
- `NewEncoder`, `(*Encoder).Encode`
|
||||||
|
- `NewDecoder`, `(*Decoder).Decode`
|
||||||
|
|
||||||
|
NOTE: `Unmarshal` will return `ExtraneousDataError` if there are remaining bytes
|
||||||
|
because RFC 8949 treats CBOR data item with remaining bytes as malformed.
|
||||||
|
- 💡 Use `UnmarshalFirst` to decode first CBOR data item and return any remaining bytes.
|
||||||
|
|
||||||
|
Other useful functions:
|
||||||
|
- `Diagnose`, `DiagnoseFirst` produce human-readable [Extended Diagnostic Notation](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G) from CBOR data.
|
||||||
|
- `UnmarshalFirst` decodes first CBOR data item and return any remaining bytes.
|
||||||
|
- `Wellformed` returns true if the CBOR data item is well-formed.
|
||||||
|
|
||||||
|
Interfaces identical or comparable to Go `encoding` packages include:
|
||||||
|
`Marshaler`, `Unmarshaler`, `BinaryMarshaler`, and `BinaryUnmarshaler`.
|
||||||
|
|
||||||
|
The `RawMessage` type can be used to delay CBOR decoding or precompute CBOR encoding.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Security Tips
|
||||||
|
|
||||||
|
🔒 Use Go's `io.LimitReader` to limit size when decoding very large or indefinite size data.
|
||||||
|
|
||||||
|
Default limits may need to be increased for systems handling very large data (e.g. blockchains).
|
||||||
|
|
||||||
|
`DecOptions` can be used to modify default limits for `MaxArrayElements`, `MaxMapPairs`, and `MaxNestedLevels`.
|
||||||
|
|
||||||
|
## Status
|
||||||
|
|
||||||
|
[v2.9.0](https://github.com/fxamacker/cbor/releases/tag/v2.9.0) (Jul 13, 2025) improved interoperability/transcoding between CBOR & JSON, refactored tests, and improved docs.
|
||||||
|
- Add opt-in support for `encoding.TextMarshaler` and `encoding.TextUnmarshaler` to encode and decode from CBOR text string.
|
||||||
|
- Add opt-in support for `json.Marshaler` and `json.Unmarshaler` via user-provided transcoding function.
|
||||||
|
- Update docs for TimeMode, Tag, RawTag, and add example for Embedded JSON Tag for CBOR.
|
||||||
|
|
||||||
|
v2.9.0 passed fuzz tests and is production quality.
|
||||||
|
|
||||||
|
The minimum version of Go required to build:
|
||||||
|
- v2.8.0 and newer releases require go 1.20+.
|
||||||
|
- v2.7.1 and older releases require go 1.17+.
|
||||||
|
|
||||||
|
For more details, see [release notes](https://github.com/fxamacker/cbor/releases).
|
||||||
|
|
||||||
|
### Prior Releases
|
||||||
|
|
||||||
|
[v2.8.0](https://github.com/fxamacker/cbor/releases/tag/v2.8.0) (March 30, 2025) is a small release primarily to add `omitzero` option to struct field tags and fix bugs. It passed fuzz tests (billions of executions) and is production quality.
|
||||||
|
|
||||||
|
[v2.7.0](https://github.com/fxamacker/cbor/releases/tag/v2.7.0) (June 23, 2024) adds features and improvements that help large projects (e.g. Kubernetes) use CBOR as an alternative to JSON and Protocol Buffers. Other improvements include speedups, improved memory use, bug fixes, new serialization options, etc. It passed fuzz tests (5+ billion executions) and is production quality.
|
||||||
|
|
||||||
|
[v2.6.0](https://github.com/fxamacker/cbor/releases/tag/v2.6.0) (February 2024) adds important new features, optimizations, and bug fixes. It is especially useful to systems that need to convert data between CBOR and JSON. New options and optimizations improve handling of bignum, integers, maps, and strings.
|
||||||
|
|
||||||
|
[v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) was released on Sunday, August 13, 2023 with new features and important bug fixes. It is fuzz tested and production quality after extended beta [v2.5.0-beta](https://github.com/fxamacker/cbor/releases/tag/v2.5.0-beta) (Dec 2022) -> [v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) (Aug 2023).
|
||||||
|
|
||||||
|
__IMPORTANT__: 👉 Before upgrading from v2.4 or older release, please read the notable changes highlighted in the release notes. v2.5.0 is a large release with bug fixes to error handling for extraneous data in `Unmarshal`, etc. that should be reviewed before upgrading.
|
||||||
|
|
||||||
|
See [v2.5.0 release notes](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) for list of new features, improvements, and bug fixes.
|
||||||
|
|
||||||
|
See ["Version and API Changes"](https://github.com/fxamacker/cbor#versions-and-api-changes) section for more info about version numbering, etc.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
<details><summary> 🔎 Benchmark Comparison: v2.4.0 vs v2.5.0</summary><p/>
|
||||||
|
|
||||||
|
TODO: Update to v2.4.0 vs 2.5.0 (not beta2).
|
||||||
|
|
||||||
|
Comparison of v2.4.0 vs v2.5.0-beta2 provided by @448 (edited to fit width).
|
||||||
|
|
||||||
|
PR [#382](https://github.com/fxamacker/cbor/pull/382) returns buffer to pool in `Encode()`. It adds a bit of overhead to `Encode()` but `NewEncoder().Encode()` is a lot faster and uses less memory as shown here:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ benchstat bench-v2.4.0.log bench-f9e6291.log
|
||||||
|
goos: linux
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/fxamacker/cbor/v2
|
||||||
|
cpu: 12th Gen Intel(R) Core(TM) i7-12700H
|
||||||
|
│ bench-v2.4.0.log │ bench-f9e6291.log │
|
||||||
|
│ sec/op │ sec/op vs base │
|
||||||
|
NewEncoderEncode/Go_bool_to_CBOR_bool-20 236.70n ± 2% 58.04n ± 1% -75.48% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 238.00n ± 2% 63.93n ± 1% -73.14% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 238.65n ± 2% 64.88n ± 1% -72.81% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_float64_to_CBOR_float-20 242.00n ± 2% 63.00n ± 1% -73.97% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 245.60n ± 1% 68.55n ± 1% -72.09% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_string_to_CBOR_text-20 243.20n ± 3% 68.39n ± 1% -71.88% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]int_to_CBOR_array-20 563.0n ± 2% 378.3n ± 0% -32.81% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 2.043µ ± 2% 1.906µ ± 2% -6.75% (p=0.000 n=10)
|
||||||
|
geomean 349.7n 122.7n -64.92%
|
||||||
|
|
||||||
|
│ bench-v2.4.0.log │ bench-f9e6291.log │
|
||||||
|
│ B/op │ B/op vs base │
|
||||||
|
NewEncoderEncode/Go_bool_to_CBOR_bool-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_float64_to_CBOR_float-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_string_to_CBOR_text-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]int_to_CBOR_array-20 128.0 ± 0% 0.0 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 544.0 ± 0% 416.0 ± 0% -23.53% (p=0.000 n=10)
|
||||||
|
geomean 153.4 ? ¹ ²
|
||||||
|
¹ summaries must be >0 to compute geomean
|
||||||
|
² ratios must be >0 to compute geomean
|
||||||
|
|
||||||
|
│ bench-v2.4.0.log │ bench-f9e6291.log │
|
||||||
|
│ allocs/op │ allocs/op vs base │
|
||||||
|
NewEncoderEncode/Go_bool_to_CBOR_bool-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_uint64_to_CBOR_positive_int-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_int64_to_CBOR_negative_int-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_float64_to_CBOR_float-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]uint8_to_CBOR_bytes-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_string_to_CBOR_text-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_[]int_to_CBOR_array-20 2.000 ± 0% 0.000 ± 0% -100.00% (p=0.000 n=10)
|
||||||
|
NewEncoderEncode/Go_map[string]string_to_CBOR_map-20 28.00 ± 0% 26.00 ± 0% -7.14% (p=0.000 n=10)
|
||||||
|
geomean 2.782 ? ¹ ²
|
||||||
|
¹ summaries must be >0 to compute geomean
|
||||||
|
² ratios must be >0 to compute geomean
|
||||||
|
```
|
||||||
|
|
||||||
|
</details>
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Who uses fxamacker/cbor
|
||||||
|
|
||||||
|
`fxamacker/cbor` is used in projects by Arm Ltd., Berlin Institute of Health at Charité, Chainlink, Confidential Computing Consortium, ConsenSys, EdgeX Foundry, F5, Flow Foundation, Fraunhofer‑AISEC, IBM, Kubernetes, Let's Encrypt (ISRG), Linaro, Linux Foundation, Matrix.org, Microsoft, National Cybersecurity Agency of France (govt), Netherlands (govt), Oasis Protocol, Red Hat OpenShift, Smallstep, Tailscale, Taurus SA, TIBCO, Veraison, and others.
|
||||||
|
|
||||||
|
`fxamacker/cbor` passed multiple confidential security assessments in 2022. A [nonconfidential security assessment](https://github.com/veraison/go-cose/blob/v1.0.0-rc.1/reports/NCC_Microsoft-go-cose-Report_2022-05-26_v1.0.pdf) (prepared by NCC Group for Microsoft Corporation) assessed a subset of fxamacker/cbor v2.4.
|
||||||
|
|
||||||
|
## Standards
|
||||||
|
|
||||||
|
`fxamacker/cbor` is a CBOR codec in full conformance with [IETF STD 94 (RFC 8949)](https://www.rfc-editor.org/info/std94). It also supports CBOR Sequences ([RFC 8742](https://www.rfc-editor.org/rfc/rfc8742.html)) and Extended Diagnostic Notation ([Appendix G of RFC 8610](https://www.rfc-editor.org/rfc/rfc8610.html#appendix-G)).
|
||||||
|
|
||||||
|
Notable CBOR features include:
|
||||||
|
|
||||||
|
| CBOR Feature | Description |
|
||||||
|
| :--- | :--- |
|
||||||
|
| CBOR tags | API supports built-in and user-defined tags. |
|
||||||
|
| Preferred serialization | Integers encode to fewest bytes. Optional float64 → float32 → float16. |
|
||||||
|
| Map key sorting | Unsorted, length-first (Canonical CBOR), and bytewise-lexicographic (CTAP2). |
|
||||||
|
| Duplicate map keys | Always forbid for encoding and option to allow/forbid for decoding. |
|
||||||
|
| Indefinite length data | Option to allow/forbid for encoding and decoding. |
|
||||||
|
| Well-formedness | Always checked and enforced. |
|
||||||
|
| Basic validity checks | Optionally check UTF-8 validity and duplicate map keys. |
|
||||||
|
| Security considerations | Prevent integer overflow and resource exhaustion (RFC 8949 Section 10). |
|
||||||
|
|
||||||
|
Known limitations are noted in the [Limitations section](#limitations).
|
||||||
|
|
||||||
|
Go nil values for slices, maps, pointers, etc. are encoded as CBOR null. Empty slices, maps, etc. are encoded as empty CBOR arrays and maps.
|
||||||
|
|
||||||
|
Decoder checks for all required well-formedness errors, including all "subkinds" of syntax errors and too little data.
|
||||||
|
|
||||||
|
After well-formedness is verified, basic validity errors are handled as follows:
|
||||||
|
|
||||||
|
* Invalid UTF-8 string: Decoder has option to check and return invalid UTF-8 string error. This check is enabled by default.
|
||||||
|
* Duplicate keys in a map: Decoder has options to ignore or enforce rejection of duplicate map keys.
|
||||||
|
|
||||||
|
When decoding well-formed CBOR arrays and maps, decoder saves the first error it encounters and continues with the next item. Options to handle this differently may be added in the future.
|
||||||
|
|
||||||
|
By default, decoder treats time values of floating-point NaN and Infinity as if they are CBOR Null or CBOR Undefined.
|
||||||
|
|
||||||
|
__Click to expand topic:__
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary> 🔎 Duplicate Map Keys</summary><p>
|
||||||
|
|
||||||
|
This library provides options for fast detection and rejection of duplicate map keys based on applying a Go-specific data model to CBOR's extended generic data model in order to determine duplicate vs distinct map keys. Detection relies on whether the CBOR map key would be a duplicate "key" when decoded and applied to the user-provided Go map or struct.
|
||||||
|
|
||||||
|
`DupMapKeyQuiet` turns off detection of duplicate map keys. It tries to use a "keep fastest" method by choosing either "keep first" or "keep last" depending on the Go data type.
|
||||||
|
|
||||||
|
`DupMapKeyEnforcedAPF` enforces detection and rejection of duplidate map keys. Decoding stops immediately and returns `DupMapKeyError` when the first duplicate key is detected. The error includes the duplicate map key and the index number.
|
||||||
|
|
||||||
|
APF suffix means "Allow Partial Fill" so the destination map or struct can contain some decoded values at the time of error. It is the caller's responsibility to respond to the `DupMapKeyError` by discarding the partially filled result if that's required by their protocol.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary> 🔎 Tag Validity</summary><p>
|
||||||
|
|
||||||
|
This library checks tag validity for built-in tags (currently tag numbers 0, 1, 2, 3, and 55799):
|
||||||
|
|
||||||
|
* Inadmissible type for tag content
|
||||||
|
* Inadmissible value for tag content
|
||||||
|
|
||||||
|
Unknown tag data items (not tag number 0, 1, 2, 3, or 55799) are handled in two ways:
|
||||||
|
|
||||||
|
* When decoding into an empty interface, unknown tag data item will be decoded into `cbor.Tag` data type, which contains tag number and tag content. The tag content will be decoded into the default Go data type for the CBOR data type.
|
||||||
|
* When decoding into other Go types, unknown tag data item is decoded into the specified Go type. If Go type is registered with a tag number, the tag number can optionally be verified.
|
||||||
|
|
||||||
|
Decoder also has an option to forbid tag data items (treat any tag data item as error) which is specified by protocols such as CTAP2 Canonical CBOR.
|
||||||
|
|
||||||
|
For more information, see [decoding options](#decoding-options-1) and [tag options](#tag-options).
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
## Limitations
|
||||||
|
|
||||||
|
If any of these limitations prevent you from using this library, please open an issue along with a link to your project.
|
||||||
|
|
||||||
|
* CBOR `Undefined` (0xf7) value decodes to Go's `nil` value. CBOR `Null` (0xf6) more closely matches Go's `nil`.
|
||||||
|
* CBOR map keys with data types not supported by Go for map keys are ignored and an error is returned after continuing to decode remaining items.
|
||||||
|
* When decoding registered CBOR tag data to interface type, decoder creates a pointer to registered Go type matching CBOR tag number. Requiring a pointer for this is a Go limitation.
|
||||||
|
|
||||||
|
## Fuzzing and Code Coverage
|
||||||
|
|
||||||
|
__Code coverage__ is always 95% or higher (with `go test -cover`) when tagging a release.
|
||||||
|
|
||||||
|
__Coverage-guided fuzzing__ must pass billions of execs using before tagging a release. Fuzzing is done using nonpublic code which may eventually get merged into this project. Until then, reports like OpenSSF Scorecard can't detect fuzz tests being used by this project.
|
||||||
|
|
||||||
|
<hr>
|
||||||
|
|
||||||
|
## Versions and API Changes
|
||||||
|
This project uses [Semantic Versioning](https://semver.org), so the API is always backwards compatible unless the major version number changes.
|
||||||
|
|
||||||
|
These functions have signatures identical to encoding/json and their API will continue to match `encoding/json` even after major new releases:
|
||||||
|
`Marshal`, `Unmarshal`, `NewEncoder`, `NewDecoder`, `(*Encoder).Encode`, and `(*Decoder).Decode`.
|
||||||
|
|
||||||
|
Exclusions from SemVer:
|
||||||
|
- Newly added API documented as "subject to change".
|
||||||
|
- Newly added API in the master branch that has never been tagged in non-beta release.
|
||||||
|
- If function parameters are unchanged, bug fixes that change behavior (e.g. return error for edge case was missed in prior version). We try to highlight these in the release notes and add extended beta period. E.g. [v2.5.0-beta](https://github.com/fxamacker/cbor/releases/tag/v2.5.0-beta) (Dec 2022) -> [v2.5.0](https://github.com/fxamacker/cbor/releases/tag/v2.5.0) (Aug 2023).
|
||||||
|
|
||||||
|
This project avoids breaking changes to behavior of encoding and decoding functions unless required to improve conformance with supported RFCs (e.g. RFC 8949, RFC 8742, etc.) Visible changes that don't improve conformance to standards are typically made available as new opt-in settings or new functions.
|
||||||
|
|
||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
This project has adopted the [Contributor Covenant Code of Conduct](CODE_OF_CONDUCT.md). Contact [faye.github@gmail.com](mailto:faye.github@gmail.com) with any questions or comments.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
Please open an issue before beginning work on a PR. The improvement may have already been considered, etc.
|
||||||
|
|
||||||
|
For more info, see [How to Contribute](CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Security Policy
|
||||||
|
|
||||||
|
Security fixes are provided for the latest released version of fxamacker/cbor.
|
||||||
|
|
||||||
|
For the full text of the Security Policy, see [SECURITY.md](SECURITY.md).
|
||||||
|
|
||||||
|
## Acknowledgements
|
||||||
|
|
||||||
|
Many thanks to all the contributors on this project!
|
||||||
|
|
||||||
|
I'm especially grateful to Bastian Müller and Dieter Shirley for suggesting and collaborating on CBOR stream mode, and much more.
|
||||||
|
|
||||||
|
I'm very grateful to Stefan Tatschner, Yawning Angel, Jernej Kos, x448, ZenGround0, and Jakob Borg for their contributions or support in the very early days.
|
||||||
|
|
||||||
|
Big thanks to Ben Luddy for his contributions in v2.6.0 and v2.7.0.
|
||||||
|
|
||||||
|
This library clearly wouldn't be possible without Carsten Bormann authoring CBOR RFCs.
|
||||||
|
|
||||||
|
Special thanks to Laurence Lundblade and Jeffrey Yasskin for their help on IETF mailing list or at [7049bis](https://github.com/cbor-wg/CBORbis).
|
||||||
|
|
||||||
|
Huge thanks to The Go Authors for creating a fun and practical programming language with batteries included!
|
||||||
|
|
||||||
|
This library uses `x448/float16` which used to be included. As a standalone package, `x448/float16` is useful to other projects as well.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Copyright © 2019-2024 [Faye Amacker](https://github.com/fxamacker).
|
||||||
|
|
||||||
|
fxamacker/cbor is licensed under the MIT License. See [LICENSE](LICENSE) for the full license text.
|
||||||
|
|
||||||
|
<hr>
|
||||||
7
vendor/github.com/fxamacker/cbor/v2/SECURITY.md
generated
vendored
Normal file
7
vendor/github.com/fxamacker/cbor/v2/SECURITY.md
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
Security fixes are provided for the latest released version of fxamacker/cbor.
|
||||||
|
|
||||||
|
If the security vulnerability is already known to the public, then you can open an issue as a bug report.
|
||||||
|
|
||||||
|
To report security vulnerabilities not yet known to the public, please email faye.github@gmail.com and allow time for the problem to be resolved before reporting it to the public.
|
||||||
90
vendor/github.com/fxamacker/cbor/v2/bytestring.go
generated
vendored
Normal file
90
vendor/github.com/fxamacker/cbor/v2/bytestring.go
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ByteString represents CBOR byte string (major type 2). ByteString can be used
|
||||||
|
// when using a Go []byte is not possible or convenient. For example, Go doesn't
|
||||||
|
// allow []byte as map key, so ByteString can be used to support data formats
|
||||||
|
// having CBOR map with byte string keys. ByteString can also be used to
|
||||||
|
// encode invalid UTF-8 string as CBOR byte string.
|
||||||
|
// See DecOption.MapKeyByteStringMode for more details.
|
||||||
|
type ByteString string
|
||||||
|
|
||||||
|
// Bytes returns bytes representing ByteString.
|
||||||
|
func (bs ByteString) Bytes() []byte {
|
||||||
|
return []byte(bs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalCBOR encodes ByteString as CBOR byte string (major type 2).
|
||||||
|
func (bs ByteString) MarshalCBOR() ([]byte, error) {
|
||||||
|
e := getEncodeBuffer()
|
||||||
|
defer putEncodeBuffer(e)
|
||||||
|
|
||||||
|
// Encode length
|
||||||
|
encodeHead(e, byte(cborTypeByteString), uint64(len(bs)))
|
||||||
|
|
||||||
|
// Encode data
|
||||||
|
buf := make([]byte, e.Len()+len(bs))
|
||||||
|
n := copy(buf, e.Bytes())
|
||||||
|
copy(buf[n:], bs)
|
||||||
|
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalCBOR decodes CBOR byte string (major type 2) to ByteString.
|
||||||
|
// Decoding CBOR null and CBOR undefined sets ByteString to be empty.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used by this codec; kept for compatibility
|
||||||
|
// with user apps that directly call this function.
|
||||||
|
func (bs *ByteString) UnmarshalCBOR(data []byte) error {
|
||||||
|
if bs == nil {
|
||||||
|
return errors.New("cbor.ByteString: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
// Check well-formedness of CBOR data item.
|
||||||
|
// ByteString.UnmarshalCBOR() is exported, so
|
||||||
|
// the codec needs to support same behavior for:
|
||||||
|
// - Unmarshal(data, *ByteString)
|
||||||
|
// - ByteString.UnmarshalCBOR(data)
|
||||||
|
err := d.wellformed(false, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return bs.unmarshalCBOR(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalCBOR decodes CBOR byte string (major type 2) to ByteString.
|
||||||
|
// Decoding CBOR null and CBOR undefined sets ByteString to be empty.
|
||||||
|
// This function assumes data is well-formed, and does not perform bounds checking.
|
||||||
|
// This function is called by Unmarshal().
|
||||||
|
func (bs *ByteString) unmarshalCBOR(data []byte) error {
|
||||||
|
if bs == nil {
|
||||||
|
return errors.New("cbor.ByteString: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decoding CBOR null and CBOR undefined to ByteString resets data.
|
||||||
|
// This behavior is similar to decoding CBOR null and CBOR undefined to []byte.
|
||||||
|
if len(data) == 1 && (data[0] == 0xf6 || data[0] == 0xf7) {
|
||||||
|
*bs = ""
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
// Check if CBOR data type is byte string
|
||||||
|
if typ := d.nextCBORType(); typ != cborTypeByteString {
|
||||||
|
return &UnmarshalTypeError{CBORType: typ.String(), GoType: typeByteString.String()}
|
||||||
|
}
|
||||||
|
|
||||||
|
b, _ := d.parseByteString()
|
||||||
|
*bs = ByteString(b)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
370
vendor/github.com/fxamacker/cbor/v2/cache.go
generated
vendored
Normal file
370
vendor/github.com/fxamacker/cbor/v2/cache.go
generated
vendored
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type encodeFuncs struct {
|
||||||
|
ef encodeFunc
|
||||||
|
ief isEmptyFunc
|
||||||
|
izf isZeroFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
decodingStructTypeCache sync.Map // map[reflect.Type]*decodingStructType
|
||||||
|
encodingStructTypeCache sync.Map // map[reflect.Type]*encodingStructType
|
||||||
|
encodeFuncCache sync.Map // map[reflect.Type]encodeFuncs
|
||||||
|
typeInfoCache sync.Map // map[reflect.Type]*typeInfo
|
||||||
|
)
|
||||||
|
|
||||||
|
type specialType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
specialTypeNone specialType = iota
|
||||||
|
specialTypeUnmarshalerIface
|
||||||
|
specialTypeUnexportedUnmarshalerIface
|
||||||
|
specialTypeEmptyIface
|
||||||
|
specialTypeIface
|
||||||
|
specialTypeTag
|
||||||
|
specialTypeTime
|
||||||
|
specialTypeJSONUnmarshalerIface
|
||||||
|
)
|
||||||
|
|
||||||
|
type typeInfo struct {
|
||||||
|
elemTypeInfo *typeInfo
|
||||||
|
keyTypeInfo *typeInfo
|
||||||
|
typ reflect.Type
|
||||||
|
kind reflect.Kind
|
||||||
|
nonPtrType reflect.Type
|
||||||
|
nonPtrKind reflect.Kind
|
||||||
|
spclType specialType
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTypeInfo(t reflect.Type) *typeInfo {
|
||||||
|
tInfo := typeInfo{typ: t, kind: t.Kind()}
|
||||||
|
|
||||||
|
for t.Kind() == reflect.Pointer {
|
||||||
|
t = t.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
k := t.Kind()
|
||||||
|
|
||||||
|
tInfo.nonPtrType = t
|
||||||
|
tInfo.nonPtrKind = k
|
||||||
|
|
||||||
|
if k == reflect.Interface {
|
||||||
|
if t.NumMethod() == 0 {
|
||||||
|
tInfo.spclType = specialTypeEmptyIface
|
||||||
|
} else {
|
||||||
|
tInfo.spclType = specialTypeIface
|
||||||
|
}
|
||||||
|
} else if t == typeTag {
|
||||||
|
tInfo.spclType = specialTypeTag
|
||||||
|
} else if t == typeTime {
|
||||||
|
tInfo.spclType = specialTypeTime
|
||||||
|
} else if reflect.PointerTo(t).Implements(typeUnexportedUnmarshaler) {
|
||||||
|
tInfo.spclType = specialTypeUnexportedUnmarshalerIface
|
||||||
|
} else if reflect.PointerTo(t).Implements(typeUnmarshaler) {
|
||||||
|
tInfo.spclType = specialTypeUnmarshalerIface
|
||||||
|
} else if reflect.PointerTo(t).Implements(typeJSONUnmarshaler) {
|
||||||
|
tInfo.spclType = specialTypeJSONUnmarshalerIface
|
||||||
|
}
|
||||||
|
|
||||||
|
switch k {
|
||||||
|
case reflect.Array, reflect.Slice:
|
||||||
|
tInfo.elemTypeInfo = getTypeInfo(t.Elem())
|
||||||
|
case reflect.Map:
|
||||||
|
tInfo.keyTypeInfo = getTypeInfo(t.Key())
|
||||||
|
tInfo.elemTypeInfo = getTypeInfo(t.Elem())
|
||||||
|
}
|
||||||
|
|
||||||
|
return &tInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type decodingStructType struct {
|
||||||
|
fields fields
|
||||||
|
fieldIndicesByName map[string]int
|
||||||
|
err error
|
||||||
|
toArray bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// The stdlib errors.Join was introduced in Go 1.20, and we still support Go 1.17, so instead,
|
||||||
|
// here's a very basic implementation of an aggregated error.
|
||||||
|
type multierror []error
|
||||||
|
|
||||||
|
func (m multierror) Error() string {
|
||||||
|
var sb strings.Builder
|
||||||
|
for i, err := range m {
|
||||||
|
sb.WriteString(err.Error())
|
||||||
|
if i < len(m)-1 {
|
||||||
|
sb.WriteString(", ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDecodingStructType(t reflect.Type) *decodingStructType {
|
||||||
|
if v, _ := decodingStructTypeCache.Load(t); v != nil {
|
||||||
|
return v.(*decodingStructType)
|
||||||
|
}
|
||||||
|
|
||||||
|
flds, structOptions := getFields(t)
|
||||||
|
|
||||||
|
toArray := hasToArrayOption(structOptions)
|
||||||
|
|
||||||
|
var errs []error
|
||||||
|
for i := 0; i < len(flds); i++ {
|
||||||
|
if flds[i].keyAsInt {
|
||||||
|
nameAsInt, numErr := strconv.Atoi(flds[i].name)
|
||||||
|
if numErr != nil {
|
||||||
|
errs = append(errs, errors.New("cbor: failed to parse field name \""+flds[i].name+"\" to int ("+numErr.Error()+")"))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
flds[i].nameAsInt = int64(nameAsInt)
|
||||||
|
}
|
||||||
|
|
||||||
|
flds[i].typInfo = getTypeInfo(flds[i].typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldIndicesByName := make(map[string]int, len(flds))
|
||||||
|
for i, fld := range flds {
|
||||||
|
if _, ok := fieldIndicesByName[fld.name]; ok {
|
||||||
|
errs = append(errs, fmt.Errorf("cbor: two or more fields of %v have the same name %q", t, fld.name))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fieldIndicesByName[fld.name] = i
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
{
|
||||||
|
var multi multierror
|
||||||
|
for _, each := range errs {
|
||||||
|
if each != nil {
|
||||||
|
multi = append(multi, each)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(multi) == 1 {
|
||||||
|
err = multi[0]
|
||||||
|
} else if len(multi) > 1 {
|
||||||
|
err = multi
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
structType := &decodingStructType{
|
||||||
|
fields: flds,
|
||||||
|
fieldIndicesByName: fieldIndicesByName,
|
||||||
|
err: err,
|
||||||
|
toArray: toArray,
|
||||||
|
}
|
||||||
|
decodingStructTypeCache.Store(t, structType)
|
||||||
|
return structType
|
||||||
|
}
|
||||||
|
|
||||||
|
type encodingStructType struct {
|
||||||
|
fields fields
|
||||||
|
bytewiseFields fields
|
||||||
|
lengthFirstFields fields
|
||||||
|
omitEmptyFieldsIdx []int
|
||||||
|
err error
|
||||||
|
toArray bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (st *encodingStructType) getFields(em *encMode) fields {
|
||||||
|
switch em.sort {
|
||||||
|
case SortNone, SortFastShuffle:
|
||||||
|
return st.fields
|
||||||
|
case SortLengthFirst:
|
||||||
|
return st.lengthFirstFields
|
||||||
|
default:
|
||||||
|
return st.bytewiseFields
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type bytewiseFieldSorter struct {
|
||||||
|
fields fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *bytewiseFieldSorter) Len() int {
|
||||||
|
return len(x.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *bytewiseFieldSorter) Swap(i, j int) {
|
||||||
|
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *bytewiseFieldSorter) Less(i, j int) bool {
|
||||||
|
return bytes.Compare(x.fields[i].cborName, x.fields[j].cborName) <= 0
|
||||||
|
}
|
||||||
|
|
||||||
|
type lengthFirstFieldSorter struct {
|
||||||
|
fields fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *lengthFirstFieldSorter) Len() int {
|
||||||
|
return len(x.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *lengthFirstFieldSorter) Swap(i, j int) {
|
||||||
|
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *lengthFirstFieldSorter) Less(i, j int) bool {
|
||||||
|
if len(x.fields[i].cborName) != len(x.fields[j].cborName) {
|
||||||
|
return len(x.fields[i].cborName) < len(x.fields[j].cborName)
|
||||||
|
}
|
||||||
|
return bytes.Compare(x.fields[i].cborName, x.fields[j].cborName) <= 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEncodingStructType(t reflect.Type) (*encodingStructType, error) {
|
||||||
|
if v, _ := encodingStructTypeCache.Load(t); v != nil {
|
||||||
|
structType := v.(*encodingStructType)
|
||||||
|
return structType, structType.err
|
||||||
|
}
|
||||||
|
|
||||||
|
flds, structOptions := getFields(t)
|
||||||
|
|
||||||
|
if hasToArrayOption(structOptions) {
|
||||||
|
return getEncodingStructToArrayType(t, flds)
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var hasKeyAsInt bool
|
||||||
|
var hasKeyAsStr bool
|
||||||
|
var omitEmptyIdx []int
|
||||||
|
e := getEncodeBuffer()
|
||||||
|
for i := 0; i < len(flds); i++ {
|
||||||
|
// Get field's encodeFunc
|
||||||
|
flds[i].ef, flds[i].ief, flds[i].izf = getEncodeFunc(flds[i].typ)
|
||||||
|
if flds[i].ef == nil {
|
||||||
|
err = &UnsupportedTypeError{t}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode field name
|
||||||
|
if flds[i].keyAsInt {
|
||||||
|
nameAsInt, numErr := strconv.Atoi(flds[i].name)
|
||||||
|
if numErr != nil {
|
||||||
|
err = errors.New("cbor: failed to parse field name \"" + flds[i].name + "\" to int (" + numErr.Error() + ")")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
flds[i].nameAsInt = int64(nameAsInt)
|
||||||
|
if nameAsInt >= 0 {
|
||||||
|
encodeHead(e, byte(cborTypePositiveInt), uint64(nameAsInt))
|
||||||
|
} else {
|
||||||
|
n := nameAsInt*(-1) - 1
|
||||||
|
encodeHead(e, byte(cborTypeNegativeInt), uint64(n))
|
||||||
|
}
|
||||||
|
flds[i].cborName = make([]byte, e.Len())
|
||||||
|
copy(flds[i].cborName, e.Bytes())
|
||||||
|
e.Reset()
|
||||||
|
|
||||||
|
hasKeyAsInt = true
|
||||||
|
} else {
|
||||||
|
encodeHead(e, byte(cborTypeTextString), uint64(len(flds[i].name)))
|
||||||
|
flds[i].cborName = make([]byte, e.Len()+len(flds[i].name))
|
||||||
|
n := copy(flds[i].cborName, e.Bytes())
|
||||||
|
copy(flds[i].cborName[n:], flds[i].name)
|
||||||
|
e.Reset()
|
||||||
|
|
||||||
|
// If cborName contains a text string, then cborNameByteString contains a
|
||||||
|
// string that has the byte string major type but is otherwise identical to
|
||||||
|
// cborName.
|
||||||
|
flds[i].cborNameByteString = make([]byte, len(flds[i].cborName))
|
||||||
|
copy(flds[i].cborNameByteString, flds[i].cborName)
|
||||||
|
// Reset encoded CBOR type to byte string, preserving the "additional
|
||||||
|
// information" bits:
|
||||||
|
flds[i].cborNameByteString[0] = byte(cborTypeByteString) |
|
||||||
|
getAdditionalInformation(flds[i].cborNameByteString[0])
|
||||||
|
|
||||||
|
hasKeyAsStr = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if field can be omitted when empty
|
||||||
|
if flds[i].omitEmpty {
|
||||||
|
omitEmptyIdx = append(omitEmptyIdx, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
putEncodeBuffer(e)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
structType := &encodingStructType{err: err}
|
||||||
|
encodingStructTypeCache.Store(t, structType)
|
||||||
|
return structType, structType.err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort fields by canonical order
|
||||||
|
bytewiseFields := make(fields, len(flds))
|
||||||
|
copy(bytewiseFields, flds)
|
||||||
|
sort.Sort(&bytewiseFieldSorter{bytewiseFields})
|
||||||
|
|
||||||
|
lengthFirstFields := bytewiseFields
|
||||||
|
if hasKeyAsInt && hasKeyAsStr {
|
||||||
|
lengthFirstFields = make(fields, len(flds))
|
||||||
|
copy(lengthFirstFields, flds)
|
||||||
|
sort.Sort(&lengthFirstFieldSorter{lengthFirstFields})
|
||||||
|
}
|
||||||
|
|
||||||
|
structType := &encodingStructType{
|
||||||
|
fields: flds,
|
||||||
|
bytewiseFields: bytewiseFields,
|
||||||
|
lengthFirstFields: lengthFirstFields,
|
||||||
|
omitEmptyFieldsIdx: omitEmptyIdx,
|
||||||
|
}
|
||||||
|
|
||||||
|
encodingStructTypeCache.Store(t, structType)
|
||||||
|
return structType, structType.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEncodingStructToArrayType(t reflect.Type, flds fields) (*encodingStructType, error) {
|
||||||
|
for i := 0; i < len(flds); i++ {
|
||||||
|
// Get field's encodeFunc
|
||||||
|
flds[i].ef, flds[i].ief, flds[i].izf = getEncodeFunc(flds[i].typ)
|
||||||
|
if flds[i].ef == nil {
|
||||||
|
structType := &encodingStructType{err: &UnsupportedTypeError{t}}
|
||||||
|
encodingStructTypeCache.Store(t, structType)
|
||||||
|
return structType, structType.err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
structType := &encodingStructType{
|
||||||
|
fields: flds,
|
||||||
|
toArray: true,
|
||||||
|
}
|
||||||
|
encodingStructTypeCache.Store(t, structType)
|
||||||
|
return structType, structType.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEncodeFunc(t reflect.Type) (encodeFunc, isEmptyFunc, isZeroFunc) {
|
||||||
|
if v, _ := encodeFuncCache.Load(t); v != nil {
|
||||||
|
fs := v.(encodeFuncs)
|
||||||
|
return fs.ef, fs.ief, fs.izf
|
||||||
|
}
|
||||||
|
ef, ief, izf := getEncodeFuncInternal(t)
|
||||||
|
encodeFuncCache.Store(t, encodeFuncs{ef, ief, izf})
|
||||||
|
return ef, ief, izf
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTypeInfo(t reflect.Type) *typeInfo {
|
||||||
|
if v, _ := typeInfoCache.Load(t); v != nil {
|
||||||
|
return v.(*typeInfo)
|
||||||
|
}
|
||||||
|
tInfo := newTypeInfo(t)
|
||||||
|
typeInfoCache.Store(t, tInfo)
|
||||||
|
return tInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
func hasToArrayOption(tag string) bool {
|
||||||
|
s := ",toarray"
|
||||||
|
idx := strings.Index(tag, s)
|
||||||
|
return idx >= 0 && (len(tag) == idx+len(s) || tag[idx+len(s)] == ',')
|
||||||
|
}
|
||||||
191
vendor/github.com/fxamacker/cbor/v2/common.go
generated
vendored
Normal file
191
vendor/github.com/fxamacker/cbor/v2/common.go
generated
vendored
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type cborType uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
cborTypePositiveInt cborType = 0x00
|
||||||
|
cborTypeNegativeInt cborType = 0x20
|
||||||
|
cborTypeByteString cborType = 0x40
|
||||||
|
cborTypeTextString cborType = 0x60
|
||||||
|
cborTypeArray cborType = 0x80
|
||||||
|
cborTypeMap cborType = 0xa0
|
||||||
|
cborTypeTag cborType = 0xc0
|
||||||
|
cborTypePrimitives cborType = 0xe0
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t cborType) String() string {
|
||||||
|
switch t {
|
||||||
|
case cborTypePositiveInt:
|
||||||
|
return "positive integer"
|
||||||
|
case cborTypeNegativeInt:
|
||||||
|
return "negative integer"
|
||||||
|
case cborTypeByteString:
|
||||||
|
return "byte string"
|
||||||
|
case cborTypeTextString:
|
||||||
|
return "UTF-8 text string"
|
||||||
|
case cborTypeArray:
|
||||||
|
return "array"
|
||||||
|
case cborTypeMap:
|
||||||
|
return "map"
|
||||||
|
case cborTypeTag:
|
||||||
|
return "tag"
|
||||||
|
case cborTypePrimitives:
|
||||||
|
return "primitives"
|
||||||
|
default:
|
||||||
|
return "Invalid type " + strconv.Itoa(int(t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type additionalInformation uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxAdditionalInformationWithoutArgument = 23
|
||||||
|
additionalInformationWith1ByteArgument = 24
|
||||||
|
additionalInformationWith2ByteArgument = 25
|
||||||
|
additionalInformationWith4ByteArgument = 26
|
||||||
|
additionalInformationWith8ByteArgument = 27
|
||||||
|
|
||||||
|
// For major type 7.
|
||||||
|
additionalInformationAsFalse = 20
|
||||||
|
additionalInformationAsTrue = 21
|
||||||
|
additionalInformationAsNull = 22
|
||||||
|
additionalInformationAsUndefined = 23
|
||||||
|
additionalInformationAsFloat16 = 25
|
||||||
|
additionalInformationAsFloat32 = 26
|
||||||
|
additionalInformationAsFloat64 = 27
|
||||||
|
|
||||||
|
// For major type 2, 3, 4, 5.
|
||||||
|
additionalInformationAsIndefiniteLengthFlag = 31
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxSimpleValueInAdditionalInformation = 23
|
||||||
|
minSimpleValueIn1ByteArgument = 32
|
||||||
|
)
|
||||||
|
|
||||||
|
func (ai additionalInformation) isIndefiniteLength() bool {
|
||||||
|
return ai == additionalInformationAsIndefiniteLengthFlag
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
// From RFC 8949 Section 3:
|
||||||
|
// "The initial byte of each encoded data item contains both information about the major type
|
||||||
|
// (the high-order 3 bits, described in Section 3.1) and additional information
|
||||||
|
// (the low-order 5 bits)."
|
||||||
|
|
||||||
|
// typeMask is used to extract major type in initial byte of encoded data item.
|
||||||
|
typeMask = 0xe0
|
||||||
|
|
||||||
|
// additionalInformationMask is used to extract additional information in initial byte of encoded data item.
|
||||||
|
additionalInformationMask = 0x1f
|
||||||
|
)
|
||||||
|
|
||||||
|
func getType(raw byte) cborType {
|
||||||
|
return cborType(raw & typeMask)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAdditionalInformation(raw byte) byte {
|
||||||
|
return raw & additionalInformationMask
|
||||||
|
}
|
||||||
|
|
||||||
|
func isBreakFlag(raw byte) bool {
|
||||||
|
return raw == cborBreakFlag
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseInitialByte(b byte) (t cborType, ai byte) {
|
||||||
|
return getType(b), getAdditionalInformation(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
tagNumRFC3339Time = 0
|
||||||
|
tagNumEpochTime = 1
|
||||||
|
tagNumUnsignedBignum = 2
|
||||||
|
tagNumNegativeBignum = 3
|
||||||
|
tagNumExpectedLaterEncodingBase64URL = 21
|
||||||
|
tagNumExpectedLaterEncodingBase64 = 22
|
||||||
|
tagNumExpectedLaterEncodingBase16 = 23
|
||||||
|
tagNumSelfDescribedCBOR = 55799
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
cborBreakFlag = byte(0xff)
|
||||||
|
cborByteStringWithIndefiniteLengthHead = byte(0x5f)
|
||||||
|
cborTextStringWithIndefiniteLengthHead = byte(0x7f)
|
||||||
|
cborArrayWithIndefiniteLengthHead = byte(0x9f)
|
||||||
|
cborMapWithIndefiniteLengthHead = byte(0xbf)
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
cborFalse = []byte{0xf4}
|
||||||
|
cborTrue = []byte{0xf5}
|
||||||
|
cborNil = []byte{0xf6}
|
||||||
|
cborNaN = []byte{0xf9, 0x7e, 0x00}
|
||||||
|
cborPositiveInfinity = []byte{0xf9, 0x7c, 0x00}
|
||||||
|
cborNegativeInfinity = []byte{0xf9, 0xfc, 0x00}
|
||||||
|
)
|
||||||
|
|
||||||
|
// validBuiltinTag checks that supported built-in tag numbers are followed by expected content types.
|
||||||
|
func validBuiltinTag(tagNum uint64, contentHead byte) error {
|
||||||
|
t := getType(contentHead)
|
||||||
|
switch tagNum {
|
||||||
|
case tagNumRFC3339Time:
|
||||||
|
// Tag content (date/time text string in RFC 3339 format) must be string type.
|
||||||
|
if t != cborTypeTextString {
|
||||||
|
return newInadmissibleTagContentTypeError(
|
||||||
|
tagNumRFC3339Time,
|
||||||
|
"text string",
|
||||||
|
t.String())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case tagNumEpochTime:
|
||||||
|
// Tag content (epoch date/time) must be uint, int, or float type.
|
||||||
|
if t != cborTypePositiveInt && t != cborTypeNegativeInt && (contentHead < 0xf9 || contentHead > 0xfb) {
|
||||||
|
return newInadmissibleTagContentTypeError(
|
||||||
|
tagNumEpochTime,
|
||||||
|
"integer or floating-point number",
|
||||||
|
t.String())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case tagNumUnsignedBignum, tagNumNegativeBignum:
|
||||||
|
// Tag content (bignum) must be byte type.
|
||||||
|
if t != cborTypeByteString {
|
||||||
|
return newInadmissibleTagContentTypeErrorf(
|
||||||
|
fmt.Sprintf(
|
||||||
|
"tag number %d or %d must be followed by byte string, got %s",
|
||||||
|
tagNumUnsignedBignum,
|
||||||
|
tagNumNegativeBignum,
|
||||||
|
t.String(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case tagNumExpectedLaterEncodingBase64URL, tagNumExpectedLaterEncodingBase64, tagNumExpectedLaterEncodingBase16:
|
||||||
|
// From RFC 8949 3.4.5.2:
|
||||||
|
// The data item tagged can be a byte string or any other data item. In the latter
|
||||||
|
// case, the tag applies to all of the byte string data items contained in the data
|
||||||
|
// item, except for those contained in a nested data item tagged with an expected
|
||||||
|
// conversion.
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Transcoder is a scheme for transcoding a single CBOR encoded data item to or from a different
|
||||||
|
// data format.
|
||||||
|
type Transcoder interface {
|
||||||
|
// Transcode reads the data item in its source format from a Reader and writes a
|
||||||
|
// corresponding representation in its destination format to a Writer.
|
||||||
|
Transcode(dst io.Writer, src io.Reader) error
|
||||||
|
}
|
||||||
3318
vendor/github.com/fxamacker/cbor/v2/decode.go
generated
vendored
Normal file
3318
vendor/github.com/fxamacker/cbor/v2/decode.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
724
vendor/github.com/fxamacker/cbor/v2/diagnose.go
generated
vendored
Normal file
724
vendor/github.com/fxamacker/cbor/v2/diagnose.go
generated
vendored
Normal file
@@ -0,0 +1,724 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/base32"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
"strconv"
|
||||||
|
"unicode/utf16"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"github.com/x448/float16"
|
||||||
|
)
|
||||||
|
|
||||||
|
// DiagMode is the main interface for CBOR diagnostic notation.
|
||||||
|
type DiagMode interface {
|
||||||
|
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items using this DiagMode.
|
||||||
|
Diagnose([]byte) (string, error)
|
||||||
|
|
||||||
|
// DiagnoseFirst returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
|
||||||
|
DiagnoseFirst([]byte) (string, []byte, error)
|
||||||
|
|
||||||
|
// DiagOptions returns user specified options used to create this DiagMode.
|
||||||
|
DiagOptions() DiagOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// ByteStringEncoding specifies the base encoding that byte strings are notated.
|
||||||
|
type ByteStringEncoding uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ByteStringBase16Encoding encodes byte strings in base16, without padding.
|
||||||
|
ByteStringBase16Encoding ByteStringEncoding = iota
|
||||||
|
|
||||||
|
// ByteStringBase32Encoding encodes byte strings in base32, without padding.
|
||||||
|
ByteStringBase32Encoding
|
||||||
|
|
||||||
|
// ByteStringBase32HexEncoding encodes byte strings in base32hex, without padding.
|
||||||
|
ByteStringBase32HexEncoding
|
||||||
|
|
||||||
|
// ByteStringBase64Encoding encodes byte strings in base64url, without padding.
|
||||||
|
ByteStringBase64Encoding
|
||||||
|
|
||||||
|
maxByteStringEncoding
|
||||||
|
)
|
||||||
|
|
||||||
|
func (bse ByteStringEncoding) valid() error {
|
||||||
|
if bse >= maxByteStringEncoding {
|
||||||
|
return errors.New("cbor: invalid ByteStringEncoding " + strconv.Itoa(int(bse)))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiagOptions specifies Diag options.
|
||||||
|
type DiagOptions struct {
|
||||||
|
// ByteStringEncoding specifies the base encoding that byte strings are notated.
|
||||||
|
// Default is ByteStringBase16Encoding.
|
||||||
|
ByteStringEncoding ByteStringEncoding
|
||||||
|
|
||||||
|
// ByteStringHexWhitespace specifies notating with whitespace in byte string
|
||||||
|
// when ByteStringEncoding is ByteStringBase16Encoding.
|
||||||
|
ByteStringHexWhitespace bool
|
||||||
|
|
||||||
|
// ByteStringText specifies notating with text in byte string
|
||||||
|
// if it is a valid UTF-8 text.
|
||||||
|
ByteStringText bool
|
||||||
|
|
||||||
|
// ByteStringEmbeddedCBOR specifies notating embedded CBOR in byte string
|
||||||
|
// if it is a valid CBOR bytes.
|
||||||
|
ByteStringEmbeddedCBOR bool
|
||||||
|
|
||||||
|
// CBORSequence specifies notating CBOR sequences.
|
||||||
|
// otherwise, it returns an error if there are more bytes after the first CBOR.
|
||||||
|
CBORSequence bool
|
||||||
|
|
||||||
|
// FloatPrecisionIndicator specifies appending a suffix to indicate float precision.
|
||||||
|
// Refer to https://www.rfc-editor.org/rfc/rfc8949.html#name-encoding-indicators.
|
||||||
|
FloatPrecisionIndicator bool
|
||||||
|
|
||||||
|
// MaxNestedLevels specifies the max nested levels allowed for any combination of CBOR array, maps, and tags.
|
||||||
|
// Default is 32 levels and it can be set to [4, 65535]. Note that higher maximum levels of nesting can
|
||||||
|
// require larger amounts of stack to deserialize. Don't increase this higher than you require.
|
||||||
|
MaxNestedLevels int
|
||||||
|
|
||||||
|
// MaxArrayElements specifies the max number of elements for CBOR arrays.
|
||||||
|
// Default is 128*1024=131072 and it can be set to [16, 2147483647]
|
||||||
|
MaxArrayElements int
|
||||||
|
|
||||||
|
// MaxMapPairs specifies the max number of key-value pairs for CBOR maps.
|
||||||
|
// Default is 128*1024=131072 and it can be set to [16, 2147483647]
|
||||||
|
MaxMapPairs int
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiagMode returns a DiagMode with immutable options.
|
||||||
|
func (opts DiagOptions) DiagMode() (DiagMode, error) {
|
||||||
|
return opts.diagMode()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (opts DiagOptions) diagMode() (*diagMode, error) {
|
||||||
|
if err := opts.ByteStringEncoding.valid(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
decMode, err := DecOptions{
|
||||||
|
MaxNestedLevels: opts.MaxNestedLevels,
|
||||||
|
MaxArrayElements: opts.MaxArrayElements,
|
||||||
|
MaxMapPairs: opts.MaxMapPairs,
|
||||||
|
}.decMode()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &diagMode{
|
||||||
|
byteStringEncoding: opts.ByteStringEncoding,
|
||||||
|
byteStringHexWhitespace: opts.ByteStringHexWhitespace,
|
||||||
|
byteStringText: opts.ByteStringText,
|
||||||
|
byteStringEmbeddedCBOR: opts.ByteStringEmbeddedCBOR,
|
||||||
|
cborSequence: opts.CBORSequence,
|
||||||
|
floatPrecisionIndicator: opts.FloatPrecisionIndicator,
|
||||||
|
decMode: decMode,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type diagMode struct {
|
||||||
|
byteStringEncoding ByteStringEncoding
|
||||||
|
byteStringHexWhitespace bool
|
||||||
|
byteStringText bool
|
||||||
|
byteStringEmbeddedCBOR bool
|
||||||
|
cborSequence bool
|
||||||
|
floatPrecisionIndicator bool
|
||||||
|
decMode *decMode
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiagOptions returns user specified options used to create this DiagMode.
|
||||||
|
func (dm *diagMode) DiagOptions() DiagOptions {
|
||||||
|
return DiagOptions{
|
||||||
|
ByteStringEncoding: dm.byteStringEncoding,
|
||||||
|
ByteStringHexWhitespace: dm.byteStringHexWhitespace,
|
||||||
|
ByteStringText: dm.byteStringText,
|
||||||
|
ByteStringEmbeddedCBOR: dm.byteStringEmbeddedCBOR,
|
||||||
|
CBORSequence: dm.cborSequence,
|
||||||
|
FloatPrecisionIndicator: dm.floatPrecisionIndicator,
|
||||||
|
MaxNestedLevels: dm.decMode.maxNestedLevels,
|
||||||
|
MaxArrayElements: dm.decMode.maxArrayElements,
|
||||||
|
MaxMapPairs: dm.decMode.maxMapPairs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items using the DiagMode.
|
||||||
|
func (dm *diagMode) Diagnose(data []byte) (string, error) {
|
||||||
|
return newDiagnose(data, dm.decMode, dm).diag(dm.cborSequence)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DiagnoseFirst returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
|
||||||
|
func (dm *diagMode) DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) {
|
||||||
|
return newDiagnose(data, dm.decMode, dm).diagFirst()
|
||||||
|
}
|
||||||
|
|
||||||
|
var defaultDiagMode, _ = DiagOptions{}.diagMode()
|
||||||
|
|
||||||
|
// Diagnose returns extended diagnostic notation (EDN) of CBOR data items
|
||||||
|
// using the default diagnostic mode.
|
||||||
|
//
|
||||||
|
// Refer to https://www.rfc-editor.org/rfc/rfc8949.html#name-diagnostic-notation.
|
||||||
|
func Diagnose(data []byte) (string, error) {
|
||||||
|
return defaultDiagMode.Diagnose(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Diagnose returns extended diagnostic notation (EDN) of the first CBOR data item using the DiagMode. Any remaining bytes are returned in rest.
|
||||||
|
func DiagnoseFirst(data []byte) (diagNotation string, rest []byte, err error) {
|
||||||
|
return defaultDiagMode.DiagnoseFirst(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
type diagnose struct {
|
||||||
|
dm *diagMode
|
||||||
|
d *decoder
|
||||||
|
w *bytes.Buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
func newDiagnose(data []byte, decm *decMode, diagm *diagMode) *diagnose {
|
||||||
|
return &diagnose{
|
||||||
|
dm: diagm,
|
||||||
|
d: &decoder{data: data, dm: decm},
|
||||||
|
w: &bytes.Buffer{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *diagnose) diag(cborSequence bool) (string, error) {
|
||||||
|
// CBOR Sequence
|
||||||
|
firstItem := true
|
||||||
|
for {
|
||||||
|
switch err := di.wellformed(cborSequence); err {
|
||||||
|
case nil:
|
||||||
|
if !firstItem {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
firstItem = false
|
||||||
|
if itemErr := di.item(); itemErr != nil {
|
||||||
|
return di.w.String(), itemErr
|
||||||
|
}
|
||||||
|
|
||||||
|
case io.EOF:
|
||||||
|
if firstItem {
|
||||||
|
return di.w.String(), err
|
||||||
|
}
|
||||||
|
return di.w.String(), nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return di.w.String(), err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *diagnose) diagFirst() (diagNotation string, rest []byte, err error) {
|
||||||
|
err = di.wellformed(true)
|
||||||
|
if err == nil {
|
||||||
|
err = di.item()
|
||||||
|
}
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
// Return EDN and the rest of the data slice (which might be len 0)
|
||||||
|
return di.w.String(), di.d.data[di.d.off:], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return di.w.String(), nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *diagnose) wellformed(allowExtraData bool) error {
|
||||||
|
off := di.d.off
|
||||||
|
err := di.d.wellformed(allowExtraData, false)
|
||||||
|
di.d.off = off
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *diagnose) item() error { //nolint:gocyclo
|
||||||
|
initialByte := di.d.data[di.d.off]
|
||||||
|
switch initialByte {
|
||||||
|
case cborByteStringWithIndefiniteLengthHead,
|
||||||
|
cborTextStringWithIndefiniteLengthHead: // indefinite-length byte/text string
|
||||||
|
di.d.off++
|
||||||
|
if isBreakFlag(di.d.data[di.d.off]) {
|
||||||
|
di.d.off++
|
||||||
|
switch initialByte {
|
||||||
|
case cborByteStringWithIndefiniteLengthHead:
|
||||||
|
// indefinite-length bytes with no chunks.
|
||||||
|
di.w.WriteString(`''_`)
|
||||||
|
return nil
|
||||||
|
case cborTextStringWithIndefiniteLengthHead:
|
||||||
|
// indefinite-length text with no chunks.
|
||||||
|
di.w.WriteString(`""_`)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteString("(_ ")
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for !di.d.foundBreak() {
|
||||||
|
if i > 0 {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
// wellformedIndefiniteString() already checked that the next item is a byte/text string.
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteByte(')')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborArrayWithIndefiniteLengthHead: // indefinite-length array
|
||||||
|
di.d.off++
|
||||||
|
di.w.WriteString("[_ ")
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for !di.d.foundBreak() {
|
||||||
|
if i > 0 {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteByte(']')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborMapWithIndefiniteLengthHead: // indefinite-length map
|
||||||
|
di.d.off++
|
||||||
|
di.w.WriteString("{_ ")
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
for !di.d.foundBreak() {
|
||||||
|
if i > 0 {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
// key
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteString(": ")
|
||||||
|
|
||||||
|
// value
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteByte('}')
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
t := di.d.nextCBORType()
|
||||||
|
switch t {
|
||||||
|
case cborTypePositiveInt:
|
||||||
|
_, _, val := di.d.getHead()
|
||||||
|
di.w.WriteString(strconv.FormatUint(val, 10))
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborTypeNegativeInt:
|
||||||
|
_, _, val := di.d.getHead()
|
||||||
|
if val > math.MaxInt64 {
|
||||||
|
// CBOR negative integer overflows int64, use big.Int to store value.
|
||||||
|
bi := new(big.Int)
|
||||||
|
bi.SetUint64(val)
|
||||||
|
bi.Add(bi, big.NewInt(1))
|
||||||
|
bi.Neg(bi)
|
||||||
|
di.w.WriteString(bi.String())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
nValue := int64(-1) ^ int64(val)
|
||||||
|
di.w.WriteString(strconv.FormatInt(nValue, 10))
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborTypeByteString:
|
||||||
|
b, _ := di.d.parseByteString()
|
||||||
|
return di.encodeByteString(b)
|
||||||
|
|
||||||
|
case cborTypeTextString:
|
||||||
|
b, err := di.d.parseTextString()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return di.encodeTextString(string(b), '"')
|
||||||
|
|
||||||
|
case cborTypeArray:
|
||||||
|
_, _, val := di.d.getHead()
|
||||||
|
count := int(val)
|
||||||
|
di.w.WriteByte('[')
|
||||||
|
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
if i > 0 {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
di.w.WriteByte(']')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborTypeMap:
|
||||||
|
_, _, val := di.d.getHead()
|
||||||
|
count := int(val)
|
||||||
|
di.w.WriteByte('{')
|
||||||
|
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
if i > 0 {
|
||||||
|
di.w.WriteString(", ")
|
||||||
|
}
|
||||||
|
// key
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
di.w.WriteString(": ")
|
||||||
|
// value
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
di.w.WriteByte('}')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case cborTypeTag:
|
||||||
|
_, _, tagNum := di.d.getHead()
|
||||||
|
switch tagNum {
|
||||||
|
case tagNumUnsignedBignum:
|
||||||
|
if nt := di.d.nextCBORType(); nt != cborTypeByteString {
|
||||||
|
return newInadmissibleTagContentTypeError(
|
||||||
|
tagNumUnsignedBignum,
|
||||||
|
"byte string",
|
||||||
|
nt.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
b, _ := di.d.parseByteString()
|
||||||
|
bi := new(big.Int).SetBytes(b)
|
||||||
|
di.w.WriteString(bi.String())
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case tagNumNegativeBignum:
|
||||||
|
if nt := di.d.nextCBORType(); nt != cborTypeByteString {
|
||||||
|
return newInadmissibleTagContentTypeError(
|
||||||
|
tagNumNegativeBignum,
|
||||||
|
"byte string",
|
||||||
|
nt.String(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
b, _ := di.d.parseByteString()
|
||||||
|
bi := new(big.Int).SetBytes(b)
|
||||||
|
bi.Add(bi, big.NewInt(1))
|
||||||
|
bi.Neg(bi)
|
||||||
|
di.w.WriteString(bi.String())
|
||||||
|
return nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
di.w.WriteString(strconv.FormatUint(tagNum, 10))
|
||||||
|
di.w.WriteByte('(')
|
||||||
|
if err := di.item(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
di.w.WriteByte(')')
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
case cborTypePrimitives:
|
||||||
|
_, ai, val := di.d.getHead()
|
||||||
|
switch ai {
|
||||||
|
case additionalInformationAsFalse:
|
||||||
|
di.w.WriteString("false")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsTrue:
|
||||||
|
di.w.WriteString("true")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsNull:
|
||||||
|
di.w.WriteString("null")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsUndefined:
|
||||||
|
di.w.WriteString("undefined")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsFloat16,
|
||||||
|
additionalInformationAsFloat32,
|
||||||
|
additionalInformationAsFloat64:
|
||||||
|
return di.encodeFloat(ai, val)
|
||||||
|
|
||||||
|
default:
|
||||||
|
di.w.WriteString("simple(")
|
||||||
|
di.w.WriteString(strconv.FormatUint(val, 10))
|
||||||
|
di.w.WriteByte(')')
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeU16 format a rune as "\uxxxx"
|
||||||
|
func (di *diagnose) writeU16(val rune) {
|
||||||
|
di.w.WriteString("\\u")
|
||||||
|
var in [2]byte
|
||||||
|
in[0] = byte(val >> 8)
|
||||||
|
in[1] = byte(val)
|
||||||
|
sz := hex.EncodedLen(len(in))
|
||||||
|
di.w.Grow(sz)
|
||||||
|
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
|
||||||
|
hex.Encode(dst, in[:])
|
||||||
|
di.w.Write(dst)
|
||||||
|
}
|
||||||
|
|
||||||
|
var rawBase32Encoding = base32.StdEncoding.WithPadding(base32.NoPadding)
|
||||||
|
var rawBase32HexEncoding = base32.HexEncoding.WithPadding(base32.NoPadding)
|
||||||
|
|
||||||
|
func (di *diagnose) encodeByteString(val []byte) error {
|
||||||
|
if len(val) > 0 {
|
||||||
|
if di.dm.byteStringText && utf8.Valid(val) {
|
||||||
|
return di.encodeTextString(string(val), '\'')
|
||||||
|
}
|
||||||
|
|
||||||
|
if di.dm.byteStringEmbeddedCBOR {
|
||||||
|
di2 := newDiagnose(val, di.dm.decMode, di.dm)
|
||||||
|
// should always notating embedded CBOR sequence.
|
||||||
|
if str, err := di2.diag(true); err == nil {
|
||||||
|
di.w.WriteString("<<")
|
||||||
|
di.w.WriteString(str)
|
||||||
|
di.w.WriteString(">>")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch di.dm.byteStringEncoding {
|
||||||
|
case ByteStringBase16Encoding:
|
||||||
|
di.w.WriteString("h'")
|
||||||
|
if di.dm.byteStringHexWhitespace {
|
||||||
|
sz := hex.EncodedLen(len(val))
|
||||||
|
if len(val) > 0 {
|
||||||
|
sz += len(val) - 1
|
||||||
|
}
|
||||||
|
di.w.Grow(sz)
|
||||||
|
|
||||||
|
dst := di.w.Bytes()[di.w.Len():]
|
||||||
|
for i := range val {
|
||||||
|
if i > 0 {
|
||||||
|
dst = append(dst, ' ')
|
||||||
|
}
|
||||||
|
hex.Encode(dst[len(dst):len(dst)+2], val[i:i+1])
|
||||||
|
dst = dst[:len(dst)+2]
|
||||||
|
}
|
||||||
|
di.w.Write(dst)
|
||||||
|
} else {
|
||||||
|
sz := hex.EncodedLen(len(val))
|
||||||
|
di.w.Grow(sz)
|
||||||
|
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
|
||||||
|
hex.Encode(dst, val)
|
||||||
|
di.w.Write(dst)
|
||||||
|
}
|
||||||
|
di.w.WriteByte('\'')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case ByteStringBase32Encoding:
|
||||||
|
di.w.WriteString("b32'")
|
||||||
|
sz := rawBase32Encoding.EncodedLen(len(val))
|
||||||
|
di.w.Grow(sz)
|
||||||
|
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
|
||||||
|
rawBase32Encoding.Encode(dst, val)
|
||||||
|
di.w.Write(dst)
|
||||||
|
di.w.WriteByte('\'')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case ByteStringBase32HexEncoding:
|
||||||
|
di.w.WriteString("h32'")
|
||||||
|
sz := rawBase32HexEncoding.EncodedLen(len(val))
|
||||||
|
di.w.Grow(sz)
|
||||||
|
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
|
||||||
|
rawBase32HexEncoding.Encode(dst, val)
|
||||||
|
di.w.Write(dst)
|
||||||
|
di.w.WriteByte('\'')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case ByteStringBase64Encoding:
|
||||||
|
di.w.WriteString("b64'")
|
||||||
|
sz := base64.RawURLEncoding.EncodedLen(len(val))
|
||||||
|
di.w.Grow(sz)
|
||||||
|
dst := di.w.Bytes()[di.w.Len() : di.w.Len()+sz]
|
||||||
|
base64.RawURLEncoding.Encode(dst, val)
|
||||||
|
di.w.Write(dst)
|
||||||
|
di.w.WriteByte('\'')
|
||||||
|
return nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
// It should not be possible for users to construct a *diagMode with an invalid byte
|
||||||
|
// string encoding.
|
||||||
|
panic(fmt.Sprintf("diagmode has invalid ByteStringEncoding %v", di.dm.byteStringEncoding))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const utf16SurrSelf = rune(0x10000)
|
||||||
|
|
||||||
|
// quote should be either `'` or `"`
|
||||||
|
func (di *diagnose) encodeTextString(val string, quote byte) error {
|
||||||
|
di.w.WriteByte(quote)
|
||||||
|
|
||||||
|
for i := 0; i < len(val); {
|
||||||
|
if b := val[i]; b < utf8.RuneSelf {
|
||||||
|
switch {
|
||||||
|
case b == '\t', b == '\n', b == '\r', b == '\\', b == quote:
|
||||||
|
di.w.WriteByte('\\')
|
||||||
|
|
||||||
|
switch b {
|
||||||
|
case '\t':
|
||||||
|
b = 't'
|
||||||
|
case '\n':
|
||||||
|
b = 'n'
|
||||||
|
case '\r':
|
||||||
|
b = 'r'
|
||||||
|
}
|
||||||
|
di.w.WriteByte(b)
|
||||||
|
|
||||||
|
case b >= ' ' && b <= '~':
|
||||||
|
di.w.WriteByte(b)
|
||||||
|
|
||||||
|
default:
|
||||||
|
di.writeU16(rune(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
c, size := utf8.DecodeRuneInString(val[i:])
|
||||||
|
switch {
|
||||||
|
case c == utf8.RuneError:
|
||||||
|
return &SemanticError{"cbor: invalid UTF-8 string"}
|
||||||
|
|
||||||
|
case c < utf16SurrSelf:
|
||||||
|
di.writeU16(c)
|
||||||
|
|
||||||
|
default:
|
||||||
|
c1, c2 := utf16.EncodeRune(c)
|
||||||
|
di.writeU16(c1)
|
||||||
|
di.writeU16(c2)
|
||||||
|
}
|
||||||
|
|
||||||
|
i += size
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteByte(quote)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (di *diagnose) encodeFloat(ai byte, val uint64) error {
|
||||||
|
f64 := float64(0)
|
||||||
|
switch ai {
|
||||||
|
case additionalInformationAsFloat16:
|
||||||
|
f16 := float16.Frombits(uint16(val))
|
||||||
|
switch {
|
||||||
|
case f16.IsNaN():
|
||||||
|
di.w.WriteString("NaN")
|
||||||
|
return nil
|
||||||
|
case f16.IsInf(1):
|
||||||
|
di.w.WriteString("Infinity")
|
||||||
|
return nil
|
||||||
|
case f16.IsInf(-1):
|
||||||
|
di.w.WriteString("-Infinity")
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
f64 = float64(f16.Float32())
|
||||||
|
}
|
||||||
|
|
||||||
|
case additionalInformationAsFloat32:
|
||||||
|
f32 := math.Float32frombits(uint32(val))
|
||||||
|
switch {
|
||||||
|
case f32 != f32:
|
||||||
|
di.w.WriteString("NaN")
|
||||||
|
return nil
|
||||||
|
case f32 > math.MaxFloat32:
|
||||||
|
di.w.WriteString("Infinity")
|
||||||
|
return nil
|
||||||
|
case f32 < -math.MaxFloat32:
|
||||||
|
di.w.WriteString("-Infinity")
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
f64 = float64(f32)
|
||||||
|
}
|
||||||
|
|
||||||
|
case additionalInformationAsFloat64:
|
||||||
|
f64 = math.Float64frombits(val)
|
||||||
|
switch {
|
||||||
|
case f64 != f64:
|
||||||
|
di.w.WriteString("NaN")
|
||||||
|
return nil
|
||||||
|
case f64 > math.MaxFloat64:
|
||||||
|
di.w.WriteString("Infinity")
|
||||||
|
return nil
|
||||||
|
case f64 < -math.MaxFloat64:
|
||||||
|
di.w.WriteString("-Infinity")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Use ES6 number to string conversion which should match most JSON generators.
|
||||||
|
// Inspired by https://github.com/golang/go/blob/4df10fba1687a6d4f51d7238a403f8f2298f6a16/src/encoding/json/encode.go#L585
|
||||||
|
const bitSize = 64
|
||||||
|
b := make([]byte, 0, 32)
|
||||||
|
if abs := math.Abs(f64); abs != 0 && (abs < 1e-6 || abs >= 1e21) {
|
||||||
|
b = strconv.AppendFloat(b, f64, 'e', -1, bitSize)
|
||||||
|
// clean up e-09 to e-9
|
||||||
|
n := len(b)
|
||||||
|
if n >= 4 && string(b[n-4:n-1]) == "e-0" {
|
||||||
|
b = append(b[:n-2], b[n-1])
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
b = strconv.AppendFloat(b, f64, 'f', -1, bitSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
// add decimal point and trailing zero if needed
|
||||||
|
if bytes.IndexByte(b, '.') < 0 {
|
||||||
|
if i := bytes.IndexByte(b, 'e'); i < 0 {
|
||||||
|
b = append(b, '.', '0')
|
||||||
|
} else {
|
||||||
|
b = append(b[:i+2], b[i:]...)
|
||||||
|
b[i] = '.'
|
||||||
|
b[i+1] = '0'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
di.w.WriteString(string(b))
|
||||||
|
|
||||||
|
if di.dm.floatPrecisionIndicator {
|
||||||
|
switch ai {
|
||||||
|
case additionalInformationAsFloat16:
|
||||||
|
di.w.WriteString("_1")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsFloat32:
|
||||||
|
di.w.WriteString("_2")
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case additionalInformationAsFloat64:
|
||||||
|
di.w.WriteString("_3")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
152
vendor/github.com/fxamacker/cbor/v2/doc.go
generated
vendored
Normal file
152
vendor/github.com/fxamacker/cbor/v2/doc.go
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package cbor is a modern CBOR codec (RFC 8949 & RFC 8742) with CBOR tags,
|
||||||
|
Go struct tag options (toarray/keyasint/omitempty/omitzero), Core Deterministic Encoding,
|
||||||
|
CTAP2, Canonical CBOR, float64->32->16, and duplicate map key detection.
|
||||||
|
|
||||||
|
Encoding options allow "preferred serialization" by encoding integers and floats
|
||||||
|
to their smallest forms (e.g. float16) when values fit.
|
||||||
|
|
||||||
|
Struct tag options "keyasint", "toarray", "omitempty", and "omitzero" reduce encoding size
|
||||||
|
and reduce programming effort.
|
||||||
|
|
||||||
|
For example, "toarray" tag makes struct fields encode to CBOR array elements. And
|
||||||
|
"keyasint" makes a field encode to an element of CBOR map with specified int key.
|
||||||
|
|
||||||
|
Latest docs can be viewed at https://github.com/fxamacker/cbor#cbor-library-in-go
|
||||||
|
|
||||||
|
# Basics
|
||||||
|
|
||||||
|
The Quick Start guide is at https://github.com/fxamacker/cbor#quick-start
|
||||||
|
|
||||||
|
Function signatures identical to encoding/json include:
|
||||||
|
|
||||||
|
Marshal, Unmarshal, NewEncoder, NewDecoder, (*Encoder).Encode, (*Decoder).Decode
|
||||||
|
|
||||||
|
Standard interfaces include:
|
||||||
|
|
||||||
|
BinaryMarshaler, BinaryUnmarshaler, Marshaler, and Unmarshaler
|
||||||
|
|
||||||
|
Diagnostic functions translate CBOR data item into Diagnostic Notation:
|
||||||
|
|
||||||
|
Diagnose, DiagnoseFirst
|
||||||
|
|
||||||
|
Functions that simplify using CBOR Sequences (RFC 8742) include:
|
||||||
|
|
||||||
|
UnmarshalFirst
|
||||||
|
|
||||||
|
Custom encoding and decoding is possible by implementing standard interfaces for
|
||||||
|
user-defined Go types.
|
||||||
|
|
||||||
|
Codec functions are available at package-level (using defaults options) or by
|
||||||
|
creating modes from options at runtime.
|
||||||
|
|
||||||
|
"Mode" in this API means definite way of encoding (EncMode) or decoding (DecMode).
|
||||||
|
|
||||||
|
EncMode and DecMode interfaces are created from EncOptions or DecOptions structs.
|
||||||
|
|
||||||
|
em, err := cbor.EncOptions{...}.EncMode()
|
||||||
|
em, err := cbor.CanonicalEncOptions().EncMode()
|
||||||
|
em, err := cbor.CTAP2EncOptions().EncMode()
|
||||||
|
|
||||||
|
Modes use immutable options to avoid side-effects and simplify concurrency. Behavior of
|
||||||
|
modes won't accidentally change at runtime after they're created.
|
||||||
|
|
||||||
|
Modes are intended to be reused and are safe for concurrent use.
|
||||||
|
|
||||||
|
EncMode and DecMode Interfaces
|
||||||
|
|
||||||
|
// EncMode interface uses immutable options and is safe for concurrent use.
|
||||||
|
type EncMode interface {
|
||||||
|
Marshal(v interface{}) ([]byte, error)
|
||||||
|
NewEncoder(w io.Writer) *Encoder
|
||||||
|
EncOptions() EncOptions // returns copy of options
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecMode interface uses immutable options and is safe for concurrent use.
|
||||||
|
type DecMode interface {
|
||||||
|
Unmarshal(data []byte, v interface{}) error
|
||||||
|
NewDecoder(r io.Reader) *Decoder
|
||||||
|
DecOptions() DecOptions // returns copy of options
|
||||||
|
}
|
||||||
|
|
||||||
|
Using Default Encoding Mode
|
||||||
|
|
||||||
|
b, err := cbor.Marshal(v)
|
||||||
|
|
||||||
|
encoder := cbor.NewEncoder(w)
|
||||||
|
err = encoder.Encode(v)
|
||||||
|
|
||||||
|
Using Default Decoding Mode
|
||||||
|
|
||||||
|
err := cbor.Unmarshal(b, &v)
|
||||||
|
|
||||||
|
decoder := cbor.NewDecoder(r)
|
||||||
|
err = decoder.Decode(&v)
|
||||||
|
|
||||||
|
Using Default Mode of UnmarshalFirst to Decode CBOR Sequences
|
||||||
|
|
||||||
|
// Decode the first CBOR data item and return remaining bytes:
|
||||||
|
rest, err = cbor.UnmarshalFirst(b, &v) // decode []byte b to v
|
||||||
|
|
||||||
|
Using Extended Diagnostic Notation (EDN) to represent CBOR data
|
||||||
|
|
||||||
|
// Translate the first CBOR data item into text and return remaining bytes.
|
||||||
|
text, rest, err = cbor.DiagnoseFirst(b) // decode []byte b to text
|
||||||
|
|
||||||
|
Creating and Using Encoding Modes
|
||||||
|
|
||||||
|
// Create EncOptions using either struct literal or a function.
|
||||||
|
opts := cbor.CanonicalEncOptions()
|
||||||
|
|
||||||
|
// If needed, modify encoding options
|
||||||
|
opts.Time = cbor.TimeUnix
|
||||||
|
|
||||||
|
// Create reusable EncMode interface with immutable options, safe for concurrent use.
|
||||||
|
em, err := opts.EncMode()
|
||||||
|
|
||||||
|
// Use EncMode like encoding/json, with same function signatures.
|
||||||
|
b, err := em.Marshal(v)
|
||||||
|
// or
|
||||||
|
encoder := em.NewEncoder(w)
|
||||||
|
err := encoder.Encode(v)
|
||||||
|
|
||||||
|
// NOTE: Both em.Marshal(v) and encoder.Encode(v) use encoding options
|
||||||
|
// specified during creation of em (encoding mode).
|
||||||
|
|
||||||
|
# CBOR Options
|
||||||
|
|
||||||
|
Predefined Encoding Options: https://github.com/fxamacker/cbor#predefined-encoding-options
|
||||||
|
|
||||||
|
Encoding Options: https://github.com/fxamacker/cbor#encoding-options
|
||||||
|
|
||||||
|
Decoding Options: https://github.com/fxamacker/cbor#decoding-options
|
||||||
|
|
||||||
|
# Struct Tags
|
||||||
|
|
||||||
|
Struct tags like `cbor:"name,omitempty"` and `json:"name,omitempty"` work as expected.
|
||||||
|
If both struct tags are specified then `cbor` is used.
|
||||||
|
|
||||||
|
Struct tag options like "keyasint", "toarray", "omitempty", and "omitzero" make it easy to use
|
||||||
|
very compact formats like COSE and CWT (CBOR Web Tokens) with structs.
|
||||||
|
|
||||||
|
The "omitzero" option omits zero values from encoding, matching
|
||||||
|
[stdlib encoding/json behavior](https://pkg.go.dev/encoding/json#Marshal).
|
||||||
|
When specified in the `cbor` tag, the option is always honored.
|
||||||
|
When specified in the `json` tag, the option is honored when building with Go 1.24+.
|
||||||
|
|
||||||
|
For example, "toarray" makes struct fields encode to array elements. And "keyasint"
|
||||||
|
makes struct fields encode to elements of CBOR map with int keys.
|
||||||
|
|
||||||
|
https://raw.githubusercontent.com/fxamacker/images/master/cbor/v2.0.0/cbor_easy_api.png
|
||||||
|
|
||||||
|
Struct tag options are listed at https://github.com/fxamacker/cbor#struct-tags-1
|
||||||
|
|
||||||
|
# Tests and Fuzzing
|
||||||
|
|
||||||
|
Over 375 tests are included in this package. Cover-guided fuzzing is handled by
|
||||||
|
a private fuzzer that replaced fxamacker/cbor-fuzz years ago.
|
||||||
|
*/
|
||||||
|
package cbor
|
||||||
2299
vendor/github.com/fxamacker/cbor/v2/encode.go
generated
vendored
Normal file
2299
vendor/github.com/fxamacker/cbor/v2/encode.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
92
vendor/github.com/fxamacker/cbor/v2/encode_map.go
generated
vendored
Normal file
92
vendor/github.com/fxamacker/cbor/v2/encode_map.go
generated
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"reflect"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mapKeyValueEncodeFunc struct {
|
||||||
|
kf, ef encodeFunc
|
||||||
|
kpool, vpool sync.Pool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (me *mapKeyValueEncodeFunc) encodeKeyValues(e *bytes.Buffer, em *encMode, v reflect.Value, kvs []keyValue) error {
|
||||||
|
iterk := me.kpool.Get().(*reflect.Value)
|
||||||
|
defer func() {
|
||||||
|
iterk.SetZero()
|
||||||
|
me.kpool.Put(iterk)
|
||||||
|
}()
|
||||||
|
iterv := me.vpool.Get().(*reflect.Value)
|
||||||
|
defer func() {
|
||||||
|
iterv.SetZero()
|
||||||
|
me.vpool.Put(iterv)
|
||||||
|
}()
|
||||||
|
|
||||||
|
if kvs == nil {
|
||||||
|
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
|
||||||
|
iterk.SetIterKey(iter)
|
||||||
|
iterv.SetIterValue(iter)
|
||||||
|
|
||||||
|
if err := me.kf(e, em, *iterk); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := me.ef(e, em, *iterv); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
initial := e.Len()
|
||||||
|
for i, iter := 0, v.MapRange(); iter.Next(); i++ {
|
||||||
|
iterk.SetIterKey(iter)
|
||||||
|
iterv.SetIterValue(iter)
|
||||||
|
|
||||||
|
offset := e.Len()
|
||||||
|
if err := me.kf(e, em, *iterk); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
valueOffset := e.Len()
|
||||||
|
if err := me.ef(e, em, *iterv); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
kvs[i] = keyValue{
|
||||||
|
offset: offset - initial,
|
||||||
|
valueOffset: valueOffset - initial,
|
||||||
|
nextOffset: e.Len() - initial,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getEncodeMapFunc(t reflect.Type) encodeFunc {
|
||||||
|
kf, _, _ := getEncodeFunc(t.Key())
|
||||||
|
ef, _, _ := getEncodeFunc(t.Elem())
|
||||||
|
if kf == nil || ef == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
mkv := &mapKeyValueEncodeFunc{
|
||||||
|
kf: kf,
|
||||||
|
ef: ef,
|
||||||
|
kpool: sync.Pool{
|
||||||
|
New: func() any {
|
||||||
|
rk := reflect.New(t.Key()).Elem()
|
||||||
|
return &rk
|
||||||
|
},
|
||||||
|
},
|
||||||
|
vpool: sync.Pool{
|
||||||
|
New: func() any {
|
||||||
|
rv := reflect.New(t.Elem()).Elem()
|
||||||
|
return &rv
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return mapEncodeFunc{
|
||||||
|
e: mkv.encodeKeyValues,
|
||||||
|
}.encode
|
||||||
|
}
|
||||||
8
vendor/github.com/fxamacker/cbor/v2/omitzero_go124.go
generated
vendored
Normal file
8
vendor/github.com/fxamacker/cbor/v2/omitzero_go124.go
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
//go:build go1.24
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
var jsonStdlibSupportsOmitzero = true
|
||||||
8
vendor/github.com/fxamacker/cbor/v2/omitzero_pre_go124.go
generated
vendored
Normal file
8
vendor/github.com/fxamacker/cbor/v2/omitzero_pre_go124.go
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
//go:build !go1.24
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
var jsonStdlibSupportsOmitzero = false
|
||||||
98
vendor/github.com/fxamacker/cbor/v2/simplevalue.go
generated
vendored
Normal file
98
vendor/github.com/fxamacker/cbor/v2/simplevalue.go
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SimpleValue represents CBOR simple value.
|
||||||
|
// CBOR simple value is:
|
||||||
|
// - an extension point like CBOR tag.
|
||||||
|
// - a subset of CBOR major type 7 that isn't floating-point.
|
||||||
|
// - "identified by a number between 0 and 255, but distinct from that number itself".
|
||||||
|
// For example, "a simple value 2 is not equivalent to an integer 2" as a CBOR map key.
|
||||||
|
//
|
||||||
|
// CBOR simple values identified by 20..23 are: "false", "true" , "null", and "undefined".
|
||||||
|
// Other CBOR simple values are currently unassigned/reserved by IANA.
|
||||||
|
type SimpleValue uint8
|
||||||
|
|
||||||
|
var (
|
||||||
|
typeSimpleValue = reflect.TypeOf(SimpleValue(0))
|
||||||
|
)
|
||||||
|
|
||||||
|
// MarshalCBOR encodes SimpleValue as CBOR simple value (major type 7).
|
||||||
|
func (sv SimpleValue) MarshalCBOR() ([]byte, error) {
|
||||||
|
// RFC 8949 3.3. Floating-Point Numbers and Values with No Content says:
|
||||||
|
// "An encoder MUST NOT issue two-byte sequences that start with 0xf8
|
||||||
|
// (major type 7, additional information 24) and continue with a byte
|
||||||
|
// less than 0x20 (32 decimal). Such sequences are not well-formed.
|
||||||
|
// (This implies that an encoder cannot encode false, true, null, or
|
||||||
|
// undefined in two-byte sequences and that only the one-byte variants
|
||||||
|
// of these are well-formed; more generally speaking, each simple value
|
||||||
|
// only has a single representation variant)."
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case sv <= maxSimpleValueInAdditionalInformation:
|
||||||
|
return []byte{byte(cborTypePrimitives) | byte(sv)}, nil
|
||||||
|
|
||||||
|
case sv >= minSimpleValueIn1ByteArgument:
|
||||||
|
return []byte{byte(cborTypePrimitives) | additionalInformationWith1ByteArgument, byte(sv)}, nil
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil, &UnsupportedValueError{msg: fmt.Sprintf("SimpleValue(%d)", sv)}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalCBOR decodes CBOR simple value (major type 7) to SimpleValue.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used by this codec; kept for compatibility
|
||||||
|
// with user apps that directly call this function.
|
||||||
|
func (sv *SimpleValue) UnmarshalCBOR(data []byte) error {
|
||||||
|
if sv == nil {
|
||||||
|
return errors.New("cbor.SimpleValue: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
// Check well-formedness of CBOR data item.
|
||||||
|
// SimpleValue.UnmarshalCBOR() is exported, so
|
||||||
|
// the codec needs to support same behavior for:
|
||||||
|
// - Unmarshal(data, *SimpleValue)
|
||||||
|
// - SimpleValue.UnmarshalCBOR(data)
|
||||||
|
err := d.wellformed(false, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return sv.unmarshalCBOR(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalCBOR decodes CBOR simple value (major type 7) to SimpleValue.
|
||||||
|
// This function assumes data is well-formed, and does not perform bounds checking.
|
||||||
|
// This function is called by Unmarshal().
|
||||||
|
func (sv *SimpleValue) unmarshalCBOR(data []byte) error {
|
||||||
|
if sv == nil {
|
||||||
|
return errors.New("cbor.SimpleValue: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
typ, ai, val := d.getHead()
|
||||||
|
|
||||||
|
if typ != cborTypePrimitives {
|
||||||
|
return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue"}
|
||||||
|
}
|
||||||
|
if ai > additionalInformationWith1ByteArgument {
|
||||||
|
return &UnmarshalTypeError{CBORType: typ.String(), GoType: "SimpleValue", errorMsg: "not simple values"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// It is safe to cast val to uint8 here because
|
||||||
|
// - data is already verified to be well-formed CBOR simple value and
|
||||||
|
// - val is <= math.MaxUint8.
|
||||||
|
*sv = SimpleValue(val)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
277
vendor/github.com/fxamacker/cbor/v2/stream.go
generated
vendored
Normal file
277
vendor/github.com/fxamacker/cbor/v2/stream.go
generated
vendored
Normal file
@@ -0,0 +1,277 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Decoder reads and decodes CBOR values from io.Reader.
|
||||||
|
type Decoder struct {
|
||||||
|
r io.Reader
|
||||||
|
d decoder
|
||||||
|
buf []byte
|
||||||
|
off int // next read offset in buf
|
||||||
|
bytesRead int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDecoder returns a new decoder that reads and decodes from r using
|
||||||
|
// the default decoding options.
|
||||||
|
func NewDecoder(r io.Reader) *Decoder {
|
||||||
|
return defaultDecMode.NewDecoder(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode reads CBOR value and decodes it into the value pointed to by v.
|
||||||
|
func (dec *Decoder) Decode(v any) error {
|
||||||
|
_, err := dec.readNext()
|
||||||
|
if err != nil {
|
||||||
|
// Return validation error or read error.
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
dec.d.reset(dec.buf[dec.off:])
|
||||||
|
err = dec.d.value(v)
|
||||||
|
|
||||||
|
// Increment dec.off even if decoding err is not nil because
|
||||||
|
// dec.d.off points to the next CBOR data item if current
|
||||||
|
// CBOR data item is valid but failed to be decoded into v.
|
||||||
|
// This allows next CBOR data item to be decoded in next
|
||||||
|
// call to this function.
|
||||||
|
dec.off += dec.d.off
|
||||||
|
dec.bytesRead += dec.d.off
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip skips to the next CBOR data item (if there is any),
|
||||||
|
// otherwise it returns error such as io.EOF, io.UnexpectedEOF, etc.
|
||||||
|
func (dec *Decoder) Skip() error {
|
||||||
|
n, err := dec.readNext()
|
||||||
|
if err != nil {
|
||||||
|
// Return validation error or read error.
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
dec.off += n
|
||||||
|
dec.bytesRead += n
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NumBytesRead returns the number of bytes read.
|
||||||
|
func (dec *Decoder) NumBytesRead() int {
|
||||||
|
return dec.bytesRead
|
||||||
|
}
|
||||||
|
|
||||||
|
// Buffered returns a reader for data remaining in Decoder's buffer.
|
||||||
|
// Returned reader is valid until the next call to Decode or Skip.
|
||||||
|
func (dec *Decoder) Buffered() io.Reader {
|
||||||
|
return bytes.NewReader(dec.buf[dec.off:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// readNext() reads next CBOR data item from Reader to buffer.
|
||||||
|
// It returns the size of next CBOR data item.
|
||||||
|
// It also returns validation error or read error if any.
|
||||||
|
func (dec *Decoder) readNext() (int, error) {
|
||||||
|
var readErr error
|
||||||
|
var validErr error
|
||||||
|
|
||||||
|
for {
|
||||||
|
// Process any unread data in dec.buf.
|
||||||
|
if dec.off < len(dec.buf) {
|
||||||
|
dec.d.reset(dec.buf[dec.off:])
|
||||||
|
off := dec.off // Save offset before data validation
|
||||||
|
validErr = dec.d.wellformed(true, false)
|
||||||
|
dec.off = off // Restore offset
|
||||||
|
|
||||||
|
if validErr == nil {
|
||||||
|
return dec.d.off, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if validErr != io.ErrUnexpectedEOF {
|
||||||
|
return 0, validErr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process last read error on io.ErrUnexpectedEOF.
|
||||||
|
if readErr != nil {
|
||||||
|
if readErr == io.EOF {
|
||||||
|
// current CBOR data item is incomplete.
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
return 0, readErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// More data is needed and there was no read error.
|
||||||
|
var n int
|
||||||
|
for n == 0 {
|
||||||
|
n, readErr = dec.read()
|
||||||
|
if n == 0 && readErr != nil {
|
||||||
|
// No more data can be read and read error is encountered.
|
||||||
|
// At this point, validErr is either nil or io.ErrUnexpectedEOF.
|
||||||
|
if readErr == io.EOF {
|
||||||
|
if validErr == io.ErrUnexpectedEOF {
|
||||||
|
// current CBOR data item is incomplete.
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, readErr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// At this point, dec.buf contains new data from last read (n > 0).
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// read() reads data from Reader to buffer.
|
||||||
|
// It returns number of bytes read and any read error encountered.
|
||||||
|
// Postconditions:
|
||||||
|
// - dec.buf contains previously unread data and new data.
|
||||||
|
// - dec.off is 0.
|
||||||
|
func (dec *Decoder) read() (int, error) {
|
||||||
|
// Grow buf if needed.
|
||||||
|
const minRead = 512
|
||||||
|
if cap(dec.buf)-len(dec.buf)+dec.off < minRead {
|
||||||
|
oldUnreadBuf := dec.buf[dec.off:]
|
||||||
|
dec.buf = make([]byte, len(dec.buf)-dec.off, 2*cap(dec.buf)+minRead)
|
||||||
|
dec.overwriteBuf(oldUnreadBuf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy unread data over read data and reset off to 0.
|
||||||
|
if dec.off > 0 {
|
||||||
|
dec.overwriteBuf(dec.buf[dec.off:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read from reader and reslice buf.
|
||||||
|
n, err := dec.r.Read(dec.buf[len(dec.buf):cap(dec.buf)])
|
||||||
|
dec.buf = dec.buf[0 : len(dec.buf)+n]
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (dec *Decoder) overwriteBuf(newBuf []byte) {
|
||||||
|
n := copy(dec.buf, newBuf)
|
||||||
|
dec.buf = dec.buf[:n]
|
||||||
|
dec.off = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encoder writes CBOR values to io.Writer.
|
||||||
|
type Encoder struct {
|
||||||
|
w io.Writer
|
||||||
|
em *encMode
|
||||||
|
indefTypes []cborType
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEncoder returns a new encoder that writes to w using the default encoding options.
|
||||||
|
func NewEncoder(w io.Writer) *Encoder {
|
||||||
|
return defaultEncMode.NewEncoder(w)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode writes the CBOR encoding of v.
|
||||||
|
func (enc *Encoder) Encode(v any) error {
|
||||||
|
if len(enc.indefTypes) > 0 && v != nil {
|
||||||
|
indefType := enc.indefTypes[len(enc.indefTypes)-1]
|
||||||
|
if indefType == cborTypeTextString {
|
||||||
|
k := reflect.TypeOf(v).Kind()
|
||||||
|
if k != reflect.String {
|
||||||
|
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length text string")
|
||||||
|
}
|
||||||
|
} else if indefType == cborTypeByteString {
|
||||||
|
t := reflect.TypeOf(v)
|
||||||
|
k := t.Kind()
|
||||||
|
if (k != reflect.Array && k != reflect.Slice) || t.Elem().Kind() != reflect.Uint8 {
|
||||||
|
return errors.New("cbor: cannot encode item type " + k.String() + " for indefinite-length byte string")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := getEncodeBuffer()
|
||||||
|
|
||||||
|
err := encode(buf, enc.em, reflect.ValueOf(v))
|
||||||
|
if err == nil {
|
||||||
|
_, err = enc.w.Write(buf.Bytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
putEncodeBuffer(buf)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartIndefiniteByteString starts byte string encoding of indefinite length.
|
||||||
|
// Subsequent calls of (*Encoder).Encode() encodes definite length byte strings
|
||||||
|
// ("chunks") as one contiguous string until EndIndefinite is called.
|
||||||
|
func (enc *Encoder) StartIndefiniteByteString() error {
|
||||||
|
return enc.startIndefinite(cborTypeByteString)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartIndefiniteTextString starts text string encoding of indefinite length.
|
||||||
|
// Subsequent calls of (*Encoder).Encode() encodes definite length text strings
|
||||||
|
// ("chunks") as one contiguous string until EndIndefinite is called.
|
||||||
|
func (enc *Encoder) StartIndefiniteTextString() error {
|
||||||
|
return enc.startIndefinite(cborTypeTextString)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartIndefiniteArray starts array encoding of indefinite length.
|
||||||
|
// Subsequent calls of (*Encoder).Encode() encodes elements of the array
|
||||||
|
// until EndIndefinite is called.
|
||||||
|
func (enc *Encoder) StartIndefiniteArray() error {
|
||||||
|
return enc.startIndefinite(cborTypeArray)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartIndefiniteMap starts array encoding of indefinite length.
|
||||||
|
// Subsequent calls of (*Encoder).Encode() encodes elements of the map
|
||||||
|
// until EndIndefinite is called.
|
||||||
|
func (enc *Encoder) StartIndefiniteMap() error {
|
||||||
|
return enc.startIndefinite(cborTypeMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
// EndIndefinite closes last opened indefinite length value.
|
||||||
|
func (enc *Encoder) EndIndefinite() error {
|
||||||
|
if len(enc.indefTypes) == 0 {
|
||||||
|
return errors.New("cbor: cannot encode \"break\" code outside indefinite length values")
|
||||||
|
}
|
||||||
|
_, err := enc.w.Write([]byte{cborBreakFlag})
|
||||||
|
if err == nil {
|
||||||
|
enc.indefTypes = enc.indefTypes[:len(enc.indefTypes)-1]
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var cborIndefHeader = map[cborType][]byte{
|
||||||
|
cborTypeByteString: {cborByteStringWithIndefiniteLengthHead},
|
||||||
|
cborTypeTextString: {cborTextStringWithIndefiniteLengthHead},
|
||||||
|
cborTypeArray: {cborArrayWithIndefiniteLengthHead},
|
||||||
|
cborTypeMap: {cborMapWithIndefiniteLengthHead},
|
||||||
|
}
|
||||||
|
|
||||||
|
func (enc *Encoder) startIndefinite(typ cborType) error {
|
||||||
|
if enc.em.indefLength == IndefLengthForbidden {
|
||||||
|
return &IndefiniteLengthError{typ}
|
||||||
|
}
|
||||||
|
_, err := enc.w.Write(cborIndefHeader[typ])
|
||||||
|
if err == nil {
|
||||||
|
enc.indefTypes = append(enc.indefTypes, typ)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// RawMessage is a raw encoded CBOR value.
|
||||||
|
type RawMessage []byte
|
||||||
|
|
||||||
|
// MarshalCBOR returns m or CBOR nil if m is nil.
|
||||||
|
func (m RawMessage) MarshalCBOR() ([]byte, error) {
|
||||||
|
if len(m) == 0 {
|
||||||
|
return cborNil, nil
|
||||||
|
}
|
||||||
|
return m, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalCBOR creates a copy of data and saves to *m.
|
||||||
|
func (m *RawMessage) UnmarshalCBOR(data []byte) error {
|
||||||
|
if m == nil {
|
||||||
|
return errors.New("cbor.RawMessage: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
*m = append((*m)[0:0], data...)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
268
vendor/github.com/fxamacker/cbor/v2/structfields.go
generated
vendored
Normal file
268
vendor/github.com/fxamacker/cbor/v2/structfields.go
generated
vendored
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type field struct {
|
||||||
|
name string
|
||||||
|
nameAsInt int64 // used to decoder to match field name with CBOR int
|
||||||
|
cborName []byte
|
||||||
|
cborNameByteString []byte // major type 2 name encoding iff cborName has major type 3
|
||||||
|
idx []int
|
||||||
|
typ reflect.Type
|
||||||
|
ef encodeFunc
|
||||||
|
ief isEmptyFunc
|
||||||
|
izf isZeroFunc
|
||||||
|
typInfo *typeInfo // used to decoder to reuse type info
|
||||||
|
tagged bool // used to choose dominant field (at the same level tagged fields dominate untagged fields)
|
||||||
|
omitEmpty bool // used to skip empty field
|
||||||
|
omitZero bool // used to skip zero field
|
||||||
|
keyAsInt bool // used to encode/decode field name as int
|
||||||
|
}
|
||||||
|
|
||||||
|
type fields []*field
|
||||||
|
|
||||||
|
// indexFieldSorter sorts fields by field idx at each level, breaking ties with idx depth.
|
||||||
|
type indexFieldSorter struct {
|
||||||
|
fields fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *indexFieldSorter) Len() int {
|
||||||
|
return len(x.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *indexFieldSorter) Swap(i, j int) {
|
||||||
|
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *indexFieldSorter) Less(i, j int) bool {
|
||||||
|
iIdx, jIdx := x.fields[i].idx, x.fields[j].idx
|
||||||
|
for k := 0; k < len(iIdx) && k < len(jIdx); k++ {
|
||||||
|
if iIdx[k] != jIdx[k] {
|
||||||
|
return iIdx[k] < jIdx[k]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return len(iIdx) <= len(jIdx)
|
||||||
|
}
|
||||||
|
|
||||||
|
// nameLevelAndTagFieldSorter sorts fields by field name, idx depth, and presence of tag.
|
||||||
|
type nameLevelAndTagFieldSorter struct {
|
||||||
|
fields fields
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *nameLevelAndTagFieldSorter) Len() int {
|
||||||
|
return len(x.fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *nameLevelAndTagFieldSorter) Swap(i, j int) {
|
||||||
|
x.fields[i], x.fields[j] = x.fields[j], x.fields[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *nameLevelAndTagFieldSorter) Less(i, j int) bool {
|
||||||
|
fi, fj := x.fields[i], x.fields[j]
|
||||||
|
if fi.name != fj.name {
|
||||||
|
return fi.name < fj.name
|
||||||
|
}
|
||||||
|
if len(fi.idx) != len(fj.idx) {
|
||||||
|
return len(fi.idx) < len(fj.idx)
|
||||||
|
}
|
||||||
|
if fi.tagged != fj.tagged {
|
||||||
|
return fi.tagged
|
||||||
|
}
|
||||||
|
return i < j // Field i and j have the same name, depth, and tagged status. Nothing else matters.
|
||||||
|
}
|
||||||
|
|
||||||
|
// getFields returns visible fields of struct type t following visibility rules for JSON encoding.
|
||||||
|
func getFields(t reflect.Type) (flds fields, structOptions string) {
|
||||||
|
// Get special field "_" tag options
|
||||||
|
if f, ok := t.FieldByName("_"); ok {
|
||||||
|
tag := f.Tag.Get("cbor")
|
||||||
|
if tag != "-" {
|
||||||
|
structOptions = tag
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// nTypes contains next level anonymous fields' types and indexes
|
||||||
|
// (there can be multiple fields of the same type at the same level)
|
||||||
|
flds, nTypes := appendFields(t, nil, nil, nil)
|
||||||
|
|
||||||
|
if len(nTypes) > 0 {
|
||||||
|
|
||||||
|
var cTypes map[reflect.Type][][]int // current level anonymous fields' types and indexes
|
||||||
|
vTypes := map[reflect.Type]bool{t: true} // visited field types at less nested levels
|
||||||
|
|
||||||
|
for len(nTypes) > 0 {
|
||||||
|
cTypes, nTypes = nTypes, nil
|
||||||
|
|
||||||
|
for t, idx := range cTypes {
|
||||||
|
// If there are multiple anonymous fields of the same struct type at the same level, all are ignored.
|
||||||
|
if len(idx) > 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Anonymous field of the same type at deeper nested level is ignored.
|
||||||
|
if vTypes[t] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
vTypes[t] = true
|
||||||
|
|
||||||
|
flds, nTypes = appendFields(t, idx[0], flds, nTypes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Sort(&nameLevelAndTagFieldSorter{flds})
|
||||||
|
|
||||||
|
// Keep visible fields.
|
||||||
|
j := 0 // index of next unique field
|
||||||
|
for i := 0; i < len(flds); {
|
||||||
|
name := flds[i].name
|
||||||
|
if i == len(flds)-1 || // last field
|
||||||
|
name != flds[i+1].name || // field i has unique field name
|
||||||
|
len(flds[i].idx) < len(flds[i+1].idx) || // field i is at a less nested level than field i+1
|
||||||
|
(flds[i].tagged && !flds[i+1].tagged) { // field i is tagged while field i+1 is not
|
||||||
|
flds[j] = flds[i]
|
||||||
|
j++
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip fields with the same field name.
|
||||||
|
for i++; i < len(flds) && name == flds[i].name; i++ { //nolint:revive
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if j != len(flds) {
|
||||||
|
flds = flds[:j]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort fields by field index
|
||||||
|
sort.Sort(&indexFieldSorter{flds})
|
||||||
|
|
||||||
|
return flds, structOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
// appendFields appends type t's exportable fields to flds and anonymous struct fields to nTypes .
|
||||||
|
func appendFields(
|
||||||
|
t reflect.Type,
|
||||||
|
idx []int,
|
||||||
|
flds fields,
|
||||||
|
nTypes map[reflect.Type][][]int,
|
||||||
|
) (
|
||||||
|
_flds fields,
|
||||||
|
_nTypes map[reflect.Type][][]int,
|
||||||
|
) {
|
||||||
|
for i := 0; i < t.NumField(); i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
|
||||||
|
ft := f.Type
|
||||||
|
for ft.Kind() == reflect.Pointer {
|
||||||
|
ft = ft.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isFieldExportable(f, ft.Kind()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
cborTag := true
|
||||||
|
tag := f.Tag.Get("cbor")
|
||||||
|
if tag == "" {
|
||||||
|
tag = f.Tag.Get("json")
|
||||||
|
cborTag = false
|
||||||
|
}
|
||||||
|
if tag == "-" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
tagged := tag != ""
|
||||||
|
|
||||||
|
// Parse field tag options
|
||||||
|
var tagFieldName string
|
||||||
|
var omitempty, omitzero, keyasint bool
|
||||||
|
for j := 0; tag != ""; j++ {
|
||||||
|
var token string
|
||||||
|
idx := strings.IndexByte(tag, ',')
|
||||||
|
if idx == -1 {
|
||||||
|
token, tag = tag, ""
|
||||||
|
} else {
|
||||||
|
token, tag = tag[:idx], tag[idx+1:]
|
||||||
|
}
|
||||||
|
if j == 0 {
|
||||||
|
tagFieldName = token
|
||||||
|
} else {
|
||||||
|
switch token {
|
||||||
|
case "omitempty":
|
||||||
|
omitempty = true
|
||||||
|
case "omitzero":
|
||||||
|
if cborTag || jsonStdlibSupportsOmitzero {
|
||||||
|
omitzero = true
|
||||||
|
}
|
||||||
|
case "keyasint":
|
||||||
|
keyasint = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldName := tagFieldName
|
||||||
|
if tagFieldName == "" {
|
||||||
|
fieldName = f.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
fIdx := make([]int, len(idx)+1)
|
||||||
|
copy(fIdx, idx)
|
||||||
|
fIdx[len(fIdx)-1] = i
|
||||||
|
|
||||||
|
if !f.Anonymous || ft.Kind() != reflect.Struct || tagFieldName != "" {
|
||||||
|
flds = append(flds, &field{
|
||||||
|
name: fieldName,
|
||||||
|
idx: fIdx,
|
||||||
|
typ: f.Type,
|
||||||
|
omitEmpty: omitempty,
|
||||||
|
omitZero: omitzero,
|
||||||
|
keyAsInt: keyasint,
|
||||||
|
tagged: tagged})
|
||||||
|
} else {
|
||||||
|
if nTypes == nil {
|
||||||
|
nTypes = make(map[reflect.Type][][]int)
|
||||||
|
}
|
||||||
|
nTypes[ft] = append(nTypes[ft], fIdx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return flds, nTypes
|
||||||
|
}
|
||||||
|
|
||||||
|
// isFieldExportable returns true if f is an exportable (regular or anonymous) field or
|
||||||
|
// a nonexportable anonymous field of struct type.
|
||||||
|
// Nonexportable anonymous field of struct type can contain exportable fields.
|
||||||
|
func isFieldExportable(f reflect.StructField, fk reflect.Kind) bool { //nolint:gocritic // ignore hugeParam
|
||||||
|
return f.IsExported() || (f.Anonymous && fk == reflect.Struct)
|
||||||
|
}
|
||||||
|
|
||||||
|
type embeddedFieldNullPtrFunc func(reflect.Value) (reflect.Value, error)
|
||||||
|
|
||||||
|
// getFieldValue returns field value of struct v by index. When encountering null pointer
|
||||||
|
// to anonymous (embedded) struct field, f is called with the last traversed field value.
|
||||||
|
func getFieldValue(v reflect.Value, idx []int, f embeddedFieldNullPtrFunc) (fv reflect.Value, err error) {
|
||||||
|
fv = v
|
||||||
|
for i, n := range idx {
|
||||||
|
fv = fv.Field(n)
|
||||||
|
|
||||||
|
if i < len(idx)-1 {
|
||||||
|
if fv.Kind() == reflect.Pointer && fv.Type().Elem().Kind() == reflect.Struct {
|
||||||
|
if fv.IsNil() {
|
||||||
|
// Null pointer to embedded struct field
|
||||||
|
fv, err = f(fv)
|
||||||
|
if err != nil || !fv.IsValid() {
|
||||||
|
return fv, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fv = fv.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fv, nil
|
||||||
|
}
|
||||||
329
vendor/github.com/fxamacker/cbor/v2/tag.go
generated
vendored
Normal file
329
vendor/github.com/fxamacker/cbor/v2/tag.go
generated
vendored
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Tag represents a tagged data item (CBOR major type 6), comprising a tag number and the unmarshaled tag content.
|
||||||
|
// NOTE: The same encoding and decoding options that apply to untagged CBOR data items also applies to tag content
|
||||||
|
// during encoding and decoding.
|
||||||
|
type Tag struct {
|
||||||
|
Number uint64
|
||||||
|
Content any
|
||||||
|
}
|
||||||
|
|
||||||
|
// RawTag represents a tagged data item (CBOR major type 6), comprising a tag number and the raw tag content.
|
||||||
|
// The raw tag content (enclosed data item) is a CBOR-encoded data item.
|
||||||
|
// RawTag can be used to delay decoding a CBOR data item or precompute encoding a CBOR data item.
|
||||||
|
type RawTag struct {
|
||||||
|
Number uint64
|
||||||
|
Content RawMessage
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalCBOR sets *t with the tag number and the raw tag content copied from data.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used by this codec; kept for compatibility
|
||||||
|
// with user apps that directly call this function.
|
||||||
|
func (t *RawTag) UnmarshalCBOR(data []byte) error {
|
||||||
|
if t == nil {
|
||||||
|
return errors.New("cbor.RawTag: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
// Check if data is a well-formed CBOR data item.
|
||||||
|
// RawTag.UnmarshalCBOR() is exported, so
|
||||||
|
// the codec needs to support same behavior for:
|
||||||
|
// - Unmarshal(data, *RawTag)
|
||||||
|
// - RawTag.UnmarshalCBOR(data)
|
||||||
|
err := d.wellformed(false, false)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return t.unmarshalCBOR(data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalCBOR sets *t with the tag number and the raw tag content copied from data.
|
||||||
|
// This function assumes data is well-formed, and does not perform bounds checking.
|
||||||
|
// This function is called by Unmarshal().
|
||||||
|
func (t *RawTag) unmarshalCBOR(data []byte) error {
|
||||||
|
if t == nil {
|
||||||
|
return errors.New("cbor.RawTag: UnmarshalCBOR on nil pointer")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decoding CBOR null and undefined to cbor.RawTag is no-op.
|
||||||
|
if len(data) == 1 && (data[0] == 0xf6 || data[0] == 0xf7) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
d := decoder{data: data, dm: defaultDecMode}
|
||||||
|
|
||||||
|
// Unmarshal tag number.
|
||||||
|
typ, _, num := d.getHead()
|
||||||
|
if typ != cborTypeTag {
|
||||||
|
return &UnmarshalTypeError{CBORType: typ.String(), GoType: typeRawTag.String()}
|
||||||
|
}
|
||||||
|
t.Number = num
|
||||||
|
|
||||||
|
// Unmarshal tag content.
|
||||||
|
c := d.data[d.off:]
|
||||||
|
t.Content = make([]byte, len(c))
|
||||||
|
copy(t.Content, c)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalCBOR returns CBOR encoding of t.
|
||||||
|
func (t RawTag) MarshalCBOR() ([]byte, error) {
|
||||||
|
if t.Number == 0 && len(t.Content) == 0 {
|
||||||
|
// Marshal uninitialized cbor.RawTag
|
||||||
|
b := make([]byte, len(cborNil))
|
||||||
|
copy(b, cborNil)
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
e := getEncodeBuffer()
|
||||||
|
|
||||||
|
encodeHead(e, byte(cborTypeTag), t.Number)
|
||||||
|
|
||||||
|
content := t.Content
|
||||||
|
if len(content) == 0 {
|
||||||
|
content = cborNil
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := make([]byte, len(e.Bytes())+len(content))
|
||||||
|
n := copy(buf, e.Bytes())
|
||||||
|
copy(buf[n:], content)
|
||||||
|
|
||||||
|
putEncodeBuffer(e)
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecTagMode specifies how decoder handles tag number.
|
||||||
|
type DecTagMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// DecTagIgnored makes decoder ignore tag number (skips if present).
|
||||||
|
DecTagIgnored DecTagMode = iota
|
||||||
|
|
||||||
|
// DecTagOptional makes decoder verify tag number if it's present.
|
||||||
|
DecTagOptional
|
||||||
|
|
||||||
|
// DecTagRequired makes decoder verify tag number and tag number must be present.
|
||||||
|
DecTagRequired
|
||||||
|
|
||||||
|
maxDecTagMode
|
||||||
|
)
|
||||||
|
|
||||||
|
func (dtm DecTagMode) valid() bool {
|
||||||
|
return dtm >= 0 && dtm < maxDecTagMode
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncTagMode specifies how encoder handles tag number.
|
||||||
|
type EncTagMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// EncTagNone makes encoder not encode tag number.
|
||||||
|
EncTagNone EncTagMode = iota
|
||||||
|
|
||||||
|
// EncTagRequired makes encoder encode tag number.
|
||||||
|
EncTagRequired
|
||||||
|
|
||||||
|
maxEncTagMode
|
||||||
|
)
|
||||||
|
|
||||||
|
func (etm EncTagMode) valid() bool {
|
||||||
|
return etm >= 0 && etm < maxEncTagMode
|
||||||
|
}
|
||||||
|
|
||||||
|
// TagOptions specifies how encoder and decoder handle tag number.
|
||||||
|
type TagOptions struct {
|
||||||
|
DecTag DecTagMode
|
||||||
|
EncTag EncTagMode
|
||||||
|
}
|
||||||
|
|
||||||
|
// TagSet is an interface to add and remove tag info. It is used by EncMode and DecMode
|
||||||
|
// to provide CBOR tag support.
|
||||||
|
type TagSet interface {
|
||||||
|
// Add adds given tag number(s), content type, and tag options to TagSet.
|
||||||
|
Add(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) error
|
||||||
|
|
||||||
|
// Remove removes given tag content type from TagSet.
|
||||||
|
Remove(contentType reflect.Type)
|
||||||
|
|
||||||
|
tagProvider
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagProvider interface {
|
||||||
|
getTagItemFromType(t reflect.Type) *tagItem
|
||||||
|
getTypeFromTagNum(num []uint64) reflect.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagItem struct {
|
||||||
|
num []uint64
|
||||||
|
cborTagNum []byte
|
||||||
|
contentType reflect.Type
|
||||||
|
opts TagOptions
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *tagItem) equalTagNum(num []uint64) bool {
|
||||||
|
// Fast path to compare 1 tag number
|
||||||
|
if len(t.num) == 1 && len(num) == 1 && t.num[0] == num[0] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(t.num) != len(num) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(t.num); i++ {
|
||||||
|
if t.num[i] != num[i] {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
type (
|
||||||
|
tagSet map[reflect.Type]*tagItem
|
||||||
|
|
||||||
|
syncTagSet struct {
|
||||||
|
sync.RWMutex
|
||||||
|
t tagSet
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t tagSet) getTagItemFromType(typ reflect.Type) *tagItem {
|
||||||
|
return t[typ]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t tagSet) getTypeFromTagNum(num []uint64) reflect.Type {
|
||||||
|
for typ, tag := range t {
|
||||||
|
if tag.equalTagNum(num) {
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTagSet returns TagSet (safe for concurrency).
|
||||||
|
func NewTagSet() TagSet {
|
||||||
|
return &syncTagSet{t: make(map[reflect.Type]*tagItem)}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds given tag number(s), content type, and tag options to TagSet.
|
||||||
|
func (t *syncTagSet) Add(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) error {
|
||||||
|
if contentType == nil {
|
||||||
|
return errors.New("cbor: cannot add nil content type to TagSet")
|
||||||
|
}
|
||||||
|
for contentType.Kind() == reflect.Pointer {
|
||||||
|
contentType = contentType.Elem()
|
||||||
|
}
|
||||||
|
tag, err := newTagItem(opts, contentType, num, nestedNum...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
t.Lock()
|
||||||
|
defer t.Unlock()
|
||||||
|
for typ, ti := range t.t {
|
||||||
|
if typ == contentType {
|
||||||
|
return errors.New("cbor: content type " + contentType.String() + " already exists in TagSet")
|
||||||
|
}
|
||||||
|
if ti.equalTagNum(tag.num) {
|
||||||
|
return fmt.Errorf("cbor: tag number %v already exists in TagSet", tag.num)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.t[contentType] = tag
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove removes given tag content type from TagSet.
|
||||||
|
func (t *syncTagSet) Remove(contentType reflect.Type) {
|
||||||
|
for contentType.Kind() == reflect.Pointer {
|
||||||
|
contentType = contentType.Elem()
|
||||||
|
}
|
||||||
|
t.Lock()
|
||||||
|
delete(t.t, contentType)
|
||||||
|
t.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *syncTagSet) getTagItemFromType(typ reflect.Type) *tagItem {
|
||||||
|
t.RLock()
|
||||||
|
ti := t.t[typ]
|
||||||
|
t.RUnlock()
|
||||||
|
return ti
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *syncTagSet) getTypeFromTagNum(num []uint64) reflect.Type {
|
||||||
|
t.RLock()
|
||||||
|
rt := t.t.getTypeFromTagNum(num)
|
||||||
|
t.RUnlock()
|
||||||
|
return rt
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTagItem(opts TagOptions, contentType reflect.Type, num uint64, nestedNum ...uint64) (*tagItem, error) {
|
||||||
|
if opts.DecTag == DecTagIgnored && opts.EncTag == EncTagNone {
|
||||||
|
return nil, errors.New("cbor: cannot add tag with DecTagIgnored and EncTagNone options to TagSet")
|
||||||
|
}
|
||||||
|
if contentType.PkgPath() == "" || contentType.Kind() == reflect.Interface {
|
||||||
|
return nil, errors.New("cbor: can only add named types to TagSet, got " + contentType.String())
|
||||||
|
}
|
||||||
|
if contentType == typeTime {
|
||||||
|
return nil, errors.New("cbor: cannot add time.Time to TagSet, use EncOptions.TimeTag and DecOptions.TimeTag instead")
|
||||||
|
}
|
||||||
|
if contentType == typeBigInt {
|
||||||
|
return nil, errors.New("cbor: cannot add big.Int to TagSet, it's built-in and supported automatically")
|
||||||
|
}
|
||||||
|
if contentType == typeTag {
|
||||||
|
return nil, errors.New("cbor: cannot add cbor.Tag to TagSet")
|
||||||
|
}
|
||||||
|
if contentType == typeRawTag {
|
||||||
|
return nil, errors.New("cbor: cannot add cbor.RawTag to TagSet")
|
||||||
|
}
|
||||||
|
if num == 0 || num == 1 {
|
||||||
|
return nil, errors.New("cbor: cannot add tag number 0 or 1 to TagSet, use EncOptions.TimeTag and DecOptions.TimeTag instead")
|
||||||
|
}
|
||||||
|
if num == 2 || num == 3 {
|
||||||
|
return nil, errors.New("cbor: cannot add tag number 2 or 3 to TagSet, it's built-in and supported automatically")
|
||||||
|
}
|
||||||
|
if num == tagNumSelfDescribedCBOR {
|
||||||
|
return nil, errors.New("cbor: cannot add tag number 55799 to TagSet, it's built-in and ignored automatically")
|
||||||
|
}
|
||||||
|
|
||||||
|
te := tagItem{num: []uint64{num}, opts: opts, contentType: contentType}
|
||||||
|
te.num = append(te.num, nestedNum...)
|
||||||
|
|
||||||
|
// Cache encoded tag numbers
|
||||||
|
e := getEncodeBuffer()
|
||||||
|
for _, n := range te.num {
|
||||||
|
encodeHead(e, byte(cborTypeTag), n)
|
||||||
|
}
|
||||||
|
te.cborTagNum = make([]byte, e.Len())
|
||||||
|
copy(te.cborTagNum, e.Bytes())
|
||||||
|
putEncodeBuffer(e)
|
||||||
|
|
||||||
|
return &te, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
typeTag = reflect.TypeOf(Tag{})
|
||||||
|
typeRawTag = reflect.TypeOf(RawTag{})
|
||||||
|
)
|
||||||
|
|
||||||
|
// WrongTagError describes mismatch between CBOR tag and registered tag.
|
||||||
|
type WrongTagError struct {
|
||||||
|
RegisteredType reflect.Type
|
||||||
|
RegisteredTagNum []uint64
|
||||||
|
TagNum []uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *WrongTagError) Error() string {
|
||||||
|
return fmt.Sprintf("cbor: wrong tag number for %s, got %v, expected %v", e.RegisteredType.String(), e.TagNum, e.RegisteredTagNum)
|
||||||
|
}
|
||||||
394
vendor/github.com/fxamacker/cbor/v2/valid.go
generated
vendored
Normal file
394
vendor/github.com/fxamacker/cbor/v2/valid.go
generated
vendored
Normal file
@@ -0,0 +1,394 @@
|
|||||||
|
// Copyright (c) Faye Amacker. All rights reserved.
|
||||||
|
// Licensed under the MIT License. See LICENSE in the project root for license information.
|
||||||
|
|
||||||
|
package cbor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/x448/float16"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SyntaxError is a description of a CBOR syntax error.
|
||||||
|
type SyntaxError struct {
|
||||||
|
msg string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *SyntaxError) Error() string { return e.msg }
|
||||||
|
|
||||||
|
// SemanticError is a description of a CBOR semantic error.
|
||||||
|
type SemanticError struct {
|
||||||
|
msg string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *SemanticError) Error() string { return e.msg }
|
||||||
|
|
||||||
|
// MaxNestedLevelError indicates exceeded max nested level of any combination of CBOR arrays/maps/tags.
|
||||||
|
type MaxNestedLevelError struct {
|
||||||
|
maxNestedLevels int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *MaxNestedLevelError) Error() string {
|
||||||
|
return "cbor: exceeded max nested level " + strconv.Itoa(e.maxNestedLevels)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxArrayElementsError indicates exceeded max number of elements for CBOR arrays.
|
||||||
|
type MaxArrayElementsError struct {
|
||||||
|
maxArrayElements int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *MaxArrayElementsError) Error() string {
|
||||||
|
return "cbor: exceeded max number of elements " + strconv.Itoa(e.maxArrayElements) + " for CBOR array"
|
||||||
|
}
|
||||||
|
|
||||||
|
// MaxMapPairsError indicates exceeded max number of key-value pairs for CBOR maps.
|
||||||
|
type MaxMapPairsError struct {
|
||||||
|
maxMapPairs int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *MaxMapPairsError) Error() string {
|
||||||
|
return "cbor: exceeded max number of key-value pairs " + strconv.Itoa(e.maxMapPairs) + " for CBOR map"
|
||||||
|
}
|
||||||
|
|
||||||
|
// IndefiniteLengthError indicates found disallowed indefinite length items.
|
||||||
|
type IndefiniteLengthError struct {
|
||||||
|
t cborType
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *IndefiniteLengthError) Error() string {
|
||||||
|
return "cbor: indefinite-length " + e.t.String() + " isn't allowed"
|
||||||
|
}
|
||||||
|
|
||||||
|
// TagsMdError indicates found disallowed CBOR tags.
|
||||||
|
type TagsMdError struct {
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *TagsMdError) Error() string {
|
||||||
|
return "cbor: CBOR tag isn't allowed"
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExtraneousDataError indicates found extraneous data following well-formed CBOR data item.
|
||||||
|
type ExtraneousDataError struct {
|
||||||
|
numOfBytes int // number of bytes of extraneous data
|
||||||
|
index int // location of extraneous data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ExtraneousDataError) Error() string {
|
||||||
|
return "cbor: " + strconv.Itoa(e.numOfBytes) + " bytes of extraneous data starting at index " + strconv.Itoa(e.index)
|
||||||
|
}
|
||||||
|
|
||||||
|
// wellformed checks whether the CBOR data item is well-formed.
|
||||||
|
// allowExtraData indicates if extraneous data is allowed after the CBOR data item.
|
||||||
|
// - use allowExtraData = true when using Decoder.Decode()
|
||||||
|
// - use allowExtraData = false when using Unmarshal()
|
||||||
|
func (d *decoder) wellformed(allowExtraData bool, checkBuiltinTags bool) error {
|
||||||
|
if len(d.data) == d.off {
|
||||||
|
return io.EOF
|
||||||
|
}
|
||||||
|
_, err := d.wellformedInternal(0, checkBuiltinTags)
|
||||||
|
if err == nil {
|
||||||
|
if !allowExtraData && d.off != len(d.data) {
|
||||||
|
err = &ExtraneousDataError{len(d.data) - d.off, d.off}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// wellformedInternal checks data's well-formedness and returns max depth and error.
|
||||||
|
func (d *decoder) wellformedInternal(depth int, checkBuiltinTags bool) (int, error) { //nolint:gocyclo
|
||||||
|
t, _, val, indefiniteLength, err := d.wellformedHeadWithIndefiniteLengthFlag()
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t {
|
||||||
|
case cborTypeByteString, cborTypeTextString:
|
||||||
|
if indefiniteLength {
|
||||||
|
if d.dm.indefLength == IndefLengthForbidden {
|
||||||
|
return 0, &IndefiniteLengthError{t}
|
||||||
|
}
|
||||||
|
return d.wellformedIndefiniteString(t, depth, checkBuiltinTags)
|
||||||
|
}
|
||||||
|
valInt := int(val)
|
||||||
|
if valInt < 0 {
|
||||||
|
// Detect integer overflow
|
||||||
|
return 0, errors.New("cbor: " + t.String() + " length " + strconv.FormatUint(val, 10) + " is too large, causing integer overflow")
|
||||||
|
}
|
||||||
|
if len(d.data)-d.off < valInt { // valInt+off may overflow integer
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
d.off += valInt
|
||||||
|
|
||||||
|
case cborTypeArray, cborTypeMap:
|
||||||
|
depth++
|
||||||
|
if depth > d.dm.maxNestedLevels {
|
||||||
|
return 0, &MaxNestedLevelError{d.dm.maxNestedLevels}
|
||||||
|
}
|
||||||
|
|
||||||
|
if indefiniteLength {
|
||||||
|
if d.dm.indefLength == IndefLengthForbidden {
|
||||||
|
return 0, &IndefiniteLengthError{t}
|
||||||
|
}
|
||||||
|
return d.wellformedIndefiniteArrayOrMap(t, depth, checkBuiltinTags)
|
||||||
|
}
|
||||||
|
|
||||||
|
valInt := int(val)
|
||||||
|
if valInt < 0 {
|
||||||
|
// Detect integer overflow
|
||||||
|
return 0, errors.New("cbor: " + t.String() + " length " + strconv.FormatUint(val, 10) + " is too large, it would cause integer overflow")
|
||||||
|
}
|
||||||
|
|
||||||
|
if t == cborTypeArray {
|
||||||
|
if valInt > d.dm.maxArrayElements {
|
||||||
|
return 0, &MaxArrayElementsError{d.dm.maxArrayElements}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if valInt > d.dm.maxMapPairs {
|
||||||
|
return 0, &MaxMapPairsError{d.dm.maxMapPairs}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
count := 1
|
||||||
|
if t == cborTypeMap {
|
||||||
|
count = 2
|
||||||
|
}
|
||||||
|
maxDepth := depth
|
||||||
|
for j := 0; j < count; j++ {
|
||||||
|
for i := 0; i < valInt; i++ {
|
||||||
|
var dpt int
|
||||||
|
if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if dpt > maxDepth {
|
||||||
|
maxDepth = dpt // Save max depth
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
depth = maxDepth
|
||||||
|
|
||||||
|
case cborTypeTag:
|
||||||
|
if d.dm.tagsMd == TagsForbidden {
|
||||||
|
return 0, &TagsMdError{}
|
||||||
|
}
|
||||||
|
|
||||||
|
tagNum := val
|
||||||
|
|
||||||
|
// Scan nested tag numbers to avoid recursion.
|
||||||
|
for {
|
||||||
|
if len(d.data) == d.off { // Tag number must be followed by tag content.
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
if checkBuiltinTags {
|
||||||
|
err = validBuiltinTag(tagNum, d.data[d.off])
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if d.dm.bignumTag == BignumTagForbidden && (tagNum == 2 || tagNum == 3) {
|
||||||
|
return 0, &UnacceptableDataItemError{
|
||||||
|
CBORType: cborTypeTag.String(),
|
||||||
|
Message: "bignum",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if getType(d.data[d.off]) != cborTypeTag {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if _, _, tagNum, err = d.wellformedHead(); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
depth++
|
||||||
|
if depth > d.dm.maxNestedLevels {
|
||||||
|
return 0, &MaxNestedLevelError{d.dm.maxNestedLevels}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Check tag content.
|
||||||
|
return d.wellformedInternal(depth, checkBuiltinTags)
|
||||||
|
}
|
||||||
|
|
||||||
|
return depth, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// wellformedIndefiniteString checks indefinite length byte/text string's well-formedness and returns max depth and error.
|
||||||
|
func (d *decoder) wellformedIndefiniteString(t cborType, depth int, checkBuiltinTags bool) (int, error) {
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
if len(d.data) == d.off {
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
if isBreakFlag(d.data[d.off]) {
|
||||||
|
d.off++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// Peek ahead to get next type and indefinite length status.
|
||||||
|
nt, ai := parseInitialByte(d.data[d.off])
|
||||||
|
if t != nt {
|
||||||
|
return 0, &SyntaxError{"cbor: wrong element type " + nt.String() + " for indefinite-length " + t.String()}
|
||||||
|
}
|
||||||
|
if additionalInformation(ai).isIndefiniteLength() {
|
||||||
|
return 0, &SyntaxError{"cbor: indefinite-length " + t.String() + " chunk is not definite-length"}
|
||||||
|
}
|
||||||
|
if depth, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return depth, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// wellformedIndefiniteArrayOrMap checks indefinite length array/map's well-formedness and returns max depth and error.
|
||||||
|
func (d *decoder) wellformedIndefiniteArrayOrMap(t cborType, depth int, checkBuiltinTags bool) (int, error) {
|
||||||
|
var err error
|
||||||
|
maxDepth := depth
|
||||||
|
i := 0
|
||||||
|
for {
|
||||||
|
if len(d.data) == d.off {
|
||||||
|
return 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
if isBreakFlag(d.data[d.off]) {
|
||||||
|
d.off++
|
||||||
|
break
|
||||||
|
}
|
||||||
|
var dpt int
|
||||||
|
if dpt, err = d.wellformedInternal(depth, checkBuiltinTags); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if dpt > maxDepth {
|
||||||
|
maxDepth = dpt
|
||||||
|
}
|
||||||
|
i++
|
||||||
|
if t == cborTypeArray {
|
||||||
|
if i > d.dm.maxArrayElements {
|
||||||
|
return 0, &MaxArrayElementsError{d.dm.maxArrayElements}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if i%2 == 0 && i/2 > d.dm.maxMapPairs {
|
||||||
|
return 0, &MaxMapPairsError{d.dm.maxMapPairs}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if t == cborTypeMap && i%2 == 1 {
|
||||||
|
return 0, &SyntaxError{"cbor: unexpected \"break\" code"}
|
||||||
|
}
|
||||||
|
return maxDepth, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) wellformedHeadWithIndefiniteLengthFlag() (
|
||||||
|
t cborType,
|
||||||
|
ai byte,
|
||||||
|
val uint64,
|
||||||
|
indefiniteLength bool,
|
||||||
|
err error,
|
||||||
|
) {
|
||||||
|
t, ai, val, err = d.wellformedHead()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
indefiniteLength = additionalInformation(ai).isIndefiniteLength()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) wellformedHead() (t cborType, ai byte, val uint64, err error) {
|
||||||
|
dataLen := len(d.data) - d.off
|
||||||
|
if dataLen == 0 {
|
||||||
|
return 0, 0, 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
t, ai = parseInitialByte(d.data[d.off])
|
||||||
|
val = uint64(ai)
|
||||||
|
d.off++
|
||||||
|
dataLen--
|
||||||
|
|
||||||
|
if ai <= maxAdditionalInformationWithoutArgument {
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ai == additionalInformationWith1ByteArgument {
|
||||||
|
const argumentSize = 1
|
||||||
|
if dataLen < argumentSize {
|
||||||
|
return 0, 0, 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
val = uint64(d.data[d.off])
|
||||||
|
d.off++
|
||||||
|
if t == cborTypePrimitives && val < 32 {
|
||||||
|
return 0, 0, 0, &SyntaxError{"cbor: invalid simple value " + strconv.Itoa(int(val)) + " for type " + t.String()}
|
||||||
|
}
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ai == additionalInformationWith2ByteArgument {
|
||||||
|
const argumentSize = 2
|
||||||
|
if dataLen < argumentSize {
|
||||||
|
return 0, 0, 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
val = uint64(binary.BigEndian.Uint16(d.data[d.off : d.off+argumentSize]))
|
||||||
|
d.off += argumentSize
|
||||||
|
if t == cborTypePrimitives {
|
||||||
|
if err := d.acceptableFloat(float64(float16.Frombits(uint16(val)).Float32())); err != nil {
|
||||||
|
return 0, 0, 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ai == additionalInformationWith4ByteArgument {
|
||||||
|
const argumentSize = 4
|
||||||
|
if dataLen < argumentSize {
|
||||||
|
return 0, 0, 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
val = uint64(binary.BigEndian.Uint32(d.data[d.off : d.off+argumentSize]))
|
||||||
|
d.off += argumentSize
|
||||||
|
if t == cborTypePrimitives {
|
||||||
|
if err := d.acceptableFloat(float64(math.Float32frombits(uint32(val)))); err != nil {
|
||||||
|
return 0, 0, 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ai == additionalInformationWith8ByteArgument {
|
||||||
|
const argumentSize = 8
|
||||||
|
if dataLen < argumentSize {
|
||||||
|
return 0, 0, 0, io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
val = binary.BigEndian.Uint64(d.data[d.off : d.off+argumentSize])
|
||||||
|
d.off += argumentSize
|
||||||
|
if t == cborTypePrimitives {
|
||||||
|
if err := d.acceptableFloat(math.Float64frombits(val)); err != nil {
|
||||||
|
return 0, 0, 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if additionalInformation(ai).isIndefiniteLength() {
|
||||||
|
switch t {
|
||||||
|
case cborTypePositiveInt, cborTypeNegativeInt, cborTypeTag:
|
||||||
|
return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()}
|
||||||
|
case cborTypePrimitives: // 0xff (break code) should not be outside wellformedIndefinite().
|
||||||
|
return 0, 0, 0, &SyntaxError{"cbor: unexpected \"break\" code"}
|
||||||
|
}
|
||||||
|
return t, ai, val, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ai == 28, 29, 30
|
||||||
|
return 0, 0, 0, &SyntaxError{"cbor: invalid additional information " + strconv.Itoa(int(ai)) + " for type " + t.String()}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *decoder) acceptableFloat(f float64) error {
|
||||||
|
switch {
|
||||||
|
case d.dm.nanDec == NaNDecodeForbidden && math.IsNaN(f):
|
||||||
|
return &UnacceptableDataItemError{
|
||||||
|
CBORType: cborTypePrimitives.String(),
|
||||||
|
Message: "floating-point NaN",
|
||||||
|
}
|
||||||
|
case d.dm.infDec == InfDecodeForbidden && math.IsInf(f, 0):
|
||||||
|
return &UnacceptableDataItemError{
|
||||||
|
CBORType: cborTypePrimitives.String(),
|
||||||
|
Message: "floating-point infinity",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
24
vendor/github.com/go-viper/mapstructure/v2/.editorconfig
generated
vendored
Normal file
24
vendor/github.com/go-viper/mapstructure/v2/.editorconfig
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
charset = utf-8
|
||||||
|
end_of_line = lf
|
||||||
|
indent_size = 4
|
||||||
|
indent_style = space
|
||||||
|
insert_final_newline = true
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
[*.go]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[{Makefile,*.mk}]
|
||||||
|
indent_style = tab
|
||||||
|
|
||||||
|
[*.nix]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[.golangci.yaml]
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[devenv.yaml]
|
||||||
|
indent_size = 2
|
||||||
7
vendor/github.com/go-viper/mapstructure/v2/.envrc
generated
vendored
Normal file
7
vendor/github.com/go-viper/mapstructure/v2/.envrc
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
export DIRENV_WARN_TIMEOUT=20s
|
||||||
|
|
||||||
|
eval "$(devenv direnvrc)"
|
||||||
|
|
||||||
|
use devenv
|
||||||
10
vendor/github.com/go-viper/mapstructure/v2/.gitignore
generated
vendored
Normal file
10
vendor/github.com/go-viper/mapstructure/v2/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
/bin/
|
||||||
|
/build/
|
||||||
|
/var/
|
||||||
|
|
||||||
|
# Devenv
|
||||||
|
.devenv*
|
||||||
|
devenv.local.nix
|
||||||
|
devenv.local.yaml
|
||||||
|
.direnv
|
||||||
|
.pre-commit-config.yaml
|
||||||
48
vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
generated
vendored
Normal file
48
vendor/github.com/go-viper/mapstructure/v2/.golangci.yaml
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
version: "2"
|
||||||
|
|
||||||
|
run:
|
||||||
|
timeout: 10m
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable:
|
||||||
|
- govet
|
||||||
|
- ineffassign
|
||||||
|
# - misspell
|
||||||
|
- nolintlint
|
||||||
|
# - revive
|
||||||
|
|
||||||
|
disable:
|
||||||
|
- errcheck
|
||||||
|
- staticcheck
|
||||||
|
- unused
|
||||||
|
|
||||||
|
settings:
|
||||||
|
misspell:
|
||||||
|
locale: US
|
||||||
|
nolintlint:
|
||||||
|
allow-unused: false # report any unused nolint directives
|
||||||
|
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
enable:
|
||||||
|
- gci
|
||||||
|
- gofmt
|
||||||
|
- gofumpt
|
||||||
|
- goimports
|
||||||
|
# - golines
|
||||||
|
|
||||||
|
settings:
|
||||||
|
gci:
|
||||||
|
sections:
|
||||||
|
- standard
|
||||||
|
- default
|
||||||
|
- localmodule
|
||||||
|
gofmt:
|
||||||
|
simplify: true
|
||||||
|
rewrite-rules:
|
||||||
|
- pattern: interface{}
|
||||||
|
replacement: any
|
||||||
|
|
||||||
|
exclusions:
|
||||||
|
paths:
|
||||||
|
- internal/
|
||||||
104
vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
generated
vendored
Normal file
104
vendor/github.com/go-viper/mapstructure/v2/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
> [!WARNING]
|
||||||
|
> As of v2 of this library, change log can be found in GitHub releases.
|
||||||
|
|
||||||
|
## 1.5.1
|
||||||
|
|
||||||
|
* Wrap errors so they're compatible with `errors.Is` and `errors.As` [GH-282]
|
||||||
|
* Fix map of slices not decoding properly in certain cases. [GH-266]
|
||||||
|
|
||||||
|
## 1.5.0
|
||||||
|
|
||||||
|
* New option `IgnoreUntaggedFields` to ignore decoding to any fields
|
||||||
|
without `mapstructure` (or the configured tag name) set [GH-277]
|
||||||
|
* New option `ErrorUnset` which makes it an error if any fields
|
||||||
|
in a target struct are not set by the decoding process. [GH-225]
|
||||||
|
* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240]
|
||||||
|
* Decoding to slice from array no longer crashes [GH-265]
|
||||||
|
* Decode nested struct pointers to map [GH-271]
|
||||||
|
* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280]
|
||||||
|
* Fix issue where fields with `,omitempty` would sometimes decode
|
||||||
|
into a map with an empty string key [GH-281]
|
||||||
|
|
||||||
|
## 1.4.3
|
||||||
|
|
||||||
|
* Fix cases where `json.Number` didn't decode properly [GH-261]
|
||||||
|
|
||||||
|
## 1.4.2
|
||||||
|
|
||||||
|
* Custom name matchers to support any sort of casing, formatting, etc. for
|
||||||
|
field names. [GH-250]
|
||||||
|
* Fix possible panic in ComposeDecodeHookFunc [GH-251]
|
||||||
|
|
||||||
|
## 1.4.1
|
||||||
|
|
||||||
|
* Fix regression where `*time.Time` value would be set to empty and not be sent
|
||||||
|
to decode hooks properly [GH-232]
|
||||||
|
|
||||||
|
## 1.4.0
|
||||||
|
|
||||||
|
* A new decode hook type `DecodeHookFuncValue` has been added that has
|
||||||
|
access to the full values. [GH-183]
|
||||||
|
* Squash is now supported with embedded fields that are struct pointers [GH-205]
|
||||||
|
* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206]
|
||||||
|
|
||||||
|
## 1.3.3
|
||||||
|
|
||||||
|
* Decoding maps from maps creates a settable value for decode hooks [GH-203]
|
||||||
|
|
||||||
|
## 1.3.2
|
||||||
|
|
||||||
|
* Decode into interface type with a struct value is supported [GH-187]
|
||||||
|
|
||||||
|
## 1.3.1
|
||||||
|
|
||||||
|
* Squash should only squash embedded structs. [GH-194]
|
||||||
|
|
||||||
|
## 1.3.0
|
||||||
|
|
||||||
|
* Added `",omitempty"` support. This will ignore zero values in the source
|
||||||
|
structure when encoding. [GH-145]
|
||||||
|
|
||||||
|
## 1.2.3
|
||||||
|
|
||||||
|
* Fix duplicate entries in Keys list with pointer values. [GH-185]
|
||||||
|
|
||||||
|
## 1.2.2
|
||||||
|
|
||||||
|
* Do not add unsettable (unexported) values to the unused metadata key
|
||||||
|
or "remain" value. [GH-150]
|
||||||
|
|
||||||
|
## 1.2.1
|
||||||
|
|
||||||
|
* Go modules checksum mismatch fix
|
||||||
|
|
||||||
|
## 1.2.0
|
||||||
|
|
||||||
|
* Added support to capture unused values in a field using the `",remain"` value
|
||||||
|
in the mapstructure tag. There is an example to showcase usage.
|
||||||
|
* Added `DecoderConfig` option to always squash embedded structs
|
||||||
|
* `json.Number` can decode into `uint` types
|
||||||
|
* Empty slices are preserved and not replaced with nil slices
|
||||||
|
* Fix panic that can occur in when decoding a map into a nil slice of structs
|
||||||
|
* Improved package documentation for godoc
|
||||||
|
|
||||||
|
## 1.1.2
|
||||||
|
|
||||||
|
* Fix error when decode hook decodes interface implementation into interface
|
||||||
|
type. [GH-140]
|
||||||
|
|
||||||
|
## 1.1.1
|
||||||
|
|
||||||
|
* Fix panic that can happen in `decodePtr`
|
||||||
|
|
||||||
|
## 1.1.0
|
||||||
|
|
||||||
|
* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133]
|
||||||
|
* Support struct to struct decoding [GH-137]
|
||||||
|
* If source map value is nil, then destination map value is nil (instead of empty)
|
||||||
|
* If source slice value is nil, then destination slice value is nil (instead of empty)
|
||||||
|
* If source pointer is nil, then destination pointer is set to nil (instead of
|
||||||
|
allocated zero value of type)
|
||||||
|
|
||||||
|
## 1.0.0
|
||||||
|
|
||||||
|
* Initial tagged stable release.
|
||||||
21
vendor/github.com/go-viper/mapstructure/v2/LICENSE
generated
vendored
Normal file
21
vendor/github.com/go-viper/mapstructure/v2/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2013 Mitchell Hashimoto
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
81
vendor/github.com/go-viper/mapstructure/v2/README.md
generated
vendored
Normal file
81
vendor/github.com/go-viper/mapstructure/v2/README.md
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
# mapstructure
|
||||||
|
|
||||||
|
[](https://github.com/go-viper/mapstructure/actions/workflows/ci.yaml)
|
||||||
|
[](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2)
|
||||||
|

|
||||||
|
[](https://deps.dev/go/github.com%252Fgo-viper%252Fmapstructure%252Fv2)
|
||||||
|
|
||||||
|
mapstructure is a Go library for decoding generic map values to structures
|
||||||
|
and vice versa, while providing helpful error handling.
|
||||||
|
|
||||||
|
This library is most useful when decoding values from some data stream (JSON,
|
||||||
|
Gob, etc.) where you don't _quite_ know the structure of the underlying data
|
||||||
|
until you read a part of it. You can therefore read a `map[string]interface{}`
|
||||||
|
and use this library to decode it into the proper underlying native Go
|
||||||
|
structure.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```shell
|
||||||
|
go get github.com/go-viper/mapstructure/v2
|
||||||
|
```
|
||||||
|
|
||||||
|
## Migrating from `github.com/mitchellh/mapstructure`
|
||||||
|
|
||||||
|
[@mitchehllh](https://github.com/mitchellh) announced his intent to archive some of his unmaintained projects (see [here](https://gist.github.com/mitchellh/90029601268e59a29e64e55bab1c5bdc) and [here](https://github.com/mitchellh/mapstructure/issues/349)). This is a repository achieved the "blessed fork" status.
|
||||||
|
|
||||||
|
You can migrate to this package by changing your import paths in your Go files to `github.com/go-viper/mapstructure/v2`.
|
||||||
|
The API is the same, so you don't need to change anything else.
|
||||||
|
|
||||||
|
Here is a script that can help you with the migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sed -i 's|github.com/mitchellh/mapstructure|github.com/go-viper/mapstructure/v2|g' $(find . -type f -name '*.go')
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need more time to migrate your code, that is absolutely fine.
|
||||||
|
|
||||||
|
Some of the latest fixes are backported to the v1 release branch of this package, so you can use the Go modules `replace` feature until you are ready to migrate:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
replace github.com/mitchellh/mapstructure => github.com/go-viper/mapstructure v1.6.0
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage & Example
|
||||||
|
|
||||||
|
For usage and examples see the [documentation](https://pkg.go.dev/mod/github.com/go-viper/mapstructure/v2).
|
||||||
|
|
||||||
|
The `Decode` function has examples associated with it there.
|
||||||
|
|
||||||
|
## But Why?!
|
||||||
|
|
||||||
|
Go offers fantastic standard libraries for decoding formats such as JSON.
|
||||||
|
The standard method is to have a struct pre-created, and populate that struct
|
||||||
|
from the bytes of the encoded format. This is great, but the problem is if
|
||||||
|
you have configuration or an encoding that changes slightly depending on
|
||||||
|
specific fields. For example, consider this JSON:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"type": "person",
|
||||||
|
"name": "Mitchell"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Perhaps we can't populate a specific structure without first reading
|
||||||
|
the "type" field from the JSON. We could always do two passes over the
|
||||||
|
decoding of the JSON (reading the "type" first, and the rest later).
|
||||||
|
However, it is much simpler to just decode this into a `map[string]interface{}`
|
||||||
|
structure, read the "type" key, then use something like this library
|
||||||
|
to decode it into the proper structure.
|
||||||
|
|
||||||
|
## Credits
|
||||||
|
|
||||||
|
Mapstructure was originally created by [@mitchellh](https://github.com/mitchellh).
|
||||||
|
This is a maintained fork of the original library.
|
||||||
|
|
||||||
|
Read more about the reasons for the fork [here](https://github.com/mitchellh/mapstructure/issues/349).
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
The project is licensed under the [MIT License](LICENSE).
|
||||||
714
vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
generated
vendored
Normal file
714
vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
generated
vendored
Normal file
@@ -0,0 +1,714 @@
|
|||||||
|
package mapstructure
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/netip"
|
||||||
|
"net/url"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// typedDecodeHook takes a raw DecodeHookFunc (an any) and turns
|
||||||
|
// it into the proper DecodeHookFunc type, such as DecodeHookFuncType.
|
||||||
|
func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc {
|
||||||
|
// Create variables here so we can reference them with the reflect pkg
|
||||||
|
var f1 DecodeHookFuncType
|
||||||
|
var f2 DecodeHookFuncKind
|
||||||
|
var f3 DecodeHookFuncValue
|
||||||
|
|
||||||
|
// Fill in the variables into this interface and the rest is done
|
||||||
|
// automatically using the reflect package.
|
||||||
|
potential := []any{f1, f2, f3}
|
||||||
|
|
||||||
|
v := reflect.ValueOf(h)
|
||||||
|
vt := v.Type()
|
||||||
|
for _, raw := range potential {
|
||||||
|
pt := reflect.ValueOf(raw).Type()
|
||||||
|
if vt.ConvertibleTo(pt) {
|
||||||
|
return v.Convert(pt).Interface()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// cachedDecodeHook takes a raw DecodeHookFunc (an any) and turns
|
||||||
|
// it into a closure to be used directly
|
||||||
|
// if the type fails to convert we return a closure always erroring to keep the previous behaviour
|
||||||
|
func cachedDecodeHook(raw DecodeHookFunc) func(from reflect.Value, to reflect.Value) (any, error) {
|
||||||
|
switch f := typedDecodeHook(raw).(type) {
|
||||||
|
case DecodeHookFuncType:
|
||||||
|
return func(from reflect.Value, to reflect.Value) (any, error) {
|
||||||
|
return f(from.Type(), to.Type(), from.Interface())
|
||||||
|
}
|
||||||
|
case DecodeHookFuncKind:
|
||||||
|
return func(from reflect.Value, to reflect.Value) (any, error) {
|
||||||
|
return f(from.Kind(), to.Kind(), from.Interface())
|
||||||
|
}
|
||||||
|
case DecodeHookFuncValue:
|
||||||
|
return func(from reflect.Value, to reflect.Value) (any, error) {
|
||||||
|
return f(from, to)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return func(from reflect.Value, to reflect.Value) (any, error) {
|
||||||
|
return nil, errors.New("invalid decode hook signature")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeHookExec executes the given decode hook. This should be used
|
||||||
|
// since it'll naturally degrade to the older backwards compatible DecodeHookFunc
|
||||||
|
// that took reflect.Kind instead of reflect.Type.
|
||||||
|
func DecodeHookExec(
|
||||||
|
raw DecodeHookFunc,
|
||||||
|
from reflect.Value, to reflect.Value,
|
||||||
|
) (any, error) {
|
||||||
|
switch f := typedDecodeHook(raw).(type) {
|
||||||
|
case DecodeHookFuncType:
|
||||||
|
return f(from.Type(), to.Type(), from.Interface())
|
||||||
|
case DecodeHookFuncKind:
|
||||||
|
return f(from.Kind(), to.Kind(), from.Interface())
|
||||||
|
case DecodeHookFuncValue:
|
||||||
|
return f(from, to)
|
||||||
|
default:
|
||||||
|
return nil, errors.New("invalid decode hook signature")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ComposeDecodeHookFunc creates a single DecodeHookFunc that
|
||||||
|
// automatically composes multiple DecodeHookFuncs.
|
||||||
|
//
|
||||||
|
// The composed funcs are called in order, with the result of the
|
||||||
|
// previous transformation.
|
||||||
|
func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
|
||||||
|
cached := make([]func(from reflect.Value, to reflect.Value) (any, error), 0, len(fs))
|
||||||
|
for _, f := range fs {
|
||||||
|
cached = append(cached, cachedDecodeHook(f))
|
||||||
|
}
|
||||||
|
return func(f reflect.Value, t reflect.Value) (any, error) {
|
||||||
|
var err error
|
||||||
|
data := f.Interface()
|
||||||
|
|
||||||
|
newFrom := f
|
||||||
|
for _, c := range cached {
|
||||||
|
data, err = c(newFrom, t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if v, ok := data.(reflect.Value); ok {
|
||||||
|
newFrom = v
|
||||||
|
} else {
|
||||||
|
newFrom = reflect.ValueOf(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned.
|
||||||
|
// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages.
|
||||||
|
func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc {
|
||||||
|
cached := make([]func(from reflect.Value, to reflect.Value) (any, error), 0, len(ff))
|
||||||
|
for _, f := range ff {
|
||||||
|
cached = append(cached, cachedDecodeHook(f))
|
||||||
|
}
|
||||||
|
return func(a, b reflect.Value) (any, error) {
|
||||||
|
var allErrs string
|
||||||
|
var out any
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for _, c := range cached {
|
||||||
|
out, err = c(a, b)
|
||||||
|
if err != nil {
|
||||||
|
allErrs += err.Error() + "\n"
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
return out, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.New(allErrs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToSliceHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// string to []string by splitting on the given sep.
|
||||||
|
func StringToSliceHookFunc(sep string) DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.SliceOf(f) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := data.(string)
|
||||||
|
if raw == "" {
|
||||||
|
return []string{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Split(raw, sep), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToWeakSliceHookFunc brings back the old (pre-v2) behavior of [StringToSliceHookFunc].
|
||||||
|
//
|
||||||
|
// As of mapstructure v2.0.0 [StringToSliceHookFunc] checks if the return type is a string slice.
|
||||||
|
// This function removes that check.
|
||||||
|
func StringToWeakSliceHookFunc(sep string) DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Slice {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := data.(string)
|
||||||
|
if raw == "" {
|
||||||
|
return []string{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Split(raw, sep), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to time.Duration.
|
||||||
|
func StringToTimeDurationHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(time.Duration(5)) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
d, err := time.ParseDuration(data.(string))
|
||||||
|
|
||||||
|
return d, wrapTimeParseDurationError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToTimeLocationHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to *time.Location.
|
||||||
|
func StringToTimeLocationHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(time.Local) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
d, err := time.LoadLocation(data.(string))
|
||||||
|
|
||||||
|
return d, wrapTimeParseLocationError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToURLHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to *url.URL.
|
||||||
|
func StringToURLHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(&url.URL{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u, err := url.Parse(data.(string))
|
||||||
|
|
||||||
|
return u, wrapUrlError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToIPHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to net.IP
|
||||||
|
func StringToIPHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(net.IP{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
ip := net.ParseIP(data.(string))
|
||||||
|
if ip == nil {
|
||||||
|
return net.IP{}, fmt.Errorf("failed parsing ip")
|
||||||
|
}
|
||||||
|
|
||||||
|
return ip, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToIPNetHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to net.IPNet
|
||||||
|
func StringToIPNetHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(net.IPNet{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
_, net, err := net.ParseCIDR(data.(string))
|
||||||
|
return net, wrapNetParseError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToTimeHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to time.Time.
|
||||||
|
func StringToTimeHookFunc(layout string) DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(time.Time{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
ti, err := time.Parse(layout, data.(string))
|
||||||
|
|
||||||
|
return ti, wrapTimeParseError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to
|
||||||
|
// the decoder.
|
||||||
|
//
|
||||||
|
// Note that this is significantly different from the WeaklyTypedInput option
|
||||||
|
// of the DecoderConfig.
|
||||||
|
func WeaklyTypedHook(
|
||||||
|
f reflect.Kind,
|
||||||
|
t reflect.Kind,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
dataVal := reflect.ValueOf(data)
|
||||||
|
switch t {
|
||||||
|
case reflect.String:
|
||||||
|
switch f {
|
||||||
|
case reflect.Bool:
|
||||||
|
if dataVal.Bool() {
|
||||||
|
return "1", nil
|
||||||
|
}
|
||||||
|
return "0", nil
|
||||||
|
case reflect.Float32:
|
||||||
|
return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil
|
||||||
|
case reflect.Int:
|
||||||
|
return strconv.FormatInt(dataVal.Int(), 10), nil
|
||||||
|
case reflect.Slice:
|
||||||
|
dataType := dataVal.Type()
|
||||||
|
elemKind := dataType.Elem().Kind()
|
||||||
|
if elemKind == reflect.Uint8 {
|
||||||
|
return string(dataVal.Interface().([]uint8)), nil
|
||||||
|
}
|
||||||
|
case reflect.Uint:
|
||||||
|
return strconv.FormatUint(dataVal.Uint(), 10), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func RecursiveStructToMapHookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Value, t reflect.Value) (any, error) {
|
||||||
|
if f.Kind() != reflect.Struct {
|
||||||
|
return f.Interface(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var i any = struct{}{}
|
||||||
|
if t.Type() != reflect.TypeOf(&i).Elem() {
|
||||||
|
return f.Interface(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m := make(map[string]any)
|
||||||
|
t.Set(reflect.ValueOf(m))
|
||||||
|
|
||||||
|
return f.Interface(), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies
|
||||||
|
// strings to the UnmarshalText function, when the target type
|
||||||
|
// implements the encoding.TextUnmarshaler interface
|
||||||
|
func TextUnmarshallerHookFunc() DecodeHookFuncType {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
result := reflect.New(t).Interface()
|
||||||
|
unmarshaller, ok := result.(encoding.TextUnmarshaler)
|
||||||
|
if !ok {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
str, ok := data.(string)
|
||||||
|
if !ok {
|
||||||
|
str = reflect.Indirect(reflect.ValueOf(&data)).Elem().String()
|
||||||
|
}
|
||||||
|
if err := unmarshaller.UnmarshalText([]byte(str)); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToNetIPAddrHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to netip.Addr.
|
||||||
|
func StringToNetIPAddrHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(netip.Addr{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
addr, err := netip.ParseAddr(data.(string))
|
||||||
|
|
||||||
|
return addr, wrapNetIPParseAddrError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToNetIPAddrPortHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to netip.AddrPort.
|
||||||
|
func StringToNetIPAddrPortHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(netip.AddrPort{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
addrPort, err := netip.ParseAddrPort(data.(string))
|
||||||
|
|
||||||
|
return addrPort, wrapNetIPParseAddrPortError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToNetIPPrefixHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to netip.Prefix.
|
||||||
|
func StringToNetIPPrefixHookFunc() DecodeHookFunc {
|
||||||
|
return func(
|
||||||
|
f reflect.Type,
|
||||||
|
t reflect.Type,
|
||||||
|
data any,
|
||||||
|
) (any, error) {
|
||||||
|
if f.Kind() != reflect.String {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
if t != reflect.TypeOf(netip.Prefix{}) {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
prefix, err := netip.ParsePrefix(data.(string))
|
||||||
|
|
||||||
|
return prefix, wrapNetIPParsePrefixError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToBasicTypeHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to basic types.
|
||||||
|
// int8, uint8, int16, uint16, int32, uint32, int64, uint64, int, uint, float32, float64, bool, byte, rune, complex64, complex128
|
||||||
|
func StringToBasicTypeHookFunc() DecodeHookFunc {
|
||||||
|
return ComposeDecodeHookFunc(
|
||||||
|
StringToInt8HookFunc(),
|
||||||
|
StringToUint8HookFunc(),
|
||||||
|
StringToInt16HookFunc(),
|
||||||
|
StringToUint16HookFunc(),
|
||||||
|
StringToInt32HookFunc(),
|
||||||
|
StringToUint32HookFunc(),
|
||||||
|
StringToInt64HookFunc(),
|
||||||
|
StringToUint64HookFunc(),
|
||||||
|
StringToIntHookFunc(),
|
||||||
|
StringToUintHookFunc(),
|
||||||
|
StringToFloat32HookFunc(),
|
||||||
|
StringToFloat64HookFunc(),
|
||||||
|
StringToBoolHookFunc(),
|
||||||
|
// byte and rune are aliases for uint8 and int32 respectively
|
||||||
|
// StringToByteHookFunc(),
|
||||||
|
// StringToRuneHookFunc(),
|
||||||
|
StringToComplex64HookFunc(),
|
||||||
|
StringToComplex128HookFunc(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToInt8HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to int8.
|
||||||
|
func StringToInt8HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Int8 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
i64, err := strconv.ParseInt(data.(string), 0, 8)
|
||||||
|
return int8(i64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToUint8HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to uint8.
|
||||||
|
func StringToUint8HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Uint8 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u64, err := strconv.ParseUint(data.(string), 0, 8)
|
||||||
|
return uint8(u64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToInt16HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to int16.
|
||||||
|
func StringToInt16HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Int16 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
i64, err := strconv.ParseInt(data.(string), 0, 16)
|
||||||
|
return int16(i64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToUint16HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to uint16.
|
||||||
|
func StringToUint16HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Uint16 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u64, err := strconv.ParseUint(data.(string), 0, 16)
|
||||||
|
return uint16(u64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToInt32HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to int32.
|
||||||
|
func StringToInt32HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Int32 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
i64, err := strconv.ParseInt(data.(string), 0, 32)
|
||||||
|
return int32(i64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToUint32HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to uint32.
|
||||||
|
func StringToUint32HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Uint32 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u64, err := strconv.ParseUint(data.(string), 0, 32)
|
||||||
|
return uint32(u64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToInt64HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to int64.
|
||||||
|
func StringToInt64HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Int64 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
i64, err := strconv.ParseInt(data.(string), 0, 64)
|
||||||
|
return int64(i64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToUint64HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to uint64.
|
||||||
|
func StringToUint64HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Uint64 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u64, err := strconv.ParseUint(data.(string), 0, 64)
|
||||||
|
return uint64(u64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToIntHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to int.
|
||||||
|
func StringToIntHookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Int {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
i64, err := strconv.ParseInt(data.(string), 0, 0)
|
||||||
|
return int(i64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToUintHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to uint.
|
||||||
|
func StringToUintHookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Uint {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
u64, err := strconv.ParseUint(data.(string), 0, 0)
|
||||||
|
return uint(u64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToFloat32HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to float32.
|
||||||
|
func StringToFloat32HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Float32 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
f64, err := strconv.ParseFloat(data.(string), 32)
|
||||||
|
return float32(f64), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToFloat64HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to float64.
|
||||||
|
func StringToFloat64HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Float64 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
f64, err := strconv.ParseFloat(data.(string), 64)
|
||||||
|
return f64, wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToBoolHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to bool.
|
||||||
|
func StringToBoolHookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Bool {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
b, err := strconv.ParseBool(data.(string))
|
||||||
|
return b, wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToByteHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to byte.
|
||||||
|
func StringToByteHookFunc() DecodeHookFunc {
|
||||||
|
return StringToUint8HookFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToRuneHookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to rune.
|
||||||
|
func StringToRuneHookFunc() DecodeHookFunc {
|
||||||
|
return StringToInt32HookFunc()
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToComplex64HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to complex64.
|
||||||
|
func StringToComplex64HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Complex64 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
c128, err := strconv.ParseComplex(data.(string), 64)
|
||||||
|
return complex64(c128), wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// StringToComplex128HookFunc returns a DecodeHookFunc that converts
|
||||||
|
// strings to complex128.
|
||||||
|
func StringToComplex128HookFunc() DecodeHookFunc {
|
||||||
|
return func(f reflect.Type, t reflect.Type, data any) (any, error) {
|
||||||
|
if f.Kind() != reflect.String || t.Kind() != reflect.Complex128 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert it by parsing
|
||||||
|
c128, err := strconv.ParseComplex(data.(string), 128)
|
||||||
|
return c128, wrapStrconvNumError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
103
vendor/github.com/go-viper/mapstructure/v2/devenv.lock
generated
vendored
Normal file
103
vendor/github.com/go-viper/mapstructure/v2/devenv.lock
generated
vendored
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"devenv": {
|
||||||
|
"locked": {
|
||||||
|
"dir": "src/modules",
|
||||||
|
"lastModified": 1765288076,
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv",
|
||||||
|
"rev": "93c055af1e8fcac49251f1b2e1c57f78620ad351",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"dir": "src/modules",
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"flake-compat": {
|
||||||
|
"flake": false,
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1765121682,
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"rev": "65f23138d8d09a92e30f1e5c87611b23ef451bf3",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "edolstra",
|
||||||
|
"repo": "flake-compat",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"git-hooks": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-compat": "flake-compat",
|
||||||
|
"gitignore": "gitignore",
|
||||||
|
"nixpkgs": [
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1765016596,
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "git-hooks.nix",
|
||||||
|
"rev": "548fc44fca28a5e81c5d6b846e555e6b9c2a5a3c",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "git-hooks.nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"gitignore": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": [
|
||||||
|
"git-hooks",
|
||||||
|
"nixpkgs"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1762808025,
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "gitignore.nix",
|
||||||
|
"rev": "cb5e3fdca1de58ccbc3ef53de65bd372b48f567c",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "hercules-ci",
|
||||||
|
"repo": "gitignore.nix",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1764580874,
|
||||||
|
"owner": "cachix",
|
||||||
|
"repo": "devenv-nixpkgs",
|
||||||
|
"rev": "dcf61356c3ab25f1362b4a4428a6d871e84f1d1d",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "cachix",
|
||||||
|
"ref": "rolling",
|
||||||
|
"repo": "devenv-nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"devenv": "devenv",
|
||||||
|
"git-hooks": "git-hooks",
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"pre-commit-hooks": [
|
||||||
|
"git-hooks"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
14
vendor/github.com/go-viper/mapstructure/v2/devenv.nix
generated
vendored
Normal file
14
vendor/github.com/go-viper/mapstructure/v2/devenv.nix
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
pkgs,
|
||||||
|
...
|
||||||
|
}:
|
||||||
|
|
||||||
|
{
|
||||||
|
languages = {
|
||||||
|
go.enable = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
packages = with pkgs; [
|
||||||
|
golangci-lint
|
||||||
|
];
|
||||||
|
}
|
||||||
4
vendor/github.com/go-viper/mapstructure/v2/devenv.yaml
generated
vendored
Normal file
4
vendor/github.com/go-viper/mapstructure/v2/devenv.yaml
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
|
||||||
|
inputs:
|
||||||
|
nixpkgs:
|
||||||
|
url: github:cachix/devenv-nixpkgs/rolling
|
||||||
244
vendor/github.com/go-viper/mapstructure/v2/errors.go
generated
vendored
Normal file
244
vendor/github.com/go-viper/mapstructure/v2/errors.go
generated
vendored
Normal file
@@ -0,0 +1,244 @@
|
|||||||
|
package mapstructure
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/url"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error interface is implemented by all errors emitted by mapstructure.
|
||||||
|
//
|
||||||
|
// Use [errors.As] to check if an error implements this interface.
|
||||||
|
type Error interface {
|
||||||
|
error
|
||||||
|
|
||||||
|
mapstructure()
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeError is a generic error type that holds information about
|
||||||
|
// a decoding error together with the name of the field that caused the error.
|
||||||
|
type DecodeError struct {
|
||||||
|
name string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func newDecodeError(name string, err error) *DecodeError {
|
||||||
|
return &DecodeError{
|
||||||
|
name: name,
|
||||||
|
err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *DecodeError) Name() string {
|
||||||
|
return e.name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *DecodeError) Unwrap() error {
|
||||||
|
return e.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *DecodeError) Error() string {
|
||||||
|
return fmt.Sprintf("'%s' %s", e.name, e.err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*DecodeError) mapstructure() {}
|
||||||
|
|
||||||
|
// ParseError is an error type that indicates a value could not be parsed
|
||||||
|
// into the expected type.
|
||||||
|
type ParseError struct {
|
||||||
|
Expected reflect.Value
|
||||||
|
Value any
|
||||||
|
Err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ParseError) Error() string {
|
||||||
|
return fmt.Sprintf("cannot parse value as '%s': %s", e.Expected.Type(), e.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*ParseError) mapstructure() {}
|
||||||
|
|
||||||
|
// UnconvertibleTypeError is an error type that indicates a value could not be
|
||||||
|
// converted to the expected type.
|
||||||
|
type UnconvertibleTypeError struct {
|
||||||
|
Expected reflect.Value
|
||||||
|
Value any
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *UnconvertibleTypeError) Error() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"expected type '%s', got unconvertible type '%s'",
|
||||||
|
e.Expected.Type(),
|
||||||
|
reflect.TypeOf(e.Value),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*UnconvertibleTypeError) mapstructure() {}
|
||||||
|
|
||||||
|
func wrapStrconvNumError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err, ok := err.(*strconv.NumError); ok {
|
||||||
|
return &strconvNumError{Err: err}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type strconvNumError struct {
|
||||||
|
Err *strconv.NumError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *strconvNumError) Error() string {
|
||||||
|
return "strconv." + e.Err.Func + ": " + e.Err.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *strconvNumError) Unwrap() error { return e.Err }
|
||||||
|
|
||||||
|
func wrapUrlError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err, ok := err.(*url.Error); ok {
|
||||||
|
return &urlError{Err: err}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type urlError struct {
|
||||||
|
Err *url.Error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *urlError) Error() string {
|
||||||
|
return fmt.Sprintf("%s", e.Err.Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *urlError) Unwrap() error { return e.Err }
|
||||||
|
|
||||||
|
func wrapNetParseError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err, ok := err.(*net.ParseError); ok {
|
||||||
|
return &netParseError{Err: err}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type netParseError struct {
|
||||||
|
Err *net.ParseError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *netParseError) Error() string {
|
||||||
|
return "invalid " + e.Err.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *netParseError) Unwrap() error { return e.Err }
|
||||||
|
|
||||||
|
func wrapTimeParseError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err, ok := err.(*time.ParseError); ok {
|
||||||
|
return &timeParseError{Err: err}
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type timeParseError struct {
|
||||||
|
Err *time.ParseError
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *timeParseError) Error() string {
|
||||||
|
if e.Err.Message == "" {
|
||||||
|
return fmt.Sprintf("parsing time as %q: cannot parse as %q", e.Err.Layout, e.Err.LayoutElem)
|
||||||
|
}
|
||||||
|
|
||||||
|
return "parsing time " + e.Err.Message
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *timeParseError) Unwrap() error { return e.Err }
|
||||||
|
|
||||||
|
func wrapNetIPParseAddrError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if errMsg := err.Error(); strings.HasPrefix(errMsg, "ParseAddr") {
|
||||||
|
errPieces := strings.Split(errMsg, ": ")
|
||||||
|
|
||||||
|
return fmt.Errorf("ParseAddr: %s", errPieces[len(errPieces)-1])
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapNetIPParseAddrPortError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
errMsg := err.Error()
|
||||||
|
if strings.HasPrefix(errMsg, "invalid port ") {
|
||||||
|
return errors.New("invalid port")
|
||||||
|
} else if strings.HasPrefix(errMsg, "invalid ip:port ") {
|
||||||
|
return errors.New("invalid ip:port")
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapNetIPParsePrefixError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if errMsg := err.Error(); strings.HasPrefix(errMsg, "netip.ParsePrefix") {
|
||||||
|
errPieces := strings.Split(errMsg, ": ")
|
||||||
|
|
||||||
|
return fmt.Errorf("netip.ParsePrefix: %s", errPieces[len(errPieces)-1])
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapTimeParseDurationError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
errMsg := err.Error()
|
||||||
|
if strings.HasPrefix(errMsg, "time: unknown unit ") {
|
||||||
|
return errors.New("time: unknown unit")
|
||||||
|
} else if strings.HasPrefix(errMsg, "time: ") {
|
||||||
|
idx := strings.LastIndex(errMsg, " ")
|
||||||
|
|
||||||
|
return errors.New(errMsg[:idx])
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapTimeParseLocationError(err error) error {
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
errMsg := err.Error()
|
||||||
|
if strings.Contains(errMsg, "unknown time zone") || strings.HasPrefix(errMsg, "time: unknown format") {
|
||||||
|
return fmt.Errorf("invalid time zone format: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
11
vendor/github.com/go-viper/mapstructure/v2/internal/errors/errors.go
generated
vendored
Normal file
11
vendor/github.com/go-viper/mapstructure/v2/internal/errors/errors.go
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package errors
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
func New(text string) error {
|
||||||
|
return errors.New(text)
|
||||||
|
}
|
||||||
|
|
||||||
|
func As(err error, target interface{}) bool {
|
||||||
|
return errors.As(err, target)
|
||||||
|
}
|
||||||
9
vendor/github.com/go-viper/mapstructure/v2/internal/errors/join.go
generated
vendored
Normal file
9
vendor/github.com/go-viper/mapstructure/v2/internal/errors/join.go
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
//go:build go1.20
|
||||||
|
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import "errors"
|
||||||
|
|
||||||
|
func Join(errs ...error) error {
|
||||||
|
return errors.Join(errs...)
|
||||||
|
}
|
||||||
61
vendor/github.com/go-viper/mapstructure/v2/internal/errors/join_go1_19.go
generated
vendored
Normal file
61
vendor/github.com/go-viper/mapstructure/v2/internal/errors/join_go1_19.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
//go:build !go1.20
|
||||||
|
|
||||||
|
// Copyright 2022 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package errors
|
||||||
|
|
||||||
|
// Join returns an error that wraps the given errors.
|
||||||
|
// Any nil error values are discarded.
|
||||||
|
// Join returns nil if every value in errs is nil.
|
||||||
|
// The error formats as the concatenation of the strings obtained
|
||||||
|
// by calling the Error method of each element of errs, with a newline
|
||||||
|
// between each string.
|
||||||
|
//
|
||||||
|
// A non-nil error returned by Join implements the Unwrap() []error method.
|
||||||
|
func Join(errs ...error) error {
|
||||||
|
n := 0
|
||||||
|
for _, err := range errs {
|
||||||
|
if err != nil {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if n == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
e := &joinError{
|
||||||
|
errs: make([]error, 0, n),
|
||||||
|
}
|
||||||
|
for _, err := range errs {
|
||||||
|
if err != nil {
|
||||||
|
e.errs = append(e.errs, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
type joinError struct {
|
||||||
|
errs []error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *joinError) Error() string {
|
||||||
|
// Since Join returns nil if every value in errs is nil,
|
||||||
|
// e.errs cannot be empty.
|
||||||
|
if len(e.errs) == 1 {
|
||||||
|
return e.errs[0].Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
b := []byte(e.errs[0].Error())
|
||||||
|
for _, err := range e.errs[1:] {
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = append(b, err.Error()...)
|
||||||
|
}
|
||||||
|
// At this point, b has at least one byte '\n'.
|
||||||
|
// return unsafe.String(&b[0], len(b))
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *joinError) Unwrap() []error {
|
||||||
|
return e.errs
|
||||||
|
}
|
||||||
1948
vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
generated
vendored
Normal file
1948
vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
44
vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
generated
vendored
Normal file
44
vendor/github.com/go-viper/mapstructure/v2/reflect_go1_19.go
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
//go:build !go1.20
|
||||||
|
|
||||||
|
package mapstructure
|
||||||
|
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
|
func isComparable(v reflect.Value) bool {
|
||||||
|
k := v.Kind()
|
||||||
|
switch k {
|
||||||
|
case reflect.Invalid:
|
||||||
|
return false
|
||||||
|
|
||||||
|
case reflect.Array:
|
||||||
|
switch v.Type().Elem().Kind() {
|
||||||
|
case reflect.Interface, reflect.Array, reflect.Struct:
|
||||||
|
for i := 0; i < v.Type().Len(); i++ {
|
||||||
|
// if !v.Index(i).Comparable() {
|
||||||
|
if !isComparable(v.Index(i)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return v.Type().Comparable()
|
||||||
|
|
||||||
|
case reflect.Interface:
|
||||||
|
// return v.Elem().Comparable()
|
||||||
|
return isComparable(v.Elem())
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
for i := 0; i < v.NumField(); i++ {
|
||||||
|
return false
|
||||||
|
|
||||||
|
// if !v.Field(i).Comparable() {
|
||||||
|
if !isComparable(v.Field(i)) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
|
||||||
|
default:
|
||||||
|
return v.Type().Comparable()
|
||||||
|
}
|
||||||
|
}
|
||||||
10
vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
generated
vendored
Normal file
10
vendor/github.com/go-viper/mapstructure/v2/reflect_go1_20.go
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
//go:build go1.20
|
||||||
|
|
||||||
|
package mapstructure
|
||||||
|
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
|
// TODO: remove once we drop support for Go <1.20
|
||||||
|
func isComparable(v reflect.Value) bool {
|
||||||
|
return v.Comparable()
|
||||||
|
}
|
||||||
26
vendor/github.com/go-webauthn/webauthn/LICENSE
generated
vendored
Normal file
26
vendor/github.com/go-webauthn/webauthn/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
Copyright (c) 2025 github.com/go-webauthn/webauthn authors.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions
|
||||||
|
are met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
3. Neither the name of the copyright holder nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
|
||||||
|
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
|
||||||
|
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
|
||||||
|
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
41
vendor/github.com/go-webauthn/webauthn/metadata/const.go
generated
vendored
Normal file
41
vendor/github.com/go-webauthn/webauthn/metadata/const.go
generated
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ProductionMDSRoot is the root certificate for the MDS.
|
||||||
|
//
|
||||||
|
// See: https://secure.globalsign.com/cacert/root-r3.crt
|
||||||
|
ProductionMDSRoot = "MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsTgHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmmKPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zdQQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZXriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+oLkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZURUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMpjjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQXmcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecsMx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpHWD9f"
|
||||||
|
|
||||||
|
// ProductionMDSURL is the Production MDS URL.
|
||||||
|
ProductionMDSURL = "https://mds.fidoalliance.org"
|
||||||
|
|
||||||
|
// ConformanceMDSRoot is the root certificate for the MDS Conformance Suite.
|
||||||
|
//
|
||||||
|
// See: https://mds3.fido.tools/pki/MDS3ROOT.crt
|
||||||
|
ConformanceMDSRoot = "MIICaDCCAe6gAwIBAgIPBCqih0DiJLW7+UHXx/o1MAoGCCqGSM49BAMDMGcxCzAJBgNVBAYTAlVTMRYwFAYDVQQKDA1GSURPIEFsbGlhbmNlMScwJQYDVQQLDB5GQUtFIE1ldGFkYXRhIDMgQkxPQiBST09UIEZBS0UxFzAVBgNVBAMMDkZBS0UgUm9vdCBGQUtFMB4XDTE3MDIwMTAwMDAwMFoXDTQ1MDEzMTIzNTk1OVowZzELMAkGA1UEBhMCVVMxFjAUBgNVBAoMDUZJRE8gQWxsaWFuY2UxJzAlBgNVBAsMHkZBS0UgTWV0YWRhdGEgMyBCTE9CIFJPT1QgRkFLRTEXMBUGA1UEAwwORkFLRSBSb290IEZBS0UwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASKYiz3YltC6+lmxhPKwA1WFZlIqnX8yL5RybSLTKFAPEQeTD9O6mOz+tg8wcSdnVxHzwnXiQKJwhrav70rKc2ierQi/4QUrdsPes8TEirZOkCVJurpDFbXZOgs++pa4XmjYDBeMAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAfBgNVHSMEGDAWgBQGcfeCs0Y8D+lh6U5B2xSrR74eHTAKBggqhkjOPQQDAwNoADBlAjEA/xFsgri0xubSa3y3v5ormpPqCwfqn9s0MLBAtzCIgxQ/zkzPKctkiwoPtDzI51KnAjAmeMygX2S5Ht8+e+EQnezLJBJXtnkRWY+Zt491wgt/AwSs5PHHMv5QgjELOuMxQBc="
|
||||||
|
|
||||||
|
// ExampleMDSRoot is the example root certificate for the MDS.
|
||||||
|
//
|
||||||
|
// See: https://fidoalliance.org/specs/mds/fido-metadata-service-v3.1-ps-20250521.html#sctn-examples
|
||||||
|
ExampleMDSRoot = "MIIGGTCCBAGgAwIBAgIUdT9qLX0sVMRe8l0sLmHd3mZovQ0wDQYJKoZIhvcNAQELBQAwgZsxHzAdBgNVBAMMFkVYQU1QTEUgTURTMyBURVNUIFJPT1QxIjAgBgkqhkiG9w0BCQEWE2V4YW1wbGVAZXhhbXBsZS5jb20xFDASBgNVBAoMC0V4YW1wbGUgT1JHMRAwDgYDVQQLDAdFeGFtcGxlMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTVkxEjAQBgNVBAcMCVdha2VmaWVsZDAeFw0yMTA0MTkxMTM1MDdaFw00ODA5MDQxMTM1MDdaMIGbMR8wHQYDVQQDDBZFWEFNUExFIE1EUzMgVEVTVCBST09UMSIwIAYJKoZIhvcNAQkBFhNleGFtcGxlQGV4YW1wbGUuY29tMRQwEgYDVQQKDAtFeGFtcGxlIE9SRzEQMA4GA1UECwwHRXhhbXBsZTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk1ZMRIwEAYDVQQHDAlXYWtlZmllbGQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDDjF5wyEWuhwDHsZosGdGFTCcI677rW881vV+UfW38J+K2ioFFNeGVsxbcebK6AVOiCDPFj0974IpeD9SFOhwAHoDu/LCfXdQWp8ZgQ91ULYWoW8o7NNSp01nbN9zmaO6/xKNCa0bzjmXoGqglqnP1AtRcWYvXOSKZy1rcPeDv4Dhcpdp6W72fBw0eWIqOhsrItuY2/N8ItBPiG03EX72nACq4nZJ/nAIcUbER8STSFPPzvE97TvShsi1FD8aO6l1WkR/QkreAGjMI++GbB2Qc1nN9Y/VEDbMDhQtxXQRdpFwubTjejkN9hKOtF3B71YrwIrng3V9RoPMFdapWMzSlI+WWHog0oTj1PqwJDDg7+z1I6vSDeVWAMKr9mq1w1OGNzgBopIjd9lRWkRtt2kQSPX9XxqS4E1gDDr8MKbpM3JuubQtNCg9D7Ljvbz6vwvUrbPHH+oREvucsp0PZ5PpizloepGIcLFxDQqCulGY2n7Ahl0JOFXJqOFCaK3TWHwBvZsaY5DgBuUvdUrwtgZNg2eg2omWXEepiVFQn3Fvj43Wh2npPMgIe5P0rwncXvROxaczd4rtajKS1ucoB9b9iKqM2+M1y/FDIgVf1fWEHwK7YdzxMlgOeLdeV/kqRU5PEUlLU9a2EwdOErrPbPKZmIfbs/L4B3k4zejMDH3Y+ZwIDAQABo1MwUTAdBgNVHQ4EFgQU8sWwq1TrurK7xMTwO1dKfeJBbCMwHwYDVR0jBBgwFoAU8sWwq1TrurK7xMTwO1dKfeJBbCMwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAFw6M1PiIfCPIBQ5EBUPNmRvRFuDpolOmDofnf/+mv63LqwQZAdo/W8tzZ9kOFhq24SiLw0H7fsdG/jeREXiIZMNoW/rA6Uac8sU+FYF7Q+qp6CQLlSQbDcpVMifTQjcBk2xh+aLK9SrrXBqnTAhwS+offGtAW8DpoLuH4tAcQmIjlgMlN65jnELCuqNR/wpA+zch8LZW8saQ2cwRCwdr8mAzZoLbsDSVCHxQF3/kQjPT7Nao1q2iWcY3OYcRmKrieHDP67yeLUbVmetfZis2d6ZlkqHLB4ZW1xX4otsEFkuTJA3HWDRsNyhTwx1YoCLsYut5Zp0myqPNBq28w6qGMyyoJN0Z4RzMEO3R6i/MQNfhK55/8O2HciM6xb5t/aBSuHPKlBDrFWhpRnKYkaNtlUo35qV5IbKGKau3SdZdSRciaXUd/p81YmoF01UlhhMz/Rqr1k2gyA0a9tF8+awCeanYt5izl8YO0FlrOU1SQ5UQw4szqqZqbrf4e8fRuU2TXNx4zk+ImE7WRB44f6mSD746ZCBRogZ/SA5jUBu+OPe4/sEtERWRcQD+fXgce9ZEN0+peyJIKAsl5Rm2Bmgyg5IoyWwSG5W+WekGyEokpslou2Yc6EjUj5ndZWz5EiHAiQ74hNfDoCZIxVVLU3Qbp8a0S1bmsoT2JOsspIbtZUg="
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
HeaderX509URI = "x5u"
|
||||||
|
HeaderX509Certificate = "x5c"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
errIntermediateCertRevoked = &Error{
|
||||||
|
Type: "intermediate_revoked",
|
||||||
|
Details: "Intermediate certificate is on issuers revocation list",
|
||||||
|
}
|
||||||
|
errLeafCertRevoked = &Error{
|
||||||
|
Type: "leaf_revoked",
|
||||||
|
Details: "Leaf certificate is on issuers revocation list",
|
||||||
|
}
|
||||||
|
errCRLUnavailable = &Error{
|
||||||
|
Type: "crl_unavailable",
|
||||||
|
Details: "Certificate revocation list is unavailable",
|
||||||
|
}
|
||||||
|
)
|
||||||
290
vendor/github.com/go-webauthn/webauthn/metadata/decode.go
generated
vendored
Normal file
290
vendor/github.com/go-webauthn/webauthn/metadata/decode.go
generated
vendored
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-viper/mapstructure/v2"
|
||||||
|
"github.com/golang-jwt/jwt/v5"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/x/revoke"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewDecoder returns a new metadata decoder.
|
||||||
|
func NewDecoder(opts ...DecoderOption) (decoder *Decoder, err error) {
|
||||||
|
decoder = &Decoder{
|
||||||
|
client: &http.Client{},
|
||||||
|
parser: jwt.NewParser(),
|
||||||
|
hook: mapstructure.ComposeDecodeHookFunc(),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range opts {
|
||||||
|
if err = opt(decoder); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to apply decoder option: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if decoder.root == "" {
|
||||||
|
decoder.root = ProductionMDSRoot
|
||||||
|
}
|
||||||
|
|
||||||
|
return decoder, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decoder handles decoding and specialized parsing of the metadata blob.
|
||||||
|
type Decoder struct {
|
||||||
|
client *http.Client
|
||||||
|
parser *jwt.Parser
|
||||||
|
hook mapstructure.DecodeHookFunc
|
||||||
|
root string
|
||||||
|
ignoreEntryParsingErrors bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse handles parsing of the raw JSON values of the metadata blob. Should be used after using [Decoder.Decode] or
|
||||||
|
// [Decoder.DecodeBytes].
|
||||||
|
func (d *Decoder) Parse(payload *PayloadJSON) (metadata *Metadata, err error) {
|
||||||
|
metadata = &Metadata{
|
||||||
|
Parsed: Parsed{
|
||||||
|
LegalHeader: payload.LegalHeader,
|
||||||
|
Number: payload.Number,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if metadata.Parsed.NextUpdate, err = time.Parse(time.DateOnly, payload.NextUpdate); err != nil {
|
||||||
|
return nil, fmt.Errorf("error occurred parsing next update value '%s': %w", payload.NextUpdate, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed Entry
|
||||||
|
|
||||||
|
for _, entry := range payload.Entries {
|
||||||
|
if parsed, err = entry.Parse(); err != nil {
|
||||||
|
metadata.Unparsed = append(metadata.Unparsed, EntryError{
|
||||||
|
Error: err,
|
||||||
|
EntryJSON: entry,
|
||||||
|
})
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata.Parsed.Entries = append(metadata.Parsed.Entries, parsed)
|
||||||
|
}
|
||||||
|
|
||||||
|
if n := len(metadata.Unparsed); n != 0 && !d.ignoreEntryParsingErrors {
|
||||||
|
return metadata, fmt.Errorf("error occurred parsing metadata: %d entries had errors during parsing", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode the blob from an [io.Reader]. This function will close the [io.ReadCloser] after completing.
|
||||||
|
func (d *Decoder) Decode(r io.Reader) (payload *PayloadJSON, err error) {
|
||||||
|
bytes, err := io.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.DecodeBytes(bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeBytes handles decoding raw bytes. If you have a read closer it's suggested to use [Decoder.Decode].
|
||||||
|
func (d *Decoder) DecodeBytes(bytes []byte) (payload *PayloadJSON, err error) {
|
||||||
|
var token *jwt.Token
|
||||||
|
|
||||||
|
if token, err = d.parser.Parse(string(bytes), func(token *jwt.Token) (any, error) {
|
||||||
|
// 2. If the x5u attribute is present in the JWT Header.
|
||||||
|
if _, ok := token.Header[HeaderX509URI].([]any); ok {
|
||||||
|
// Never seen an x5u here, although it is in the spec.
|
||||||
|
return nil, errors.New("x5u encountered in header of metadata TOC payload")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. If the x5u attribute is missing, the chain should be retrieved from the x5c attribute.
|
||||||
|
var (
|
||||||
|
x5c, chain []any
|
||||||
|
ok, valid bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if x5c, ok = token.Header[HeaderX509Certificate].([]any); !ok {
|
||||||
|
// If that attribute is missing as well, Metadata TOC signing trust anchor is considered the TOC signing certificate chain.
|
||||||
|
chain = []any{d.root}
|
||||||
|
} else {
|
||||||
|
chain = x5c
|
||||||
|
}
|
||||||
|
|
||||||
|
// The certificate chain MUST be verified to properly chain to the metadata TOC signing trust anchor.
|
||||||
|
if valid, err = validateChain(d.root, chain); !valid || err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chain validated, extract the TOC signing certificate from the chain. Create a buffer large enough to hold the
|
||||||
|
// certificate bytes.
|
||||||
|
o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string))))
|
||||||
|
|
||||||
|
var (
|
||||||
|
n int
|
||||||
|
cert *x509.Certificate
|
||||||
|
)
|
||||||
|
|
||||||
|
// Decode the base64 certificate into the buffer.
|
||||||
|
if n, err = base64.StdEncoding.Decode(o, []byte(chain[0].(string))); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the certificate from the buffer.
|
||||||
|
if cert, err = x509.ParseCertificate(o[:n]); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Verify the signature of the Metadata TOC object using the TOC signing certificate chain
|
||||||
|
// jwt.Parse() uses the TOC signing certificate public key internally to verify the signature.
|
||||||
|
return cert.PublicKey, err
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var decoder *mapstructure.Decoder
|
||||||
|
|
||||||
|
payload = &PayloadJSON{}
|
||||||
|
|
||||||
|
if decoder, err = mapstructure.NewDecoder(&mapstructure.DecoderConfig{
|
||||||
|
Metadata: nil,
|
||||||
|
Result: payload,
|
||||||
|
DecodeHook: d.hook,
|
||||||
|
TagName: "json",
|
||||||
|
}); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = decoder.Decode(token.Claims); err != nil {
|
||||||
|
return payload, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return payload, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecoderOption is a representation of a function that can set options within a decoder.
|
||||||
|
type DecoderOption func(decoder *Decoder) (err error)
|
||||||
|
|
||||||
|
// WithIgnoreEntryParsingErrors is a DecoderOption which ignores errors when parsing individual entries. The values for
|
||||||
|
// these entries will exist as an unparsed entry.
|
||||||
|
func WithIgnoreEntryParsingErrors() DecoderOption {
|
||||||
|
return func(decoder *Decoder) (err error) {
|
||||||
|
decoder.ignoreEntryParsingErrors = true
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithRootCertificate overrides the root certificate used to validate the authenticity of the metadata payload.
|
||||||
|
func WithRootCertificate(value string) DecoderOption {
|
||||||
|
return func(decoder *Decoder) (err error) {
|
||||||
|
decoder.root = value
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateChain(root string, chain []any) (bool, error) {
|
||||||
|
oRoot := make([]byte, base64.StdEncoding.DecodedLen(len(root)))
|
||||||
|
|
||||||
|
nRoot, err := base64.StdEncoding.Decode(oRoot, []byte(root))
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rootcert, err := x509.ParseCertificate(oRoot[:nRoot])
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
roots := x509.NewCertPool()
|
||||||
|
|
||||||
|
roots.AddCert(rootcert)
|
||||||
|
|
||||||
|
o := make([]byte, base64.StdEncoding.DecodedLen(len(chain[1].(string))))
|
||||||
|
|
||||||
|
n, err := base64.StdEncoding.Decode(o, []byte(chain[1].(string)))
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
intcert, err := x509.ParseCertificate(o[:n])
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if revoked, ok := revoke.VerifyCertificate(intcert); !ok {
|
||||||
|
issuer := intcert.IssuingCertificateURL
|
||||||
|
|
||||||
|
if issuer != nil {
|
||||||
|
return false, errCRLUnavailable
|
||||||
|
}
|
||||||
|
} else if revoked {
|
||||||
|
return false, errIntermediateCertRevoked
|
||||||
|
}
|
||||||
|
|
||||||
|
ints := x509.NewCertPool()
|
||||||
|
ints.AddCert(intcert)
|
||||||
|
|
||||||
|
l := make([]byte, base64.StdEncoding.DecodedLen(len(chain[0].(string))))
|
||||||
|
|
||||||
|
n, err = base64.StdEncoding.Decode(l, []byte(chain[0].(string)))
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
leafcert, err := x509.ParseCertificate(l[:n])
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if revoked, ok := revoke.VerifyCertificate(leafcert); !ok {
|
||||||
|
return false, errCRLUnavailable
|
||||||
|
} else if revoked {
|
||||||
|
return false, errLeafCertRevoked
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := x509.VerifyOptions{
|
||||||
|
Roots: roots,
|
||||||
|
Intermediates: ints,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = leafcert.Verify(opts)
|
||||||
|
|
||||||
|
return err == nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func mdsParseX509Certificate(value string) (certificate *x509.Certificate, err error) {
|
||||||
|
var n int
|
||||||
|
|
||||||
|
raw := make([]byte, base64.StdEncoding.DecodedLen(len(value)))
|
||||||
|
|
||||||
|
if n, err = base64.StdEncoding.Decode(raw, []byte(strings.TrimSpace(value))); err != nil {
|
||||||
|
return nil, fmt.Errorf("error occurred parsing *x509.certificate: error occurred decoding base64 data: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if certificate, err = x509.ParseCertificate(raw[:n]); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return certificate, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mdsParseTimePointer(format, value string) (parsed *time.Time, err error) {
|
||||||
|
if value == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var p time.Time
|
||||||
|
|
||||||
|
if p, err = time.Parse(format, value); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &p, nil
|
||||||
|
}
|
||||||
2
vendor/github.com/go-webauthn/webauthn/metadata/doc.go
generated
vendored
Normal file
2
vendor/github.com/go-webauthn/webauthn/metadata/doc.go
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
// Package metadata handles metadata validation instrumentation.
|
||||||
|
package metadata
|
||||||
1120
vendor/github.com/go-webauthn/webauthn/metadata/metadata.go
generated
vendored
Normal file
1120
vendor/github.com/go-webauthn/webauthn/metadata/metadata.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
16
vendor/github.com/go-webauthn/webauthn/metadata/passkey_authenticator.go
generated
vendored
Normal file
16
vendor/github.com/go-webauthn/webauthn/metadata/passkey_authenticator.go
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
// PasskeyAuthenticator is a type that represents the schema from the Passkey Developer AAGUID listing.
|
||||||
|
//
|
||||||
|
// See: https://github.com/passkeydeveloper/passkey-authenticator-aaguids
|
||||||
|
type PasskeyAuthenticator map[string]PassKeyAuthenticatorAAGUID
|
||||||
|
|
||||||
|
// PassKeyAuthenticatorAAGUID is a type that represents the individual schema entry from the Passkey Developer AAGUID
|
||||||
|
// listing. Used with [PasskeyAuthenticator].
|
||||||
|
//
|
||||||
|
// See: https://github.com/passkeydeveloper/passkey-authenticator-aaguids
|
||||||
|
type PassKeyAuthenticatorAAGUID struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
IconDark string `json:"icon_dark,omitempty"`
|
||||||
|
IconLight string `json:"icon_light,omitempty"`
|
||||||
|
}
|
||||||
62
vendor/github.com/go-webauthn/webauthn/metadata/status.go
generated
vendored
Normal file
62
vendor/github.com/go-webauthn/webauthn/metadata/status.go
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ValidateStatusReports checks a list of [StatusReport] structs against a list of desired and undesired [AuthenticatorStatus]
|
||||||
|
// values. If the reports contain all of the desired and none of the undesired status reports then no error is returned
|
||||||
|
// otherwise an error describing the issue is returned.
|
||||||
|
func ValidateStatusReports(reports []StatusReport, desired, undesired []AuthenticatorStatus) (err error) {
|
||||||
|
if len(desired) == 0 && (len(undesired) == 0 || len(reports) == 0) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var present, absent []string
|
||||||
|
|
||||||
|
if len(undesired) != 0 {
|
||||||
|
for _, report := range reports {
|
||||||
|
for _, status := range undesired {
|
||||||
|
if report.Status == status {
|
||||||
|
present = append(present, string(status))
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(desired) != 0 {
|
||||||
|
desired:
|
||||||
|
for _, status := range desired {
|
||||||
|
for _, report := range reports {
|
||||||
|
if report.Status == status {
|
||||||
|
continue desired
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
absent = append(absent, string(status))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case len(present) == 0 && len(absent) == 0:
|
||||||
|
return nil
|
||||||
|
case len(present) != 0 && len(absent) == 0:
|
||||||
|
return &Error{
|
||||||
|
Type: "invalid_status",
|
||||||
|
Details: fmt.Sprintf("The following undesired status reports were present: %s", strings.Join(present, ", ")),
|
||||||
|
}
|
||||||
|
case len(present) == 0 && len(absent) != 0:
|
||||||
|
return &Error{
|
||||||
|
Type: "invalid_status",
|
||||||
|
Details: fmt.Sprintf("The following desired status reports were absent: %s", strings.Join(absent, ", ")),
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return &Error{
|
||||||
|
Type: "invalid_status",
|
||||||
|
Details: fmt.Sprintf("The following undesired status reports were present: %s; the following desired status reports were absent: %s", strings.Join(present, ", "), strings.Join(absent, ", ")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
373
vendor/github.com/go-webauthn/webauthn/metadata/types.go
generated
vendored
Normal file
373
vendor/github.com/go-webauthn/webauthn/metadata/types.go
generated
vendored
Normal file
@@ -0,0 +1,373 @@
|
|||||||
|
package metadata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"reflect"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The Provider is an interface which describes the elements required to satisfy validation of metadata.
|
||||||
|
type Provider interface {
|
||||||
|
// GetEntry returns a MDS3 payload entry given a AAGUID.
|
||||||
|
GetEntry(ctx context.Context, aaguid uuid.UUID) (entry *Entry, err error)
|
||||||
|
|
||||||
|
// GetValidateEntry returns true if this provider requires an entry to exist with a AAGUID matching the attestation
|
||||||
|
// statement during registration.
|
||||||
|
GetValidateEntry(ctx context.Context) (validate bool)
|
||||||
|
|
||||||
|
// GetValidateEntryPermitZeroAAGUID returns true if attestation statements with zerod AAGUID should be permitted
|
||||||
|
// when considering the result from GetValidateEntry. i.e. if the AAGUID is zeroed, and GetValidateEntry returns
|
||||||
|
// true, and this implementation returns true, the attestation statement will pass validation.
|
||||||
|
GetValidateEntryPermitZeroAAGUID(ctx context.Context) (skip bool)
|
||||||
|
|
||||||
|
// GetValidateTrustAnchor returns true if trust anchor validation of attestation statements is enforced during
|
||||||
|
// registration.
|
||||||
|
GetValidateTrustAnchor(ctx context.Context) (validate bool)
|
||||||
|
|
||||||
|
// GetValidateStatus returns true if the status reports for an authenticator should be validated against desired and
|
||||||
|
// undesired statuses.
|
||||||
|
GetValidateStatus(ctx context.Context) (validate bool)
|
||||||
|
|
||||||
|
// GetValidateAttestationTypes if true will enforce checking that the provided attestation is possible with the
|
||||||
|
// given authenticator.
|
||||||
|
GetValidateAttestationTypes(ctx context.Context) (validate bool)
|
||||||
|
|
||||||
|
// ValidateStatusReports returns nil if the provided authenticator status reports are desired.
|
||||||
|
ValidateStatusReports(ctx context.Context, reports []StatusReport) (err error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrNotInitialized = errors.New("metadata: not initialized")
|
||||||
|
)
|
||||||
|
|
||||||
|
type PublicKeyCredentialParameters struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Alg webauthncose.COSEAlgorithmIdentifier `json:"alg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AuthenticatorAttestationTypes []AuthenticatorAttestationType
|
||||||
|
|
||||||
|
func (t AuthenticatorAttestationTypes) HasBasicFull() bool {
|
||||||
|
for _, a := range t {
|
||||||
|
if a == BasicFull || a == AttCA {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// AuthenticatorAttestationType - The ATTESTATION constants are 16 bit long integers indicating the specific attestation that authenticator supports.
|
||||||
|
// Each constant has a case-sensitive string representation (in quotes), which is used in the authoritative metadata for FIDO authenticators.
|
||||||
|
type AuthenticatorAttestationType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// BasicFull - Indicates full basic attestation, based on an attestation private key shared among a class of authenticators (e.g. same model). Authenticators must provide its attestation signature during the registration process for the same reason. The attestation trust anchor is shared with FIDO Servers out of band (as part of the Metadata). This sharing process should be done according to [UAFMetadataService].
|
||||||
|
BasicFull AuthenticatorAttestationType = "basic_full"
|
||||||
|
|
||||||
|
// BasicSurrogate - Just syntactically a Basic Attestation. The attestation object self-signed, i.e. it is signed using the UAuth.priv key, i.e. the key corresponding to the UAuth.pub key included in the attestation object. As a consequence it does not provide a cryptographic proof of the security characteristics. But it is the best thing we can do if the authenticator is not able to have an attestation private key.
|
||||||
|
BasicSurrogate AuthenticatorAttestationType = "basic_surrogate"
|
||||||
|
|
||||||
|
// Ecdaa - Indicates use of elliptic curve based direct anonymous attestation as defined in [FIDOEcdaaAlgorithm]. Support for this attestation type is optional at this time. It might be required by FIDO Certification.
|
||||||
|
Ecdaa AuthenticatorAttestationType = "ecdaa"
|
||||||
|
|
||||||
|
// AttCA - Indicates PrivacyCA attestation as defined in [TCG-CMCProfile-AIKCertEnroll]. Support for this attestation type is optional at this time. It might be required by FIDO Certification.
|
||||||
|
AttCA AuthenticatorAttestationType = "attca"
|
||||||
|
|
||||||
|
// AnonCA In this case, the authenticator uses an Anonymization CA which dynamically generates per-credential attestation certificates such that the attestation statements presented to Relying Parties do not provide uniquely identifiable information, e.g., that might be used for tracking purposes. The applicable [WebAuthn] attestation formats "fmt" are Google SafetyNet Attestation "android-safetynet", Android Keystore Attestation "android-key", Apple Anonymous Attestation "apple", and Apple Application Attestation "apple-appattest".
|
||||||
|
AnonCA AuthenticatorAttestationType = "anonca"
|
||||||
|
|
||||||
|
// None - Indicates absence of attestation.
|
||||||
|
None AuthenticatorAttestationType = "none"
|
||||||
|
)
|
||||||
|
|
||||||
|
type KeyScope string
|
||||||
|
|
||||||
|
const (
|
||||||
|
KeyScopeNone KeyScope = ""
|
||||||
|
PublicKeyCredentialSource KeyScope = "public-key-credential-source" //nolint:gosec
|
||||||
|
DeviceSupplementalPublicKeys KeyScope = "device-spk"
|
||||||
|
ProviderSupplementalPublicKeys KeyScope = "provider-spk"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MultiDeviceCredentialSupport string
|
||||||
|
|
||||||
|
const (
|
||||||
|
MultiDeviceCredentialUnsupported MultiDeviceCredentialSupport = "unsupported"
|
||||||
|
MultiDeviceCredentialExplicit MultiDeviceCredentialSupport = "explicit"
|
||||||
|
MultiDeviceCredentialImplicit MultiDeviceCredentialSupport = "implicit"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorStatus - This enumeration describes the status of an authenticator model as identified by its AAID and potentially some additional information (such as a specific attestation key).
|
||||||
|
// https://fidoalliance.org/specs/mds/fido-metadata-service-v3.1-ps-20250521.html#sctn-authnr-stat
|
||||||
|
type AuthenticatorStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// NotFidoCertified - This authenticator is not FIDO certified.
|
||||||
|
NotFidoCertified AuthenticatorStatus = "NOT_FIDO_CERTIFIED"
|
||||||
|
|
||||||
|
// FidoCertified - This authenticator has passed FIDO functional certification. This certification scheme is phased out and will be replaced by FIDO_CERTIFIED_L1.
|
||||||
|
FidoCertified AuthenticatorStatus = "FIDO_CERTIFIED"
|
||||||
|
|
||||||
|
// UserVerificationBypass - Indicates that malware is able to bypass the user verification. This means that the authenticator could be used without the user's consent and potentially even without the user's knowledge.
|
||||||
|
//nolint:gosec
|
||||||
|
UserVerificationBypass AuthenticatorStatus = "USER_VERIFICATION_BYPASS"
|
||||||
|
|
||||||
|
// AttestationKeyCompromise - Indicates that an attestation key for this authenticator is known to be compromised. Additional data should be supplied, including the key identifier and the date of compromise, if known.
|
||||||
|
AttestationKeyCompromise AuthenticatorStatus = "ATTESTATION_KEY_COMPROMISE"
|
||||||
|
|
||||||
|
// UserKeyRemoteCompromise - This authenticator has identified weaknesses that allow registered keys to be compromised and should not be trusted. This would include both, e.g. weak entropy that causes predictable keys to be generated or side channels that allow keys or signatures to be forged, guessed or extracted.
|
||||||
|
UserKeyRemoteCompromise AuthenticatorStatus = "USER_KEY_REMOTE_COMPROMISE"
|
||||||
|
|
||||||
|
// UserKeyPhysicalCompromise - This authenticator has known weaknesses in its key protection mechanism(s) that allow user keys to be extracted by an adversary in physical possession of the device.
|
||||||
|
UserKeyPhysicalCompromise AuthenticatorStatus = "USER_KEY_PHYSICAL_COMPROMISE"
|
||||||
|
|
||||||
|
// UpdateAvailable - A software or firmware update is available for the device. Additional data should be supplied including a URL where users can obtain an update and the date the update was published.
|
||||||
|
UpdateAvailable AuthenticatorStatus = "UPDATE_AVAILABLE"
|
||||||
|
|
||||||
|
// Revoked - The FIDO Alliance has determined that this authenticator should not be trusted for any reason, for example if it is known to be a fraudulent product or contain a deliberate backdoor.
|
||||||
|
Revoked AuthenticatorStatus = "REVOKED"
|
||||||
|
|
||||||
|
// SelfAssertionSubmitted - The authenticator vendor has completed and submitted the self-certification checklist to the FIDO Alliance. If this completed checklist is publicly available, the URL will be specified in StatusReportJSON.url.
|
||||||
|
SelfAssertionSubmitted AuthenticatorStatus = "SELF_ASSERTION_SUBMITTED"
|
||||||
|
|
||||||
|
// FidoCertifiedL1 - The authenticator has passed FIDO Authenticator certification at level 1. This level is the more strict successor of FIDO_CERTIFIED.
|
||||||
|
FidoCertifiedL1 AuthenticatorStatus = "FIDO_CERTIFIED_L1"
|
||||||
|
|
||||||
|
// FidoCertifiedL1plus - The authenticator has passed FIDO Authenticator certification at level 1+. This level is the more than level 1.
|
||||||
|
FidoCertifiedL1plus AuthenticatorStatus = "FIDO_CERTIFIED_L1plus"
|
||||||
|
|
||||||
|
// FidoCertifiedL2 - The authenticator has passed FIDO Authenticator certification at level 2. This level is more strict than level 1+.
|
||||||
|
FidoCertifiedL2 AuthenticatorStatus = "FIDO_CERTIFIED_L2"
|
||||||
|
|
||||||
|
// FidoCertifiedL2plus - The authenticator has passed FIDO Authenticator certification at level 2+. This level is more strict than level 2.
|
||||||
|
FidoCertifiedL2plus AuthenticatorStatus = "FIDO_CERTIFIED_L2plus"
|
||||||
|
|
||||||
|
// FidoCertifiedL3 - The authenticator has passed FIDO Authenticator certification at level 3. This level is more strict than level 2+.
|
||||||
|
FidoCertifiedL3 AuthenticatorStatus = "FIDO_CERTIFIED_L3"
|
||||||
|
|
||||||
|
// FidoCertifiedL3plus - The authenticator has passed FIDO Authenticator certification at level 3+. This level is more strict than level 3.
|
||||||
|
FidoCertifiedL3plus AuthenticatorStatus = "FIDO_CERTIFIED_L3plus"
|
||||||
|
|
||||||
|
// FIPS140CertifiedL1 - The authenticator has passed FIPS 140 certification at overall level 1.
|
||||||
|
FIPS140CertifiedL1 AuthenticatorStatus = "FIPS140_CERTIFIED_L1"
|
||||||
|
|
||||||
|
// FIPS140CertifiedL2 - The authenticator has passed FIPS 140 certification at overall level 2.
|
||||||
|
FIPS140CertifiedL2 AuthenticatorStatus = "FIPS140_CERTIFIED_L2"
|
||||||
|
|
||||||
|
// FIPS140CertifiedL3 - The authenticator has passed FIPS 140 certification at overall level 3.
|
||||||
|
FIPS140CertifiedL3 AuthenticatorStatus = "FIPS140_CERTIFIED_L3"
|
||||||
|
|
||||||
|
// FIPS140CertifiedL4 - The authenticator has passed FIPS 140 certification at overall level 4.
|
||||||
|
FIPS140CertifiedL4 AuthenticatorStatus = "FIPS140_CERTIFIED_L4"
|
||||||
|
)
|
||||||
|
|
||||||
|
// defaultUndesiredAuthenticatorStatus is an array of undesirable authenticator statuses.
|
||||||
|
var defaultUndesiredAuthenticatorStatus = [...]AuthenticatorStatus{
|
||||||
|
AttestationKeyCompromise,
|
||||||
|
UserVerificationBypass,
|
||||||
|
UserKeyRemoteCompromise,
|
||||||
|
UserKeyPhysicalCompromise,
|
||||||
|
Revoked,
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsUndesiredAuthenticatorStatus returns whether the supplied authenticator status is desirable or not.
|
||||||
|
func IsUndesiredAuthenticatorStatus(status AuthenticatorStatus) bool {
|
||||||
|
for _, s := range defaultUndesiredAuthenticatorStatus {
|
||||||
|
if s == status {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsUndesiredAuthenticatorStatusSlice returns whether the supplied authenticator status is desirable or not.
|
||||||
|
func IsUndesiredAuthenticatorStatusSlice(status AuthenticatorStatus, values []AuthenticatorStatus) bool {
|
||||||
|
for _, s := range values {
|
||||||
|
if s == status {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsUndesiredAuthenticatorStatusMap returns whether the supplied authenticator status is desirable or not.
|
||||||
|
func IsUndesiredAuthenticatorStatusMap(status AuthenticatorStatus, values map[AuthenticatorStatus]bool) bool {
|
||||||
|
_, ok := values[status]
|
||||||
|
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
type AuthenticationAlgorithm string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW is an ECDSA signature on the NIST secp256r1 curve which must have raw R and
|
||||||
|
// S buffers, encoded in big-endian order.
|
||||||
|
ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_SECP256R1_ECDSA_SHA256_DER is a DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the NIST secp256r1
|
||||||
|
// curve.
|
||||||
|
ALG_SIGN_SECP256R1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256r1_ecdsa_sha256_der"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PSS_SHA256_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian
|
||||||
|
// order RFC4055 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA256_RAW AuthenticationAlgorithm = "rsassa_pss_sha256_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PSS_SHA256_DER is a DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the
|
||||||
|
// RSASSA-PSS RFC3447 signature RFC4055 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA256_DER AuthenticationAlgorithm = "rsassa_pss_sha256_der"
|
||||||
|
|
||||||
|
// ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW is an ECDSA signature on the secp256k1 curve which must have raw R and S
|
||||||
|
// buffers, encoded in big-endian order.
|
||||||
|
ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_SECP256K1_ECDSA_SHA256_DER is a DER ITU-X690-2008 encoded ECDSA signature RFC5480 on the secp256k1 curve.
|
||||||
|
ALG_SIGN_SECP256K1_ECDSA_SHA256_DER AuthenticationAlgorithm = "secp256k1_ecdsa_sha256_der"
|
||||||
|
|
||||||
|
// ALG_SIGN_SM2_SM3_RAW is a Chinese SM2 elliptic curve based signature algorithm combined with SM3 hash algorithm
|
||||||
|
// OSCCA-SM2 OSCCA-SM3.
|
||||||
|
ALG_SIGN_SM2_SM3_RAW AuthenticationAlgorithm = "sm2_sm3_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSA_EMSA_PKCS1_SHA256_RAW is the EMSA-PKCS1-v1_5 signature as defined in RFC3447.
|
||||||
|
ALG_SIGN_RSA_EMSA_PKCS1_SHA256_RAW AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSA_EMSA_PKCS1_SHA256_DER is a DER ITU-X690-2008 encoded OCTET STRING (not BIT STRING!) containing the
|
||||||
|
// EMSA-PKCS1-v1_5 signature as defined in RFC3447.
|
||||||
|
ALG_SIGN_RSA_EMSA_PKCS1_SHA256_DER AuthenticationAlgorithm = "rsa_emsa_pkcs1_sha256_der"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PSS_SHA384_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian
|
||||||
|
// order RFC4055 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA384_RAW AuthenticationAlgorithm = "rsassa_pss_sha384_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PSS_SHA512_RAW is a RSASSA-PSS RFC3447 signature must have raw S buffers, encoded in big-endian
|
||||||
|
// order RFC4055 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA512_RAW AuthenticationAlgorithm = "rsassa_pss_sha512_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA256(aka RS256) signature must have raw
|
||||||
|
// S buffers, encoded in big-endian order RFC8017 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha256_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA384(aka RS384) signature must have raw S buffers, encoded in big-endian order RFC8017 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha384_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA512(aka RS512) signature must have raw
|
||||||
|
// S buffers, encoded in big-endian order RFC8017 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha512_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW is a RSASSA-PKCS1-v1_5 RFC3447 with SHA1(aka RS1) signature must have raw S
|
||||||
|
// buffers, encoded in big-endian order RFC8017 RFC4056.
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW AuthenticationAlgorithm = "rsassa_pkcsv15_sha1_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW is an ECDSA signature on the NIST secp384r1 curve with SHA384(aka: ES384)
|
||||||
|
// which must have raw R and S buffers, encoded in big-endian order.
|
||||||
|
ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW AuthenticationAlgorithm = "secp384r1_ecdsa_sha384_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW is an ECDSA signature on the NIST secp512r1 curve with SHA512(aka: ES512)
|
||||||
|
// which must have raw R and S buffers, encoded in big-endian order.
|
||||||
|
ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW AuthenticationAlgorithm = "secp521r1_ecdsa_sha512_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_ED25519_EDDSA_SHA512_RAW is an EdDSA signature on the curve 25519, which must have raw R and S buffers,
|
||||||
|
// encoded in big-endian order.
|
||||||
|
ALG_SIGN_ED25519_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed25519_eddsa_sha512_raw"
|
||||||
|
|
||||||
|
// ALG_SIGN_ED448_EDDSA_SHA512_RAW is an EdDSA signature on the curve Ed448, which must have raw R and S buffers,
|
||||||
|
// encoded in big-endian order.
|
||||||
|
ALG_SIGN_ED448_EDDSA_SHA512_RAW AuthenticationAlgorithm = "ed448_eddsa_sha512_raw"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TODO: this goes away after webauthncose.CredentialPublicKey gets implemented.
|
||||||
|
type algKeyCose struct {
|
||||||
|
KeyType webauthncose.COSEKeyType
|
||||||
|
Algorithm webauthncose.COSEAlgorithmIdentifier
|
||||||
|
Curve webauthncose.COSEEllipticCurve
|
||||||
|
}
|
||||||
|
|
||||||
|
func algKeyCoseDictionary() func(AuthenticationAlgorithm) algKeyCose {
|
||||||
|
mapping := map[AuthenticationAlgorithm]algKeyCose{
|
||||||
|
ALG_SIGN_SECP256R1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256},
|
||||||
|
ALG_SIGN_SECP256R1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256, Curve: webauthncose.P256},
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256},
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA256_DER: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS256},
|
||||||
|
ALG_SIGN_SECP256K1_ECDSA_SHA256_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1},
|
||||||
|
ALG_SIGN_SECP256K1_ECDSA_SHA256_DER: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES256K, Curve: webauthncose.Secp256k1},
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS384},
|
||||||
|
ALG_SIGN_RSASSA_PSS_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgPS512},
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA256_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS256},
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA384_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS384},
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA512_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS512},
|
||||||
|
ALG_SIGN_RSASSA_PKCSV15_SHA1_RAW: {KeyType: webauthncose.RSAKey, Algorithm: webauthncose.AlgRS1},
|
||||||
|
ALG_SIGN_SECP384R1_ECDSA_SHA384_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES384, Curve: webauthncose.P384},
|
||||||
|
ALG_SIGN_SECP521R1_ECDSA_SHA512_RAW: {KeyType: webauthncose.EllipticKey, Algorithm: webauthncose.AlgES512, Curve: webauthncose.P521},
|
||||||
|
ALG_SIGN_ED25519_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed25519},
|
||||||
|
ALG_SIGN_ED448_EDDSA_SHA512_RAW: {KeyType: webauthncose.OctetKey, Algorithm: webauthncose.AlgEdDSA, Curve: webauthncose.Ed448},
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(key AuthenticationAlgorithm) algKeyCose {
|
||||||
|
return mapping[key]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func AlgKeyMatch(key algKeyCose, algs []AuthenticationAlgorithm) bool {
|
||||||
|
for _, alg := range algs {
|
||||||
|
if reflect.DeepEqual(algKeyCoseDictionary()(alg), key) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type PublicKeyAlgAndEncoding string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ALG_KEY_ECC_X962_RAW is a raw ANSI X9.62 formatted Elliptic Curve public key.
|
||||||
|
ALG_KEY_ECC_X962_RAW PublicKeyAlgAndEncoding = "ecc_x962_raw"
|
||||||
|
|
||||||
|
// ALG_KEY_ECC_X962_DER is a DER ITU-X690-2008 encoded ANSI X.9.62 formatted SubjectPublicKeyInfo RFC5480 specifying an elliptic curve public key.
|
||||||
|
ALG_KEY_ECC_X962_DER PublicKeyAlgAndEncoding = "ecc_x962_der"
|
||||||
|
|
||||||
|
// ALG_KEY_RSA_2048_RAW is a raw encoded 2048-bit RSA public key RFC3447.
|
||||||
|
ALG_KEY_RSA_2048_RAW PublicKeyAlgAndEncoding = "rsa_2048_raw"
|
||||||
|
|
||||||
|
// ALG_KEY_RSA_2048_DER is a ASN.1 DER [ITU-X690-2008] encoded 2048-bit RSA RFC3447 public key RFC4055.
|
||||||
|
ALG_KEY_RSA_2048_DER PublicKeyAlgAndEncoding = "rsa_2048_der"
|
||||||
|
|
||||||
|
// ALG_KEY_COSE is a COSE_Key format, as defined in Section 7 of RFC8152. This encoding includes its own field for indicating the public key algorithm.
|
||||||
|
ALG_KEY_COSE PublicKeyAlgAndEncoding = "cose"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Error struct {
|
||||||
|
// Short name for the type of error that has occurred.
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// Additional details about the error.
|
||||||
|
Details string `json:"error"`
|
||||||
|
|
||||||
|
// Information to help debug the error.
|
||||||
|
DevInfo string `json:"debug"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Error() string {
|
||||||
|
return e.Details
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clock is an interface used to implement clock functionality in various metadata areas.
|
||||||
|
type Clock interface {
|
||||||
|
// Now returns the current time.
|
||||||
|
Now() time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// RealClock is just a real clock.
|
||||||
|
type RealClock struct{}
|
||||||
|
|
||||||
|
// Now returns the current time.
|
||||||
|
func (RealClock) Now() time.Time {
|
||||||
|
return time.Now()
|
||||||
|
}
|
||||||
199
vendor/github.com/go-webauthn/webauthn/protocol/assertion.go
generated
vendored
Normal file
199
vendor/github.com/go-webauthn/webauthn/protocol/assertion.go
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The CredentialAssertionResponse is the raw response returned to the Relying Party from an authenticator when we request a
|
||||||
|
// credential for login/assertion.
|
||||||
|
type CredentialAssertionResponse struct {
|
||||||
|
PublicKeyCredential
|
||||||
|
|
||||||
|
AssertionResponse AuthenticatorAssertionResponse `json:"response"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The ParsedCredentialAssertionData is the parsed [CredentialAssertionResponse] that has been marshalled into a format
|
||||||
|
// that allows us to verify the client and authenticator data inside the response.
|
||||||
|
type ParsedCredentialAssertionData struct {
|
||||||
|
ParsedPublicKeyCredential
|
||||||
|
|
||||||
|
Response ParsedAssertionResponse
|
||||||
|
Raw CredentialAssertionResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
// The AuthenticatorAssertionResponse contains the raw authenticator assertion data and is parsed into
|
||||||
|
// [ParsedAssertionResponse].
|
||||||
|
type AuthenticatorAssertionResponse struct {
|
||||||
|
AuthenticatorResponse
|
||||||
|
|
||||||
|
AuthenticatorData URLEncodedBase64 `json:"authenticatorData"`
|
||||||
|
Signature URLEncodedBase64 `json:"signature"`
|
||||||
|
UserHandle URLEncodedBase64 `json:"userHandle,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsedAssertionResponse is the parsed form of [AuthenticatorAssertionResponse].
|
||||||
|
type ParsedAssertionResponse struct {
|
||||||
|
CollectedClientData CollectedClientData
|
||||||
|
AuthenticatorData AuthenticatorData
|
||||||
|
Signature []byte
|
||||||
|
UserHandle []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialRequestResponse parses the credential request response into a format that is either required by the
|
||||||
|
// specification or makes the assertion verification steps easier to complete. This takes a [*http.Request] that contains
|
||||||
|
// the assertion response data in a raw, mostly base64 encoded format, and parses the data into manageable structures.
|
||||||
|
func ParseCredentialRequestResponse(response *http.Request) (*ParsedCredentialAssertionData, error) {
|
||||||
|
if response == nil || response.Body == nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("No response given")
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func(request *http.Request) {
|
||||||
|
_, _ = io.Copy(io.Discard, request.Body)
|
||||||
|
_ = request.Body.Close()
|
||||||
|
}(response)
|
||||||
|
|
||||||
|
return ParseCredentialRequestResponseBody(response.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialRequestResponseBody parses the credential request response into a format that is either required by
|
||||||
|
// the specification or makes the assertion verification steps easier to complete. This takes an [io.Reader] that contains
|
||||||
|
// the assertion response data in a raw, mostly base64 encoded format, and parses the data into manageable structures.
|
||||||
|
func ParseCredentialRequestResponseBody(body io.Reader) (par *ParsedCredentialAssertionData, err error) {
|
||||||
|
var car CredentialAssertionResponse
|
||||||
|
|
||||||
|
if err = decodeBody(body, &car); err != nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Assertion").WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return car.Parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialRequestResponseBytes is an alternative version of [ParseCredentialRequestResponseBody] that just takes
|
||||||
|
// a byte slice.
|
||||||
|
func ParseCredentialRequestResponseBytes(data []byte) (par *ParsedCredentialAssertionData, err error) {
|
||||||
|
var car CredentialAssertionResponse
|
||||||
|
|
||||||
|
if err = decodeBytes(data, &car); err != nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Assertion").WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return car.Parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse validates and parses the [CredentialAssertionResponse] into a [ParseCredentialCreationResponseBody]. This receiver
|
||||||
|
// is unlikely to be expressly guaranteed under the versioning policy. Users looking for this guarantee should see
|
||||||
|
// [ParseCredentialRequestResponseBody] instead, and this receiver should only be used if that function is inadequate
|
||||||
|
// for their use case.
|
||||||
|
func (car CredentialAssertionResponse) Parse() (par *ParsedCredentialAssertionData, err error) {
|
||||||
|
if car.ID == "" {
|
||||||
|
return nil, ErrBadRequest.WithDetails("CredentialAssertionResponse with ID missing")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = base64.RawURLEncoding.DecodeString(car.ID); err != nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("CredentialAssertionResponse with ID not base64url encoded").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if car.Type != string(PublicKeyCredentialType) {
|
||||||
|
return nil, ErrBadRequest.WithDetails("CredentialAssertionResponse with bad type")
|
||||||
|
}
|
||||||
|
|
||||||
|
var attachment AuthenticatorAttachment
|
||||||
|
|
||||||
|
switch att := AuthenticatorAttachment(car.AuthenticatorAttachment); att {
|
||||||
|
case Platform, CrossPlatform:
|
||||||
|
attachment = att
|
||||||
|
}
|
||||||
|
|
||||||
|
par = &ParsedCredentialAssertionData{
|
||||||
|
ParsedPublicKeyCredential{
|
||||||
|
ParsedCredential{car.ID, car.Type}, car.RawID, car.ClientExtensionResults, attachment,
|
||||||
|
},
|
||||||
|
ParsedAssertionResponse{
|
||||||
|
Signature: car.AssertionResponse.Signature,
|
||||||
|
UserHandle: car.AssertionResponse.UserHandle,
|
||||||
|
},
|
||||||
|
car,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5. Let JSONtext be the result of running UTF-8 decode on the value of cData.
|
||||||
|
// We don't call it cData but this is Step 5 in the spec.
|
||||||
|
if err = json.Unmarshal(car.AssertionResponse.ClientDataJSON, &par.Response.CollectedClientData); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = par.Response.AuthenticatorData.Unmarshal(car.AssertionResponse.AuthenticatorData); err != nil {
|
||||||
|
return nil, ErrParsingData.WithDetails("Error unmarshalling auth data").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return par, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the remaining elements of the assertion data by following the steps outlined in the referenced specification
|
||||||
|
// documentation. It's important to note that the credentialBytes field is the CBOR representation of the credential.
|
||||||
|
//
|
||||||
|
// Specification: §7.2 Verifying an Authentication Assertion (https://www.w3.org/TR/webauthn/#sctn-verifying-assertion)
|
||||||
|
func (p *ParsedCredentialAssertionData) Verify(storedChallenge string, relyingPartyID string, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode, appID string, verifyUser bool, verifyUserPresence bool, credentialBytes []byte) error {
|
||||||
|
// Steps 4 through 6 in verifying the assertion data (https://www.w3.org/TR/webauthn/#verifying-assertion) are
|
||||||
|
// "assertive" steps, i.e. "Let JSONtext be the result of running UTF-8 decode on the value of cData."
|
||||||
|
// We handle these steps in part as we verify but also beforehand
|
||||||
|
//
|
||||||
|
// Handle steps 7 through 10 of assertion by verifying stored data against the Collected Client Data
|
||||||
|
// returned by the authenticator.
|
||||||
|
validError := p.Response.CollectedClientData.Verify(storedChallenge, AssertCeremony, rpOrigins, rpTopOrigins, rpTopOriginsVerify)
|
||||||
|
if validError != nil {
|
||||||
|
return validError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Begin Step 11. Verify that the rpIdHash in authData is the SHA-256 hash of the RP ID expected by the RP.
|
||||||
|
rpIDHash := sha256.Sum256([]byte(relyingPartyID))
|
||||||
|
|
||||||
|
var appIDHash [32]byte
|
||||||
|
if appID != "" {
|
||||||
|
appIDHash = sha256.Sum256([]byte(appID))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle steps 11 through 14, verifying the authenticator data.
|
||||||
|
validError = p.Response.AuthenticatorData.Verify(rpIDHash[:], appIDHash[:], verifyUser, verifyUserPresence)
|
||||||
|
if validError != nil {
|
||||||
|
return validError
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 15. Let hash be the result of computing a hash over the cData using SHA-256.
|
||||||
|
clientDataHash := sha256.Sum256(p.Raw.AssertionResponse.ClientDataJSON)
|
||||||
|
|
||||||
|
// Step 16. Using the credential public key looked up in step 3, verify that sig is
|
||||||
|
// a valid signature over the binary concatenation of authData and hash.
|
||||||
|
|
||||||
|
sigData := append(p.Raw.AssertionResponse.AuthenticatorData, clientDataHash[:]...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
var (
|
||||||
|
key any
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
// If the Session Data does not contain the appID extension or it wasn't reported as used by the Client/RP then we
|
||||||
|
// use the standard CTAP2 public key parser.
|
||||||
|
if appID == "" {
|
||||||
|
key, err = webauthncose.ParsePublicKey(credentialBytes)
|
||||||
|
} else {
|
||||||
|
key, err = webauthncose.ParseFIDOPublicKey(credentialBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return ErrAssertionSignature.WithDetails(fmt.Sprintf("Error parsing the assertion public key: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
valid, err := webauthncose.VerifySignature(key, sigData, p.Response.Signature)
|
||||||
|
if !valid || err != nil {
|
||||||
|
return ErrAssertionSignature.WithDetails(fmt.Sprintf("Error validating the assertion signature: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
234
vendor/github.com/go-webauthn/webauthn/protocol/attestation.go
generated
vendored
Normal file
234
vendor/github.com/go-webauthn/webauthn/protocol/attestation.go
generated
vendored
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncbor"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorAttestationResponse is the initial unpacked 'response' object received by the relying party. This
|
||||||
|
// contains the clientDataJSON object, which will be marshalled into [CollectedClientData], and the 'attestationObject',
|
||||||
|
// which contains information about the authenticator, and the newly minted public key credential. The information in
|
||||||
|
// both objects are used to verify the authenticity of the ceremony and new credential.
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#typedefdef-publickeycredentialjson
|
||||||
|
type AuthenticatorAttestationResponse struct {
|
||||||
|
// The byte slice of clientDataJSON, which becomes CollectedClientData.
|
||||||
|
AuthenticatorResponse
|
||||||
|
|
||||||
|
Transports []string `json:"transports,omitempty"`
|
||||||
|
|
||||||
|
AuthenticatorData URLEncodedBase64 `json:"authenticatorData"`
|
||||||
|
|
||||||
|
PublicKey URLEncodedBase64 `json:"publicKey"`
|
||||||
|
|
||||||
|
PublicKeyAlgorithm int64 `json:"publicKeyAlgorithm"`
|
||||||
|
|
||||||
|
// AttestationObject is the byte slice version of attestationObject.
|
||||||
|
// This attribute contains an attestation object, which is opaque to, and
|
||||||
|
// cryptographically protected against tampering by, the client. The
|
||||||
|
// attestation object contains both authenticator data and an attestation
|
||||||
|
// statement. The former contains the AAGUID, a unique credential ID, and
|
||||||
|
// the credential public key. The contents of the attestation statement are
|
||||||
|
// determined by the attestation statement format used by the authenticator.
|
||||||
|
// It also contains any additional information that the Relying Party's server
|
||||||
|
// requires to validate the attestation statement, as well as to decode and
|
||||||
|
// validate the authenticator data along with the JSON-serialized client data.
|
||||||
|
AttestationObject URLEncodedBase64 `json:"attestationObject"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsedAttestationResponse is the parsed version of [AuthenticatorAttestationResponse].
|
||||||
|
type ParsedAttestationResponse struct {
|
||||||
|
CollectedClientData CollectedClientData
|
||||||
|
AttestationObject AttestationObject
|
||||||
|
Transports []AuthenticatorTransport
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttestationObject is the raw attestationObject.
|
||||||
|
//
|
||||||
|
// Authenticators SHOULD also provide some form of attestation, if possible. If an authenticator does, the basic
|
||||||
|
// requirement is that the authenticator can produce, for each credential public key, an attestation statement
|
||||||
|
// verifiable by the WebAuthn Relying Party. Typically, this attestation statement contains a signature by an
|
||||||
|
// attestation private key over the attested credential public key and a challenge, as well as a certificate or similar
|
||||||
|
// data providing provenance information for the attestation public key, enabling the Relying Party to make a trust
|
||||||
|
// decision. However, if an attestation key pair is not available, then the authenticator MAY either perform self
|
||||||
|
// attestation of the credential public key with the corresponding credential private key, or otherwise perform no
|
||||||
|
// attestation. All this information is returned by authenticators any time a new public key credential is generated, in
|
||||||
|
// the overall form of an attestation object.
|
||||||
|
//
|
||||||
|
// Specification: §6.5. Attestation (https://www.w3.org/TR/webauthn/#sctn-attestation)
|
||||||
|
type AttestationObject struct {
|
||||||
|
// The authenticator data, including the newly created public key. See [AuthenticatorData] for more info.
|
||||||
|
AuthData AuthenticatorData
|
||||||
|
|
||||||
|
// The byteform version of the authenticator data, used in part for signature validation.
|
||||||
|
RawAuthData []byte `json:"authData"`
|
||||||
|
|
||||||
|
// The format of the Attestation data.
|
||||||
|
Format string `json:"fmt"`
|
||||||
|
|
||||||
|
// The attestation statement data sent back if attestation is requested.
|
||||||
|
AttStatement map[string]any `json:"attStmt,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type NonCompoundAttestationObject struct {
|
||||||
|
// The format of the Attestation data.
|
||||||
|
Format string `json:"fmt"`
|
||||||
|
|
||||||
|
// The attestation statement data sent back if attestation is requested.
|
||||||
|
AttStatement map[string]any `json:"attStmt,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type attestationFormatValidationHandler func(att AttestationObject, clientDataHash []byte, mds metadata.Provider) (attestationType string, x5cs []any, err error)
|
||||||
|
|
||||||
|
var attestationRegistry = make(map[AttestationFormat]attestationFormatValidationHandler)
|
||||||
|
|
||||||
|
// RegisterAttestationFormat is a method to register attestation formats with the library. Generally using one of the
|
||||||
|
// locally registered attestation formats is enough.
|
||||||
|
func RegisterAttestationFormat(format AttestationFormat, handler attestationFormatValidationHandler) {
|
||||||
|
attestationRegistry[format] = handler
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the values returned in the authenticator response and perform attestation verification
|
||||||
|
// Step 8. This returns a fully decoded struct with the data put into a format that can be
|
||||||
|
// used to verify the user and credential that was created.
|
||||||
|
func (ccr *AuthenticatorAttestationResponse) Parse() (p *ParsedAttestationResponse, err error) {
|
||||||
|
p = &ParsedAttestationResponse{}
|
||||||
|
|
||||||
|
if err = json.Unmarshal(ccr.ClientDataJSON, &p.CollectedClientData); err != nil {
|
||||||
|
return nil, ErrParsingData.WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(ccr.AttestationObject, &p.AttestationObject); err != nil {
|
||||||
|
return nil, ErrParsingData.WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 8. Perform CBOR decoding on the attestationObject field of the AuthenticatorAttestationResponse
|
||||||
|
// structure to obtain the attestation statement format fmt, the authenticator data authData, and
|
||||||
|
// the attestation statement attStmt.
|
||||||
|
if err = p.AttestationObject.AuthData.Unmarshal(p.AttestationObject.RawAuthData); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !p.AttestationObject.AuthData.Flags.HasAttestedCredentialData() {
|
||||||
|
return nil, ErrAttestationFormat.WithInfo("Attestation missing attested credential data flag")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range ccr.Transports {
|
||||||
|
if transport, ok := internalRemappedAuthenticatorTransport[t]; ok {
|
||||||
|
p.Transports = append(p.Transports, transport)
|
||||||
|
} else {
|
||||||
|
p.Transports = append(p.Transports, AuthenticatorTransport(t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify performs Steps 13 through 19 of registration verification.
|
||||||
|
//
|
||||||
|
// Steps 13 through 15 are verified against the auth data. These steps are identical to 15 through 18 for assertion so we
|
||||||
|
// handle them with AuthData.
|
||||||
|
func (a *AttestationObject) Verify(relyingPartyID string, clientDataHash []byte, userVerificationRequired bool, userPresenceRequired bool, mds metadata.Provider, credParams []CredentialParameter) (err error) {
|
||||||
|
rpIDHash := sha256.Sum256([]byte(relyingPartyID))
|
||||||
|
|
||||||
|
// Begin Step 13 through 15. Verify that the rpIdHash in authData is the SHA-256 hash of the RP ID expected by the RP.
|
||||||
|
if err = a.AuthData.Verify(rpIDHash[:], nil, userVerificationRequired, userPresenceRequired); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 16. Verify that the "alg" parameter in the credential public key in
|
||||||
|
// authData matches the alg attribute of one of the items in options.pubKeyCredParams.
|
||||||
|
var pk webauthncose.PublicKeyData
|
||||||
|
if err = webauthncbor.Unmarshal(a.AuthData.AttData.CredentialPublicKey, &pk); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
|
||||||
|
for _, credParam := range credParams {
|
||||||
|
if int(pk.Algorithm) == int(credParam.Algorithm) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return ErrAttestationFormat.WithInfo("Credential public key algorithm not supported")
|
||||||
|
}
|
||||||
|
|
||||||
|
return a.VerifyAttestation(clientDataHash, mds)
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyAttestation only verifies the attestation object excluding the AuthData values. If you wish to also verify the
|
||||||
|
// AuthData values you should use [Verify].
|
||||||
|
func (a *AttestationObject) VerifyAttestation(clientDataHash []byte, mds metadata.Provider) (err error) {
|
||||||
|
// Step 18. Determine the attestation statement format by performing a
|
||||||
|
// USASCII case-sensitive match on fmt against the set of supported
|
||||||
|
// WebAuthn Attestation Statement Format Identifier values. The up-to-date
|
||||||
|
// list of registered WebAuthn Attestation Statement Format Identifier
|
||||||
|
// values is maintained in the IANA registry of the same name
|
||||||
|
// [WebAuthn-Registries] (https://www.w3.org/TR/webauthn/#biblio-webauthn-registries).
|
||||||
|
//
|
||||||
|
// Since there is not an active registry yet, we'll check it against our internal
|
||||||
|
// Supported types.
|
||||||
|
//
|
||||||
|
// But first let's make sure attestation is present. If it isn't, we don't need to handle
|
||||||
|
// any of the following steps.
|
||||||
|
if AttestationFormat(a.Format) == AttestationFormatNone {
|
||||||
|
if len(a.AttStatement) != 0 {
|
||||||
|
return ErrAttestationFormat.WithInfo("Attestation format none with attestation present")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
handler attestationFormatValidationHandler
|
||||||
|
valid bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if handler, valid = attestationRegistry[AttestationFormat(a.Format)]; !valid {
|
||||||
|
return ErrAttestationFormat.WithInfo(fmt.Sprintf("Attestation format %s is unsupported", a.Format))
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
aaguid uuid.UUID
|
||||||
|
attestationType string
|
||||||
|
x5cs []any
|
||||||
|
)
|
||||||
|
|
||||||
|
// Step 19. Verify that attStmt is a correct attestation statement, conveying a valid attestation signature, by using
|
||||||
|
// the attestation statement format fmt’s verification procedure given attStmt, authData and the hash of the serialized
|
||||||
|
// client data computed in step 7.
|
||||||
|
if attestationType, x5cs, err = handler(*a, clientDataHash, mds); err != nil {
|
||||||
|
return err.(*Error).WithInfo(attestationType)
|
||||||
|
}
|
||||||
|
|
||||||
|
if attestationType == string(AttestationFormatCompound) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(a.AuthData.AttData.AAGUID) != 0 {
|
||||||
|
if aaguid, err = uuid.FromBytes(a.AuthData.AttData.AAGUID); err != nil {
|
||||||
|
return ErrInvalidAttestation.WithInfo("Error occurred parsing AAGUID during attestation validation").WithDetails(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if mds == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if e := ValidateMetadata(context.Background(), mds, aaguid, attestationType, a.Format, x5cs); e != nil {
|
||||||
|
return ErrInvalidAttestation.WithInfo(fmt.Sprintf("Error occurred validating metadata during attestation validation: %+v", e)).WithDetails(e.DevInfo).WithError(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
260
vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go
generated
vendored
Normal file
260
vendor/github.com/go-webauthn/webauthn/protocol/attestation_androidkey.go
generated
vendored
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/asn1"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerAndroidKey is the handler for the Android Key Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// An Android key attestation statement consists simply of the Android attestation statement, which is a series of DER
|
||||||
|
// encoded X.509 certificates. See the Android developer documentation. Its syntax is defined as follows:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
//
|
||||||
|
// fmt: "android-key",
|
||||||
|
// attStmt: androidStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// androidStmtFormat = {
|
||||||
|
// alg: COSEAlgorithmIdentifier,
|
||||||
|
// sig: bytes,
|
||||||
|
// x5c: [ credCert: bytes, * (caCert: bytes) ]
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.4. Android Key Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#sctn-android-key-attestation
|
||||||
|
func attestationFormatValidationHandlerAndroidKey(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
var (
|
||||||
|
alg int64
|
||||||
|
sig []byte
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
// Given the verification procedure inputs attStmt, authenticatorData and clientDataHash, the verification procedure is as follows:
|
||||||
|
// §8.4.1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it to extract
|
||||||
|
// the contained fields.
|
||||||
|
// Get the alg value - A COSEAlgorithmIdentifier containing the identifier of the algorithm
|
||||||
|
// used to generate the attestation signature.
|
||||||
|
if alg, ok = att.AttStatement[stmtAlgorithm].(int64); !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Error retrieving alg value")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the sig value - A byte string containing the attestation signature.
|
||||||
|
if sig, ok = att.AttStatement[stmtSignature].([]byte); !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Error retrieving sig value")
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.4.2. Verify that sig is a valid signature over the concatenation of authenticatorData and clientDataHash
|
||||||
|
// using the public key in the first certificate in x5c with the algorithm specified in alg.
|
||||||
|
var (
|
||||||
|
x5c []any
|
||||||
|
certs []*x509.Certificate
|
||||||
|
)
|
||||||
|
|
||||||
|
if x5c, certs, err = attStatementParseX5CS(att.AttStatement, stmtX5C); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(certs) == 0 {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("No certificates in x5c")
|
||||||
|
}
|
||||||
|
|
||||||
|
credCert := certs[0]
|
||||||
|
|
||||||
|
if _, err = attStatementCertChainVerify(certs, attAndroidKeyHardwareRootsCertPool, true, time.Now().Add(time.Hour*8760).UTC()); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Error validating x5c cert chain").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
signatureData := append(att.RawAuthData, clientDataHash...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
if sigAlg := webauthncose.SigAlgFromCOSEAlg(webauthncose.COSEAlgorithmIdentifier(alg)); sigAlg == x509.UnknownSignatureAlgorithm {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Unsupported COSE alg: %d", alg))
|
||||||
|
} else if err = credCert.CheckSignature(sigAlg, signatureData, sig); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Signature validation error: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the public key in the first certificate in x5c matches the credentialPublicKey in the attestedCredentialData in authenticatorData.
|
||||||
|
var attPublicKeyData webauthncose.EC2PublicKeyData
|
||||||
|
if attPublicKeyData, err = verifyAttestationECDSAPublicKeyMatch(att, credCert); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var valid bool
|
||||||
|
if valid, err = attPublicKeyData.Verify(signatureData, sig); err != nil || !valid {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error parsing public key: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.4.3. Verify that the attestationChallenge field in the attestation certificate extension data is identical to clientDataHash.
|
||||||
|
// attCert.Extensions.
|
||||||
|
// As noted in §8.4.1 (https://www.w3.org/TR/webauthn/#key-attstn-cert-requirements) the Android Key Attestation
|
||||||
|
// certificate's android key attestation certificate extension data is identified by the OID
|
||||||
|
// "1.3.6.1.4.1.11129.2.1.17".
|
||||||
|
var attExtBytes []byte
|
||||||
|
|
||||||
|
for _, ext := range credCert.Extensions {
|
||||||
|
if ext.Id.Equal(oidExtensionAndroidKeystore) {
|
||||||
|
attExtBytes = ext.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(attExtBytes) == 0 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate extensions missing 1.3.6.1.4.1.11129.2.1.17")
|
||||||
|
}
|
||||||
|
|
||||||
|
decoded := keyDescription{}
|
||||||
|
|
||||||
|
if _, err = asn1.Unmarshal(attExtBytes, &decoded); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Unable to parse Android key attestation certificate extensions").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that the attestationChallenge field in the attestation certificate extension data is identical to clientDataHash.
|
||||||
|
if !bytes.Equal(decoded.AttestationChallenge, clientDataHash) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation challenge not equal to clientDataHash")
|
||||||
|
}
|
||||||
|
|
||||||
|
// The AuthorizationList.allApplications field is not present on either authorization list (softwareEnforced nor teeEnforced), since PublicKeyCredential MUST be scoped to the RP ID.
|
||||||
|
if decoded.SoftwareEnforced.AllApplications != nil || decoded.TeeEnforced.AllApplications != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate extensions contains all applications field")
|
||||||
|
}
|
||||||
|
|
||||||
|
// For the following, use only the teeEnforced authorization list if the RP wants to accept only keys from a trusted execution environment, otherwise use the union of teeEnforced and softwareEnforced.
|
||||||
|
// The value in the AuthorizationList.origin field is equal to KM_ORIGIN_GENERATED (which == 0).
|
||||||
|
if decoded.SoftwareEnforced.Origin != KM_ORIGIN_GENERATED || decoded.TeeEnforced.Origin != KM_ORIGIN_GENERATED {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate extensions contains authorization list with origin not equal KM_ORIGIN_GENERATED")
|
||||||
|
}
|
||||||
|
|
||||||
|
// The value in the AuthorizationList.purpose field is equal to KM_PURPOSE_SIGN (which == 2).
|
||||||
|
if !contains(decoded.SoftwareEnforced.Purpose, KM_PURPOSE_SIGN) && !contains(decoded.TeeEnforced.Purpose, KM_PURPOSE_SIGN) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate extensions contains authorization list with purpose not equal KM_PURPOSE_SIGN")
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(metadata.BasicFull), x5c, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func contains(s []int, e int) bool {
|
||||||
|
for _, a := range s {
|
||||||
|
if a == e {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type keyDescription struct {
|
||||||
|
AttestationVersion int
|
||||||
|
AttestationSecurityLevel asn1.Enumerated
|
||||||
|
KeymasterVersion int
|
||||||
|
KeymasterSecurityLevel asn1.Enumerated
|
||||||
|
AttestationChallenge []byte
|
||||||
|
UniqueID []byte
|
||||||
|
SoftwareEnforced authorizationList
|
||||||
|
TeeEnforced authorizationList
|
||||||
|
}
|
||||||
|
|
||||||
|
type authorizationList struct {
|
||||||
|
Purpose []int `asn1:"tag:1,explicit,set,optional"`
|
||||||
|
Algorithm int `asn1:"tag:2,explicit,optional"`
|
||||||
|
KeySize int `asn1:"tag:3,explicit,optional"`
|
||||||
|
Digest []int `asn1:"tag:5,explicit,set,optional"`
|
||||||
|
Padding []int `asn1:"tag:6,explicit,set,optional"`
|
||||||
|
EcCurve int `asn1:"tag:10,explicit,optional"`
|
||||||
|
RsaPublicExponent int `asn1:"tag:200,explicit,optional"`
|
||||||
|
RollbackResistance any `asn1:"tag:303,explicit,optional"`
|
||||||
|
ActiveDateTime int `asn1:"tag:400,explicit,optional"`
|
||||||
|
OriginationExpireDateTime int `asn1:"tag:401,explicit,optional"`
|
||||||
|
UsageExpireDateTime int `asn1:"tag:402,explicit,optional"`
|
||||||
|
NoAuthRequired any `asn1:"tag:503,explicit,optional"`
|
||||||
|
UserAuthType int `asn1:"tag:504,explicit,optional"`
|
||||||
|
AuthTimeout int `asn1:"tag:505,explicit,optional"`
|
||||||
|
AllowWhileOnBody any `asn1:"tag:506,explicit,optional"`
|
||||||
|
TrustedUserPresenceRequired any `asn1:"tag:507,explicit,optional"`
|
||||||
|
TrustedConfirmationRequired any `asn1:"tag:508,explicit,optional"`
|
||||||
|
UnlockedDeviceRequired any `asn1:"tag:509,explicit,optional"`
|
||||||
|
AllApplications any `asn1:"tag:600,explicit,optional"`
|
||||||
|
ApplicationID any `asn1:"tag:601,explicit,optional"`
|
||||||
|
CreationDateTime int `asn1:"tag:701,explicit,optional"`
|
||||||
|
Origin int `asn1:"tag:702,explicit,optional"`
|
||||||
|
RootOfTrust rootOfTrust `asn1:"tag:704,explicit,optional"`
|
||||||
|
OsVersion int `asn1:"tag:705,explicit,optional"`
|
||||||
|
OsPatchLevel int `asn1:"tag:706,explicit,optional"`
|
||||||
|
AttestationApplicationID []byte `asn1:"tag:709,explicit,optional"`
|
||||||
|
AttestationIDBrand []byte `asn1:"tag:710,explicit,optional"`
|
||||||
|
AttestationIDDevice []byte `asn1:"tag:711,explicit,optional"`
|
||||||
|
AttestationIDProduct []byte `asn1:"tag:712,explicit,optional"`
|
||||||
|
AttestationIDSerial []byte `asn1:"tag:713,explicit,optional"`
|
||||||
|
AttestationIDImei []byte `asn1:"tag:714,explicit,optional"`
|
||||||
|
AttestationIDMeid []byte `asn1:"tag:715,explicit,optional"`
|
||||||
|
AttestationIDManufacturer []byte `asn1:"tag:716,explicit,optional"`
|
||||||
|
AttestationIDModel []byte `asn1:"tag:717,explicit,optional"`
|
||||||
|
VendorPatchLevel int `asn1:"tag:718,explicit,optional"`
|
||||||
|
BootPatchLevel int `asn1:"tag:719,explicit,optional"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type rootOfTrust struct {
|
||||||
|
verifiedBootKey []byte //nolint:unused
|
||||||
|
deviceLocked bool //nolint:unused
|
||||||
|
verifiedBootState verifiedBootState //nolint:unused
|
||||||
|
verifiedBootHash []byte //nolint:unused
|
||||||
|
}
|
||||||
|
|
||||||
|
type verifiedBootState int
|
||||||
|
|
||||||
|
const (
|
||||||
|
Verified verifiedBootState = iota
|
||||||
|
SelfSigned
|
||||||
|
Unverified
|
||||||
|
Failed
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// KM_ORIGIN_GENERATED means generated in keymaster. Should not exist outside the TEE.
|
||||||
|
KM_ORIGIN_GENERATED = iota
|
||||||
|
|
||||||
|
// KM_ORIGIN_DERIVED means derived inside keymaster. Likely exists off-device.
|
||||||
|
KM_ORIGIN_DERIVED
|
||||||
|
|
||||||
|
// KM_ORIGIN_IMPORTED means imported into keymaster. Existed as clear text in Android.
|
||||||
|
KM_ORIGIN_IMPORTED
|
||||||
|
|
||||||
|
// KM_ORIGIN_UNKNOWN means keymaster did not record origin. This value can only be seen on keys in a keymaster0
|
||||||
|
// implementation. The keymaster0 adapter uses this value to document the fact that it is unknown whether the key
|
||||||
|
// was generated inside or imported into keymaster.
|
||||||
|
KM_ORIGIN_UNKNOWN
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// KM_PURPOSE_ENCRYPT is usable with RSA, EC and AES keys.
|
||||||
|
KM_PURPOSE_ENCRYPT = iota
|
||||||
|
|
||||||
|
// KM_PURPOSE_DECRYPT is usable with RSA, EC and AES keys.
|
||||||
|
KM_PURPOSE_DECRYPT
|
||||||
|
|
||||||
|
// KM_PURPOSE_SIGN is usable with RSA, EC and HMAC keys.
|
||||||
|
KM_PURPOSE_SIGN
|
||||||
|
|
||||||
|
// KM_PURPOSE_VERIFY is usable with RSA, EC and HMAC keys.
|
||||||
|
KM_PURPOSE_VERIFY
|
||||||
|
|
||||||
|
// KM_PURPOSE_DERIVE_KEY is usable with EC keys.
|
||||||
|
KM_PURPOSE_DERIVE_KEY
|
||||||
|
|
||||||
|
// KM_PURPOSE_WRAP is usable with wrapped keys.
|
||||||
|
KM_PURPOSE_WRAP
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
attAndroidKeyHardwareRootsCertPool *x509.CertPool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatAndroidKey, attestationFormatValidationHandlerAndroidKey)
|
||||||
|
}
|
||||||
99
vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go
generated
vendored
Normal file
99
vendor/github.com/go-webauthn/webauthn/protocol/attestation_apple.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/asn1"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerAppleAnonymous is the handler for the Apple Anonymous Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// The syntax of an Apple attestation statement is defined as follows:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
//
|
||||||
|
// fmt: "apple",
|
||||||
|
// attStmt: appleStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// appleStmtFormat = {
|
||||||
|
// x5c: [ credCert: bytes, * (caCert: bytes) ]
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.8. Apple Anonymous Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See : https://www.w3.org/TR/webauthn/#sctn-apple-anonymous-attestation
|
||||||
|
func attestationFormatValidationHandlerAppleAnonymous(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
// Step 1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it
|
||||||
|
// to extract the contained fields.
|
||||||
|
var (
|
||||||
|
x5c []any
|
||||||
|
certs []*x509.Certificate
|
||||||
|
)
|
||||||
|
|
||||||
|
if x5c, certs, err = attStatementParseX5CS(att.AttStatement, stmtX5C); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
credCert := certs[0]
|
||||||
|
|
||||||
|
if _, err = attStatementCertChainVerify(certs, attAppleHardwareRootsCertPool, true, time.Now().Add(time.Hour*8760).UTC()); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Error validating x5c cert chain").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2. Concatenate authenticatorData and clientDataHash to form nonceToHash.
|
||||||
|
nonceToHash := append(att.RawAuthData, clientDataHash...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
// Step 3. Perform SHA-256 hash of nonceToHash to produce nonce.
|
||||||
|
nonce := sha256.Sum256(nonceToHash)
|
||||||
|
|
||||||
|
// Step 4. Verify that nonce equals the value of the extension with OID 1.2.840.113635.100.8.2 in credCert.
|
||||||
|
var attExtBytes []byte
|
||||||
|
|
||||||
|
for _, ext := range credCert.Extensions {
|
||||||
|
if ext.Id.Equal(oidExtensionAppleAnonymousAttestation) {
|
||||||
|
attExtBytes = ext.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(attExtBytes) == 0 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate extensions missing 1.2.840.113635.100.8.2")
|
||||||
|
}
|
||||||
|
|
||||||
|
decoded := AppleAnonymousAttestation{}
|
||||||
|
|
||||||
|
if _, err = asn1.Unmarshal(attExtBytes, &decoded); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Unable to parse apple attestation certificate extensions").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !bytes.Equal(decoded.Nonce, nonce[:]) {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Attestation certificate does not contain expected nonce")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 5. Verify that the credential public key equals the Subject Public Key of credCert.
|
||||||
|
if _, err = verifyAttestationECDSAPublicKeyMatch(att, credCert); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 6. If successful, return implementation-specific values representing attestation type Anonymization CA and
|
||||||
|
// attestation trust path x5c.
|
||||||
|
return string(metadata.AnonCA), x5c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AppleAnonymousAttestation represents the attestation format for Apple, who have not yet published a schema for the
|
||||||
|
// extension (as of JULY 2021.)
|
||||||
|
type AppleAnonymousAttestation struct {
|
||||||
|
Nonce []byte `asn1:"tag:1,explicit"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
attAppleHardwareRootsCertPool *x509.CertPool
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatApple, attestationFormatValidationHandlerAppleAnonymous)
|
||||||
|
}
|
||||||
111
vendor/github.com/go-webauthn/webauthn/protocol/attestation_compound.go
generated
vendored
Normal file
111
vendor/github.com/go-webauthn/webauthn/protocol/attestation_compound.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatCompound, attestationFormatValidationHandlerCompound)
|
||||||
|
}
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerCompound is the handler for the Compound Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// The syntax of a Compound Attestation statement is defined by the following CDDL:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
//
|
||||||
|
// fmt: "compound",
|
||||||
|
// attStmt: [2* nonCompoundAttStmt]
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// nonCompoundAttStmt = { $$attStmtType } .within { fmt: text .ne "compound", * any => any }
|
||||||
|
//
|
||||||
|
// Specification: §8.9. Compound Attestation Statement Forma
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn-3/#sctn-compound-attestation
|
||||||
|
func attestationFormatValidationHandlerCompound(att AttestationObject, clientDataHash []byte, mds metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
var (
|
||||||
|
aaguid uuid.UUID
|
||||||
|
raw any
|
||||||
|
ok bool
|
||||||
|
stmts []any
|
||||||
|
subStmt map[string]any
|
||||||
|
attStmts []NonCompoundAttestationObject
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(att.AuthData.AttData.AAGUID) != 0 {
|
||||||
|
if aaguid, err = uuid.FromBytes(att.AuthData.AttData.AAGUID); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithInfo("Error occurred parsing AAGUID during attestation validation").WithDetails(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if raw, ok = att.AttStatement[stmtAttStmt]; !ok {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound statement missing attStmt")
|
||||||
|
}
|
||||||
|
|
||||||
|
if stmts, ok = raw.([]any); !ok {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound statement attStmt isn't an array")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(stmts) < 2 {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound statement attStmt isn't an array with at least two other statements")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, stmt := range stmts {
|
||||||
|
if subStmt, ok = stmt.(map[string]any); !ok {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound statement attStmt contains one or more items that isn't an object")
|
||||||
|
}
|
||||||
|
|
||||||
|
var attStmt NonCompoundAttestationObject
|
||||||
|
|
||||||
|
if attStmt.Format, ok = subStmt[stmtFmt].(string); !ok {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound sub-statement does not have a format")
|
||||||
|
}
|
||||||
|
|
||||||
|
if attStmt.AttStatement, ok = subStmt[stmtAttStmt].(map[string]any); !ok {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound sub-statement does not have an attestation statement")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch AttestationFormat(attStmt.Format) {
|
||||||
|
case AttestationFormatCompound:
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound sub-statement has a format of compound which is not allowed")
|
||||||
|
case "":
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Compound sub-statement has an empty format which is not allowed")
|
||||||
|
default:
|
||||||
|
if _, ok = attestationRegistry[AttestationFormat(attStmt.Format)]; !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithInfo(fmt.Sprintf("Attestation sub-statement format %s is unsupported", attStmt.Format))
|
||||||
|
}
|
||||||
|
|
||||||
|
attStmts = append(attStmts, attStmt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, attStmt := range attStmts {
|
||||||
|
object := AttestationObject{
|
||||||
|
Format: attStmt.Format,
|
||||||
|
AttStatement: attStmt.AttStatement,
|
||||||
|
AuthData: att.AuthData,
|
||||||
|
RawAuthData: att.RawAuthData,
|
||||||
|
}
|
||||||
|
|
||||||
|
var cx5cs []any
|
||||||
|
if _, cx5cs, err = attestationRegistry[AttestationFormat(object.Format)](object, clientDataHash, mds); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if mds == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if e := ValidateMetadata(context.Background(), mds, aaguid, attestationType, object.Format, cx5cs); e != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithInfo(fmt.Sprintf("Error occurred validating metadata during attestation validation: %+v", e)).WithDetails(e.DevInfo).WithError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(AttestationFormatCompound), nil, nil
|
||||||
|
}
|
||||||
159
vendor/github.com/go-webauthn/webauthn/protocol/attestation_fido_u2f.go
generated
vendored
Normal file
159
vendor/github.com/go-webauthn/webauthn/protocol/attestation_fido_u2f.go
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/elliptic"
|
||||||
|
"crypto/x509"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncbor"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerFIDOU2F is the handler for the FIDO U2F Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// The syntax of a FIDO U2F attestation statement is defined as follows:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
//
|
||||||
|
// fmt: "fido-u2f",
|
||||||
|
// attStmt: u2fStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// u2fStmtFormat = {
|
||||||
|
// x5c: [ attestnCert: bytes ],
|
||||||
|
// sig: bytes
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.6. FIDO U2F Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#sctn-fido-u2f-attestation
|
||||||
|
func attestationFormatValidationHandlerFIDOU2F(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
// Non-normative verification procedure of expected requirement.
|
||||||
|
if !bytes.Equal(att.AuthData.AttData.AAGUID, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) {
|
||||||
|
return "", nil, ErrUnsupportedAlgorithm.WithDetails("U2F attestation format AAGUID not set to 0x00")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Signing procedure. Non-normative verification procedure of expected requirement.
|
||||||
|
// If the credential public key of the attested credential is not of algorithm -7 ("ES256"), stop and return an error.
|
||||||
|
var key webauthncose.EC2PublicKeyData
|
||||||
|
if err = webauthncbor.Unmarshal(att.AuthData.AttData.CredentialPublicKey, &key); err != nil {
|
||||||
|
return "", nil, ErrAttestationCertificate.WithDetails("Error parsing public key").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if webauthncose.COSEAlgorithmIdentifier(key.Algorithm) != webauthncose.AlgES256 {
|
||||||
|
return "", nil, ErrUnsupportedAlgorithm.WithDetails("Non-ES256 Public Key algorithm used")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
sig []byte
|
||||||
|
raw []byte
|
||||||
|
x5c []any
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
// Step 1. Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it
|
||||||
|
// to extract the contained fields.
|
||||||
|
|
||||||
|
// Check for "x5c" which is a single element array containing the attestation certificate in X.509 format.
|
||||||
|
if x5c, ok = att.AttStatement[stmtX5C].([]any); !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Missing properly formatted x5c data")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: Packed Attestation, FIDO U2F Attestation, and Assertion Signatures require ASN.1 DER sig values, but it is
|
||||||
|
// RECOMMENDED that any new attestation formats defined not use ASN.1 encodings, but instead represent signatures as
|
||||||
|
// equivalent fixed-length byte arrays without internal structure, using the same representations as used by COSE
|
||||||
|
// signatures as defined in [RFC9053](https://www.rfc-editor.org/rfc/rfc9053.html) and
|
||||||
|
// [RFC8230](https://www.rfc-editor.org/rfc/rfc8230.html).
|
||||||
|
// This is described in §6.5.5 https://www.w3.org/TR/webauthn-3/#sctn-signature-attestation-types.
|
||||||
|
|
||||||
|
// Check for "sig" which is The attestation signature. The signature was calculated over the (raw) U2F
|
||||||
|
// registration response message https://www.w3.org/TR/webauthn/#biblio-fido-u2f-message-formats]
|
||||||
|
// received by the client from the authenticator.
|
||||||
|
if sig, ok = att.AttStatement[stmtSignature].([]byte); !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Missing sig data")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.
|
||||||
|
// 1. Check that x5c has exactly one element and let attCert be that element.
|
||||||
|
// 2. Let certificate public key be the public key conveyed by attCert.
|
||||||
|
// 3. If certificate public key is not an Elliptic Curve (EC) public key over the P-256 curve, terminate this
|
||||||
|
// algorithm and return an appropriate error.
|
||||||
|
|
||||||
|
// Step 2.1.
|
||||||
|
if len(x5c) > 1 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Received more than one element in x5c values")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.2.
|
||||||
|
if raw, ok = x5c[0].([]byte); !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Error decoding ASN.1 data from x5c")
|
||||||
|
}
|
||||||
|
|
||||||
|
attCert, err := x509.ParseCertificate(raw)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Error parsing certificate from ASN.1 data into certificate").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.3.
|
||||||
|
if attCert.PublicKeyAlgorithm != x509.ECDSA {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate public key algorithm is not ECDSA")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3. Extract the claimed rpIdHash from authenticatorData, and the claimed credentialId and credentialPublicKey
|
||||||
|
// from authenticatorData.attestedCredentialData.
|
||||||
|
rpIdHash := att.AuthData.RPIDHash
|
||||||
|
credentialID := att.AuthData.AttData.CredentialID
|
||||||
|
|
||||||
|
// Step 4. Convert the COSE_KEY formatted credentialPublicKey (see Section 7 of RFC8152 [https://www.w3.org/TR/webauthn/#biblio-rfc8152])
|
||||||
|
// to Raw ANSI X9.62 public key format (see ALG_KEY_ECC_X962_RAW in Section 3.6.2 Public Key
|
||||||
|
// Representation Formats of
|
||||||
|
// [FIDO-Registry](https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-registry-v2.0-id-20180227.html#public-key-representation-formats)).
|
||||||
|
|
||||||
|
// Let x be the value corresponding to the "-2" key (representing x coordinate) in credentialPublicKey, and confirm
|
||||||
|
// its size to be of 32 bytes. If size differs or "-2" key is not found, terminate this algorithm and return an
|
||||||
|
// appropriate error.
|
||||||
|
|
||||||
|
// Let y be the value corresponding to the "-3" key (representing y coordinate) in credentialPublicKey, and confirm
|
||||||
|
// its size to be of 32 bytes. If size differs or "-3" key is not found, terminate this algorithm and return an
|
||||||
|
// appropriate error.
|
||||||
|
credentialPublicKey, ok := attCert.PublicKey.(*ecdsa.PublicKey)
|
||||||
|
if !ok || credentialPublicKey.Curve != elliptic.P256() {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Attestation certificate does not contain a P-256 ECDSA public key")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(key.XCoord) > 32 || len(key.YCoord) > 32 {
|
||||||
|
return "", nil, ErrAttestation.WithDetails("X or Y Coordinate for key is invalid length")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Let publicKeyU2F be the concatenation 0x04 || x || y.
|
||||||
|
publicKeyU2F := bytes.NewBuffer([]byte{0x04})
|
||||||
|
publicKeyU2F.Write(key.XCoord)
|
||||||
|
publicKeyU2F.Write(key.YCoord)
|
||||||
|
|
||||||
|
// Step 5. Let verificationData be the concatenation of (0x00 || rpIdHash || clientDataHash || credentialId || publicKeyU2F)
|
||||||
|
// (see Section 4.3 of [FIDO-U2F-Message-Formats](https://fidoalliance.org/specs/fido-u2f-v1.1-id-20160915/fido-u2f-raw-message-formats-v1.1-id-20160915.html#registration-response-message-success)).
|
||||||
|
verificationData := bytes.NewBuffer([]byte{0x00})
|
||||||
|
verificationData.Write(rpIdHash)
|
||||||
|
verificationData.Write(clientDataHash)
|
||||||
|
verificationData.Write(credentialID)
|
||||||
|
verificationData.Write(publicKeyU2F.Bytes())
|
||||||
|
|
||||||
|
// Step 6. Verify the sig using verificationData and the certificate public key per section 4.1.4 of [SEC1] with
|
||||||
|
// SHA-256 as the hash function used in step two.
|
||||||
|
if err = attCert.CheckSignature(x509.ECDSAWithSHA256, verificationData.Bytes(), sig); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Step 7. Optionally, inspect x5c and consult externally provided knowledge to determine whether attStmt
|
||||||
|
// conveys a Basic or AttCA attestation.
|
||||||
|
|
||||||
|
// Step 8. If successful, return implementation-specific values representing attestation type Basic, AttCA or
|
||||||
|
// uncertainty, and attestation trust path x5c.
|
||||||
|
return string(metadata.BasicFull), x5c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatFIDOUniversalSecondFactor, attestationFormatValidationHandlerFIDOU2F)
|
||||||
|
}
|
||||||
255
vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go
generated
vendored
Normal file
255
vendor/github.com/go-webauthn/webauthn/protocol/attestation_packed.go
generated
vendored
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/asn1"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatPacked, attestationFormatValidationHandlerPacked)
|
||||||
|
}
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerPacked is the handler for the Packed Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// The syntax of a Packed Attestation statement is defined by the following CDDL:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
//
|
||||||
|
// fmt: "packed",
|
||||||
|
// attStmt: packedStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// packedStmtFormat = {
|
||||||
|
// alg: COSEAlgorithmIdentifier,
|
||||||
|
// sig: bytes,
|
||||||
|
// x5c: [ attestnCert: bytes, * (caCert: bytes) ]
|
||||||
|
// } //
|
||||||
|
// {
|
||||||
|
// alg: COSEAlgorithmIdentifier
|
||||||
|
// sig: bytes,
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.2. Packed Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#sctn-packed-attestation
|
||||||
|
func attestationFormatValidationHandlerPacked(att AttestationObject, clientDataHash []byte, mds metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
var (
|
||||||
|
alg int64
|
||||||
|
sig []byte
|
||||||
|
x5c []any
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
// Step 1. Verify that attStmt is valid CBOR conforming to the syntax defined
|
||||||
|
// above and perform CBOR decoding on it to extract the contained fields.
|
||||||
|
// Get the alg value - A COSEAlgorithmIdentifier containing the identifier of the algorithm
|
||||||
|
// used to generate the attestation signature.
|
||||||
|
if alg, ok = att.AttStatement[stmtAlgorithm].(int64); !ok {
|
||||||
|
return string(AttestationFormatPacked), nil, ErrAttestationFormat.WithDetails("Error retrieving alg value")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the sig value - A byte string containing the attestation signature.
|
||||||
|
if sig, ok = att.AttStatement[stmtSignature].([]byte); !ok {
|
||||||
|
return string(AttestationFormatPacked), nil, ErrAttestationFormat.WithDetails("Error retrieving sig value")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2. If x5c is present, this indicates that the attestation type is not ECDAA.
|
||||||
|
if x5c, ok = att.AttStatement[stmtX5C].([]any); ok {
|
||||||
|
// Handle Basic Attestation steps for the x509 Certificate.
|
||||||
|
return handleBasicAttestation(sig, clientDataHash, att.RawAuthData, att.AuthData.AttData.AAGUID, alg, x5c, mds)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3. If ecdaaKeyId is present, then the attestation type is ECDAA.
|
||||||
|
// Also make sure the we did not have an x509.
|
||||||
|
ecdaaKeyID, ecdaaKeyPresent := att.AttStatement[stmtECDAAKID].([]byte)
|
||||||
|
if ecdaaKeyPresent {
|
||||||
|
// Handle ECDAA Attestation steps for the x509 Certificate.
|
||||||
|
return handleECDAAAttestation(sig, clientDataHash, ecdaaKeyID, mds)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4. If neither x5c nor ecdaaKeyId is present, self attestation is in use.
|
||||||
|
return handleSelfAttestation(alg, att.AuthData.AttData.CredentialPublicKey, att.RawAuthData, clientDataHash, sig, mds)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle the attestation steps laid out in the basic format.
|
||||||
|
func handleBasicAttestation(sig, clientDataHash, authData, aaguid []byte, alg int64, x5c []any, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
// Step 2.1. Verify that sig is a valid signature over the concatenation of authenticatorData
|
||||||
|
// and clientDataHash using the attestation public key in attestnCert with the algorithm specified in alg.
|
||||||
|
var attestnCert *x509.Certificate
|
||||||
|
|
||||||
|
for i, raw := range x5c {
|
||||||
|
rawByes, ok := raw.([]byte)
|
||||||
|
if !ok {
|
||||||
|
return "", x5c, ErrAttestation.WithDetails("Error getting certificate from x5c cert chain")
|
||||||
|
}
|
||||||
|
|
||||||
|
cert, err := x509.ParseCertificate(rawByes)
|
||||||
|
if err != nil {
|
||||||
|
return "", x5c, ErrAttestationFormat.WithDetails(fmt.Sprintf("Error parsing certificate from ASN.1 data: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cert.NotBefore.After(time.Now()) || cert.NotAfter.Before(time.Now()) {
|
||||||
|
return "", x5c, ErrAttestationFormat.WithDetails("Cert in chain not time valid")
|
||||||
|
}
|
||||||
|
|
||||||
|
if i == 0 {
|
||||||
|
attestnCert = cert
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if attestnCert == nil {
|
||||||
|
return "", x5c, ErrAttestation.WithDetails("Error getting certificate from x5c cert chain")
|
||||||
|
}
|
||||||
|
|
||||||
|
signatureData := append(authData, clientDataHash...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
if sigAlg := webauthncose.SigAlgFromCOSEAlg(webauthncose.COSEAlgorithmIdentifier(alg)); sigAlg == x509.UnknownSignatureAlgorithm {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Unsupported COSE alg: %d", alg))
|
||||||
|
} else if err = attestnCert.CheckSignature(sigAlg, signatureData, sig); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Signature validation error: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.2 Verify that attestnCert meets the requirements in §8.2.1 Packed attestation statement certificate requirements.
|
||||||
|
// §8.2.1 can be found here https://www.w3.org/TR/webauthn/#packed-attestation-cert-requirements
|
||||||
|
|
||||||
|
// Step 2.2.1 (from §8.2.1) Version MUST be set to 3 (which is indicated by an ASN.1 INTEGER with value 2).
|
||||||
|
if attestnCert.Version != 3 {
|
||||||
|
return "", x5c, ErrAttestationCertificate.WithDetails("Attestation Certificate is incorrect version")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.2.2 (from §8.2.1) Subject field MUST be set to:
|
||||||
|
// Subject-C
|
||||||
|
// ISO 3166 code specifying the country where the Authenticator vendor is incorporated (PrintableString).
|
||||||
|
|
||||||
|
// TODO: Find a good, usable, country code library. For now, check stringy-ness
|
||||||
|
subjectString := strings.Join(attestnCert.Subject.Country, "")
|
||||||
|
if subjectString == "" {
|
||||||
|
return "", x5c, ErrAttestationCertificate.WithDetails("Attestation Certificate Country Code is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subject-O
|
||||||
|
// Legal name of the Authenticator vendor (UTF8String).
|
||||||
|
subjectString = strings.Join(attestnCert.Subject.Organization, "")
|
||||||
|
if subjectString == "" {
|
||||||
|
return "", x5c, ErrAttestationCertificate.WithDetails("Attestation Certificate Organization is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subject-OU
|
||||||
|
// Literal string “Authenticator Attestation” (UTF8String).
|
||||||
|
subjectString = strings.Join(attestnCert.Subject.OrganizationalUnit, " ")
|
||||||
|
if subjectString != "Authenticator Attestation" {
|
||||||
|
return "", x5c, ErrAttestationCertificate.WithDetails("Attestation Certificate Organizational Unit is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Subject-CN
|
||||||
|
// A UTF8String of the vendor’s choosing.
|
||||||
|
subjectString = attestnCert.Subject.CommonName
|
||||||
|
if subjectString == "" {
|
||||||
|
return "", x5c, ErrAttestationCertificate.WithDetails("Attestation Certificate Common Name not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.2.3 (from §8.2.1) If the related attestation root certificate is used for multiple authenticator models,
|
||||||
|
// the Extension OID 1.3.6.1.4.1.45724.1.1.4 (id-fido-gen-ce-aaguid) MUST be present, containing the
|
||||||
|
// AAGUID as a 16-byte OCTET STRING. The extension MUST NOT be marked as critical.
|
||||||
|
var foundAAGUID []byte
|
||||||
|
|
||||||
|
for _, extension := range attestnCert.Extensions {
|
||||||
|
if extension.Id.Equal(oidFIDOGenCeAAGUID) {
|
||||||
|
if extension.Critical {
|
||||||
|
return "", x5c, ErrInvalidAttestation.WithDetails("Attestation certificate FIDO extension marked as critical")
|
||||||
|
}
|
||||||
|
|
||||||
|
foundAAGUID = extension.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We validate the AAGUID as mentioned above
|
||||||
|
// This is not well defined in§8.2.1 but mentioned in step 2.3: we validate the AAGUID if it is present within the certificate
|
||||||
|
// and make sure it matches the auth data AAGUID
|
||||||
|
// Note that an X.509 Extension encodes the DER-encoding of the value in an OCTET STRING. Thus, the
|
||||||
|
// AAGUID MUST be wrapped in two OCTET STRINGS to be valid.
|
||||||
|
if len(foundAAGUID) > 0 {
|
||||||
|
var unMarshalledAAGUID []byte
|
||||||
|
|
||||||
|
if _, err = asn1.Unmarshal(foundAAGUID, &unMarshalledAAGUID); err != nil {
|
||||||
|
return "", x5c, ErrInvalidAttestation.WithDetails("Error unmarshalling AAGUID from certificate")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !bytes.Equal(aaguid, unMarshalledAAGUID) {
|
||||||
|
return "", x5c, ErrInvalidAttestation.WithDetails("Certificate AAGUID does not match Auth Data certificate")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2.2.4 The Basic Constraints extension MUST have the CA component set to false.
|
||||||
|
if attestnCert.IsCA {
|
||||||
|
return "", x5c, ErrInvalidAttestation.WithDetails("Attestation certificate's Basic Constraints marked as CA")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note for 2.2.5 An Authority Information Access (AIA) extension with entry id-ad-ocsp and a CRL
|
||||||
|
// Distribution Point extension [RFC5280](https://www.w3.org/TR/webauthn/#biblio-rfc5280) are
|
||||||
|
// both OPTIONAL as the status of many attestation certificates is available through authenticator
|
||||||
|
// metadata services. See, for example, the FIDO Metadata Service
|
||||||
|
// [FIDOMetadataService] (https://www.w3.org/TR/webauthn/#biblio-fidometadataservice)
|
||||||
|
|
||||||
|
// Step 2.4 If successful, return attestation type Basic and attestation trust path x5c.
|
||||||
|
// We don't handle trust paths yet but we're done.
|
||||||
|
return string(metadata.BasicFull), x5c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleECDAAAttestation(sig, clientDataHash, ecdaaKeyID []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
return "Packed (ECDAA)", nil, ErrNotSpecImplemented
|
||||||
|
}
|
||||||
|
|
||||||
|
func handleSelfAttestation(alg int64, pubKey, authData, clientDataHash, sig []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
verificationData := append(authData, clientDataHash...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
var (
|
||||||
|
key any
|
||||||
|
valid bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if key, err = webauthncose.ParsePublicKey(pubKey); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails(fmt.Sprintf("Error parsing the public key: %+v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// §4.1 Validate that alg matches the algorithm of the credentialPublicKey in authenticatorData.
|
||||||
|
switch k := key.(type) {
|
||||||
|
case webauthncose.OKPPublicKeyData:
|
||||||
|
err = verifyKeyAlgorithm(k.Algorithm, alg)
|
||||||
|
case webauthncose.EC2PublicKeyData:
|
||||||
|
err = verifyKeyAlgorithm(k.Algorithm, alg)
|
||||||
|
case webauthncose.RSAPublicKeyData:
|
||||||
|
err = verifyKeyAlgorithm(k.Algorithm, alg)
|
||||||
|
default:
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Error verifying the public key data")
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// §4.2 Verify that sig is a valid signature over the concatenation of authenticatorData and
|
||||||
|
// clientDataHash using the credential public key with alg.
|
||||||
|
if valid, err = webauthncose.VerifySignature(key, verificationData, sig); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails(fmt.Sprintf("Error verifying the signature: %+v", err)).WithError(err)
|
||||||
|
} else if !valid {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Unable to verify signature")
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(metadata.BasicSurrogate), nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func verifyKeyAlgorithm(keyAlgorithm, attestedAlgorithm int64) error {
|
||||||
|
if keyAlgorithm != attestedAlgorithm {
|
||||||
|
return ErrInvalidAttestation.WithDetails("Public key algorithm does not equal att statement algorithm")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
185
vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go
generated
vendored
Normal file
185
vendor/github.com/go-webauthn/webauthn/protocol/attestation_safetynet.go
generated
vendored
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/sha256"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-viper/mapstructure/v2"
|
||||||
|
"github.com/golang-jwt/jwt/v5"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerAndroidSafetyNet is the handler for the Android SafetyNet Attestation Statement
|
||||||
|
// Format.
|
||||||
|
//
|
||||||
|
// When the authenticator is a platform authenticator on certain Android platforms, the attestation statement may be
|
||||||
|
// based on the SafetyNet API. In this case the authenticator data is completely controlled by the caller of the
|
||||||
|
// SafetyNet API (typically an application running on the Android platform) and the attestation statement provides some
|
||||||
|
// statements about the health of the platform and the identity of the calling application (see SafetyNet Documentation
|
||||||
|
// for more details).
|
||||||
|
//
|
||||||
|
// The syntax of an Android Attestation statement is defined as follows:
|
||||||
|
//
|
||||||
|
// $$attStmtType //= (
|
||||||
|
// fmt: "android-safetynet",
|
||||||
|
// attStmt: safetynetStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// safetynetStmtFormat = {
|
||||||
|
// ver: text,
|
||||||
|
// response: bytes
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.5. Android SafetyNet Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#sctn-android-safetynet-attestation
|
||||||
|
func attestationFormatValidationHandlerAndroidSafetyNet(att AttestationObject, clientDataHash []byte, mds metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
// The syntax of an Android Attestation statement is defined as follows:
|
||||||
|
// $$attStmtType //= (
|
||||||
|
// fmt: "android-safetynet",
|
||||||
|
// attStmt: safetynetStmtFormat
|
||||||
|
// )
|
||||||
|
|
||||||
|
// safetynetStmtFormat = {
|
||||||
|
// ver: text,
|
||||||
|
// response: bytes
|
||||||
|
// }
|
||||||
|
|
||||||
|
// §8.5.1 Verify that attStmt is valid CBOR conforming to the syntax defined above and perform CBOR decoding on it to extract
|
||||||
|
// the contained fields.
|
||||||
|
|
||||||
|
// We have done this
|
||||||
|
// §8.5.2 Verify that response is a valid SafetyNet response of version ver.
|
||||||
|
version, present := att.AttStatement[stmtVersion].(string)
|
||||||
|
if !present {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Unable to find the version of SafetyNet")
|
||||||
|
}
|
||||||
|
|
||||||
|
if version == "" {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Not a proper version for SafetyNet")
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: provide user the ability to designate their supported versions.
|
||||||
|
|
||||||
|
response, present := att.AttStatement["response"].([]byte)
|
||||||
|
if !present {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Unable to find the SafetyNet response")
|
||||||
|
}
|
||||||
|
|
||||||
|
var token *jwt.Token
|
||||||
|
|
||||||
|
if token, err = jwt.Parse(string(response), keyFuncSafetyNetJWT, jwt.WithValidMethods([]string{jwt.SigningMethodRS256.Alg()})); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error finding cert issued to correct hostname: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// marshall the JWT payload into the safetynet response json.
|
||||||
|
var safetyNetResponse SafetyNetResponse
|
||||||
|
|
||||||
|
if err = mapstructure.Decode(token.Claims, &safetyNetResponse); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails(fmt.Sprintf("Error parsing the SafetyNet response: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.5.3 Verify that the nonce in the response is identical to the Base64 encoding of the SHA-256 hash of the concatenation
|
||||||
|
// of authenticatorData and clientDataHash.
|
||||||
|
nonceBuffer := sha256.Sum256(append(att.RawAuthData, clientDataHash...))
|
||||||
|
|
||||||
|
nonceBytes, err := base64.StdEncoding.DecodeString(safetyNetResponse.Nonce)
|
||||||
|
if !bytes.Equal(nonceBuffer[:], nonceBytes) || err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Invalid nonce for in SafetyNet response").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.5.4 Let attestationCert be the attestation certificate (https://www.w3.org/TR/webauthn/#attestation-certificate)
|
||||||
|
certChain := token.Header[stmtX5C].([]any)
|
||||||
|
l := make([]byte, base64.StdEncoding.DecodedLen(len(certChain[0].(string))))
|
||||||
|
|
||||||
|
n, err := base64.StdEncoding.Decode(l, []byte(certChain[0].(string)))
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error finding cert issued to correct hostname: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
attestationCert, err := x509.ParseCertificate(l[:n])
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error finding cert issued to correct hostname: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.5.5 Verify that attestationCert is issued to the hostname "attest.android.com".
|
||||||
|
if err = attestationCert.VerifyHostname(attStatementAndroidSafetyNetHostname); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error finding cert issued to correct hostname: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.5.6 Verify that the ctsProfileMatch attribute in the payload of response is true.
|
||||||
|
if !safetyNetResponse.CtsProfileMatch {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("ctsProfileMatch attribute of the JWT payload is false")
|
||||||
|
}
|
||||||
|
|
||||||
|
if t := time.Unix(safetyNetResponse.TimestampMs/1000, 0); t.After(time.Now()) {
|
||||||
|
// Zero tolerance for post-dated timestamps.
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("SafetyNet response with timestamp after current time")
|
||||||
|
} else if t.Before(time.Now().Add(-time.Minute)) {
|
||||||
|
// Small tolerance for pre-dated timestamps.
|
||||||
|
if mds != nil && mds.GetValidateEntry(context.Background()) {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("SafetyNet response with timestamp before one minute ago")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// §8.5.7 If successful, return implementation-specific values representing attestation type Basic and attestation
|
||||||
|
// trust path attestationCert.
|
||||||
|
return string(metadata.BasicFull), nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func keyFuncSafetyNetJWT(token *jwt.Token) (key any, err error) {
|
||||||
|
var (
|
||||||
|
ok bool
|
||||||
|
raw any
|
||||||
|
chain []any
|
||||||
|
first string
|
||||||
|
der []byte
|
||||||
|
cert *x509.Certificate
|
||||||
|
)
|
||||||
|
|
||||||
|
if raw, ok = token.Header[stmtX5C]; !ok {
|
||||||
|
return nil, fmt.Errorf("jwt header missing x5c")
|
||||||
|
}
|
||||||
|
|
||||||
|
if chain, ok = raw.([]any); !ok || len(chain) == 0 {
|
||||||
|
return nil, fmt.Errorf("jwt header x5c is not a non-empty array")
|
||||||
|
}
|
||||||
|
|
||||||
|
if first, ok = chain[0].(string); !ok || first == "" {
|
||||||
|
return nil, fmt.Errorf("jwt header x5c[0] not a base64 string")
|
||||||
|
}
|
||||||
|
|
||||||
|
if der, err = base64.StdEncoding.DecodeString(first); err != nil {
|
||||||
|
return nil, fmt.Errorf("decode x5c leaf: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if cert, err = x509.ParseCertificate(der); err != nil {
|
||||||
|
if cert != nil {
|
||||||
|
return cert.PublicKey, fmt.Errorf("parse x5c leaf: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("parse x5c leaf: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cert.PublicKey, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type SafetyNetResponse struct {
|
||||||
|
Nonce string `json:"nonce"`
|
||||||
|
TimestampMs int64 `json:"timestampMs"`
|
||||||
|
ApkPackageName string `json:"apkPackageName"`
|
||||||
|
ApkDigestSha256 string `json:"apkDigestSha256"`
|
||||||
|
CtsProfileMatch bool `json:"ctsProfileMatch"`
|
||||||
|
ApkCertificateDigestSha256 []any `json:"apkCertificateDigestSha256"`
|
||||||
|
BasicIntegrity bool `json:"basicIntegrity"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatAndroidSafetyNet, attestationFormatValidationHandlerAndroidSafetyNet)
|
||||||
|
}
|
||||||
625
vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go
generated
vendored
Normal file
625
vendor/github.com/go-webauthn/webauthn/protocol/attestation_tpm.go
generated
vendored
Normal file
@@ -0,0 +1,625 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto"
|
||||||
|
"crypto/subtle"
|
||||||
|
"crypto/x509"
|
||||||
|
"crypto/x509/pkix"
|
||||||
|
"encoding/asn1"
|
||||||
|
"encoding/binary"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/go-tpm/tpm2"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
// attestationFormatValidationHandlerTPM is the handler for the TPM Attestation Statement Format.
|
||||||
|
//
|
||||||
|
// The syntax of a TPM Attestation statement is as follows:
|
||||||
|
//
|
||||||
|
// $$attStmtType // = (
|
||||||
|
//
|
||||||
|
// fmt: "tpm",
|
||||||
|
// attStmt: tpmStmtFormat
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// tpmStmtFormat = {
|
||||||
|
// ver: "2.0",
|
||||||
|
// (
|
||||||
|
// alg: COSEAlgorithmIdentifier,
|
||||||
|
// x5c: [ aikCert: bytes, * (caCert: bytes) ]
|
||||||
|
// )
|
||||||
|
// sig: bytes,
|
||||||
|
// certInfo: bytes,
|
||||||
|
// pubArea: bytes
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// Specification: §8.3. TPM Attestation Statement Format
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#sctn-tpm-attestation
|
||||||
|
func attestationFormatValidationHandlerTPM(att AttestationObject, clientDataHash []byte, _ metadata.Provider) (attestationType string, x5cs []any, err error) {
|
||||||
|
var statement *tpm2AttStatement
|
||||||
|
|
||||||
|
if statement, err = newTPM2AttStatement(att.AttStatement); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.HasECDAAKeyID || statement.HasValidECDAAKeyID {
|
||||||
|
return "", nil, ErrNotImplemented
|
||||||
|
}
|
||||||
|
|
||||||
|
if !statement.HasX5C || !statement.HasValidX5C {
|
||||||
|
return "", nil, ErrNotImplemented
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.Version != versionTPM20 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("WebAuthn only supports TPM 2.0 currently")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
pubArea *tpm2.TPMTPublic
|
||||||
|
key any
|
||||||
|
)
|
||||||
|
|
||||||
|
if pubArea, err = tpm2.Unmarshal[tpm2.TPMTPublic](statement.PubArea); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Unable to decode TPMT_PUBLIC in attestation statement").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if key, err = webauthncose.ParsePublicKey(att.AuthData.AttData.CredentialPublicKey); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch k := key.(type) {
|
||||||
|
case webauthncose.EC2PublicKeyData:
|
||||||
|
var (
|
||||||
|
params *tpm2.TPMSECCParms
|
||||||
|
point *tpm2.TPMSECCPoint
|
||||||
|
)
|
||||||
|
|
||||||
|
if params, err = pubArea.Parameters.ECCDetail(); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between ECCParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
if point, err = pubArea.Unique.ECC(); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between ECCParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.CurveID != k.TPMCurveID() {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between ECCParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !bytes.Equal(point.X.Buffer, k.XCoord) || !bytes.Equal(point.Y.Buffer, k.YCoord) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between ECCParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
case webauthncose.RSAPublicKeyData:
|
||||||
|
var (
|
||||||
|
params *tpm2.TPMSRSAParms
|
||||||
|
modulus *tpm2.TPM2BPublicKeyRSA
|
||||||
|
)
|
||||||
|
|
||||||
|
if params, err = pubArea.Parameters.RSADetail(); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between RSAParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
if modulus, err = pubArea.Unique.RSA(); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between RSAParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !bytes.Equal(modulus.Buffer, k.Modulus) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between RSAParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
|
||||||
|
exp := uint32(k.Exponent[0]) + uint32(k.Exponent[1])<<8 + uint32(k.Exponent[2])<<16
|
||||||
|
if tpm2Exponent(params) != exp {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Mismatch between RSAParameters in pubArea and credentialPublicKey")
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "", nil, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concatenate authenticatorData and clientDataHash to form attToBeSigned.
|
||||||
|
attToBeSigned := append(att.RawAuthData, clientDataHash...) //nolint:gocritic // This is intentional.
|
||||||
|
|
||||||
|
var certInfo *tpm2.TPMSAttest
|
||||||
|
|
||||||
|
// Validate that certInfo is valid:
|
||||||
|
// 1/4 Verify that magic is set to TPM_GENERATED_VALUE, handled here.
|
||||||
|
if certInfo, err = tpm2.Unmarshal[tpm2.TPMSAttest](statement.CertInfo); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = certInfo.Magic.Check(); err != nil {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails("Magic is not set to TPM_GENERATED_VALUE")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2/4 Verify that type is set to TPM_ST_ATTEST_CERTIFY.
|
||||||
|
if certInfo.Type != tpm2.TPMSTAttestCertify {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Type is not set to TPM_ST_ATTEST_CERTIFY")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3/4 Verify that extraData is set to the hash of attToBeSigned using the hash algorithm employed in "alg".
|
||||||
|
coseAlg := webauthncose.COSEAlgorithmIdentifier(statement.Algorithm)
|
||||||
|
|
||||||
|
h := webauthncose.HasherFromCOSEAlg(coseAlg)
|
||||||
|
h.Write(attToBeSigned)
|
||||||
|
|
||||||
|
if !bytes.Equal(certInfo.ExtraData.Buffer, h.Sum(nil)) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("ExtraData is not set to hash of attToBeSigned")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note that the remaining fields in the "Standard Attestation Structure"
|
||||||
|
// [TPMv2-Part1] section 31.2, i.e., qualifiedSigner, clockInfo and firmwareVersion
|
||||||
|
// are ignored. These fields MAY be used as an input to risk engines.
|
||||||
|
var (
|
||||||
|
aikCert *x509.Certificate
|
||||||
|
raw []byte
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if len(statement.X5C) == 0 {
|
||||||
|
return "", nil, ErrAttestation.WithDetails("Error getting certificate from x5c cert chain")
|
||||||
|
}
|
||||||
|
|
||||||
|
// In this case:
|
||||||
|
// Verify the sig is a valid signature over certInfo using the attestation public key in aikCert with the algorithm specified in alg.
|
||||||
|
if raw, ok = statement.X5C[0].([]byte); !ok {
|
||||||
|
return "", nil, ErrAttestation.WithDetails("Error getting certificate from x5c cert chain")
|
||||||
|
}
|
||||||
|
|
||||||
|
if aikCert, err = x509.ParseCertificate(raw); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Error parsing certificate from ASN.1")
|
||||||
|
}
|
||||||
|
|
||||||
|
if sigAlg := webauthncose.SigAlgFromCOSEAlg(coseAlg); sigAlg == x509.UnknownSignatureAlgorithm {
|
||||||
|
return "", nil, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Unsupported COSE alg: %d", statement.Algorithm))
|
||||||
|
} else if err = aikCert.CheckSignature(sigAlg, statement.CertInfo, statement.Signature); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails(fmt.Sprintf("Signature validation error: %+v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify that aikCert meets the requirements in §8.3.1 TPM Attestation Statement Certificate Requirements.
|
||||||
|
|
||||||
|
// 1/6 Version MUST be set to 3.
|
||||||
|
if aikCert.Version != 3 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate version must be 3")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2/6 Subject field MUST be set to empty.
|
||||||
|
if aikCert.Subject.String() != "" {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate subject must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
manufacturer, model, version string
|
||||||
|
ekuValid = false
|
||||||
|
eku []asn1.ObjectIdentifier
|
||||||
|
constraints tpmBasicConstraints
|
||||||
|
rest []byte
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, ext := range aikCert.Extensions {
|
||||||
|
switch {
|
||||||
|
case ext.Id.Equal(oidExtensionSubjectAltName):
|
||||||
|
if manufacturer, model, version, err = parseSANExtension(ext.Value); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
case ext.Id.Equal(oidExtensionExtendedKeyUsage):
|
||||||
|
if rest, err = asn1.Unmarshal(ext.Value, &eku); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate extended key usage malformed")
|
||||||
|
} else if len(rest) != 0 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate extended key usage contains extra data")
|
||||||
|
}
|
||||||
|
|
||||||
|
found := false
|
||||||
|
|
||||||
|
for _, oid := range eku {
|
||||||
|
if oid.Equal(oidTCGKpAIKCertificate) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate extended key usage missing 2.23.133.8.3")
|
||||||
|
}
|
||||||
|
|
||||||
|
ekuValid = true
|
||||||
|
case ext.Id.Equal(oidExtensionBasicConstraints):
|
||||||
|
if rest, err = asn1.Unmarshal(ext.Value, &constraints); err != nil {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate basic constraints malformed")
|
||||||
|
} else if len(rest) != 0 {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate basic constraints contains extra data")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3/6 The Subject Alternative Name extension MUST be set as defined in [TPMv2-EK-Profile] section 3.2.9.
|
||||||
|
if manufacturer == "" || model == "" || version == "" {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Invalid SAN data in AIK certificate")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isValidTPMManufacturer(manufacturer) {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Invalid TPM manufacturer")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4/6 The Extended Key Usage extension MUST contain the "joint-iso-itu-t(2) internationalorganizations(23) 133 tcg-kp(8) tcg-kp-AIKCertificate(3)" OID.
|
||||||
|
if !ekuValid {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate missing EKU")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 6/6 An Authority Information Access (AIA) extension with entry id-ad-ocsp and a CRL Distribution Point
|
||||||
|
// extension [RFC5280] are both OPTIONAL as the status of many attestation certificates is available
|
||||||
|
// through metadata services. See, for example, the FIDO Metadata Service.
|
||||||
|
if constraints.IsCA {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("AIK certificate basic constraints missing or CA is true")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4/4 Verify that attested contains a TPMS_CERTIFY_INFO structure as specified in
|
||||||
|
// [TPMv2-Part2] section 10.12.3, whose name field contains a valid Name for pubArea,
|
||||||
|
// as computed using the algorithm in the nameAlg field of pubArea
|
||||||
|
// using the procedure specified in [TPMv2-Part1] section 16.
|
||||||
|
//
|
||||||
|
// This needs to move after the x5c check as the QualifiedSigner only gets populated when it can be verified.
|
||||||
|
if ok, err = tpm2NameMatch(certInfo, pubArea); err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
} else if !ok {
|
||||||
|
return "", nil, ErrAttestationFormat.WithDetails("Hash value mismatch attested and pubArea")
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(metadata.AttCA), statement.X5C, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func tpm2Exponent(params *tpm2.TPMSRSAParms) (exp uint32) {
|
||||||
|
if params.Exponent != 0 {
|
||||||
|
return params.Exponent
|
||||||
|
}
|
||||||
|
|
||||||
|
return 65537
|
||||||
|
}
|
||||||
|
|
||||||
|
func tpm2NameMatch(certInfo *tpm2.TPMSAttest, pubArea *tpm2.TPMTPublic) (match bool, err error) {
|
||||||
|
if certInfo == nil || pubArea == nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
certifyInfo *tpm2.TPMSCertifyInfo
|
||||||
|
name *tpm2.TPM2BName
|
||||||
|
)
|
||||||
|
|
||||||
|
if certifyInfo, err = certInfo.Attested.Certify(); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if name, err = tpm2.ObjectName(pubArea); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, _, err = tpm2NameDigest(certInfo.QualifiedSigner); err != nil {
|
||||||
|
return false, fmt.Errorf("invalid name digest algorithm: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return subtle.ConstantTimeCompare(certifyInfo.Name.Buffer, name.Buffer) == 1, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tpm2NameDigest(name tpm2.TPM2BName) (alg tpm2.TPMIAlgHash, digest []byte, err error) {
|
||||||
|
buf := name.Buffer
|
||||||
|
|
||||||
|
if len(buf) < 3 {
|
||||||
|
return 0, nil, fmt.Errorf("name too short")
|
||||||
|
}
|
||||||
|
|
||||||
|
alg = tpm2.TPMIAlgHash(binary.BigEndian.Uint16(buf[:2]))
|
||||||
|
|
||||||
|
var hash crypto.Hash
|
||||||
|
|
||||||
|
if hash, err = alg.Hash(); err != nil {
|
||||||
|
return 0, nil, fmt.Errorf("invalid hash algorithm: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
digest = buf[2:]
|
||||||
|
|
||||||
|
if len(digest) == 0 {
|
||||||
|
return 0, nil, fmt.Errorf("name digest is empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(digest) != hash.Size() {
|
||||||
|
return 0, nil, fmt.Errorf("invalid name digest length: %d", len(digest))
|
||||||
|
}
|
||||||
|
|
||||||
|
return alg, digest, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type tpm2AttStatement struct {
|
||||||
|
Version string
|
||||||
|
Algorithm int64
|
||||||
|
Signature []byte
|
||||||
|
CertInfo []byte
|
||||||
|
PubArea []byte
|
||||||
|
|
||||||
|
X5C []any
|
||||||
|
HasX5C bool
|
||||||
|
HasValidX5C bool
|
||||||
|
|
||||||
|
HasECDAAKeyID bool
|
||||||
|
HasValidECDAAKeyID bool
|
||||||
|
ECDAAKeyID []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTPM2AttStatement(raw map[string]any) (statement *tpm2AttStatement, err error) {
|
||||||
|
var ok bool
|
||||||
|
|
||||||
|
statement = &tpm2AttStatement{}
|
||||||
|
|
||||||
|
// Given the verification procedure inputs attStmt, authenticatorData
|
||||||
|
// and clientDataHash, the verification procedure is as follows.
|
||||||
|
|
||||||
|
// Verify that attStmt is valid CBOR conforming to the syntax defined
|
||||||
|
// above and perform CBOR decoding on it to extract the contained fields.
|
||||||
|
if statement.Version, ok = raw[stmtVersion].(string); !ok {
|
||||||
|
return nil, ErrAttestationFormat.WithDetails("Error retrieving ver value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.Algorithm, ok = raw[stmtAlgorithm].(int64); !ok {
|
||||||
|
return nil, ErrAttestationFormat.WithDetails("Error retrieving alg value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.Signature, ok = raw[stmtSignature].([]byte); !ok {
|
||||||
|
return nil, ErrAttestationFormat.WithDetails("Error retrieving sig value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.CertInfo, ok = raw[stmtCertInfo].([]byte); !ok {
|
||||||
|
return nil, ErrAttestationFormat.WithDetails("Error retrieving certInfo value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if statement.PubArea, ok = raw[stmtPubArea].([]byte); !ok {
|
||||||
|
return nil, ErrAttestationFormat.WithDetails("Error retrieving pubArea value")
|
||||||
|
}
|
||||||
|
|
||||||
|
var rawX5C, rawECDAAKeyID any
|
||||||
|
|
||||||
|
rawX5C, statement.HasX5C = raw[stmtX5C]
|
||||||
|
statement.X5C, statement.HasValidX5C = rawX5C.([]any)
|
||||||
|
|
||||||
|
rawECDAAKeyID, statement.HasECDAAKeyID = raw[stmtECDAAKID]
|
||||||
|
statement.ECDAAKeyID, statement.HasValidECDAAKeyID = rawECDAAKeyID.([]byte)
|
||||||
|
|
||||||
|
return statement, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// forEachSAN loops through the TPM SAN extension.
|
||||||
|
//
|
||||||
|
// RFC 5280, 4.2.1.6
|
||||||
|
// SubjectAltName ::= GeneralNames
|
||||||
|
//
|
||||||
|
// GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName
|
||||||
|
//
|
||||||
|
// GeneralName ::= CHOICE {
|
||||||
|
// otherName [0] OtherName,
|
||||||
|
// rfc822Name [1] IA5String,
|
||||||
|
// dNSName [2] IA5String,
|
||||||
|
// x400Address [3] ORAddress,
|
||||||
|
// directoryName [4] Name,
|
||||||
|
// ediPartyName [5] EDIPartyName,
|
||||||
|
// uniformResourceIdentifier [6] IA5String,
|
||||||
|
// iPAddress [7] OCTET STRING,
|
||||||
|
// registeredID [8] OBJECT IDENTIFIER }
|
||||||
|
func forEachSAN(extension []byte, callback func(tag int, data []byte) error) error {
|
||||||
|
var seq asn1.RawValue
|
||||||
|
|
||||||
|
rest, err := asn1.Unmarshal(extension, &seq)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
} else if len(rest) != 0 {
|
||||||
|
return errors.New("x509: trailing data after X.509 extension")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !seq.IsCompound || seq.Tag != 16 || seq.Class != 0 {
|
||||||
|
return asn1.StructuralError{Msg: "bad SAN sequence"}
|
||||||
|
}
|
||||||
|
|
||||||
|
rest = seq.Bytes
|
||||||
|
|
||||||
|
for len(rest) > 0 {
|
||||||
|
var v asn1.RawValue
|
||||||
|
|
||||||
|
rest, err = asn1.Unmarshal(rest, &v)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = callback(v.Tag, v.Bytes); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
nameTypeDN = 4
|
||||||
|
)
|
||||||
|
|
||||||
|
func parseSANExtension(value []byte) (manufacturer string, model string, version string, err error) {
|
||||||
|
err = forEachSAN(value, func(tag int, data []byte) error {
|
||||||
|
if tag == nameTypeDN {
|
||||||
|
tpmDeviceAttributes := pkix.RDNSequence{}
|
||||||
|
|
||||||
|
if _, err = asn1.Unmarshal(data, &tpmDeviceAttributes); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, rdn := range tpmDeviceAttributes {
|
||||||
|
if len(rdn) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, atv := range rdn {
|
||||||
|
value, ok := atv.Value.(string)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if atv.Type.Equal(oidTCGAtTpmManufacturer) {
|
||||||
|
manufacturer = strings.TrimPrefix(value, "id:")
|
||||||
|
}
|
||||||
|
|
||||||
|
if atv.Type.Equal(oidTCGAtTpmModel) {
|
||||||
|
model = value
|
||||||
|
}
|
||||||
|
|
||||||
|
if atv.Type.Equal(oidTCGAtTPMVersion) {
|
||||||
|
version = strings.TrimPrefix(value, "id:")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// See https://trustedcomputinggroup.org/resource/vendor-id-registry/ for registry contents.
|
||||||
|
var tpmManufacturers = []struct {
|
||||||
|
id string
|
||||||
|
name string
|
||||||
|
code string
|
||||||
|
}{
|
||||||
|
{"414D4400", "AMD", "AMD"},
|
||||||
|
{"414E5400", "Ant Group", "ANT"},
|
||||||
|
{"41544D4C", "Atmel", "ATML"},
|
||||||
|
{"4252434D", "Broadcom", "BRCM"},
|
||||||
|
{"4353434F", "Cisco", "CSCO"},
|
||||||
|
{"464C5953", "Flyslice Technologies", "FLYS"},
|
||||||
|
{"524F4343", "Fuzhou Rockchip", "ROCC"},
|
||||||
|
{"474F4F47", "Google", "GOOG"},
|
||||||
|
{"48504900", "HPI", "HPI"},
|
||||||
|
{"48504500", "HPE", "HPE"},
|
||||||
|
{"48495349", "Huawei", "HISI"},
|
||||||
|
{"49424d00", "IBM", "IBM"},
|
||||||
|
{"49424D00", "IBM", "IBM"},
|
||||||
|
{"49465800", "Infineon", "IFX"},
|
||||||
|
{"494E5443", "Intel", "INTC"},
|
||||||
|
{"4C454E00", "Lenovo", "LEN"},
|
||||||
|
{"4D534654", "Microsoft", "MSFT"},
|
||||||
|
{"4E534D20", "National Semiconductor", "NSM"},
|
||||||
|
{"4E545A00", "Nationz", "NTZ"},
|
||||||
|
{"4E534700", "NSING", "NSG"},
|
||||||
|
{"4E544300", "Nuvoton Technology", "NTC"},
|
||||||
|
{"51434F4D", "Qualcomm", "QCOM"},
|
||||||
|
{"534D534E", "Samsung", "SECE"},
|
||||||
|
{"53454345", "SecEdge", "SecEdge"},
|
||||||
|
{"534E5300", "Sinosun", "SNS"},
|
||||||
|
{"534D5343", "SMSC", "SMSC"},
|
||||||
|
{"53544D20", "ST Microelectronics", "STM"},
|
||||||
|
{"54584E00", "Texas Instruments", "TXN"},
|
||||||
|
{"57454300", "Winbond", "WEC"},
|
||||||
|
{"5345414C", "Wisekey", "SEAL"},
|
||||||
|
{"FFFFF1D0", "FIDO Alliance Conformance Testing", "FIDO"},
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidTPMManufacturer(id string) bool {
|
||||||
|
for _, m := range tpmManufacturers {
|
||||||
|
if m.id == id {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func tpmParseAIKAttCA(x5c *x509.Certificate, x5cis []*x509.Certificate) (err *Error) {
|
||||||
|
if err = tpmParseSANExtension(x5c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = tpmRemoveEKU(x5c); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, parent := range x5cis {
|
||||||
|
if err = tpmRemoveEKU(parent); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func tpmParseSANExtension(attestation *x509.Certificate) (protoErr *Error) {
|
||||||
|
var (
|
||||||
|
manufacturer, model, version string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, ext := range attestation.Extensions {
|
||||||
|
if ext.Id.Equal(oidExtensionSubjectAltName) {
|
||||||
|
if manufacturer, model, version, err = parseSANExtension(ext.Value); err != nil {
|
||||||
|
return ErrInvalidAttestation.WithDetails("Authenticator with invalid Authenticator Identity Key SAN data encountered during attestation validation.").WithInfo(fmt.Sprintf("Error occurred parsing SAN extension: %s", err.Error())).WithError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if manufacturer == "" || model == "" || version == "" {
|
||||||
|
return ErrAttestationFormat.WithDetails("Invalid SAN data in AIK certificate.")
|
||||||
|
}
|
||||||
|
|
||||||
|
var unhandled []asn1.ObjectIdentifier //nolint:prealloc
|
||||||
|
|
||||||
|
for _, uce := range attestation.UnhandledCriticalExtensions {
|
||||||
|
if uce.Equal(oidExtensionSubjectAltName) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
unhandled = append(unhandled, uce)
|
||||||
|
}
|
||||||
|
|
||||||
|
attestation.UnhandledCriticalExtensions = unhandled
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type tpmBasicConstraints struct {
|
||||||
|
IsCA bool `asn1:"optional"`
|
||||||
|
MaxPathLen int `asn1:"optional,default:-1"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove extension key usage to avoid ExtKeyUsage check failure.
|
||||||
|
func tpmRemoveEKU(x5c *x509.Certificate) *Error {
|
||||||
|
var (
|
||||||
|
unknown []asn1.ObjectIdentifier
|
||||||
|
hasAiK bool
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, eku := range x5c.UnknownExtKeyUsage {
|
||||||
|
if eku.Equal(oidTCGKpAIKCertificate) {
|
||||||
|
hasAiK = true
|
||||||
|
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if eku.Equal(oidMicrosoftKpPrivacyCA) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
unknown = append(unknown, eku)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !hasAiK {
|
||||||
|
return ErrAttestationFormat.WithDetails("Attestation Identity Key certificate missing required Extended Key Usage.")
|
||||||
|
}
|
||||||
|
|
||||||
|
x5c.UnknownExtKeyUsage = unknown
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterAttestationFormat(AttestationFormatTPM, attestationFormatValidationHandlerTPM)
|
||||||
|
}
|
||||||
426
vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go
generated
vendored
Normal file
426
vendor/github.com/go-webauthn/webauthn/protocol/authenticator.go
generated
vendored
Normal file
@@ -0,0 +1,426 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncbor"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
minAuthDataLength = 37
|
||||||
|
minAttestedAuthLength = 55
|
||||||
|
maxCredentialIDLength = 1023
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorResponse represents the IDL with the same name.
|
||||||
|
//
|
||||||
|
// Authenticators respond to Relying Party requests by returning an object derived from the AuthenticatorResponse
|
||||||
|
// interface
|
||||||
|
//
|
||||||
|
// Specification: §5.2. Authenticator Responses (https://www.w3.org/TR/webauthn/#iface-authenticatorresponse)
|
||||||
|
type AuthenticatorResponse struct {
|
||||||
|
// From the spec https://www.w3.org/TR/webauthn/#dom-authenticatorresponse-clientdatajson
|
||||||
|
// This attribute contains a JSON serialization of the client data passed to the authenticator
|
||||||
|
// by the client in its call to either create() or get().
|
||||||
|
ClientDataJSON URLEncodedBase64 `json:"clientDataJSON"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AuthenticatorData represents the IDL with the same name.
|
||||||
|
//
|
||||||
|
// The authenticator data structure encodes contextual bindings made by the authenticator. These bindings are controlled
|
||||||
|
// by the authenticator itself, and derive their trust from the WebAuthn Relying Party's assessment of the security
|
||||||
|
// properties of the authenticator. In one extreme case, the authenticator may be embedded in the client, and its
|
||||||
|
// bindings may be no more trustworthy than the client data. At the other extreme, the authenticator may be a discrete
|
||||||
|
// entity with high-security hardware and software, connected to the client over a secure channel. In both cases, the
|
||||||
|
// Relying Party receives the authenticator data in the same format, and uses its knowledge of the authenticator to make
|
||||||
|
// trust decisions.
|
||||||
|
//
|
||||||
|
// The authenticator data has a compact but extensible encoding. This is desired since authenticators can be devices
|
||||||
|
// with limited capabilities and low power requirements, with much simpler software stacks than the client platform.
|
||||||
|
//
|
||||||
|
// Specification: §6.1. Authenticator Data (https://www.w3.org/TR/webauthn/#sctn-authenticator-data)
|
||||||
|
type AuthenticatorData struct {
|
||||||
|
RPIDHash []byte `json:"rpid"`
|
||||||
|
Flags AuthenticatorFlags `json:"flags"`
|
||||||
|
Counter uint32 `json:"sign_count"`
|
||||||
|
AttData AttestedCredentialData `json:"att_data"`
|
||||||
|
ExtData []byte `json:"ext_data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AttestedCredentialData struct {
|
||||||
|
AAGUID []byte `json:"aaguid"`
|
||||||
|
CredentialID []byte `json:"credential_id"`
|
||||||
|
|
||||||
|
// The raw credential public key bytes received from the attestation data. This is the CBOR representation of the
|
||||||
|
// credentials public key.
|
||||||
|
CredentialPublicKey []byte `json:"public_key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialMediationRequirement represents mediation requirements for clients. When making a request via get(options)
|
||||||
|
// or create(options), developers can set a case-by-case requirement for user mediation by choosing the appropriate
|
||||||
|
// CredentialMediationRequirement enum value.
|
||||||
|
//
|
||||||
|
// See https://www.w3.org/TR/credential-management-1/#mediation-requirements
|
||||||
|
type CredentialMediationRequirement string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// MediationDefault lets the browser choose the mediation flow completely as if it wasn't specified at all.
|
||||||
|
MediationDefault CredentialMediationRequirement = ""
|
||||||
|
|
||||||
|
// MediationSilent indicates user mediation is suppressed for the given operation. If the operation can be performed
|
||||||
|
// without user involvement, wonderful. If user involvement is necessary, then the operation will return null rather
|
||||||
|
// than involving the user.
|
||||||
|
MediationSilent CredentialMediationRequirement = "silent"
|
||||||
|
|
||||||
|
// MediationOptional indicates if credentials can be handed over for a given operation without user mediation, they
|
||||||
|
// will be. If user mediation is required, then the user agent will involve the user in the decision.
|
||||||
|
MediationOptional CredentialMediationRequirement = "optional"
|
||||||
|
|
||||||
|
// MediationConditional indicates for get(), discovered credentials are presented to the user in a non-modal dialog
|
||||||
|
// along with an indication of the origin which is requesting credentials. If the user makes a gesture outside of
|
||||||
|
// the dialog, the dialog closes without resolving or rejecting the Promise returned by the get() method and without
|
||||||
|
// causing a user-visible error condition. If the user makes a gesture that selects a credential, that credential is
|
||||||
|
// returned to the caller. The prevent silent access flag is treated as being true regardless of its actual value:
|
||||||
|
// the conditional behavior always involves user mediation of some sort if applicable credentials are discovered.
|
||||||
|
MediationConditional CredentialMediationRequirement = "conditional"
|
||||||
|
|
||||||
|
// MediationRequired indicates the user agent will not hand over credentials without user mediation, even if the
|
||||||
|
// prevent silent access flag is unset for an origin.
|
||||||
|
MediationRequired CredentialMediationRequirement = "required"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorAttachment represents the IDL enum of the same name, and is used as part of the Authenticator Selection
|
||||||
|
// Criteria.
|
||||||
|
//
|
||||||
|
// This enumeration’s values describe authenticators' attachment modalities. Relying Parties use this to express a
|
||||||
|
// preferred authenticator attachment modality when calling navigator.credentials.create() to create a credential.
|
||||||
|
//
|
||||||
|
// If this member is present, eligible authenticators are filtered to only authenticators attached with the specified
|
||||||
|
// §5.4.5 Authenticator Attachment Enumeration (enum AuthenticatorAttachment). The value SHOULD be a member of
|
||||||
|
// AuthenticatorAttachment but client platforms MUST ignore unknown values, treating an unknown value as if the member
|
||||||
|
// does not exist.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.4. Authenticator Selection Criteria (https://www.w3.org/TR/webauthn/#dom-authenticatorselectioncriteria-authenticatorattachment)
|
||||||
|
//
|
||||||
|
// Specification: §5.4.5. Authenticator Attachment Enumeration (https://www.w3.org/TR/webauthn/#enum-attachment)
|
||||||
|
type AuthenticatorAttachment string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Platform represents a platform authenticator is attached using a client device-specific transport, called
|
||||||
|
// platform attachment, and is usually not removable from the client device. A public key credential bound to a
|
||||||
|
// platform authenticator is called a platform credential.
|
||||||
|
Platform AuthenticatorAttachment = "platform"
|
||||||
|
|
||||||
|
// CrossPlatform represents a roaming authenticator is attached using cross-platform transports, called
|
||||||
|
// cross-platform attachment. Authenticators of this class are removable from, and can "roam" among, client devices.
|
||||||
|
// A public key credential bound to a roaming authenticator is called a roaming credential.
|
||||||
|
CrossPlatform AuthenticatorAttachment = "cross-platform"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ResidentKeyRequirement represents the IDL of the same name.
|
||||||
|
//
|
||||||
|
// This enumeration’s values describe the Relying Party's requirements for client-side discoverable credentials
|
||||||
|
// (formerly known as resident credentials or resident keys).
|
||||||
|
//
|
||||||
|
// Specifies the extent to which the Relying Party desires to create a client-side discoverable credential. For
|
||||||
|
// historical reasons the naming retains the deprecated “resident” terminology. The value SHOULD be a member of
|
||||||
|
// ResidentKeyRequirement but client platforms MUST ignore unknown values, treating an unknown value as if the member
|
||||||
|
// does not exist. If no value is given then the effective value is required if requireResidentKey is true or
|
||||||
|
// discouraged if it is false or absent.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.4. Authenticator Selection Criteria (https://www.w3.org/TR/webauthn/#dom-authenticatorselectioncriteria-residentkey)
|
||||||
|
//
|
||||||
|
// Specification: §5.4.6. Resident Key Requirement Enumeration (https://www.w3.org/TR/webauthn/#enumdef-residentkeyrequirement)
|
||||||
|
type ResidentKeyRequirement string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// ResidentKeyRequirementDiscouraged indicates the Relying Party prefers creating a server-side credential, but will
|
||||||
|
// accept a client-side discoverable credential. This is the default.
|
||||||
|
ResidentKeyRequirementDiscouraged ResidentKeyRequirement = "discouraged"
|
||||||
|
|
||||||
|
// ResidentKeyRequirementPreferred indicates to the client we would prefer a discoverable credential.
|
||||||
|
ResidentKeyRequirementPreferred ResidentKeyRequirement = "preferred"
|
||||||
|
|
||||||
|
// ResidentKeyRequirementRequired indicates the Relying Party requires a client-side discoverable credential, and is
|
||||||
|
// prepared to receive an error if a client-side discoverable credential cannot be created.
|
||||||
|
ResidentKeyRequirementRequired ResidentKeyRequirement = "required"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorTransport represents the IDL enum with the same name.
|
||||||
|
//
|
||||||
|
// Authenticators may implement various transports for communicating with clients. This enumeration defines hints as to
|
||||||
|
// how clients might communicate with a particular authenticator in order to obtain an assertion for a specific
|
||||||
|
// credential. Note that these hints represent the WebAuthn Relying Party's best belief as to how an authenticator may
|
||||||
|
// be reached. A Relying Party will typically learn of the supported transports for a public key credential via
|
||||||
|
// getTransports().
|
||||||
|
//
|
||||||
|
// Specification: §5.8.4. Authenticator Transport Enumeration (https://www.w3.org/TR/webauthn/#enumdef-authenticatortransport)
|
||||||
|
type AuthenticatorTransport string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// USB indicates the respective authenticator can be contacted over removable USB.
|
||||||
|
USB AuthenticatorTransport = "usb"
|
||||||
|
|
||||||
|
// NFC indicates the respective authenticator can be contacted over Near Field Communication (NFC).
|
||||||
|
NFC AuthenticatorTransport = "nfc"
|
||||||
|
|
||||||
|
// BLE indicates the respective authenticator can be contacted over Bluetooth Smart (Bluetooth Low Energy / BLE).
|
||||||
|
BLE AuthenticatorTransport = "ble"
|
||||||
|
|
||||||
|
// SmartCard indicates the respective authenticator can be contacted over ISO/IEC 7816 smart card with contacts.
|
||||||
|
//
|
||||||
|
// WebAuthn Level 3.
|
||||||
|
SmartCard AuthenticatorTransport = "smart-card"
|
||||||
|
|
||||||
|
// Hybrid indicates the respective authenticator can be contacted using a combination of (often separate)
|
||||||
|
// data-transport and proximity mechanisms. This supports, for example, authentication on a desktop computer using
|
||||||
|
// a smartphone.
|
||||||
|
//
|
||||||
|
// WebAuthn Level 3.
|
||||||
|
Hybrid AuthenticatorTransport = "hybrid"
|
||||||
|
|
||||||
|
// Internal indicates the respective authenticator is contacted using a client device-specific transport, i.e., it
|
||||||
|
// is a platform authenticator. These authenticators are not removable from the client device.
|
||||||
|
Internal AuthenticatorTransport = "internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// UserVerificationRequirement is a representation of the UserVerificationRequirement IDL enum.
|
||||||
|
//
|
||||||
|
// A WebAuthn Relying Party may require user verification for some of its operations but not for others,
|
||||||
|
// and may use this type to express its needs.
|
||||||
|
//
|
||||||
|
// Specification: §5.8.6. User Verification Requirement Enumeration (https://www.w3.org/TR/webauthn/#enum-userVerificationRequirement)
|
||||||
|
type UserVerificationRequirement string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// VerificationRequired User verification is required to create/release a credential.
|
||||||
|
VerificationRequired UserVerificationRequirement = "required"
|
||||||
|
|
||||||
|
// VerificationPreferred User verification is preferred to create/release a credential.
|
||||||
|
VerificationPreferred UserVerificationRequirement = "preferred" // This is the default.
|
||||||
|
|
||||||
|
// VerificationDiscouraged The authenticator should not verify the user for the credential.
|
||||||
|
VerificationDiscouraged UserVerificationRequirement = "discouraged"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticatorFlags A byte of information returned during during ceremonies in the
|
||||||
|
// authenticatorData that contains bits that give us information about the
|
||||||
|
// whether the user was present and/or verified during authentication, and whether
|
||||||
|
// there is attestation or extension data present. Bit 0 is the least significant bit.
|
||||||
|
//
|
||||||
|
// Specification: §6.1. Authenticator Data - Flags (https://www.w3.org/TR/webauthn/#flags)
|
||||||
|
type AuthenticatorFlags byte
|
||||||
|
|
||||||
|
// The bits that do not have flags are reserved for future use.
|
||||||
|
const (
|
||||||
|
// FlagUserPresent Bit 00000001 in the byte sequence. Tells us if user is present. Also referred to as the UP flag.
|
||||||
|
FlagUserPresent AuthenticatorFlags = 1 << iota // Referred to as UP.
|
||||||
|
|
||||||
|
// FlagRFU1 is a reserved for future use flag.
|
||||||
|
FlagRFU1
|
||||||
|
|
||||||
|
// FlagUserVerified Bit 00000100 in the byte sequence. Tells us if user is verified
|
||||||
|
// by the authenticator using a biometric or PIN. Also referred to as the UV flag.
|
||||||
|
FlagUserVerified
|
||||||
|
|
||||||
|
// FlagBackupEligible Bit 00001000 in the byte sequence. Tells us if a backup is eligible for device. Also referred
|
||||||
|
// to as the BE flag.
|
||||||
|
FlagBackupEligible // Referred to as BE.
|
||||||
|
|
||||||
|
// FlagBackupState Bit 00010000 in the byte sequence. Tells us if a backup state for device. Also referred to as the
|
||||||
|
// BS flag.
|
||||||
|
FlagBackupState
|
||||||
|
|
||||||
|
// FlagRFU2 is a reserved for future use flag.
|
||||||
|
FlagRFU2
|
||||||
|
|
||||||
|
// FlagAttestedCredentialData Bit 01000000 in the byte sequence. Indicates whether
|
||||||
|
// the authenticator added attested credential data. Also referred to as the AT flag.
|
||||||
|
FlagAttestedCredentialData
|
||||||
|
|
||||||
|
// FlagHasExtensions Bit 10000000 in the byte sequence. Indicates if the authenticator data has extensions. Also
|
||||||
|
// referred to as the ED flag.
|
||||||
|
FlagHasExtensions
|
||||||
|
)
|
||||||
|
|
||||||
|
// UserPresent returns if the UP flag was set.
|
||||||
|
func (flag AuthenticatorFlags) UserPresent() bool {
|
||||||
|
return flag.HasUserPresent()
|
||||||
|
}
|
||||||
|
|
||||||
|
// UserVerified returns if the UV flag was set.
|
||||||
|
func (flag AuthenticatorFlags) UserVerified() bool {
|
||||||
|
return flag.HasUserVerified()
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasUserPresent returns if the UP flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasUserPresent() bool {
|
||||||
|
return (flag & FlagUserPresent) == FlagUserPresent
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasUserVerified returns if the UV flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasUserVerified() bool {
|
||||||
|
return (flag & FlagUserVerified) == FlagUserVerified
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasAttestedCredentialData returns if the AT flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasAttestedCredentialData() bool {
|
||||||
|
return (flag & FlagAttestedCredentialData) == FlagAttestedCredentialData
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasExtensions returns if the ED flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasExtensions() bool {
|
||||||
|
return (flag & FlagHasExtensions) == FlagHasExtensions
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasBackupEligible returns if the BE flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasBackupEligible() bool {
|
||||||
|
return (flag & FlagBackupEligible) == FlagBackupEligible
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasBackupState returns if the BS flag was set.
|
||||||
|
func (flag AuthenticatorFlags) HasBackupState() bool {
|
||||||
|
return (flag & FlagBackupState) == FlagBackupState
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal will take the raw Authenticator Data and marshals it into AuthenticatorData for further validation.
|
||||||
|
// The authenticator data has a compact but extensible encoding. This is desired since authenticators can be
|
||||||
|
// devices with limited capabilities and low power requirements, with much simpler software stacks than the client platform.
|
||||||
|
// The authenticator data structure is a byte array of 37 bytes or more, and is laid out in this table:
|
||||||
|
// https://www.w3.org/TR/webauthn/#table-authData
|
||||||
|
func (a *AuthenticatorData) Unmarshal(rawAuthData []byte) (err error) {
|
||||||
|
if minAuthDataLength > len(rawAuthData) {
|
||||||
|
return ErrBadRequest.
|
||||||
|
WithDetails("Authenticator data length too short").
|
||||||
|
WithInfo(fmt.Sprintf("Expected data greater than %d bytes. Got %d bytes", minAuthDataLength, len(rawAuthData)))
|
||||||
|
}
|
||||||
|
|
||||||
|
a.RPIDHash = rawAuthData[:32]
|
||||||
|
a.Flags = AuthenticatorFlags(rawAuthData[32])
|
||||||
|
a.Counter = binary.BigEndian.Uint32(rawAuthData[33:37])
|
||||||
|
|
||||||
|
remaining := len(rawAuthData) - minAuthDataLength
|
||||||
|
|
||||||
|
if a.Flags.HasAttestedCredentialData() {
|
||||||
|
if len(rawAuthData) > minAttestedAuthLength {
|
||||||
|
if err = a.unmarshalAttestedData(rawAuthData); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
attDataLen := len(a.AttData.AAGUID) + 2 + len(a.AttData.CredentialID) + len(a.AttData.CredentialPublicKey)
|
||||||
|
remaining -= attDataLen
|
||||||
|
} else {
|
||||||
|
return ErrBadRequest.WithDetails("Attested credential flag set but data is missing")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if !a.Flags.HasExtensions() && len(rawAuthData) != 37 {
|
||||||
|
return ErrBadRequest.WithDetails("Attested credential flag not set")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.Flags.HasExtensions() {
|
||||||
|
if remaining != 0 {
|
||||||
|
a.ExtData = rawAuthData[len(rawAuthData)-remaining:]
|
||||||
|
remaining -= len(a.ExtData)
|
||||||
|
} else {
|
||||||
|
return ErrBadRequest.WithDetails("Extensions flag set but extensions data is missing")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if remaining != 0 {
|
||||||
|
return ErrBadRequest.WithDetails("Leftover bytes decoding AuthenticatorData")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If Attestation Data is present, unmarshall that into the appropriate public key structure.
|
||||||
|
func (a *AuthenticatorData) unmarshalAttestedData(rawAuthData []byte) (err error) {
|
||||||
|
a.AttData.AAGUID = rawAuthData[37:53]
|
||||||
|
|
||||||
|
idLength := binary.BigEndian.Uint16(rawAuthData[53:55])
|
||||||
|
if len(rawAuthData) < int(55+idLength) {
|
||||||
|
return ErrBadRequest.WithDetails("Authenticator attestation data length too short")
|
||||||
|
}
|
||||||
|
|
||||||
|
if idLength > maxCredentialIDLength {
|
||||||
|
return ErrBadRequest.WithDetails("Authenticator attestation data credential id length too long")
|
||||||
|
}
|
||||||
|
|
||||||
|
a.AttData.CredentialID = rawAuthData[55 : 55+idLength]
|
||||||
|
|
||||||
|
a.AttData.CredentialPublicKey, err = unmarshalCredentialPublicKey(rawAuthData[55+idLength:])
|
||||||
|
if err != nil {
|
||||||
|
return ErrBadRequest.WithDetails(fmt.Sprintf("Could not unmarshal Credential Public Key: %v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshall the credential's Public Key into CBOR encoding.
|
||||||
|
func unmarshalCredentialPublicKey(keyBytes []byte) (rawBytes []byte, err error) {
|
||||||
|
var m any
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(keyBytes, &m); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if rawBytes, err = webauthncbor.Marshal(m); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return rawBytes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResidentKeyRequired - Require that the key be private key resident to the client device.
|
||||||
|
func ResidentKeyRequired() *bool {
|
||||||
|
required := true
|
||||||
|
|
||||||
|
return &required
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResidentKeyNotRequired - Do not require that the private key be resident to the client device.
|
||||||
|
func ResidentKeyNotRequired() *bool {
|
||||||
|
required := false
|
||||||
|
return &required
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify on AuthenticatorData handles Steps 13 through 15 & 17 for Registration
|
||||||
|
// and Steps 15 through 18 for Assertion.
|
||||||
|
func (a *AuthenticatorData) Verify(rpIdHash []byte, appIDHash []byte, userVerificationRequired bool, userPresenceRequired bool) (err error) {
|
||||||
|
// Registration Step 13 & Assertion Step 15
|
||||||
|
// Verify that the RP ID hash in authData is indeed the SHA-256
|
||||||
|
// hash of the RP ID expected by the RP.
|
||||||
|
if !bytes.Equal(a.RPIDHash, rpIdHash) && !bytes.Equal(a.RPIDHash, appIDHash) {
|
||||||
|
return ErrVerification.WithInfo(fmt.Sprintf("RP Hash mismatch. Expected %x and Received %x", a.RPIDHash, rpIdHash))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 15 & Assertion Step 16
|
||||||
|
// Verify that the User Present bit of the flags in authData is set.
|
||||||
|
if userPresenceRequired && !a.Flags.UserPresent() {
|
||||||
|
return ErrVerification.WithInfo("User presence required but flag not set by authenticator")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 15 & Assertion Step 17
|
||||||
|
// If user verification is required for this assertion, verify that
|
||||||
|
// the User Verified bit of the flags in authData is set.
|
||||||
|
if userVerificationRequired && !a.Flags.UserVerified() {
|
||||||
|
return ErrVerification.WithInfo("User verification required but flag not set by authenticator")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 17 & Assertion Step 18
|
||||||
|
// Verify that the values of the client extension outputs in clientExtensionResults
|
||||||
|
// and the authenticator extension outputs in the extensions in authData are as
|
||||||
|
// expected, considering the client extension input values that were given as the
|
||||||
|
// extensions option in the create() call. In particular, any extension identifier
|
||||||
|
// values in the clientExtensionResults and the extensions in authData MUST be also be
|
||||||
|
// present as extension identifier values in the extensions member of options, i.e., no
|
||||||
|
// extensions are present that were not requested. In the general case, the meaning
|
||||||
|
// of "are as expected" is specific to the Relying Party and which extensions are in use.
|
||||||
|
|
||||||
|
// This is not yet fully implemented by the spec or by browsers.
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
52
vendor/github.com/go-webauthn/webauthn/protocol/base64.go
generated
vendored
Normal file
52
vendor/github.com/go-webauthn/webauthn/protocol/base64.go
generated
vendored
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// URLEncodedBase64 represents a byte slice holding URL-encoded base64 data.
|
||||||
|
// When fields of this type are unmarshalled from JSON, the data is base64
|
||||||
|
// decoded into a byte slice.
|
||||||
|
type URLEncodedBase64 []byte
|
||||||
|
|
||||||
|
func (e URLEncodedBase64) String() string {
|
||||||
|
return base64.RawURLEncoding.EncodeToString(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalJSON base64 decodes a URL-encoded value, storing the result in the
|
||||||
|
// provided byte slice.
|
||||||
|
func (e *URLEncodedBase64) UnmarshalJSON(data []byte) error {
|
||||||
|
if bytes.Equal(data, []byte("null")) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim the leading and trailing quotes from raw JSON data (the whole value part).
|
||||||
|
data = bytes.Trim(data, `"`)
|
||||||
|
|
||||||
|
// Trim the trailing equal characters.
|
||||||
|
data = bytes.TrimRight(data, "=")
|
||||||
|
|
||||||
|
out := make([]byte, base64.RawURLEncoding.DecodedLen(len(data)))
|
||||||
|
|
||||||
|
n, err := base64.RawURLEncoding.Decode(out, data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
v := reflect.ValueOf(e).Elem()
|
||||||
|
v.SetBytes(out[:n])
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON base64 encodes a non URL-encoded value, storing the result in the
|
||||||
|
// provided byte slice.
|
||||||
|
func (e URLEncodedBase64) MarshalJSON() ([]byte, error) {
|
||||||
|
if e == nil {
|
||||||
|
return []byte("null"), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return []byte(`"` + base64.RawURLEncoding.EncodeToString(e) + `"`), nil
|
||||||
|
}
|
||||||
20
vendor/github.com/go-webauthn/webauthn/protocol/challenge.go
generated
vendored
Normal file
20
vendor/github.com/go-webauthn/webauthn/protocol/challenge.go
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ChallengeLength - Length of bytes to generate for a challenge.
|
||||||
|
const ChallengeLength = 32
|
||||||
|
|
||||||
|
// CreateChallenge creates a new challenge that should be signed and returned by the authenticator. The spec recommends
|
||||||
|
// using at least 16 bytes with 100 bits of entropy. We use 32 bytes.
|
||||||
|
func CreateChallenge() (challenge URLEncodedBase64, err error) {
|
||||||
|
challenge = make([]byte, ChallengeLength)
|
||||||
|
|
||||||
|
if _, err = rand.Read(challenge); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return challenge, nil
|
||||||
|
}
|
||||||
285
vendor/github.com/go-webauthn/webauthn/protocol/client.go
generated
vendored
Normal file
285
vendor/github.com/go-webauthn/webauthn/protocol/client.go
generated
vendored
Normal file
@@ -0,0 +1,285 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/subtle"
|
||||||
|
"fmt"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CollectedClientData represents the contextual bindings of both the WebAuthn Relying Party
|
||||||
|
// and the client. It is a key-value mapping whose keys are strings. Values can be any type
|
||||||
|
// that has a valid encoding in JSON. Its structure is defined by the following Web IDL.
|
||||||
|
//
|
||||||
|
// Specification: §5.8.1. Client Data Used in WebAuthn Signatures (https://www.w3.org/TR/webauthn/#dictdef-collectedclientdata)
|
||||||
|
type CollectedClientData struct {
|
||||||
|
// Type the string "webauthn.create" when creating new credentials,
|
||||||
|
// and "webauthn.get" when getting an assertion from an existing credential. The
|
||||||
|
// purpose of this member is to prevent certain types of signature confusion attacks
|
||||||
|
// (where an attacker substitutes one legitimate signature for another).
|
||||||
|
Type CeremonyType `json:"type"`
|
||||||
|
Challenge string `json:"challenge"`
|
||||||
|
Origin string `json:"origin"`
|
||||||
|
TopOrigin string `json:"topOrigin,omitempty"`
|
||||||
|
CrossOrigin bool `json:"crossOrigin,omitempty"`
|
||||||
|
TokenBinding *TokenBinding `json:"tokenBinding,omitempty"`
|
||||||
|
|
||||||
|
// Chromium (Chrome) returns a hint sometimes about how to handle clientDataJSON in a safe manner.
|
||||||
|
Hint string `json:"new_keys_may_be_added_here,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CeremonyType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
CreateCeremony CeremonyType = "webauthn.create"
|
||||||
|
AssertCeremony CeremonyType = "webauthn.get"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TokenBinding struct {
|
||||||
|
Status TokenBindingStatus `json:"status"`
|
||||||
|
ID string `json:"id,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type TokenBindingStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Present indicates token binding was used when communicating with the
|
||||||
|
// Relying Party. In this case, the id member MUST be present.
|
||||||
|
Present TokenBindingStatus = "present"
|
||||||
|
|
||||||
|
// Supported indicates token binding was used when communicating with the
|
||||||
|
// negotiated when communicating with the Relying Party.
|
||||||
|
Supported TokenBindingStatus = "supported"
|
||||||
|
|
||||||
|
// NotSupported indicates token binding not supported
|
||||||
|
// when communicating with the Relying Party.
|
||||||
|
NotSupported TokenBindingStatus = "not-supported"
|
||||||
|
)
|
||||||
|
|
||||||
|
// FullyQualifiedOrigin returns the origin per the HTML spec: (scheme)://(host)[:(port)].
|
||||||
|
func FullyQualifiedOrigin(rawOrigin string) (fqOrigin string, err error) {
|
||||||
|
if strings.HasPrefix(rawOrigin, "android:apk-key-hash:") {
|
||||||
|
return rawOrigin, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var origin *url.URL
|
||||||
|
|
||||||
|
if origin, err = url.ParseRequestURI(rawOrigin); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if origin.Host == "" {
|
||||||
|
return "", fmt.Errorf("url '%s' does not have a host", rawOrigin)
|
||||||
|
}
|
||||||
|
|
||||||
|
origin.Path, origin.RawPath, origin.RawQuery, origin.User = "", "", "", nil
|
||||||
|
|
||||||
|
return origin.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify handles steps 3 through 6 of verifying the registering client data of a
|
||||||
|
// new credential and steps 7 through 10 of verifying an authentication assertion
|
||||||
|
// See https://www.w3.org/TR/webauthn/#registering-a-new-credential
|
||||||
|
// and https://www.w3.org/TR/webauthn/#verifying-assertion
|
||||||
|
//
|
||||||
|
// Note: the rpTopOriginsVerify parameter does not accept the TopOriginVerificationMode value of
|
||||||
|
// TopOriginDefaultVerificationMode as it's expected this value is updated by the config validation process.
|
||||||
|
func (c *CollectedClientData) Verify(storedChallenge string, ceremony CeremonyType, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode) (err error) {
|
||||||
|
// Registration Step 3. Verify that the value of C.type is webauthn.create.
|
||||||
|
|
||||||
|
// Assertion Step 7. Verify that the value of C.type is the string webauthn.get.
|
||||||
|
if c.Type != ceremony {
|
||||||
|
return ErrVerification.WithDetails("Error validating ceremony type").WithInfo(fmt.Sprintf("Expected Value: %s, Received: %s", ceremony, c.Type))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 4. Verify that the value of C.challenge matches the challenge
|
||||||
|
// that was sent to the authenticator in the create() call.
|
||||||
|
|
||||||
|
// Assertion Step 8. Verify that the value of C.challenge matches the challenge
|
||||||
|
// that was sent to the authenticator in the PublicKeyCredentialRequestOptions
|
||||||
|
// passed to the get() call.
|
||||||
|
|
||||||
|
challenge := c.Challenge
|
||||||
|
if subtle.ConstantTimeCompare([]byte(storedChallenge), []byte(challenge)) != 1 {
|
||||||
|
return ErrVerification.
|
||||||
|
WithDetails("Error validating challenge").
|
||||||
|
WithInfo(fmt.Sprintf("Expected b Value: %#v\nReceived b: %#v\n", storedChallenge, challenge))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 5 & Assertion Step 9. Verify that the value of C.origin matches
|
||||||
|
// the Relying Party's origin.
|
||||||
|
|
||||||
|
if !IsOriginInHaystack(c.Origin, rpOrigins) {
|
||||||
|
return ErrVerification.
|
||||||
|
WithDetails("Error validating origin").
|
||||||
|
WithInfo(fmt.Sprintf("Expected Values: %s, Received: %s", rpOrigins, c.Origin))
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpTopOriginsVerify != TopOriginIgnoreVerificationMode {
|
||||||
|
switch len(c.TopOrigin) {
|
||||||
|
case 0:
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
if !c.CrossOrigin {
|
||||||
|
return ErrVerification.
|
||||||
|
WithDetails("Error validating topOrigin").
|
||||||
|
WithInfo("The topOrigin can't have values unless crossOrigin is true.")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
fqTopOrigin string
|
||||||
|
possibleTopOrigins []string
|
||||||
|
)
|
||||||
|
|
||||||
|
switch rpTopOriginsVerify {
|
||||||
|
case TopOriginExplicitVerificationMode:
|
||||||
|
possibleTopOrigins = rpTopOrigins
|
||||||
|
case TopOriginAutoVerificationMode:
|
||||||
|
possibleTopOrigins = append(rpTopOrigins, rpOrigins...) //nolint:gocritic // This is intentional.
|
||||||
|
case TopOriginImplicitVerificationMode:
|
||||||
|
possibleTopOrigins = rpOrigins
|
||||||
|
default:
|
||||||
|
return ErrNotImplemented.WithDetails("Error handling unknown Top Origin verification mode")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !IsOriginInHaystack(c.TopOrigin, possibleTopOrigins) {
|
||||||
|
return ErrVerification.
|
||||||
|
WithDetails("Error validating top origin").
|
||||||
|
WithInfo(fmt.Sprintf("Expected Values: %s, Received: %s", possibleTopOrigins, fqTopOrigin))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registration Step 6 and Assertion Step 10. Verify that the value of C.tokenBinding.status
|
||||||
|
// matches the state of Token Binding for the TLS connection over which the assertion was
|
||||||
|
// obtained. If Token Binding was used on that TLS connection, also verify that C.tokenBinding.id
|
||||||
|
// matches the base64url encoding of the Token Binding ID for the connection.
|
||||||
|
if c.TokenBinding != nil {
|
||||||
|
if c.TokenBinding.Status == "" {
|
||||||
|
return ErrParsingData.WithDetails("Error decoding clientData, token binding present without status")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.TokenBinding.Status != Present && c.TokenBinding.Status != Supported && c.TokenBinding.Status != NotSupported {
|
||||||
|
return ErrParsingData.
|
||||||
|
WithDetails("Error decoding clientData, token binding present with invalid status").
|
||||||
|
WithInfo(fmt.Sprintf("Got: %s", c.TokenBinding.Status))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Not yet fully implemented by the spec, browsers, and me.
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type TopOriginVerificationMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// TopOriginDefaultVerificationMode represents the default verification mode for the Top Origin. At this time this
|
||||||
|
// mode is the same as TopOriginIgnoreVerificationMode until such a time as the specification becomes stable. This
|
||||||
|
// value is intended as a fallback value and implementers should very intentionally pick another option if they want
|
||||||
|
// stability.
|
||||||
|
TopOriginDefaultVerificationMode TopOriginVerificationMode = iota
|
||||||
|
|
||||||
|
// TopOriginIgnoreVerificationMode ignores verification entirely.
|
||||||
|
TopOriginIgnoreVerificationMode
|
||||||
|
|
||||||
|
// TopOriginAutoVerificationMode represents the automatic verification mode for the Top Origin. In this mode the
|
||||||
|
// If the Top Origins parameter has values it checks against this, otherwise it checks against the Origins parameter.
|
||||||
|
TopOriginAutoVerificationMode
|
||||||
|
|
||||||
|
// TopOriginImplicitVerificationMode represents the implicit verification mode for the Top Origin. In this mode the
|
||||||
|
// Top Origin is verified against the allowed Origins values.
|
||||||
|
TopOriginImplicitVerificationMode
|
||||||
|
|
||||||
|
// TopOriginExplicitVerificationMode represents the explicit verification mode for the Top Origin. In this mode the
|
||||||
|
// Top Origin is verified against the allowed Top Origins values.
|
||||||
|
TopOriginExplicitVerificationMode
|
||||||
|
)
|
||||||
|
|
||||||
|
// IsOriginInHaystack checks if the needle is in the haystack using the mechanism to determine origin equality defined
|
||||||
|
// in HTML5 Section 5.3 and RFC3986 Section 6.2.1.
|
||||||
|
//
|
||||||
|
// Specifically if the needle value has the 'http://' or 'https://' prefix (case-insensitive) and can be parsed as a
|
||||||
|
// URL; we check each item in the haystack to see if it matches the same rules, and then if the scheme and host (with
|
||||||
|
// a normalized port) components match case-insensitively then they're considered a match.
|
||||||
|
//
|
||||||
|
// If the needle value does not have the 'http://' or 'https://' prefix (case-insensitive) or can't be parsed as a URL
|
||||||
|
// equality is determined using simple string comparison.
|
||||||
|
//
|
||||||
|
// It is important to note that this function completely ignores Apple Associated Domains entirely as Apple is using
|
||||||
|
// an unassigned Well-Known URI in breech of Well-Known Uniform Resource Identifiers (RFC8615).
|
||||||
|
//
|
||||||
|
// See (Origin Definition): https://www.w3.org/TR/2011/WD-html5-20110525/origin-0.html
|
||||||
|
//
|
||||||
|
// See (Simple String Comparison Definition): https://datatracker.ietf.org/doc/html/rfc3986#section-6.2.1
|
||||||
|
//
|
||||||
|
// See (Apple Associated Domains): https://developer.apple.com/documentation/xcode/supporting-associated-domains
|
||||||
|
//
|
||||||
|
// See (IANA Well Known URI Assignments): https://www.iana.org/assignments/well-known-uris/well-known-uris.xhtml
|
||||||
|
//
|
||||||
|
// See (Well-Known Uniform Resource Identifiers): https://datatracker.ietf.org/doc/html/rfc8615
|
||||||
|
func IsOriginInHaystack(needle string, haystack []string) bool {
|
||||||
|
needleURI := parseOriginURI(needle)
|
||||||
|
|
||||||
|
if needleURI != nil {
|
||||||
|
for _, hay := range haystack {
|
||||||
|
if hayURI := parseOriginURI(hay); hayURI != nil {
|
||||||
|
if isOriginEqual(needleURI, hayURI) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, hay := range haystack {
|
||||||
|
if needle == hay {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isOriginEqual(a *url.URL, b *url.URL) bool {
|
||||||
|
if !strings.EqualFold(a.Scheme, b.Scheme) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.EqualFold(a.Host, b.Host) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseOriginURI(raw string) *url.URL {
|
||||||
|
if !isPossibleFQDN(raw) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can ignore the error here because it's effectively not a FQDN if this fails.
|
||||||
|
uri, _ := url.Parse(raw)
|
||||||
|
|
||||||
|
if uri == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize the port if necessary.
|
||||||
|
switch uri.Scheme {
|
||||||
|
case "http":
|
||||||
|
if uri.Port() == "80" {
|
||||||
|
uri.Host = uri.Hostname()
|
||||||
|
}
|
||||||
|
case "https":
|
||||||
|
if uri.Port() == "443" {
|
||||||
|
uri.Host = uri.Hostname()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return uri
|
||||||
|
}
|
||||||
|
|
||||||
|
func isPossibleFQDN(raw string) bool {
|
||||||
|
normalized := strings.ToLower(raw)
|
||||||
|
|
||||||
|
return strings.HasPrefix(normalized, "http://") || strings.HasPrefix(normalized, "https://")
|
||||||
|
}
|
||||||
226
vendor/github.com/go-webauthn/webauthn/protocol/const.go
generated
vendored
Normal file
226
vendor/github.com/go-webauthn/webauthn/protocol/const.go
generated
vendored
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import "encoding/asn1"
|
||||||
|
|
||||||
|
const (
|
||||||
|
stmtAttStmt = "attStmt"
|
||||||
|
stmtFmt = "fmt"
|
||||||
|
stmtX5C = "x5c"
|
||||||
|
stmtSignature = "sig"
|
||||||
|
stmtAlgorithm = "alg"
|
||||||
|
stmtVersion = "ver"
|
||||||
|
stmtECDAAKID = "ecdaaKeyId"
|
||||||
|
stmtCertInfo = "certInfo"
|
||||||
|
stmtPubArea = "pubArea"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
versionTPM20 = "2.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
attStatementAndroidSafetyNetHostname = "attest.android.com"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// internalRemappedAuthenticatorTransport handles remapping of AuthenticatorTransport values. Specifically it is
|
||||||
|
// intentional on remapping only transports that never made recommendation but are being used in the wild. It
|
||||||
|
// should not be used to handle transports that were ratified.
|
||||||
|
internalRemappedAuthenticatorTransport = map[string]AuthenticatorTransport{
|
||||||
|
// The Authenticator Transport 'hybrid' was previously named 'cable'; even if it was for a short period.
|
||||||
|
"cable": Hybrid,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
/*
|
||||||
|
Apple Anonymous Attestation Root 1 in PEM form.
|
||||||
|
|
||||||
|
Source: https://www.apple.com/certificateauthority/Apple_WebAuthn_Root_CA.pem
|
||||||
|
SHA256 Fingerprints:
|
||||||
|
Root 1: 09:15:DD:5C:07:A2:8D:B5:49:D1:F6:77:BB:5A:75:D4:BF:BE:95:61:A7:73:42:43:27:76:2E:9E:02:F9:BB:29
|
||||||
|
*/
|
||||||
|
|
||||||
|
certificateAppleRoot1 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIICEjCCAZmgAwIBAgIQaB0BbHo84wIlpQGUKEdXcTAKBggqhkjOPQQDAzBLMR8w
|
||||||
|
HQYDVQQDDBZBcHBsZSBXZWJBdXRobiBSb290IENBMRMwEQYDVQQKDApBcHBsZSBJ
|
||||||
|
bmMuMRMwEQYDVQQIDApDYWxpZm9ybmlhMB4XDTIwMDMxODE4MjEzMloXDTQ1MDMx
|
||||||
|
NTAwMDAwMFowSzEfMB0GA1UEAwwWQXBwbGUgV2ViQXV0aG4gUm9vdCBDQTETMBEG
|
||||||
|
A1UECgwKQXBwbGUgSW5jLjETMBEGA1UECAwKQ2FsaWZvcm5pYTB2MBAGByqGSM49
|
||||||
|
AgEGBSuBBAAiA2IABCJCQ2pTVhzjl4Wo6IhHtMSAzO2cv+H9DQKev3//fG59G11k
|
||||||
|
xu9eI0/7o6V5uShBpe1u6l6mS19S1FEh6yGljnZAJ+2GNP1mi/YK2kSXIuTHjxA/
|
||||||
|
pcoRf7XkOtO4o1qlcaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUJtdk
|
||||||
|
2cV4wlpn0afeaxLQG2PxxtcwDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
|
||||||
|
MGQCMFrZ+9DsJ1PW9hfNdBywZDsWDbWFp28it1d/5w2RPkRX3Bbn/UbDTNLx7Jr3
|
||||||
|
jAGGiQIwHFj+dJZYUJR786osByBelJYsVZd2GbHQu209b5RCmGQ21gpSAk9QZW4B
|
||||||
|
1bWeT0vT
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
/*
|
||||||
|
Google Hardware Attestation Root 1 through Root 5 in PEM form.
|
||||||
|
|
||||||
|
Source: https://developer.android.com/training/articles/security-key-attestation#root_certificate
|
||||||
|
SHA256 Fingerprints:
|
||||||
|
Root 1: CE:DB:1C:B6:DC:89:6A:E5:EC:79:73:48:BC:E9:28:67:53:C2:B3:8E:E7:1C:E0:FB:E3:4A:9A:12:48:80:0D:FC
|
||||||
|
Root 2: 6D:9D:B4:CE:6C:5C:0B:29:31:66:D0:89:86:E0:57:74:A8:77:6C:EB:52:5D:9E:43:29:52:0D:E1:2B:A4:BC:C0
|
||||||
|
Root 3: C1:98:4A:3E:F4:5C:1E:2A:91:85:51:DE:10:60:3C:86:F7:05:1B:22:49:C4:89:1C:AE:32:30:EA:BD:0C:97:D5
|
||||||
|
Root 4: 1E:F1:A0:4B:8B:A5:8A:B9:45:89:AC:49:8C:89:82:A7:83:F2:4E:A7:30:7E:01:59:A0:C3:A7:3B:37:7D:87:CC
|
||||||
|
Root 5: AB:66:41:17:8A:36:E1:79:AA:0C:1C:DD:DF:9A:16:EB:45:FA:20:94:3E:2B:8C:D7:C7:C0:5C:26:CF:8B:48:7A
|
||||||
|
*/
|
||||||
|
|
||||||
|
certificateAndroidKeyRoot1 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFHDCCAwSgAwIBAgIJAPHBcqaZ6vUdMA0GCSqGSIb3DQEBCwUAMBsxGTAXBgNV
|
||||||
|
BAUTEGY5MjAwOWU4NTNiNmIwNDUwHhcNMjIwMzIwMTgwNzQ4WhcNNDIwMzE1MTgw
|
||||||
|
NzQ4WjAbMRkwFwYDVQQFExBmOTIwMDllODUzYjZiMDQ1MIICIjANBgkqhkiG9w0B
|
||||||
|
AQEFAAOCAg8AMIICCgKCAgEAr7bHgiuxpwHsK7Qui8xUFmOr75gvMsd/dTEDDJdS
|
||||||
|
Sxtf6An7xyqpRR90PL2abxM1dEqlXnf2tqw1Ne4Xwl5jlRfdnJLmN0pTy/4lj4/7
|
||||||
|
tv0Sk3iiKkypnEUtR6WfMgH0QZfKHM1+di+y9TFRtv6y//0rb+T+W8a9nsNL/ggj
|
||||||
|
nar86461qO0rOs2cXjp3kOG1FEJ5MVmFmBGtnrKpa73XpXyTqRxB/M0n1n/W9nGq
|
||||||
|
C4FSYa04T6N5RIZGBN2z2MT5IKGbFlbC8UrW0DxW7AYImQQcHtGl/m00QLVWutHQ
|
||||||
|
oVJYnFPlXTcHYvASLu+RhhsbDmxMgJJ0mcDpvsC4PjvB+TxywElgS70vE0XmLD+O
|
||||||
|
JtvsBslHZvPBKCOdT0MS+tgSOIfga+z1Z1g7+DVagf7quvmag8jfPioyKvxnK/Eg
|
||||||
|
sTUVi2ghzq8wm27ud/mIM7AY2qEORR8Go3TVB4HzWQgpZrt3i5MIlCaY504LzSRi
|
||||||
|
igHCzAPlHws+W0rB5N+er5/2pJKnfBSDiCiFAVtCLOZ7gLiMm0jhO2B6tUXHI/+M
|
||||||
|
RPjy02i59lINMRRev56GKtcd9qO/0kUJWdZTdA2XoS82ixPvZtXQpUpuL12ab+9E
|
||||||
|
aDK8Z4RHJYYfCT3Q5vNAXaiWQ+8PTWm2QgBR/bkwSWc+NpUFgNPN9PvQi8WEg5Um
|
||||||
|
AGMCAwEAAaNjMGEwHQYDVR0OBBYEFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMB8GA1Ud
|
||||||
|
IwQYMBaAFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||||
|
VR0PAQH/BAQDAgIEMA0GCSqGSIb3DQEBCwUAA4ICAQB8cMqTllHc8U+qCrOlg3H7
|
||||||
|
174lmaCsbo/bJ0C17JEgMLb4kvrqsXZs01U3mB/qABg/1t5Pd5AORHARs1hhqGIC
|
||||||
|
W/nKMav574f9rZN4PC2ZlufGXb7sIdJpGiO9ctRhiLuYuly10JccUZGEHpHSYM2G
|
||||||
|
tkgYbZba6lsCPYAAP83cyDV+1aOkTf1RCp/lM0PKvmxYN10RYsK631jrleGdcdkx
|
||||||
|
oSK//mSQbgcWnmAEZrzHoF1/0gso1HZgIn0YLzVhLSA/iXCX4QT2h3J5z3znluKG
|
||||||
|
1nv8NQdxei2DIIhASWfu804CA96cQKTTlaae2fweqXjdN1/v2nqOhngNyz1361mF
|
||||||
|
mr4XmaKH/ItTwOe72NI9ZcwS1lVaCvsIkTDCEXdm9rCNPAY10iTunIHFXRh+7KPz
|
||||||
|
lHGewCq/8TOohBRn0/NNfh7uRslOSZ/xKbN9tMBtw37Z8d2vvnXq/YWdsm1+JLVw
|
||||||
|
n6yYD/yacNJBlwpddla8eaVMjsF6nBnIgQOf9zKSe06nSTqvgwUHosgOECZJZ1Eu
|
||||||
|
zbH4yswbt02tKtKEFhx+v+OTge/06V+jGsqTWLsfrOCNLuA8H++z+pUENmpqnnHo
|
||||||
|
vaI47gC+TNpkgYGkkBT6B/m/U01BuOBBTzhIlMEZq9qkDWuM2cA5kW5V3FJUcfHn
|
||||||
|
w1IdYIg2Wxg7yHcQZemFQg==
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
|
||||||
|
certificateAndroidKeyRoot2 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIICIjCCAaigAwIBAgIRAISp0Cl7DrWK5/8OgN52BgUwCgYIKoZIzj0EAwMwUjEc
|
||||||
|
MBoGA1UEAwwTS2V5IEF0dGVzdGF0aW9uIENBMTEQMA4GA1UECwwHQW5kcm9pZDET
|
||||||
|
MBEGA1UECgwKR29vZ2xlIExMQzELMAkGA1UEBhMCVVMwHhcNMjUwNzE3MjIzMjE4
|
||||||
|
WhcNMzUwNzE1MjIzMjE4WjBSMRwwGgYDVQQDDBNLZXkgQXR0ZXN0YXRpb24gQ0Ex
|
||||||
|
MRAwDgYDVQQLDAdBbmRyb2lkMRMwEQYDVQQKDApHb29nbGUgTExDMQswCQYDVQQG
|
||||||
|
EwJVUzB2MBAGByqGSM49AgEGBSuBBAAiA2IABCPaI3FO3z5bBQo8cuiEas4HjqCt
|
||||||
|
G/mLFfRT0MsIssPBEEU5Cfbt6sH5yOAxqEi5QagpU1yX4HwnGb7OtBYpDTB57uH5
|
||||||
|
Eczm34A5FNijV3s0/f0UPl7zbJcTx6xwqMIRq6NCMEAwDwYDVR0TAQH/BAUwAwEB
|
||||||
|
/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFFIyuyz7RkOb3NaBqQ5lZuA0QepA
|
||||||
|
MAoGCCqGSM49BAMDA2gAMGUCMETfjPO/HwqReR2CS7p0ZWoD/LHs6hDi422opifH
|
||||||
|
EUaYLxwGlT9SLdjkVpz0UUOR5wIxAIoGyxGKRHVTpqpGRFiJtQEOOTp/+s1GcxeY
|
||||||
|
uR2zh/80lQyu9vAFCj6E4AXc+osmRg==
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
|
||||||
|
certificateAndroidKeyRoot3 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFYDCCA0igAwIBAgIJAOj6GWMU0voYMA0GCSqGSIb3DQEBCwUAMBsxGTAXBgNV
|
||||||
|
BAUTEGY5MjAwOWU4NTNiNmIwNDUwHhcNMTYwNTI2MTYyODUyWhcNMjYwNTI0MTYy
|
||||||
|
ODUyWjAbMRkwFwYDVQQFExBmOTIwMDllODUzYjZiMDQ1MIICIjANBgkqhkiG9w0B
|
||||||
|
AQEFAAOCAg8AMIICCgKCAgEAr7bHgiuxpwHsK7Qui8xUFmOr75gvMsd/dTEDDJdS
|
||||||
|
Sxtf6An7xyqpRR90PL2abxM1dEqlXnf2tqw1Ne4Xwl5jlRfdnJLmN0pTy/4lj4/7
|
||||||
|
tv0Sk3iiKkypnEUtR6WfMgH0QZfKHM1+di+y9TFRtv6y//0rb+T+W8a9nsNL/ggj
|
||||||
|
nar86461qO0rOs2cXjp3kOG1FEJ5MVmFmBGtnrKpa73XpXyTqRxB/M0n1n/W9nGq
|
||||||
|
C4FSYa04T6N5RIZGBN2z2MT5IKGbFlbC8UrW0DxW7AYImQQcHtGl/m00QLVWutHQ
|
||||||
|
oVJYnFPlXTcHYvASLu+RhhsbDmxMgJJ0mcDpvsC4PjvB+TxywElgS70vE0XmLD+O
|
||||||
|
JtvsBslHZvPBKCOdT0MS+tgSOIfga+z1Z1g7+DVagf7quvmag8jfPioyKvxnK/Eg
|
||||||
|
sTUVi2ghzq8wm27ud/mIM7AY2qEORR8Go3TVB4HzWQgpZrt3i5MIlCaY504LzSRi
|
||||||
|
igHCzAPlHws+W0rB5N+er5/2pJKnfBSDiCiFAVtCLOZ7gLiMm0jhO2B6tUXHI/+M
|
||||||
|
RPjy02i59lINMRRev56GKtcd9qO/0kUJWdZTdA2XoS82ixPvZtXQpUpuL12ab+9E
|
||||||
|
aDK8Z4RHJYYfCT3Q5vNAXaiWQ+8PTWm2QgBR/bkwSWc+NpUFgNPN9PvQi8WEg5Um
|
||||||
|
AGMCAwEAAaOBpjCBozAdBgNVHQ4EFgQUNmHhAHyIBQlRi0RsR/8aTMnqTxIwHwYD
|
||||||
|
VR0jBBgwFoAUNmHhAHyIBQlRi0RsR/8aTMnqTxIwDwYDVR0TAQH/BAUwAwEB/zAO
|
||||||
|
BgNVHQ8BAf8EBAMCAYYwQAYDVR0fBDkwNzA1oDOgMYYvaHR0cHM6Ly9hbmRyb2lk
|
||||||
|
Lmdvb2dsZWFwaXMuY29tL2F0dGVzdGF0aW9uL2NybC8wDQYJKoZIhvcNAQELBQAD
|
||||||
|
ggIBACDIw41L3KlXG0aMiS//cqrG+EShHUGo8HNsw30W1kJtjn6UBwRM6jnmiwfB
|
||||||
|
Pb8VA91chb2vssAtX2zbTvqBJ9+LBPGCdw/E53Rbf86qhxKaiAHOjpvAy5Y3m00m
|
||||||
|
qC0w/Zwvju1twb4vhLaJ5NkUJYsUS7rmJKHHBnETLi8GFqiEsqTWpG/6ibYCv7rY
|
||||||
|
DBJDcR9W62BW9jfIoBQcxUCUJouMPH25lLNcDc1ssqvC2v7iUgI9LeoM1sNovqPm
|
||||||
|
QUiG9rHli1vXxzCyaMTjwftkJLkf6724DFhuKug2jITV0QkXvaJWF4nUaHOTNA4u
|
||||||
|
JU9WDvZLI1j83A+/xnAJUucIv/zGJ1AMH2boHqF8CY16LpsYgBt6tKxxWH00XcyD
|
||||||
|
CdW2KlBCeqbQPcsFmWyWugxdcekhYsAWyoSf818NUsZdBWBaR/OukXrNLfkQ79Iy
|
||||||
|
ZohZbvabO/X+MVT3rriAoKc8oE2Uws6DF+60PV7/WIPjNvXySdqspImSN78mflxD
|
||||||
|
qwLqRBYkA3I75qppLGG9rp7UCdRjxMl8ZDBld+7yvHVgt1cVzJx9xnyGCC23Uaic
|
||||||
|
MDSXYrB4I4WHXPGjxhZuCuPBLTdOLU8YRvMYdEvYebWHMpvwGCF6bAx3JBpIeOQ1
|
||||||
|
wDB5y0USicV3YgYGmi+NZfhA4URSh77Yd6uuJOJENRaNVTzk
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
|
||||||
|
certificateAndroidKeyRoot4 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFHDCCAwSgAwIBAgIJANUP8luj8tazMA0GCSqGSIb3DQEBCwUAMBsxGTAXBgNV
|
||||||
|
BAUTEGY5MjAwOWU4NTNiNmIwNDUwHhcNMTkxMTIyMjAzNzU4WhcNMzQxMTE4MjAz
|
||||||
|
NzU4WjAbMRkwFwYDVQQFExBmOTIwMDllODUzYjZiMDQ1MIICIjANBgkqhkiG9w0B
|
||||||
|
AQEFAAOCAg8AMIICCgKCAgEAr7bHgiuxpwHsK7Qui8xUFmOr75gvMsd/dTEDDJdS
|
||||||
|
Sxtf6An7xyqpRR90PL2abxM1dEqlXnf2tqw1Ne4Xwl5jlRfdnJLmN0pTy/4lj4/7
|
||||||
|
tv0Sk3iiKkypnEUtR6WfMgH0QZfKHM1+di+y9TFRtv6y//0rb+T+W8a9nsNL/ggj
|
||||||
|
nar86461qO0rOs2cXjp3kOG1FEJ5MVmFmBGtnrKpa73XpXyTqRxB/M0n1n/W9nGq
|
||||||
|
C4FSYa04T6N5RIZGBN2z2MT5IKGbFlbC8UrW0DxW7AYImQQcHtGl/m00QLVWutHQ
|
||||||
|
oVJYnFPlXTcHYvASLu+RhhsbDmxMgJJ0mcDpvsC4PjvB+TxywElgS70vE0XmLD+O
|
||||||
|
JtvsBslHZvPBKCOdT0MS+tgSOIfga+z1Z1g7+DVagf7quvmag8jfPioyKvxnK/Eg
|
||||||
|
sTUVi2ghzq8wm27ud/mIM7AY2qEORR8Go3TVB4HzWQgpZrt3i5MIlCaY504LzSRi
|
||||||
|
igHCzAPlHws+W0rB5N+er5/2pJKnfBSDiCiFAVtCLOZ7gLiMm0jhO2B6tUXHI/+M
|
||||||
|
RPjy02i59lINMRRev56GKtcd9qO/0kUJWdZTdA2XoS82ixPvZtXQpUpuL12ab+9E
|
||||||
|
aDK8Z4RHJYYfCT3Q5vNAXaiWQ+8PTWm2QgBR/bkwSWc+NpUFgNPN9PvQi8WEg5Um
|
||||||
|
AGMCAwEAAaNjMGEwHQYDVR0OBBYEFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMB8GA1Ud
|
||||||
|
IwQYMBaAFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||||
|
VR0PAQH/BAQDAgIEMA0GCSqGSIb3DQEBCwUAA4ICAQBOMaBc8oumXb2voc7XCWnu
|
||||||
|
XKhBBK3e2KMGz39t7lA3XXRe2ZLLAkLM5y3J7tURkf5a1SutfdOyXAmeE6SRo83U
|
||||||
|
h6WszodmMkxK5GM4JGrnt4pBisu5igXEydaW7qq2CdC6DOGjG+mEkN8/TA6p3cno
|
||||||
|
L/sPyz6evdjLlSeJ8rFBH6xWyIZCbrcpYEJzXaUOEaxxXxgYz5/cTiVKN2M1G2ok
|
||||||
|
QBUIYSY6bjEL4aUN5cfo7ogP3UvliEo3Eo0YgwuzR2v0KR6C1cZqZJSTnghIC/vA
|
||||||
|
D32KdNQ+c3N+vl2OTsUVMC1GiWkngNx1OO1+kXW+YTnnTUOtOIswUP/Vqd5SYgAI
|
||||||
|
mMAfY8U9/iIgkQj6T2W6FsScy94IN9fFhE1UtzmLoBIuUFsVXJMTz+Jucth+IqoW
|
||||||
|
Fua9v1R93/k98p41pjtFX+H8DslVgfP097vju4KDlqN64xV1grw3ZLl4CiOe/A91
|
||||||
|
oeLm2UHOq6wn3esB4r2EIQKb6jTVGu5sYCcdWpXr0AUVqcABPdgL+H7qJguBw09o
|
||||||
|
jm6xNIrw2OocrDKsudk/okr/AwqEyPKw9WnMlQgLIKw1rODG2NvU9oR3GVGdMkUB
|
||||||
|
ZutL8VuFkERQGt6vQ2OCw0sV47VMkuYbacK/xyZFiRcrPJPb41zgbQj9XAEyLKCH
|
||||||
|
ex0SdDrx+tWUDqG8At2JHA==
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
|
||||||
|
certificateAndroidKeyRoot5 = `-----BEGIN CERTIFICATE-----
|
||||||
|
MIIFHDCCAwSgAwIBAgIJAMNrfES5rhgxMA0GCSqGSIb3DQEBCwUAMBsxGTAXBgNV
|
||||||
|
BAUTEGY5MjAwOWU4NTNiNmIwNDUwHhcNMjExMTE3MjMxMDQyWhcNMzYxMTEzMjMx
|
||||||
|
MDQyWjAbMRkwFwYDVQQFExBmOTIwMDllODUzYjZiMDQ1MIICIjANBgkqhkiG9w0B
|
||||||
|
AQEFAAOCAg8AMIICCgKCAgEAr7bHgiuxpwHsK7Qui8xUFmOr75gvMsd/dTEDDJdS
|
||||||
|
Sxtf6An7xyqpRR90PL2abxM1dEqlXnf2tqw1Ne4Xwl5jlRfdnJLmN0pTy/4lj4/7
|
||||||
|
tv0Sk3iiKkypnEUtR6WfMgH0QZfKHM1+di+y9TFRtv6y//0rb+T+W8a9nsNL/ggj
|
||||||
|
nar86461qO0rOs2cXjp3kOG1FEJ5MVmFmBGtnrKpa73XpXyTqRxB/M0n1n/W9nGq
|
||||||
|
C4FSYa04T6N5RIZGBN2z2MT5IKGbFlbC8UrW0DxW7AYImQQcHtGl/m00QLVWutHQ
|
||||||
|
oVJYnFPlXTcHYvASLu+RhhsbDmxMgJJ0mcDpvsC4PjvB+TxywElgS70vE0XmLD+O
|
||||||
|
JtvsBslHZvPBKCOdT0MS+tgSOIfga+z1Z1g7+DVagf7quvmag8jfPioyKvxnK/Eg
|
||||||
|
sTUVi2ghzq8wm27ud/mIM7AY2qEORR8Go3TVB4HzWQgpZrt3i5MIlCaY504LzSRi
|
||||||
|
igHCzAPlHws+W0rB5N+er5/2pJKnfBSDiCiFAVtCLOZ7gLiMm0jhO2B6tUXHI/+M
|
||||||
|
RPjy02i59lINMRRev56GKtcd9qO/0kUJWdZTdA2XoS82ixPvZtXQpUpuL12ab+9E
|
||||||
|
aDK8Z4RHJYYfCT3Q5vNAXaiWQ+8PTWm2QgBR/bkwSWc+NpUFgNPN9PvQi8WEg5Um
|
||||||
|
AGMCAwEAAaNjMGEwHQYDVR0OBBYEFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMB8GA1Ud
|
||||||
|
IwQYMBaAFDZh4QB8iAUJUYtEbEf/GkzJ6k8SMA8GA1UdEwEB/wQFMAMBAf8wDgYD
|
||||||
|
VR0PAQH/BAQDAgIEMA0GCSqGSIb3DQEBCwUAA4ICAQBTNNZe5cuf8oiq+jV0itTG
|
||||||
|
zWVhSTjOBEk2FQvh11J3o3lna0o7rd8RFHnN00q4hi6TapFhh4qaw/iG6Xg+xOan
|
||||||
|
63niLWIC5GOPFgPeYXM9+nBb3zZzC8ABypYuCusWCmt6Tn3+Pjbz3MTVhRGXuT/T
|
||||||
|
QH4KGFY4PhvzAyXwdjTOCXID+aHud4RLcSySr0Fq/L+R8TWalvM1wJJPhyRjqRCJ
|
||||||
|
erGtfBagiALzvhnmY7U1qFcS0NCnKjoO7oFedKdWlZz0YAfu3aGCJd4KHT0MsGiL
|
||||||
|
Zez9WP81xYSrKMNEsDK+zK5fVzw6jA7cxmpXcARTnmAuGUeI7VVDhDzKeVOctf3a
|
||||||
|
0qQLwC+d0+xrETZ4r2fRGNw2YEs2W8Qj6oDcfPvq9JySe7pJ6wcHnl5EZ0lwc4xH
|
||||||
|
7Y4Dx9RA1JlfooLMw3tOdJZH0enxPXaydfAD3YifeZpFaUzicHeLzVJLt9dvGB0b
|
||||||
|
HQLE4+EqKFgOZv2EoP686DQqbVS1u+9k0p2xbMA105TBIk7npraa8VM0fnrRKi7w
|
||||||
|
lZKwdH+aNAyhbXRW9xsnODJ+g8eF452zvbiKKngEKirK5LGieoXBX7tZ9D1GNBH2
|
||||||
|
Ob3bKOwwIWdEFle/YF/h6zWgdeoaNGDqVBrLr2+0DtWoiB1aDEjLWl9FmyIUyUm7
|
||||||
|
mD/vFDkzF+wm7cyWpQpCVQ==
|
||||||
|
-----END CERTIFICATE-----`
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
oidExtensionAppleAnonymousAttestation = asn1.ObjectIdentifier{1, 2, 840, 113635, 100, 8, 2}
|
||||||
|
oidExtensionAndroidKeystore = asn1.ObjectIdentifier{1, 3, 6, 1, 4, 1, 11129, 2, 1, 17}
|
||||||
|
oidExtensionSubjectAltName = asn1.ObjectIdentifier{2, 5, 29, 17}
|
||||||
|
oidExtensionExtendedKeyUsage = asn1.ObjectIdentifier{2, 5, 29, 37}
|
||||||
|
oidExtensionBasicConstraints = asn1.ObjectIdentifier{2, 5, 29, 19}
|
||||||
|
oidFIDOGenCeAAGUID = asn1.ObjectIdentifier{1, 3, 6, 1, 4, 1, 45724, 1, 1, 4}
|
||||||
|
oidMicrosoftKpPrivacyCA = asn1.ObjectIdentifier{1, 3, 6, 1, 4, 1, 311, 21, 36}
|
||||||
|
oidTCGKpAIKCertificate = asn1.ObjectIdentifier{2, 23, 133, 8, 3}
|
||||||
|
oidTCGAtTpmManufacturer = asn1.ObjectIdentifier{2, 23, 133, 2, 1}
|
||||||
|
oidTCGAtTpmModel = asn1.ObjectIdentifier{2, 23, 133, 2, 2}
|
||||||
|
oidTCGAtTPMVersion = asn1.ObjectIdentifier{2, 23, 133, 2, 3}
|
||||||
|
)
|
||||||
265
vendor/github.com/go-webauthn/webauthn/protocol/credential.go
generated
vendored
Normal file
265
vendor/github.com/go-webauthn/webauthn/protocol/credential.go
generated
vendored
Normal file
@@ -0,0 +1,265 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"encoding/base64"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Credential is the basic credential type from the Credential Management specification that is inherited by WebAuthn's
|
||||||
|
// PublicKeyCredential type.
|
||||||
|
//
|
||||||
|
// Specification: Credential Management §2.2. The Credential Interface (https://www.w3.org/TR/credential-management/#credential)
|
||||||
|
type Credential struct {
|
||||||
|
// ID is The credential’s identifier. The requirements for the
|
||||||
|
// identifier are distinct for each type of credential. It might
|
||||||
|
// represent a username for username/password tuples, for example.
|
||||||
|
ID string `json:"id"`
|
||||||
|
// Type is the value of the object’s interface object's [[type]] slot,
|
||||||
|
// which specifies the credential type represented by this object.
|
||||||
|
// This should be type "public-key" for Webauthn credentials.
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsedCredential is the parsed PublicKeyCredential interface, inherits from Credential, and contains
|
||||||
|
// the attributes that are returned to the caller when a new credential is created, or a new assertion is requested.
|
||||||
|
type ParsedCredential struct {
|
||||||
|
ID string `cbor:"id"`
|
||||||
|
Type string `cbor:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PublicKeyCredential struct {
|
||||||
|
Credential
|
||||||
|
|
||||||
|
RawID URLEncodedBase64 `json:"rawId"`
|
||||||
|
ClientExtensionResults AuthenticationExtensionsClientOutputs `json:"clientExtensionResults,omitempty"`
|
||||||
|
AuthenticatorAttachment string `json:"authenticatorAttachment,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ParsedPublicKeyCredential struct {
|
||||||
|
ParsedCredential
|
||||||
|
|
||||||
|
RawID []byte `json:"rawId"`
|
||||||
|
ClientExtensionResults AuthenticationExtensionsClientOutputs `json:"clientExtensionResults,omitempty"`
|
||||||
|
AuthenticatorAttachment AuthenticatorAttachment `json:"authenticatorAttachment,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CredentialCreationResponse struct {
|
||||||
|
PublicKeyCredential
|
||||||
|
|
||||||
|
AttestationResponse AuthenticatorAttestationResponse `json:"response"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ParsedCredentialCreationData struct {
|
||||||
|
ParsedPublicKeyCredential
|
||||||
|
|
||||||
|
Response ParsedAttestationResponse
|
||||||
|
Raw CredentialCreationResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialCreationResponse is a non-agnostic function for parsing a registration response from the http library
|
||||||
|
// from stdlib. It handles some standard cleanup operations.
|
||||||
|
func ParseCredentialCreationResponse(request *http.Request) (*ParsedCredentialCreationData, error) {
|
||||||
|
if request == nil || request.Body == nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("No response given")
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
_, _ = io.Copy(io.Discard, request.Body)
|
||||||
|
_ = request.Body.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
|
return ParseCredentialCreationResponseBody(request.Body)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialCreationResponseBody is an agnostic version of ParseCredentialCreationResponse. Implementers are
|
||||||
|
// therefore responsible for managing cleanup.
|
||||||
|
func ParseCredentialCreationResponseBody(body io.Reader) (pcc *ParsedCredentialCreationData, err error) {
|
||||||
|
var ccr CredentialCreationResponse
|
||||||
|
|
||||||
|
if err = decodeBody(body, &ccr); err != nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ccr.Parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCredentialCreationResponseBytes is an alternative version of ParseCredentialCreationResponseBody that just takes
|
||||||
|
// a byte slice.
|
||||||
|
func ParseCredentialCreationResponseBytes(data []byte) (pcc *ParsedCredentialCreationData, err error) {
|
||||||
|
var ccr CredentialCreationResponse
|
||||||
|
|
||||||
|
if err = decodeBytes(data, &ccr); err != nil {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo(err.Error()).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ccr.Parse()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse validates and parses the CredentialCreationResponse into a ParsedCredentialCreationData. This receiver
|
||||||
|
// is unlikely to be expressly guaranteed under the versioning policy. Users looking for this guarantee should see
|
||||||
|
// ParseCredentialCreationResponseBody instead, and this receiver should only be used if that function is inadequate
|
||||||
|
// for their use case.
|
||||||
|
func (ccr CredentialCreationResponse) Parse() (pcc *ParsedCredentialCreationData, err error) {
|
||||||
|
if ccr.ID == "" {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo("Missing ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
testB64, err := base64.RawURLEncoding.DecodeString(ccr.ID)
|
||||||
|
if err != nil || len(testB64) == 0 {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo("ID not base64.RawURLEncoded")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ccr.Type == "" {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo("Missing type")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ccr.Type != string(PublicKeyCredentialType) {
|
||||||
|
return nil, ErrBadRequest.WithDetails("Parse error for Registration").WithInfo("Type not public-key")
|
||||||
|
}
|
||||||
|
|
||||||
|
response, err := ccr.AttestationResponse.Parse()
|
||||||
|
if err != nil {
|
||||||
|
return nil, ErrParsingData.WithDetails("Error parsing attestation response")
|
||||||
|
}
|
||||||
|
|
||||||
|
var attachment AuthenticatorAttachment
|
||||||
|
|
||||||
|
switch ccr.AuthenticatorAttachment {
|
||||||
|
case "platform":
|
||||||
|
attachment = Platform
|
||||||
|
case "cross-platform":
|
||||||
|
attachment = CrossPlatform
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ParsedCredentialCreationData{
|
||||||
|
ParsedPublicKeyCredential{
|
||||||
|
ParsedCredential{ccr.ID, ccr.Type}, ccr.RawID, ccr.ClientExtensionResults, attachment,
|
||||||
|
},
|
||||||
|
*response,
|
||||||
|
ccr,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify the Client and Attestation data.
|
||||||
|
//
|
||||||
|
// Specification: §7.1. Registering a New Credential (https://www.w3.org/TR/webauthn/#sctn-registering-a-new-credential)
|
||||||
|
func (pcc *ParsedCredentialCreationData) Verify(storedChallenge string, verifyUser bool, verifyUserPresence bool, relyingPartyID string, rpOrigins, rpTopOrigins []string, rpTopOriginsVerify TopOriginVerificationMode, mds metadata.Provider, credParams []CredentialParameter) (clientDataHash []byte, err error) {
|
||||||
|
// Handles steps 3 through 6 - Verifying the Client Data against the Relying Party's stored data.
|
||||||
|
if err = pcc.Response.CollectedClientData.Verify(storedChallenge, CreateCeremony, rpOrigins, rpTopOrigins, rpTopOriginsVerify); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 7. Compute the hash of response.clientDataJSON using SHA-256.
|
||||||
|
sum := sha256.Sum256(pcc.Raw.AttestationResponse.ClientDataJSON)
|
||||||
|
clientDataHash = sum[:]
|
||||||
|
|
||||||
|
// Step 8. Perform CBOR decoding on the attestationObject field of the AuthenticatorAttestationResponse
|
||||||
|
// structure to obtain the attestation statement format fmt, the authenticator data authData, and the
|
||||||
|
// attestation statement attStmt.
|
||||||
|
|
||||||
|
// We do the above step while parsing and decoding the CredentialCreationResponse
|
||||||
|
// Handle steps 9 through 14 - This verifies the attestation object.
|
||||||
|
if err = pcc.Response.AttestationObject.Verify(relyingPartyID, clientDataHash, verifyUser, verifyUserPresence, mds, credParams); err != nil {
|
||||||
|
return clientDataHash, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 15. If validation is successful, obtain a list of acceptable trust anchors (attestation root
|
||||||
|
// certificates or ECDAA-Issuer public keys) for that attestation type and attestation statement
|
||||||
|
// format fmt, from a trusted source or from policy. For example, the FIDO Metadata Service provides
|
||||||
|
// one way to obtain such information, using the AAGUID in the attestedCredentialData in authData.
|
||||||
|
// [https://fidoalliance.org/specs/fido-v2.0-id-20180227/fido-metadata-service-v2.0-id-20180227.html]
|
||||||
|
|
||||||
|
// TODO: There are no valid AAGUIDs yet or trust sources supported. We could implement policy for the RP in
|
||||||
|
// the future, however.
|
||||||
|
|
||||||
|
// Step 16. Assess the attestation trustworthiness using outputs of the verification procedure in step 14, as follows:
|
||||||
|
// - If self attestation was used, check if self attestation is acceptable under Relying Party policy.
|
||||||
|
// - If ECDAA was used, verify that the identifier of the ECDAA-Issuer public key used is included in
|
||||||
|
// the set of acceptable trust anchors obtained in step 15.
|
||||||
|
// - Otherwise, use the X.509 certificates returned by the verification procedure to verify that the
|
||||||
|
// attestation public key correctly chains up to an acceptable root certificate.
|
||||||
|
|
||||||
|
// TODO: We're not supporting trust anchors, self-attestation policy, or acceptable root certs yet.
|
||||||
|
|
||||||
|
// Step 17. Check that the credentialId is not yet registered to any other user. If registration is
|
||||||
|
// requested for a credential that is already registered to a different user, the Relying Party SHOULD
|
||||||
|
// fail this registration ceremony, or it MAY decide to accept the registration, e.g. while deleting
|
||||||
|
// the older registration.
|
||||||
|
|
||||||
|
// TODO: We can't support this in the code's current form, the Relying Party would need to check for this
|
||||||
|
// against their database.
|
||||||
|
|
||||||
|
// Step 18 If the attestation statement attStmt verified successfully and is found to be trustworthy, then
|
||||||
|
// register the new credential with the account that was denoted in the options.user passed to create(), by
|
||||||
|
// associating it with the credentialId and credentialPublicKey in the attestedCredentialData in authData, as
|
||||||
|
// appropriate for the Relying Party's system.
|
||||||
|
|
||||||
|
// Step 19. If the attestation statement attStmt successfully verified but is not trustworthy per step 16 above,
|
||||||
|
// the Relying Party SHOULD fail the registration ceremony.
|
||||||
|
|
||||||
|
// TODO: Not implemented for the reasons mentioned under Step 16.
|
||||||
|
|
||||||
|
return clientDataHash, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAppID takes a AuthenticationExtensions object or nil. It then performs the following checks in order:
|
||||||
|
//
|
||||||
|
// 1. Check that the Session Data's AuthenticationExtensions has been provided and if it hasn't return an error.
|
||||||
|
// 2. Check that the AuthenticationExtensionsClientOutputs contains the extensions output and return an empty string if it doesn't.
|
||||||
|
// 3. Check that the Credential AttestationType is `fido-u2f` and return an empty string if it isn't.
|
||||||
|
// 4. Check that the AuthenticationExtensionsClientOutputs contains the appid key and if it doesn't return an empty string.
|
||||||
|
// 5. Check that the AuthenticationExtensionsClientOutputs appid is a bool and if it isn't return an error.
|
||||||
|
// 6. Check that the appid output is true and if it isn't return an empty string.
|
||||||
|
// 7. Check that the Session Data has an appid extension defined and if it doesn't return an error.
|
||||||
|
// 8. Check that the appid extension in Session Data is a string and if it isn't return an error.
|
||||||
|
// 9. Return the appid extension value from the Session data.
|
||||||
|
func (ppkc ParsedPublicKeyCredential) GetAppID(authExt AuthenticationExtensions, credentialAttestationType string) (appID string, err error) {
|
||||||
|
var (
|
||||||
|
value, clientValue interface{}
|
||||||
|
enableAppID, ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
if authExt == nil {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if ppkc.ClientExtensionResults == nil {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the credential does not have the correct attestation type it is assumed to NOT be a fido-u2f credential.
|
||||||
|
// https://www.w3.org/TR/webauthn/#sctn-fido-u2f-attestation
|
||||||
|
if credentialAttestationType != CredentialTypeFIDOU2F {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if clientValue, ok = ppkc.ClientExtensionResults[ExtensionAppID]; !ok {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if enableAppID, ok = clientValue.(bool); !ok {
|
||||||
|
return "", ErrBadRequest.WithDetails("Client Output appid did not have the expected type")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !enableAppID {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if value, ok = authExt[ExtensionAppID]; !ok {
|
||||||
|
return "", ErrBadRequest.WithDetails("Session Data does not have an appid but Client Output indicates it should be set")
|
||||||
|
}
|
||||||
|
|
||||||
|
if appID, ok = value.(string); !ok {
|
||||||
|
return "", ErrBadRequest.WithDetails("Session Data appid did not have the expected type")
|
||||||
|
}
|
||||||
|
|
||||||
|
return appID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
CredentialTypeFIDOU2F = "fido-u2f"
|
||||||
|
)
|
||||||
40
vendor/github.com/go-webauthn/webauthn/protocol/decoder.go
generated
vendored
Normal file
40
vendor/github.com/go-webauthn/webauthn/protocol/decoder.go
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
func decodeBody(body io.Reader, v any) (err error) {
|
||||||
|
decoder := json.NewDecoder(body)
|
||||||
|
|
||||||
|
if err = decoder.Decode(v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = decoder.Token()
|
||||||
|
|
||||||
|
if !errors.Is(err, io.EOF) {
|
||||||
|
return errors.New("body contains trailing data")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeBytes(data []byte, v any) (err error) {
|
||||||
|
decoder := json.NewDecoder(bytes.NewReader(data))
|
||||||
|
|
||||||
|
if err = decoder.Decode(v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = decoder.Token()
|
||||||
|
|
||||||
|
if !errors.Is(err, io.EOF) {
|
||||||
|
return errors.New("body contains trailing data")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
8
vendor/github.com/go-webauthn/webauthn/protocol/doc.go
generated
vendored
Normal file
8
vendor/github.com/go-webauthn/webauthn/protocol/doc.go
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
// Package protocol contains data structures and validation functionality
|
||||||
|
// outlined in the Web Authentication specification (https://www.w3.org/TR/webauthn).
|
||||||
|
// The data structures here attempt to conform as much as possible to their definitions,
|
||||||
|
// but some structs (like those that are used as part of validation steps) contain
|
||||||
|
// additional fields that help us unpack and validate the data we unmarshall.
|
||||||
|
// When implementing this library, most developers will primarily be using the API
|
||||||
|
// outlined in the webauthn package.
|
||||||
|
package protocol
|
||||||
46
vendor/github.com/go-webauthn/webauthn/protocol/entities.go
generated
vendored
Normal file
46
vendor/github.com/go-webauthn/webauthn/protocol/entities.go
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
// CredentialEntity represents the PublicKeyCredentialEntity IDL and it describes a user account, or a WebAuthn Relying
|
||||||
|
// Party with which a public key credential is associated.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.1. Public Key Entity Description (https://www.w3.org/TR/webauthn/#dictionary-pkcredentialentity)
|
||||||
|
type CredentialEntity struct {
|
||||||
|
// A human-palatable name for the entity. Its function depends on what the PublicKeyCredentialEntity represents:
|
||||||
|
//
|
||||||
|
// When inherited by PublicKeyCredentialRpEntity it is a human-palatable identifier for the Relying Party,
|
||||||
|
// intended only for display. For example, "ACME Corporation", "Wonderful Widgets, Inc." or "ОАО Примертех".
|
||||||
|
//
|
||||||
|
// When inherited by PublicKeyCredentialUserEntity, it is a human-palatable identifier for a user account. It is
|
||||||
|
// intended only for display, i.e., aiding the user in determining the difference between user accounts with similar
|
||||||
|
// displayNames. For example, "alexm", "alex.p.mueller@example.com" or "+14255551234".
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The RelyingPartyEntity represents the PublicKeyCredentialRpEntity IDL and is used to supply additional Relying Party
|
||||||
|
// attributes when creating a new credential.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.2. Relying Party Parameters for Credential Generation (https://www.w3.org/TR/webauthn/#dictionary-rp-credential-params)
|
||||||
|
type RelyingPartyEntity struct {
|
||||||
|
CredentialEntity
|
||||||
|
|
||||||
|
// A unique identifier for the Relying Party entity, which sets the RP ID.
|
||||||
|
ID string `json:"id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The UserEntity represents the PublicKeyCredentialUserEntity IDL and is used to supply additional user account
|
||||||
|
// attributes when creating a new credential.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.3 User Account Parameters for Credential Generation (https://www.w3.org/TR/webauthn/#dictdef-publickeycredentialuserentity)
|
||||||
|
type UserEntity struct {
|
||||||
|
CredentialEntity
|
||||||
|
// A human-palatable name for the user account, intended only for display.
|
||||||
|
// For example, "Alex P. Müller" or "田中 倫". The Relying Party SHOULD let
|
||||||
|
// the user choose this, and SHOULD NOT restrict the choice more than necessary.
|
||||||
|
DisplayName string `json:"displayName"`
|
||||||
|
|
||||||
|
// ID is the user handle of the user account entity. To ensure secure operation,
|
||||||
|
// authentication and authorization decisions MUST be made on the basis of this id
|
||||||
|
// member, not the displayName nor name members. See Section 6.1 of
|
||||||
|
// [RFC8266](https://www.w3.org/TR/webauthn/#biblio-rfc8266).
|
||||||
|
ID any `json:"id"`
|
||||||
|
}
|
||||||
150
vendor/github.com/go-webauthn/webauthn/protocol/errors.go
generated
vendored
Normal file
150
vendor/github.com/go-webauthn/webauthn/protocol/errors.go
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
// Error is a struct that describes specific error conditions in a structured format.
|
||||||
|
type Error struct {
|
||||||
|
// Short name for the type of error that has occurred.
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// Additional details about the error.
|
||||||
|
Details string `json:"error"`
|
||||||
|
|
||||||
|
// Information to help debug the error.
|
||||||
|
DevInfo string `json:"debug"`
|
||||||
|
|
||||||
|
// Inner error.
|
||||||
|
Err error `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Error() string {
|
||||||
|
return e.Details
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) WithDetails(details string) *Error {
|
||||||
|
err := *e
|
||||||
|
err.Details = details
|
||||||
|
|
||||||
|
return &err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) WithInfo(info string) *Error {
|
||||||
|
err := *e
|
||||||
|
err.DevInfo = info
|
||||||
|
|
||||||
|
return &err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Error) WithError(err error) *Error {
|
||||||
|
errCopy := *e
|
||||||
|
errCopy.Err = err
|
||||||
|
|
||||||
|
return &errCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorUnknownCredential is a special Error which signals the fact the provided credential is unknown. The reason this
|
||||||
|
// specific error type is useful is so that the relying-party can send a signal to the Authenticator that the
|
||||||
|
// credential has been removed.
|
||||||
|
type ErrorUnknownCredential struct {
|
||||||
|
Err *Error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) Error() string {
|
||||||
|
return e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) copy() ErrorUnknownCredential {
|
||||||
|
err := *e.Err
|
||||||
|
|
||||||
|
return ErrorUnknownCredential{Err: &err}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) WithDetails(details string) *ErrorUnknownCredential {
|
||||||
|
err := e.copy()
|
||||||
|
err.Err.Details = details
|
||||||
|
|
||||||
|
return &err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) WithInfo(info string) *ErrorUnknownCredential {
|
||||||
|
err := e.copy()
|
||||||
|
err.Err.DevInfo = info
|
||||||
|
|
||||||
|
return &err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrorUnknownCredential) WithError(err error) *ErrorUnknownCredential {
|
||||||
|
errCopy := e.copy()
|
||||||
|
errCopy.Err.Err = err
|
||||||
|
|
||||||
|
return &errCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrBadRequest = &Error{
|
||||||
|
Type: "invalid_request",
|
||||||
|
Details: "Error reading the request data",
|
||||||
|
}
|
||||||
|
ErrChallengeMismatch = &Error{
|
||||||
|
Type: "challenge_mismatch",
|
||||||
|
Details: "Stored challenge and received challenge do not match",
|
||||||
|
}
|
||||||
|
ErrParsingData = &Error{
|
||||||
|
Type: "parse_error",
|
||||||
|
Details: "Error parsing the authenticator response",
|
||||||
|
}
|
||||||
|
ErrAuthData = &Error{
|
||||||
|
Type: "auth_data",
|
||||||
|
Details: "Error verifying the authenticator data",
|
||||||
|
}
|
||||||
|
ErrVerification = &Error{
|
||||||
|
Type: "verification_error",
|
||||||
|
Details: "Error validating the authenticator response",
|
||||||
|
}
|
||||||
|
ErrAttestation = &Error{
|
||||||
|
Type: "attestation_error",
|
||||||
|
Details: "Error validating the attestation data provided",
|
||||||
|
}
|
||||||
|
ErrInvalidAttestation = &Error{
|
||||||
|
Type: "invalid_attestation",
|
||||||
|
Details: "Invalid attestation data",
|
||||||
|
}
|
||||||
|
ErrMetadata = &Error{
|
||||||
|
Type: "invalid_metadata",
|
||||||
|
Details: "",
|
||||||
|
}
|
||||||
|
ErrAttestationFormat = &Error{
|
||||||
|
Type: "invalid_attestation",
|
||||||
|
Details: "Invalid attestation format",
|
||||||
|
}
|
||||||
|
ErrAttestationCertificate = &Error{
|
||||||
|
Type: "invalid_certificate",
|
||||||
|
Details: "Invalid attestation certificate",
|
||||||
|
}
|
||||||
|
ErrAssertionSignature = &Error{
|
||||||
|
Type: "invalid_signature",
|
||||||
|
Details: "Assertion Signature against auth data and client hash is not valid",
|
||||||
|
}
|
||||||
|
ErrUnsupportedKey = &Error{
|
||||||
|
Type: "invalid_key_type",
|
||||||
|
Details: "Unsupported Public Key Type",
|
||||||
|
}
|
||||||
|
ErrUnsupportedAlgorithm = &Error{
|
||||||
|
Type: "unsupported_key_algorithm",
|
||||||
|
Details: "Unsupported public key algorithm",
|
||||||
|
}
|
||||||
|
ErrNotSpecImplemented = &Error{
|
||||||
|
Type: "spec_unimplemented",
|
||||||
|
Details: "This field is not yet supported by the WebAuthn spec",
|
||||||
|
}
|
||||||
|
ErrNotImplemented = &Error{
|
||||||
|
Type: "not_implemented",
|
||||||
|
Details: "This field is not yet supported by this library",
|
||||||
|
}
|
||||||
|
)
|
||||||
13
vendor/github.com/go-webauthn/webauthn/protocol/extensions.go
generated
vendored
Normal file
13
vendor/github.com/go-webauthn/webauthn/protocol/extensions.go
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
// Extensions are discussed in §9. WebAuthn Extensions (https://www.w3.org/TR/webauthn/#extensions).
|
||||||
|
|
||||||
|
// For a list of commonly supported extensions, see §10. Defined Extensions
|
||||||
|
// (https://www.w3.org/TR/webauthn/#sctn-defined-extensions).
|
||||||
|
|
||||||
|
type AuthenticationExtensionsClientOutputs map[string]any
|
||||||
|
|
||||||
|
const (
|
||||||
|
ExtensionAppID = "appid"
|
||||||
|
ExtensionAppIDExclude = "appidExclude"
|
||||||
|
)
|
||||||
30
vendor/github.com/go-webauthn/webauthn/protocol/init.go
generated
vendored
Normal file
30
vendor/github.com/go-webauthn/webauthn/protocol/init.go
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/x509"
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
initAndroidKeyHardwareRoots()
|
||||||
|
initAppleHardwareRoots()
|
||||||
|
}
|
||||||
|
|
||||||
|
func initAndroidKeyHardwareRoots() {
|
||||||
|
if attAndroidKeyHardwareRootsCertPool == nil {
|
||||||
|
attAndroidKeyHardwareRootsCertPool = x509.NewCertPool()
|
||||||
|
}
|
||||||
|
|
||||||
|
attAndroidKeyHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAndroidKeyRoot1)))
|
||||||
|
attAndroidKeyHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAndroidKeyRoot2)))
|
||||||
|
attAndroidKeyHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAndroidKeyRoot3)))
|
||||||
|
attAndroidKeyHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAndroidKeyRoot4)))
|
||||||
|
attAndroidKeyHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAndroidKeyRoot5)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func initAppleHardwareRoots() {
|
||||||
|
if attAppleHardwareRootsCertPool == nil {
|
||||||
|
attAppleHardwareRootsCertPool = x509.NewCertPool()
|
||||||
|
}
|
||||||
|
|
||||||
|
attAppleHardwareRootsCertPool.AddCert(mustParseX509CertificatePEM([]byte(certificateAppleRoot1)))
|
||||||
|
}
|
||||||
129
vendor/github.com/go-webauthn/webauthn/protocol/metadata.go
generated
vendored
Normal file
129
vendor/github.com/go-webauthn/webauthn/protocol/metadata.go
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/x509"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ValidateMetadata(ctx context.Context, mds metadata.Provider, aaguid uuid.UUID, attestationType, attestationFormat string, x5cs []any) (protoErr *Error) {
|
||||||
|
if mds == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if AttestationFormat(attestationFormat) == AttestationFormatNone {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
entry *metadata.Entry
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
if entry, err = mds.GetEntry(ctx, aaguid); err != nil {
|
||||||
|
return ErrMetadata.WithInfo(fmt.Sprintf("Failed to validate authenticator metadata for Authenticator Attestation GUID '%s'. Error occurred retreiving the metadata entry: %+v", aaguid, err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry == nil {
|
||||||
|
if aaguid == uuid.Nil && mds.GetValidateEntryPermitZeroAAGUID(ctx) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if mds.GetValidateEntry(ctx) {
|
||||||
|
return ErrMetadata.WithInfo(fmt.Sprintf("Failed to validate authenticator metadata for Authenticator Attestation GUID '%s'. The authenticator has no registered metadata.", aaguid))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if attestationType != "" && mds.GetValidateAttestationTypes(ctx) {
|
||||||
|
found := false
|
||||||
|
|
||||||
|
for _, atype := range entry.MetadataStatement.AttestationTypes {
|
||||||
|
if string(atype) == attestationType {
|
||||||
|
found = true
|
||||||
|
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return ErrMetadata.WithInfo(fmt.Sprintf("Failed to validate authenticator metadata for Authenticator Attestation GUID '%s'. The attestation type '%s' is not known to be used by this authenticator.", aaguid.String(), attestationType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if mds.GetValidateStatus(ctx) {
|
||||||
|
if err = mds.ValidateStatusReports(ctx, entry.StatusReports); err != nil {
|
||||||
|
return ErrMetadata.WithInfo(fmt.Sprintf("Failed to validate authenticator metadata for Authenticator Attestation GUID '%s'. Error occurred validating the authenticator status: %+v", aaguid, err))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if mds.GetValidateTrustAnchor(ctx) {
|
||||||
|
if len(x5cs) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
x5c, parsed *x509.Certificate
|
||||||
|
x5cis []*x509.Certificate
|
||||||
|
raw []byte
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, x5cAny := range x5cs {
|
||||||
|
if raw, ok = x5cAny.([]byte); !ok {
|
||||||
|
return ErrMetadata.WithDetails(fmt.Sprintf("Failed to parse attestation certificate from x5c during attestation validation for Authenticator Attestation GUID '%s'.", aaguid)).WithInfo(fmt.Sprintf("The %s certificate in the attestation was type '%T' but '[]byte' was expected", loopOrdinalNumber(i), x5cAny))
|
||||||
|
}
|
||||||
|
|
||||||
|
if parsed, err = x509.ParseCertificate(raw); err != nil {
|
||||||
|
return ErrMetadata.WithDetails(fmt.Sprintf("Failed to parse attestation certificate from x5c during attestation validation for Authenticator Attestation GUID '%s'.", aaguid)).WithInfo(fmt.Sprintf("Error returned from x509.ParseCertificate: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if x5c == nil {
|
||||||
|
x5c = parsed
|
||||||
|
} else {
|
||||||
|
x5cis = append(x5cis, parsed)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if attestationType == string(metadata.AttCA) {
|
||||||
|
if protoErr = tpmParseAIKAttCA(x5c, x5cis); protoErr != nil {
|
||||||
|
return ErrMetadata.WithDetails(protoErr.Details).WithInfo(protoErr.DevInfo).WithError(protoErr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if x5c != nil && x5c.Subject.CommonName != x5c.Issuer.CommonName {
|
||||||
|
if !entry.MetadataStatement.AttestationTypes.HasBasicFull() {
|
||||||
|
return ErrMetadata.WithDetails(fmt.Sprintf("Failed to validate attestation statement signature during attestation validation for Authenticator Attestation GUID '%s'. Attestation was provided in the full format but the authenticator doesn't support the full attestation format.", aaguid))
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = x5c.Verify(entry.MetadataStatement.Verifier(x5cis)); err != nil {
|
||||||
|
return ErrMetadata.WithDetails(fmt.Sprintf("Failed to validate attestation statement signature during attestation validation for Authenticator Attestation GUID '%s'. The attestation certificate could not be verified due to an error validating the trust chain against the Metadata Service.", aaguid)).WithError(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loopOrdinalNumber(n int) string {
|
||||||
|
n++
|
||||||
|
|
||||||
|
if n > 9 && n < 20 {
|
||||||
|
return fmt.Sprintf("%dth", n)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch n % 10 {
|
||||||
|
case 1:
|
||||||
|
return fmt.Sprintf("%dst", n)
|
||||||
|
case 2:
|
||||||
|
return fmt.Sprintf("%dnd", n)
|
||||||
|
case 3:
|
||||||
|
return fmt.Sprintf("%drd", n)
|
||||||
|
default:
|
||||||
|
return fmt.Sprintf("%dth", n)
|
||||||
|
}
|
||||||
|
}
|
||||||
288
vendor/github.com/go-webauthn/webauthn/protocol/options.go
generated
vendored
Normal file
288
vendor/github.com/go-webauthn/webauthn/protocol/options.go
generated
vendored
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CredentialCreation struct {
|
||||||
|
Response PublicKeyCredentialCreationOptions `json:"publicKey"`
|
||||||
|
Mediation CredentialMediationRequirement `json:"mediation,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CredentialAssertion struct {
|
||||||
|
Response PublicKeyCredentialRequestOptions `json:"publicKey"`
|
||||||
|
Mediation CredentialMediationRequirement `json:"mediation,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PublicKeyCredentialCreationOptions represents the IDL of the same name.
|
||||||
|
//
|
||||||
|
// In order to create a Credential via create(), the caller specifies a few parameters in a
|
||||||
|
// PublicKeyCredentialCreationOptions object.
|
||||||
|
//
|
||||||
|
// WebAuthn Level 3: hints,attestationFormats.
|
||||||
|
//
|
||||||
|
// Specification: §5.4. Options for Credential Creation (https://www.w3.org/TR/webauthn/#dictionary-makecredentialoptions)
|
||||||
|
type PublicKeyCredentialCreationOptions struct {
|
||||||
|
RelyingParty RelyingPartyEntity `json:"rp"`
|
||||||
|
User UserEntity `json:"user"`
|
||||||
|
Challenge URLEncodedBase64 `json:"challenge"`
|
||||||
|
Parameters []CredentialParameter `json:"pubKeyCredParams,omitempty"`
|
||||||
|
Timeout int `json:"timeout,omitempty"`
|
||||||
|
CredentialExcludeList []CredentialDescriptor `json:"excludeCredentials,omitempty"`
|
||||||
|
AuthenticatorSelection AuthenticatorSelection `json:"authenticatorSelection,omitempty"`
|
||||||
|
Hints []PublicKeyCredentialHints `json:"hints,omitempty"`
|
||||||
|
Attestation ConveyancePreference `json:"attestation,omitempty"`
|
||||||
|
AttestationFormats []AttestationFormat `json:"attestationFormats,omitempty"`
|
||||||
|
Extensions AuthenticationExtensions `json:"extensions,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// The PublicKeyCredentialRequestOptions dictionary supplies get() with the data it needs to generate an assertion.
|
||||||
|
// Its challenge member MUST be present, while its other members are OPTIONAL.
|
||||||
|
//
|
||||||
|
// WebAuthn Level 3: hints.
|
||||||
|
//
|
||||||
|
// Specification: §5.5. Options for Assertion Generation (https://www.w3.org/TR/webauthn/#dictionary-assertion-options)
|
||||||
|
type PublicKeyCredentialRequestOptions struct {
|
||||||
|
Challenge URLEncodedBase64 `json:"challenge"`
|
||||||
|
Timeout int `json:"timeout,omitempty"`
|
||||||
|
RelyingPartyID string `json:"rpId,omitempty"`
|
||||||
|
AllowedCredentials []CredentialDescriptor `json:"allowCredentials,omitempty"`
|
||||||
|
UserVerification UserVerificationRequirement `json:"userVerification,omitempty"`
|
||||||
|
Hints []PublicKeyCredentialHints `json:"hints,omitempty"`
|
||||||
|
Extensions AuthenticationExtensions `json:"extensions,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialDescriptor represents the PublicKeyCredentialDescriptor IDL.
|
||||||
|
//
|
||||||
|
// This dictionary contains the attributes that are specified by a caller when referring to a public key credential as
|
||||||
|
// an input parameter to the create() or get() methods. It mirrors the fields of the PublicKeyCredential object returned
|
||||||
|
// by the latter methods.
|
||||||
|
//
|
||||||
|
// Specification: §5.10.3. Credential Descriptor (https://www.w3.org/TR/webauthn/#credential-dictionary)
|
||||||
|
type CredentialDescriptor struct {
|
||||||
|
// The valid credential types.
|
||||||
|
Type CredentialType `json:"type"`
|
||||||
|
|
||||||
|
// CredentialID The ID of a credential to allow/disallow.
|
||||||
|
CredentialID URLEncodedBase64 `json:"id"`
|
||||||
|
|
||||||
|
// The authenticator transports that can be used.
|
||||||
|
Transport []AuthenticatorTransport `json:"transports,omitempty"`
|
||||||
|
|
||||||
|
// The AttestationType from the Credential. Used internally only.
|
||||||
|
AttestationType string `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c CredentialDescriptor) SignalUnknownCredential(rpid string) *SignalUnknownCredential {
|
||||||
|
return &SignalUnknownCredential{
|
||||||
|
CredentialID: c.CredentialID,
|
||||||
|
RPID: rpid,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialParameter is the credential type and algorithm
|
||||||
|
// that the relying party wants the authenticator to create.
|
||||||
|
type CredentialParameter struct {
|
||||||
|
Type CredentialType `json:"type"`
|
||||||
|
Algorithm webauthncose.COSEAlgorithmIdentifier `json:"alg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialType represents the PublicKeyCredentialType IDL and is used with the CredentialDescriptor IDL.
|
||||||
|
//
|
||||||
|
// This enumeration defines the valid credential types. It is an extension point; values can be added to it in the
|
||||||
|
// future, as more credential types are defined. The values of this enumeration are used for versioning the
|
||||||
|
// Authentication Assertion and attestation structures according to the type of the authenticator.
|
||||||
|
//
|
||||||
|
// Currently one credential type is defined, namely "public-key".
|
||||||
|
//
|
||||||
|
// Specification: §5.8.2. Credential Type Enumeration (https://www.w3.org/TR/webauthn/#enumdef-publickeycredentialtype)
|
||||||
|
//
|
||||||
|
// Specification: §5.8.3. Credential Descriptor (https://www.w3.org/TR/webauthn/#dictionary-credential-descriptor)
|
||||||
|
type CredentialType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// PublicKeyCredentialType - Currently one credential type is defined, namely "public-key".
|
||||||
|
PublicKeyCredentialType CredentialType = "public-key"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AuthenticationExtensions represents the AuthenticationExtensionsClientInputs IDL. This member contains additional
|
||||||
|
// parameters requesting additional processing by the client and authenticator.
|
||||||
|
//
|
||||||
|
// Specification: §5.7.1. Authentication Extensions Client Inputs (https://www.w3.org/TR/webauthn/#iface-authentication-extensions-client-inputs)
|
||||||
|
type AuthenticationExtensions map[string]any
|
||||||
|
|
||||||
|
// AuthenticatorSelection represents the AuthenticatorSelectionCriteria IDL.
|
||||||
|
//
|
||||||
|
// WebAuthn Relying Parties may use the AuthenticatorSelectionCriteria dictionary to specify their requirements
|
||||||
|
// regarding authenticator attributes.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.4. Authenticator Selection Criteria (https://www.w3.org/TR/webauthn/#dictionary-authenticatorSelection)
|
||||||
|
type AuthenticatorSelection struct {
|
||||||
|
// AuthenticatorAttachment If this member is present, eligible authenticators are filtered to only
|
||||||
|
// authenticators attached with the specified AuthenticatorAttachment enum.
|
||||||
|
AuthenticatorAttachment AuthenticatorAttachment `json:"authenticatorAttachment,omitempty"`
|
||||||
|
|
||||||
|
// RequireResidentKey this member describes the Relying Party's requirements regarding resident
|
||||||
|
// credentials. If the parameter is set to true, the authenticator MUST create a client-side-resident
|
||||||
|
// public key credential source when creating a public key credential.
|
||||||
|
RequireResidentKey *bool `json:"requireResidentKey,omitempty"`
|
||||||
|
|
||||||
|
// ResidentKey this member describes the Relying Party's requirements regarding resident
|
||||||
|
// credentials per Webauthn Level 2.
|
||||||
|
ResidentKey ResidentKeyRequirement `json:"residentKey,omitempty"`
|
||||||
|
|
||||||
|
// UserVerification This member describes the Relying Party's requirements regarding user verification for
|
||||||
|
// the create() operation. Eligible authenticators are filtered to only those capable of satisfying this
|
||||||
|
// requirement.
|
||||||
|
UserVerification UserVerificationRequirement `json:"userVerification,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConveyancePreference is the type representing the AttestationConveyancePreference IDL.
|
||||||
|
//
|
||||||
|
// WebAuthn Relying Parties may use AttestationConveyancePreference to specify their preference regarding attestation
|
||||||
|
// conveyance during credential generation.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.7. Attestation Conveyance Preference Enumeration (https://www.w3.org/TR/webauthn/#enum-attestation-convey)
|
||||||
|
type ConveyancePreference string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// PreferNoAttestation is a ConveyancePreference value.
|
||||||
|
//
|
||||||
|
// This value indicates that the Relying Party is not interested in authenticator attestation. For example, in order
|
||||||
|
// to potentially avoid having to obtain user consent to relay identifying information to the Relying Party, or to
|
||||||
|
// save a round trip to an Attestation CA or Anonymization CA.
|
||||||
|
//
|
||||||
|
// This is the default value.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.7. Attestation Conveyance Preference Enumeration (https://www.w3.org/TR/webauthn/#dom-attestationconveyancepreference-none)
|
||||||
|
PreferNoAttestation ConveyancePreference = "none"
|
||||||
|
|
||||||
|
// PreferIndirectAttestation is a ConveyancePreference value.
|
||||||
|
//
|
||||||
|
// This value indicates that the Relying Party prefers an attestation conveyance yielding verifiable attestation
|
||||||
|
// statements, but allows the client to decide how to obtain such attestation statements. The client MAY replace the
|
||||||
|
// authenticator-generated attestation statements with attestation statements generated by an Anonymization CA, in
|
||||||
|
// order to protect the user’s privacy, or to assist Relying Parties with attestation verification in a
|
||||||
|
// heterogeneous ecosystem.
|
||||||
|
//
|
||||||
|
// Note: There is no guarantee that the Relying Party will obtain a verifiable attestation statement in this case.
|
||||||
|
// For example, in the case that the authenticator employs self attestation.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.7. Attestation Conveyance Preference Enumeration (https://www.w3.org/TR/webauthn/#dom-attestationconveyancepreference-indirect)
|
||||||
|
PreferIndirectAttestation ConveyancePreference = "indirect"
|
||||||
|
|
||||||
|
// PreferDirectAttestation is a ConveyancePreference value.
|
||||||
|
//
|
||||||
|
// This value indicates that the Relying Party wants to receive the attestation statement as generated by the
|
||||||
|
// authenticator.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.7. Attestation Conveyance Preference Enumeration (https://www.w3.org/TR/webauthn/#dom-attestationconveyancepreference-direct)
|
||||||
|
PreferDirectAttestation ConveyancePreference = "direct"
|
||||||
|
|
||||||
|
// PreferEnterpriseAttestation is a ConveyancePreference value.
|
||||||
|
//
|
||||||
|
// This value indicates that the Relying Party wants to receive an attestation statement that may include uniquely
|
||||||
|
// identifying information. This is intended for controlled deployments within an enterprise where the organization
|
||||||
|
// wishes to tie registrations to specific authenticators. User agents MUST NOT provide such an attestation unless
|
||||||
|
// the user agent or authenticator configuration permits it for the requested RP ID.
|
||||||
|
//
|
||||||
|
// If permitted, the user agent SHOULD signal to the authenticator (at invocation time) that enterprise
|
||||||
|
// attestation is requested, and convey the resulting AAGUID and attestation statement, unaltered, to the Relying
|
||||||
|
// Party.
|
||||||
|
//
|
||||||
|
// Specification: §5.4.7. Attestation Conveyance Preference Enumeration (https://www.w3.org/TR/webauthn/#dom-attestationconveyancepreference-enterprise)
|
||||||
|
PreferEnterpriseAttestation ConveyancePreference = "enterprise"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AttestationFormat is an internal representation of the relevant inputs for registration.
|
||||||
|
//
|
||||||
|
// Specification: §5.4 Options for Credential Creation (https://w3c.github.io/webauthn/#dom-publickeycredentialcreationoptions-attestationformats)
|
||||||
|
// Registry: https://www.iana.org/assignments/webauthn/webauthn.xhtml
|
||||||
|
type AttestationFormat string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// AttestationFormatPacked is the "packed" attestation statement format is a WebAuthn-optimized format for
|
||||||
|
// attestation. It uses a very compact but still extensible encoding method. This format is implementable by
|
||||||
|
// authenticators with limited resources (e.g., secure elements).
|
||||||
|
AttestationFormatPacked AttestationFormat = "packed"
|
||||||
|
|
||||||
|
// AttestationFormatTPM is the TPM attestation statement format returns an attestation statement in the same format
|
||||||
|
// as the packed attestation statement format, although the rawData and signature fields are computed differently.
|
||||||
|
AttestationFormatTPM AttestationFormat = "tpm"
|
||||||
|
|
||||||
|
// AttestationFormatAndroidKey is the attestation statement format for platform authenticators on versions "N", and
|
||||||
|
// later, which may provide this proprietary "hardware attestation" statement.
|
||||||
|
AttestationFormatAndroidKey AttestationFormat = "android-key"
|
||||||
|
|
||||||
|
// AttestationFormatAndroidSafetyNet is the attestation statement format that Android-based platform authenticators
|
||||||
|
// MAY produce an attestation statement based on the Android SafetyNet API.
|
||||||
|
AttestationFormatAndroidSafetyNet AttestationFormat = "android-safetynet"
|
||||||
|
|
||||||
|
// AttestationFormatFIDOUniversalSecondFactor is the attestation statement format that is used with FIDO U2F
|
||||||
|
// authenticators.
|
||||||
|
AttestationFormatFIDOUniversalSecondFactor AttestationFormat = "fido-u2f"
|
||||||
|
|
||||||
|
// AttestationFormatApple is the attestation statement format that is used with Apple devices' platform
|
||||||
|
// authenticators.
|
||||||
|
AttestationFormatApple AttestationFormat = "apple"
|
||||||
|
|
||||||
|
// AttestationFormatCompound is used to pass multiple, self-contained attestation statements in a single ceremony.
|
||||||
|
AttestationFormatCompound AttestationFormat = "compound"
|
||||||
|
|
||||||
|
// AttestationFormatNone is the attestation statement format that is used to replace any authenticator-provided
|
||||||
|
// attestation statement when a WebAuthn Relying Party indicates it does not wish to receive attestation information.
|
||||||
|
AttestationFormatNone AttestationFormat = "none"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PublicKeyCredentialHints string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// PublicKeyCredentialHintSecurityKey is a PublicKeyCredentialHint that indicates that the Relying Party believes
|
||||||
|
// that users will satisfy this request with a physical security key. For example, an enterprise Relying Party may
|
||||||
|
// set this hint if they have issued security keys to their employees and will only accept those authenticators for
|
||||||
|
// registration and authentication.
|
||||||
|
//
|
||||||
|
// For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the
|
||||||
|
// authenticatorAttachment SHOULD be set to cross-platform.
|
||||||
|
PublicKeyCredentialHintSecurityKey PublicKeyCredentialHints = "security-key"
|
||||||
|
|
||||||
|
// PublicKeyCredentialHintClientDevice is a PublicKeyCredentialHint that indicates that the Relying Party believes
|
||||||
|
// that users will satisfy this request with a platform authenticator attached to the client device.
|
||||||
|
//
|
||||||
|
// For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the
|
||||||
|
// authenticatorAttachment SHOULD be set to platform.
|
||||||
|
PublicKeyCredentialHintClientDevice PublicKeyCredentialHints = "client-device"
|
||||||
|
|
||||||
|
// PublicKeyCredentialHintHybrid is a PublicKeyCredentialHint that indicates that the Relying Party believes that
|
||||||
|
// users will satisfy this request with general-purpose authenticators such as smartphones. For example, a consumer
|
||||||
|
// Relying Party may believe that only a small fraction of their customers possesses dedicated security keys. This
|
||||||
|
// option also implies that the local platform authenticator should not be promoted in the UI.
|
||||||
|
//
|
||||||
|
// For compatibility with older user agents, when this hint is used in PublicKeyCredentialCreationOptions, the
|
||||||
|
// authenticatorAttachment SHOULD be set to cross-platform.
|
||||||
|
PublicKeyCredentialHintHybrid PublicKeyCredentialHints = "hybrid"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (a *PublicKeyCredentialRequestOptions) GetAllowedCredentialIDs() [][]byte {
|
||||||
|
var allowedCredentialIDs = make([][]byte, len(a.AllowedCredentials))
|
||||||
|
|
||||||
|
for i, credential := range a.AllowedCredentials {
|
||||||
|
allowedCredentialIDs[i] = credential.CredentialID
|
||||||
|
}
|
||||||
|
|
||||||
|
return allowedCredentialIDs
|
||||||
|
}
|
||||||
|
|
||||||
|
type Extensions any
|
||||||
|
|
||||||
|
type ServerResponse struct {
|
||||||
|
Status ServerResponseStatus `json:"status"`
|
||||||
|
Message string `json:"errorMessage"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ServerResponseStatus string
|
||||||
|
|
||||||
|
const (
|
||||||
|
StatusOk ServerResponseStatus = "ok"
|
||||||
|
StatusFailed ServerResponseStatus = "failed"
|
||||||
|
)
|
||||||
51
vendor/github.com/go-webauthn/webauthn/protocol/signals.go
generated
vendored
Normal file
51
vendor/github.com/go-webauthn/webauthn/protocol/signals.go
generated
vendored
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
// NewSignalAllAcceptedCredentials creates a new SignalAllAcceptedCredentials struct that can simply be encoded with
|
||||||
|
// json.Marshal.
|
||||||
|
func NewSignalAllAcceptedCredentials(rpid string, user AllAcceptedCredentialsUser) *SignalAllAcceptedCredentials {
|
||||||
|
if user == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
credentials := user.WebAuthnCredentialIDs()
|
||||||
|
|
||||||
|
ids := make([]URLEncodedBase64, len(credentials))
|
||||||
|
|
||||||
|
for i, id := range credentials {
|
||||||
|
ids[i] = id
|
||||||
|
}
|
||||||
|
|
||||||
|
return &SignalAllAcceptedCredentials{
|
||||||
|
AllAcceptedCredentialIDs: ids,
|
||||||
|
RPID: rpid,
|
||||||
|
UserID: user.WebAuthnID(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignalAllAcceptedCredentials is a struct which represents the CDDL of the same name.
|
||||||
|
type SignalAllAcceptedCredentials struct {
|
||||||
|
AllAcceptedCredentialIDs []URLEncodedBase64 `json:"allAcceptedCredentialIds"`
|
||||||
|
RPID string `json:"rpId"`
|
||||||
|
UserID URLEncodedBase64 `json:"userId"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignalCurrentUserDetails is a struct which represents the CDDL of the same name.
|
||||||
|
type SignalCurrentUserDetails struct {
|
||||||
|
DisplayName string `json:"displayName"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
RPID string `json:"rpId"`
|
||||||
|
UserID URLEncodedBase64 `json:"userId"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignalUnknownCredential is a struct which represents the CDDL of the same name.
|
||||||
|
type SignalUnknownCredential struct {
|
||||||
|
CredentialID URLEncodedBase64 `json:"credentialId"`
|
||||||
|
RPID string `json:"rpId"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AllAcceptedCredentialsUser is an interface that can be implemented by a user to provide information about their
|
||||||
|
// accepted credentials.
|
||||||
|
type AllAcceptedCredentialsUser interface {
|
||||||
|
WebAuthnID() []byte
|
||||||
|
WebAuthnCredentialIDs() [][]byte
|
||||||
|
}
|
||||||
264
vendor/github.com/go-webauthn/webauthn/protocol/utils.go
generated
vendored
Normal file
264
vendor/github.com/go-webauthn/webauthn/protocol/utils.go
generated
vendored
Normal file
@@ -0,0 +1,264 @@
|
|||||||
|
package protocol
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/pem"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncose"
|
||||||
|
)
|
||||||
|
|
||||||
|
func mustParseX509Certificate(der []byte) *x509.Certificate {
|
||||||
|
cert, err := x509.ParseCertificate(der)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cert
|
||||||
|
}
|
||||||
|
|
||||||
|
func mustParseX509CertificatePEM(raw []byte) *x509.Certificate {
|
||||||
|
block, rest := pem.Decode(raw)
|
||||||
|
if len(rest) > 0 || block == nil || block.Type != "CERTIFICATE" {
|
||||||
|
panic("Invalid PEM Certificate")
|
||||||
|
}
|
||||||
|
|
||||||
|
return mustParseX509Certificate(block.Bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func attStatementParseX5CS(attStatement map[string]any, key string) (x5c []any, x5cs []*x509.Certificate, err error) {
|
||||||
|
var ok bool
|
||||||
|
if x5c, ok = attStatement[key].([]any); !ok {
|
||||||
|
return nil, nil, ErrAttestationFormat.WithDetails("Error retrieving x5c value")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(x5c) == 0 {
|
||||||
|
return nil, nil, ErrAttestationFormat.WithDetails("Error retrieving x5c value: empty array")
|
||||||
|
}
|
||||||
|
|
||||||
|
if x5cs, err = parseX5C(x5c); err != nil {
|
||||||
|
return nil, nil, ErrAttestationFormat.WithDetails("Error retrieving x5c value: error occurred parsing values").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return x5c, x5cs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseX5C(x5c []any) (x5cs []*x509.Certificate, err error) {
|
||||||
|
x5cs = make([]*x509.Certificate, len(x5c))
|
||||||
|
|
||||||
|
var (
|
||||||
|
raw []byte
|
||||||
|
ok bool
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, t := range x5c {
|
||||||
|
if raw, ok = t.([]byte); !ok {
|
||||||
|
return nil, fmt.Errorf("x5c[%d] is not a byte array", i)
|
||||||
|
}
|
||||||
|
|
||||||
|
if x5cs[i], err = x509.ParseCertificate(raw); err != nil {
|
||||||
|
return nil, fmt.Errorf("x5c[%d] is not a valid certificate: %w", i, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return x5cs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// attStatementCertChainVerify allows verifying an attestation statement certificate chain and optionally allows
|
||||||
|
// mangling the not after value for purpose of just validating the attestation lineage. If you set mangleNotAfter to
|
||||||
|
// true this function should only be considered safe for determining lineage, and not hte validity of a chain in
|
||||||
|
// general.
|
||||||
|
//
|
||||||
|
// WARNING: Setting mangleNotAfter=true weakens security by accepting expired certificates.
|
||||||
|
func attStatementCertChainVerify(certs []*x509.Certificate, roots *x509.CertPool, mangleNotAfter bool, mangleNotAfterSafeTime time.Time) (chains [][]*x509.Certificate, err error) {
|
||||||
|
if len(certs) == 0 {
|
||||||
|
return nil, errors.New("empty chain")
|
||||||
|
}
|
||||||
|
|
||||||
|
leaf := certs[0]
|
||||||
|
|
||||||
|
for _, cert := range certs {
|
||||||
|
if !cert.IsCA {
|
||||||
|
leaf = certInsecureConditionalNotAfterMangle(cert, mangleNotAfter, mangleNotAfterSafeTime)
|
||||||
|
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
intermediates *x509.CertPool
|
||||||
|
)
|
||||||
|
|
||||||
|
staticRoots := roots != nil
|
||||||
|
|
||||||
|
intermediates = x509.NewCertPool()
|
||||||
|
|
||||||
|
if roots == nil {
|
||||||
|
if roots, err = x509.SystemCertPool(); err != nil || roots == nil {
|
||||||
|
roots = x509.NewCertPool()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, cert := range certs {
|
||||||
|
if cert == leaf {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if isSelfSigned(cert) && !staticRoots {
|
||||||
|
roots.AddCert(certInsecureConditionalNotAfterMangle(cert, mangleNotAfter, mangleNotAfterSafeTime))
|
||||||
|
} else {
|
||||||
|
intermediates.AddCert(certInsecureConditionalNotAfterMangle(cert, mangleNotAfter, mangleNotAfterSafeTime))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
opts := x509.VerifyOptions{
|
||||||
|
Roots: roots,
|
||||||
|
Intermediates: intermediates,
|
||||||
|
}
|
||||||
|
|
||||||
|
return leaf.Verify(opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSelfSigned(c *x509.Certificate) bool {
|
||||||
|
if !c.IsCA {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.CheckSignatureFrom(c) == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is used to intentionally but conditionally mangle the certificate not after value to exclude it from
|
||||||
|
// the verification process. This should only be used in instances where all you care about is which certificates
|
||||||
|
// performed the signing.
|
||||||
|
//
|
||||||
|
// WARNING: Setting mangle=true weakens security by accepting expired certificates.
|
||||||
|
func certInsecureConditionalNotAfterMangle(cert *x509.Certificate, mangle bool, safe time.Time) (out *x509.Certificate) {
|
||||||
|
if !mangle || cert.NotAfter.After(safe) {
|
||||||
|
return cert
|
||||||
|
}
|
||||||
|
|
||||||
|
out = &x509.Certificate{}
|
||||||
|
|
||||||
|
*out = *cert
|
||||||
|
|
||||||
|
out.NotAfter = safe
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// This function is used to intentionally mangle the certificate not after value to exclude it from
|
||||||
|
// the verification process. This should only be used in instances where all you care about is which certificates
|
||||||
|
// performed the signing.
|
||||||
|
func certInsecureNotAfterMangle(cert *x509.Certificate, safe time.Time) (out *x509.Certificate) {
|
||||||
|
c := *cert
|
||||||
|
|
||||||
|
out = &c
|
||||||
|
|
||||||
|
if out.NotAfter.Before(safe) {
|
||||||
|
out.NotAfter = safe
|
||||||
|
}
|
||||||
|
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func verifyAttestationECDSAPublicKeyMatch(att AttestationObject, cert *x509.Certificate) (attPublicKeyData webauthncose.EC2PublicKeyData, err error) {
|
||||||
|
var (
|
||||||
|
key any
|
||||||
|
ok bool
|
||||||
|
|
||||||
|
publicKey, attPublicKey *ecdsa.PublicKey
|
||||||
|
)
|
||||||
|
|
||||||
|
if key, err = webauthncose.ParsePublicKey(att.AuthData.AttData.CredentialPublicKey); err != nil {
|
||||||
|
return attPublicKeyData, ErrInvalidAttestation.WithDetails(fmt.Sprintf("Error parsing public key: %+v", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if attPublicKeyData, ok = key.(webauthncose.EC2PublicKeyData); !ok {
|
||||||
|
return attPublicKeyData, ErrInvalidAttestation.WithDetails("Attestation public key is not ECDSA")
|
||||||
|
}
|
||||||
|
|
||||||
|
if publicKey, ok = cert.PublicKey.(*ecdsa.PublicKey); !ok {
|
||||||
|
return attPublicKeyData, ErrInvalidAttestation.WithDetails("Credential public key is not ECDSA")
|
||||||
|
}
|
||||||
|
|
||||||
|
if attPublicKey, err = attPublicKeyData.ToECDSA(); err != nil {
|
||||||
|
return attPublicKeyData, ErrInvalidAttestation.WithDetails("Error converting public key to ECDSA").WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !attPublicKey.Equal(publicKey) {
|
||||||
|
return attPublicKeyData, ErrInvalidAttestation.WithDetails("Certificate public key does not match public key in authData")
|
||||||
|
}
|
||||||
|
|
||||||
|
return attPublicKeyData, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateRPID performs non-exhaustive checks to ensure the string is most likely a domain string as
|
||||||
|
// relying-party ID's are required to be. Effectively this can be an IP, localhost, or a string that contains a period.
|
||||||
|
// The relying-party ID must not contain scheme, port, path, query, or fragment components.
|
||||||
|
//
|
||||||
|
// See: https://www.w3.org/TR/webauthn/#rp-id
|
||||||
|
func ValidateRPID(value string) (err error) {
|
||||||
|
if len(value) == 0 {
|
||||||
|
return errors.New("empty value provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ip := net.ParseIP(value); ip != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var rpid *url.URL
|
||||||
|
|
||||||
|
if rpid, err = url.Parse(value); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Scheme != "" && rpid.Opaque != "" && rpid.Path == "" {
|
||||||
|
return errors.New("the port component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Scheme != "" {
|
||||||
|
if rpid.Host != "" && rpid.Path != "" {
|
||||||
|
return errors.New("the path component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Host != "" && rpid.RawQuery != "" {
|
||||||
|
return errors.New("the query component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Host != "" && rpid.Fragment != "" {
|
||||||
|
return errors.New("the fragment component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Host != "" && rpid.Port() != "" {
|
||||||
|
return errors.New("the port component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.New("the scheme component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.RawQuery != "" {
|
||||||
|
return errors.New("the query component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.RawFragment != "" || rpid.Fragment != "" {
|
||||||
|
return errors.New("the fragment component must be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if rpid.Host == "" {
|
||||||
|
if strings.Contains(rpid.Path, "/") {
|
||||||
|
return errors.New("the path component must be empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if value != "localhost" && !strings.Contains(rpid.Path, ".") {
|
||||||
|
return errors.New("the domain component must actually be a domain")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
33
vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go
generated
vendored
Normal file
33
vendor/github.com/go-webauthn/webauthn/protocol/webauthncbor/webauthncbor.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
package webauthncbor
|
||||||
|
|
||||||
|
import "github.com/fxamacker/cbor/v2"
|
||||||
|
|
||||||
|
const nestedLevelsAllowed = 4
|
||||||
|
|
||||||
|
// ctap2CBORDecMode is the cbor.DecMode following the CTAP2 canonical CBOR encoding form
|
||||||
|
// (https://fidoalliance.org/specs/fido-v2.0-ps-20190130/fido-client-to-authenticator-protocol-v2.0-ps-20190130.html#message-encoding)
|
||||||
|
var ctap2CBORDecMode, _ = cbor.DecOptions{
|
||||||
|
DupMapKey: cbor.DupMapKeyEnforcedAPF,
|
||||||
|
MaxNestedLevels: nestedLevelsAllowed,
|
||||||
|
IndefLength: cbor.IndefLengthForbidden,
|
||||||
|
TagsMd: cbor.TagsForbidden,
|
||||||
|
}.DecMode()
|
||||||
|
|
||||||
|
var ctap2CBOREncMode, _ = cbor.CTAP2EncOptions().EncMode()
|
||||||
|
|
||||||
|
// Unmarshal parses the CBOR-encoded data into the value pointed to by v
|
||||||
|
// following the CTAP2 canonical CBOR encoding form.
|
||||||
|
// (https://fidoalliance.org/specs/fido-v2.0-ps-20190130/fido-client-to-authenticator-protocol-v2.0-ps-20190130.html#message-encoding)
|
||||||
|
func Unmarshal(data []byte, v any) error {
|
||||||
|
// TODO (james-d-elliott): investigate the specific use case for Unmarshal vs UnmarshalFirst to determine the edge cases where this may be useful.
|
||||||
|
_, err := ctap2CBORDecMode.UnmarshalFirst(data, v)
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal encodes the value pointed to by v
|
||||||
|
// following the CTAP2 canonical CBOR encoding form.
|
||||||
|
// (https://fidoalliance.org/specs/fido-v2.0-ps-20190130/fido-client-to-authenticator-protocol-v2.0-ps-20190130.html#message-encoding)
|
||||||
|
func Marshal(v any) ([]byte, error) {
|
||||||
|
return ctap2CBOREncMode.Marshal(v)
|
||||||
|
}
|
||||||
5
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/const.go
generated
vendored
Normal file
5
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/const.go
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
package webauthncose
|
||||||
|
|
||||||
|
const (
|
||||||
|
keyCannotDisplay = "Cannot display key"
|
||||||
|
)
|
||||||
10
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/ed25519.go
generated
vendored
Normal file
10
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/ed25519.go
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
package webauthncose
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/ed25519"
|
||||||
|
"crypto/x509"
|
||||||
|
)
|
||||||
|
|
||||||
|
func marshalEd25519PublicKey(pub ed25519.PublicKey) ([]byte, error) {
|
||||||
|
return x509.MarshalPKIXPublicKey(pub)
|
||||||
|
}
|
||||||
7
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/types.go
generated
vendored
Normal file
7
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/types.go
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
package webauthncose
|
||||||
|
|
||||||
|
import "math/big"
|
||||||
|
|
||||||
|
type ECDSASignature struct {
|
||||||
|
R, S *big.Int
|
||||||
|
}
|
||||||
13
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/var.go
generated
vendored
Normal file
13
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/var.go
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package webauthncose
|
||||||
|
|
||||||
|
import "sync/atomic"
|
||||||
|
|
||||||
|
var allowBERIntegers atomic.Bool
|
||||||
|
|
||||||
|
// SetExperimentalInsecureAllowBERIntegers allows credentials which have BER integer encoding for their signatures
|
||||||
|
// which do not conform to the specification. This is an experimental option that may be removed without any notice
|
||||||
|
// and could potentially lead to zero-day exploits due to the ambiguity of encoding practices. This is not a recommended
|
||||||
|
// option.
|
||||||
|
func SetExperimentalInsecureAllowBERIntegers(value bool) {
|
||||||
|
allowBERIntegers.Store(value)
|
||||||
|
}
|
||||||
589
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go
generated
vendored
Normal file
589
vendor/github.com/go-webauthn/webauthn/protocol/webauthncose/webauthncose.go
generated
vendored
Normal file
@@ -0,0 +1,589 @@
|
|||||||
|
package webauthncose
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto"
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/ed25519"
|
||||||
|
"crypto/elliptic"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/pem"
|
||||||
|
"fmt"
|
||||||
|
"hash"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/x/encoding/asn1"
|
||||||
|
|
||||||
|
"github.com/google/go-tpm/tpm2"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol/webauthncbor"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PublicKeyData The public key portion of a Relying Party-specific credential key pair, generated
|
||||||
|
// by an authenticator and returned to a Relying Party at registration time. We unpack this object
|
||||||
|
// using fxamacker's cbor library ("github.com/fxamacker/cbor/v2") which is why there are cbor tags
|
||||||
|
// included. The tag field values correspond to the IANA COSE keys that give their respective
|
||||||
|
// values.
|
||||||
|
//
|
||||||
|
// Specification: §6.4.1.1. Examples of credentialPublicKey Values Encoded in COSE_Key Format (https://www.w3.org/TR/webauthn/#sctn-encoded-credPubKey-examples)
|
||||||
|
type PublicKeyData struct {
|
||||||
|
// Decode the results to int by default.
|
||||||
|
_struct bool `cbor:",keyasint" json:"public_key"` //nolint:govet
|
||||||
|
|
||||||
|
// The type of key created. Should be OKP, EC2, or RSA.
|
||||||
|
KeyType int64 `cbor:"1,keyasint" json:"kty"`
|
||||||
|
|
||||||
|
// A COSEAlgorithmIdentifier for the algorithm used to derive the key signature.
|
||||||
|
Algorithm int64 `cbor:"3,keyasint" json:"alg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
const ecCoordSize = 32
|
||||||
|
|
||||||
|
type EC2PublicKeyData struct {
|
||||||
|
PublicKeyData
|
||||||
|
|
||||||
|
// If the key type is EC2, the curve on which we derive the signature from.
|
||||||
|
Curve int64 `cbor:"-1,keyasint,omitempty" json:"crv"`
|
||||||
|
|
||||||
|
// A byte string 32 bytes in length that holds the x coordinate of the key.
|
||||||
|
XCoord []byte `cbor:"-2,keyasint,omitempty" json:"x"`
|
||||||
|
|
||||||
|
// A byte string 32 bytes in length that holds the y coordinate of the key.
|
||||||
|
YCoord []byte `cbor:"-3,keyasint,omitempty" json:"y"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RSAPublicKeyData struct {
|
||||||
|
PublicKeyData
|
||||||
|
|
||||||
|
// Represents the modulus parameter for the RSA algorithm.
|
||||||
|
Modulus []byte `cbor:"-1,keyasint,omitempty" json:"n"`
|
||||||
|
|
||||||
|
// Represents the exponent parameter for the RSA algorithm.
|
||||||
|
Exponent []byte `cbor:"-2,keyasint,omitempty" json:"e"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type OKPPublicKeyData struct {
|
||||||
|
PublicKeyData
|
||||||
|
|
||||||
|
Curve int64
|
||||||
|
|
||||||
|
// A byte string that holds the x coordinate of the key.
|
||||||
|
XCoord []byte `cbor:"-2,keyasint,omitempty" json:"x"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Octet Key Pair (OKP) Public Key Signature.
|
||||||
|
func (k *OKPPublicKeyData) Verify(data []byte, sig []byte) (bool, error) {
|
||||||
|
if err := validateOKPPublicKey(k); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var key ed25519.PublicKey = make([]byte, ed25519.PublicKeySize)
|
||||||
|
|
||||||
|
copy(key, k.XCoord)
|
||||||
|
|
||||||
|
return ed25519.Verify(key, data, sig), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify Elliptic Curve Public Key Signature.
|
||||||
|
func (k *EC2PublicKeyData) Verify(data []byte, sig []byte) (valid bool, err error) {
|
||||||
|
if err = validateEC2PublicKey(k); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pubkey := &ecdsa.PublicKey{
|
||||||
|
Curve: ec2AlgCurve(k.Algorithm),
|
||||||
|
X: big.NewInt(0).SetBytes(k.XCoord),
|
||||||
|
Y: big.NewInt(0).SetBytes(k.YCoord),
|
||||||
|
}
|
||||||
|
|
||||||
|
h := HasherFromCOSEAlg(COSEAlgorithmIdentifier(k.Algorithm))
|
||||||
|
h.Write(data)
|
||||||
|
|
||||||
|
e := &ECDSASignature{}
|
||||||
|
|
||||||
|
var opts []asn1.UnmarshalOpt
|
||||||
|
|
||||||
|
if allowBERIntegers.Load() {
|
||||||
|
opts = append(opts, asn1.WithUnmarshalAllowBERIntegers(true))
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err = asn1.Unmarshal(sig, e, opts...); err != nil {
|
||||||
|
return false, ErrSigNotProvidedOrInvalid
|
||||||
|
}
|
||||||
|
|
||||||
|
return ecdsa.Verify(pubkey, h.Sum(nil), e.R, e.S), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToECDSA converts the EC2PublicKeyData to an ecdsa.PublicKey.
|
||||||
|
func (k *EC2PublicKeyData) ToECDSA() (key *ecdsa.PublicKey, err error) {
|
||||||
|
if err = validateEC2PublicKey(k); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ecdsa.PublicKey{
|
||||||
|
Curve: ec2AlgCurve(k.Algorithm),
|
||||||
|
X: big.NewInt(0).SetBytes(k.XCoord),
|
||||||
|
Y: big.NewInt(0).SetBytes(k.YCoord),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify RSA Public Key Signature.
|
||||||
|
func (k *RSAPublicKeyData) Verify(data []byte, sig []byte) (valid bool, err error) {
|
||||||
|
if err = validateRSAPublicKey(k); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
e, _ := parseRSAPublicKeyDataExponent(k)
|
||||||
|
|
||||||
|
pubkey := &rsa.PublicKey{
|
||||||
|
N: big.NewInt(0).SetBytes(k.Modulus),
|
||||||
|
E: e,
|
||||||
|
}
|
||||||
|
|
||||||
|
coseAlg := COSEAlgorithmIdentifier(k.Algorithm)
|
||||||
|
|
||||||
|
algDetail, ok := COSESignatureAlgorithmDetails[coseAlg]
|
||||||
|
if !ok {
|
||||||
|
return false, ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
hash := algDetail.hash
|
||||||
|
h := hash.New()
|
||||||
|
h.Write(data)
|
||||||
|
|
||||||
|
switch coseAlg {
|
||||||
|
case AlgPS256, AlgPS384, AlgPS512:
|
||||||
|
err = rsa.VerifyPSS(pubkey, hash, h.Sum(nil), sig, nil)
|
||||||
|
|
||||||
|
return err == nil, err
|
||||||
|
case AlgRS1, AlgRS256, AlgRS384, AlgRS512:
|
||||||
|
err = rsa.VerifyPKCS1v15(pubkey, hash, h.Sum(nil), sig)
|
||||||
|
|
||||||
|
return err == nil, err
|
||||||
|
default:
|
||||||
|
return false, ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsePublicKey figures out what kind of COSE material was provided and create the data for the new key.
|
||||||
|
func ParsePublicKey(keyBytes []byte) (publicKey any, err error) {
|
||||||
|
pk := PublicKeyData{}
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(keyBytes, &pk); err != nil {
|
||||||
|
return nil, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
switch COSEKeyType(pk.KeyType) {
|
||||||
|
case OctetKey:
|
||||||
|
var o OKPPublicKeyData
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(keyBytes, &o); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
o.PublicKeyData = pk
|
||||||
|
|
||||||
|
if err = validateOKPPublicKey(&o); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return o, nil
|
||||||
|
case EllipticKey:
|
||||||
|
var e EC2PublicKeyData
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(keyBytes, &e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
e.PublicKeyData = pk
|
||||||
|
|
||||||
|
if err = validateEC2PublicKey(&e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return e, nil
|
||||||
|
case RSAKey:
|
||||||
|
var r RSAPublicKeyData
|
||||||
|
|
||||||
|
if err = webauthncbor.Unmarshal(keyBytes, &r); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
r.PublicKeyData = pk
|
||||||
|
|
||||||
|
if err = validateRSAPublicKey(&r); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return r, nil
|
||||||
|
default:
|
||||||
|
return nil, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseFIDOPublicKey is only used when the appID extension is configured by the assertion response.
|
||||||
|
func ParseFIDOPublicKey(keyBytes []byte) (data EC2PublicKeyData, err error) {
|
||||||
|
x, y := elliptic.Unmarshal(elliptic.P256(), keyBytes)
|
||||||
|
|
||||||
|
if x == nil || y == nil {
|
||||||
|
return data, fmt.Errorf("elliptic unmarshall returned a nil value")
|
||||||
|
}
|
||||||
|
|
||||||
|
return EC2PublicKeyData{
|
||||||
|
PublicKeyData: PublicKeyData{
|
||||||
|
KeyType: int64(EllipticKey),
|
||||||
|
Algorithm: int64(AlgES256),
|
||||||
|
},
|
||||||
|
Curve: int64(P256),
|
||||||
|
XCoord: x.FillBytes(make([]byte, ecCoordSize)),
|
||||||
|
YCoord: y.FillBytes(make([]byte, ecCoordSize)),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func VerifySignature(key any, data []byte, sig []byte) (bool, error) {
|
||||||
|
switch k := key.(type) {
|
||||||
|
case OKPPublicKeyData:
|
||||||
|
return k.Verify(data, sig)
|
||||||
|
case EC2PublicKeyData:
|
||||||
|
return k.Verify(data, sig)
|
||||||
|
case RSAPublicKeyData:
|
||||||
|
return k.Verify(data, sig)
|
||||||
|
default:
|
||||||
|
return false, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func DisplayPublicKey(cpk []byte) string {
|
||||||
|
parsedKey, err := ParsePublicKey(cpk)
|
||||||
|
if err != nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
|
||||||
|
var data []byte
|
||||||
|
|
||||||
|
switch k := parsedKey.(type) {
|
||||||
|
case RSAPublicKeyData:
|
||||||
|
var e int
|
||||||
|
|
||||||
|
if e, err = parseRSAPublicKeyDataExponent(&k); err != nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
|
||||||
|
rKey := &rsa.PublicKey{
|
||||||
|
N: big.NewInt(0).SetBytes(k.Modulus),
|
||||||
|
E: e,
|
||||||
|
}
|
||||||
|
|
||||||
|
if data, err = x509.MarshalPKIXPublicKey(rKey); err != nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
case EC2PublicKeyData:
|
||||||
|
curve := ec2AlgCurve(k.Algorithm)
|
||||||
|
if curve == nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
|
||||||
|
eKey := &ecdsa.PublicKey{
|
||||||
|
Curve: curve,
|
||||||
|
X: big.NewInt(0).SetBytes(k.XCoord),
|
||||||
|
Y: big.NewInt(0).SetBytes(k.YCoord),
|
||||||
|
}
|
||||||
|
|
||||||
|
if data, err = x509.MarshalPKIXPublicKey(eKey); err != nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
case OKPPublicKeyData:
|
||||||
|
if len(k.XCoord) != ed25519.PublicKeySize {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
|
||||||
|
var oKey ed25519.PublicKey = make([]byte, ed25519.PublicKeySize)
|
||||||
|
|
||||||
|
copy(oKey, k.XCoord)
|
||||||
|
|
||||||
|
if data, err = marshalEd25519PublicKey(oKey); err != nil {
|
||||||
|
return keyCannotDisplay
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "Cannot display key of this type"
|
||||||
|
}
|
||||||
|
|
||||||
|
pemBytes := pem.EncodeToMemory(&pem.Block{
|
||||||
|
Type: "PUBLIC KEY",
|
||||||
|
Bytes: data,
|
||||||
|
})
|
||||||
|
|
||||||
|
return string(pemBytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// COSEAlgorithmIdentifier is a number identifying a cryptographic algorithm. The algorithm identifiers SHOULD be values
|
||||||
|
// registered in the IANA COSE Algorithms registry [https://www.w3.org/TR/webauthn/#biblio-iana-cose-algs-reg], for
|
||||||
|
// instance, -7 for "ES256" and -257 for "RS256".
|
||||||
|
//
|
||||||
|
// Specification: §5.8.5. Cryptographic Algorithm Identifier (https://www.w3.org/TR/webauthn/#sctn-alg-identifier)
|
||||||
|
type COSEAlgorithmIdentifier int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// AlgES256 ECDSA with SHA-256.
|
||||||
|
AlgES256 COSEAlgorithmIdentifier = -7
|
||||||
|
|
||||||
|
// AlgEdDSA EdDSA.
|
||||||
|
AlgEdDSA COSEAlgorithmIdentifier = -8
|
||||||
|
|
||||||
|
// AlgES384 ECDSA with SHA-384.
|
||||||
|
AlgES384 COSEAlgorithmIdentifier = -35
|
||||||
|
|
||||||
|
// AlgES512 ECDSA with SHA-512.
|
||||||
|
AlgES512 COSEAlgorithmIdentifier = -36
|
||||||
|
|
||||||
|
// AlgPS256 RSASSA-PSS with SHA-256.
|
||||||
|
AlgPS256 COSEAlgorithmIdentifier = -37
|
||||||
|
|
||||||
|
// AlgPS384 RSASSA-PSS with SHA-384.
|
||||||
|
AlgPS384 COSEAlgorithmIdentifier = -38
|
||||||
|
|
||||||
|
// AlgPS512 RSASSA-PSS with SHA-512.
|
||||||
|
AlgPS512 COSEAlgorithmIdentifier = -39
|
||||||
|
|
||||||
|
// AlgES256K is ECDSA using secp256k1 curve and SHA-256.
|
||||||
|
AlgES256K COSEAlgorithmIdentifier = -47
|
||||||
|
|
||||||
|
// AlgRS256 RSASSA-PKCS1-v1_5 with SHA-256.
|
||||||
|
AlgRS256 COSEAlgorithmIdentifier = -257
|
||||||
|
|
||||||
|
// AlgRS384 RSASSA-PKCS1-v1_5 with SHA-384.
|
||||||
|
AlgRS384 COSEAlgorithmIdentifier = -258
|
||||||
|
|
||||||
|
// AlgRS512 RSASSA-PKCS1-v1_5 with SHA-512.
|
||||||
|
AlgRS512 COSEAlgorithmIdentifier = -259
|
||||||
|
|
||||||
|
// AlgRS1 RSASSA-PKCS1-v1_5 with SHA-1.
|
||||||
|
AlgRS1 COSEAlgorithmIdentifier = -65535
|
||||||
|
)
|
||||||
|
|
||||||
|
// COSEKeyType is The Key type derived from the IANA COSE AuthData.
|
||||||
|
type COSEKeyType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// KeyTypeReserved is a reserved value.
|
||||||
|
KeyTypeReserved COSEKeyType = iota
|
||||||
|
|
||||||
|
// OctetKey is an Octet Key.
|
||||||
|
OctetKey
|
||||||
|
|
||||||
|
// EllipticKey is an Elliptic Curve Public Key.
|
||||||
|
EllipticKey
|
||||||
|
|
||||||
|
// RSAKey is an RSA Public Key.
|
||||||
|
RSAKey
|
||||||
|
|
||||||
|
// Symmetric Keys.
|
||||||
|
Symmetric
|
||||||
|
|
||||||
|
// HSSLMS is the public key for HSS/LMS hash-based digital signature.
|
||||||
|
HSSLMS
|
||||||
|
)
|
||||||
|
|
||||||
|
// COSEEllipticCurve is an enumerator that represents the COSE Elliptic Curves.
|
||||||
|
//
|
||||||
|
// Specification: https://www.iana.org/assignments/cose/cose.xhtml#elliptic-curves
|
||||||
|
type COSEEllipticCurve int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// EllipticCurveReserved is the COSE EC Reserved value.
|
||||||
|
EllipticCurveReserved COSEEllipticCurve = iota
|
||||||
|
|
||||||
|
// P256 represents NIST P-256 also known as secp256r1.
|
||||||
|
P256
|
||||||
|
|
||||||
|
// P384 represents NIST P-384 also known as secp384r1.
|
||||||
|
P384
|
||||||
|
|
||||||
|
// P521 represents NIST P-521 also known as secp521r1.
|
||||||
|
P521
|
||||||
|
|
||||||
|
// X25519 for use w/ ECDH only.
|
||||||
|
X25519
|
||||||
|
|
||||||
|
// X448 for use w/ ECDH only.
|
||||||
|
X448
|
||||||
|
|
||||||
|
// Ed25519 for use w/ EdDSA only.
|
||||||
|
Ed25519
|
||||||
|
|
||||||
|
// Ed448 for use w/ EdDSA only.
|
||||||
|
Ed448
|
||||||
|
|
||||||
|
// Secp256k1 is the SECG secp256k1 curve.
|
||||||
|
Secp256k1
|
||||||
|
)
|
||||||
|
|
||||||
|
func (k *EC2PublicKeyData) TPMCurveID() tpm2.TPMECCCurve {
|
||||||
|
switch COSEEllipticCurve(k.Curve) {
|
||||||
|
case P256:
|
||||||
|
return tpm2.TPMECCNistP256 // TPM_ECC_NIST_P256.
|
||||||
|
case P384:
|
||||||
|
return tpm2.TPMECCNistP384 // TPM_ECC_NIST_P384.
|
||||||
|
case P521:
|
||||||
|
return tpm2.TPMECCNistP521 // TPM_ECC_NIST_P521.
|
||||||
|
default:
|
||||||
|
return tpm2.TPMECCNone // TPM_ECC_NONE.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ec2AlgCurve(coseAlg int64) elliptic.Curve {
|
||||||
|
switch COSEAlgorithmIdentifier(coseAlg) {
|
||||||
|
case AlgES512: // IANA COSE code for ECDSA w/ SHA-512.
|
||||||
|
return elliptic.P521()
|
||||||
|
case AlgES384: // IANA COSE code for ECDSA w/ SHA-384.
|
||||||
|
return elliptic.P384()
|
||||||
|
case AlgES256: // IANA COSE code for ECDSA w/ SHA-256.
|
||||||
|
return elliptic.P256()
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SigAlgFromCOSEAlg return which signature algorithm is being used from the COSE Key.
|
||||||
|
func SigAlgFromCOSEAlg(coseAlg COSEAlgorithmIdentifier) x509.SignatureAlgorithm {
|
||||||
|
d, ok := COSESignatureAlgorithmDetails[coseAlg]
|
||||||
|
if !ok {
|
||||||
|
return x509.UnknownSignatureAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.sigAlg
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasherFromCOSEAlg returns the Hashing interface to be used for a given COSE Algorithm.
|
||||||
|
func HasherFromCOSEAlg(coseAlg COSEAlgorithmIdentifier) hash.Hash {
|
||||||
|
d, ok := COSESignatureAlgorithmDetails[coseAlg]
|
||||||
|
if !ok {
|
||||||
|
// default to SHA256? Why not.
|
||||||
|
return crypto.SHA256.New()
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.hash.New()
|
||||||
|
}
|
||||||
|
|
||||||
|
var COSESignatureAlgorithmDetails = map[COSEAlgorithmIdentifier]struct {
|
||||||
|
name string
|
||||||
|
hash crypto.Hash
|
||||||
|
sigAlg x509.SignatureAlgorithm
|
||||||
|
}{
|
||||||
|
AlgRS1: {"SHA1-RSA", crypto.SHA1, x509.SHA1WithRSA},
|
||||||
|
AlgRS256: {"SHA256-RSA", crypto.SHA256, x509.SHA256WithRSA},
|
||||||
|
AlgRS384: {"SHA384-RSA", crypto.SHA384, x509.SHA384WithRSA},
|
||||||
|
AlgRS512: {"SHA512-RSA", crypto.SHA512, x509.SHA512WithRSA},
|
||||||
|
AlgPS256: {"SHA256-RSAPSS", crypto.SHA256, x509.SHA256WithRSAPSS},
|
||||||
|
AlgPS384: {"SHA384-RSAPSS", crypto.SHA384, x509.SHA384WithRSAPSS},
|
||||||
|
AlgPS512: {"SHA512-RSAPSS", crypto.SHA512, x509.SHA512WithRSAPSS},
|
||||||
|
AlgES256: {"ECDSA-SHA256", crypto.SHA256, x509.ECDSAWithSHA256},
|
||||||
|
AlgES384: {"ECDSA-SHA384", crypto.SHA384, x509.ECDSAWithSHA384},
|
||||||
|
AlgES512: {"ECDSA-SHA512", crypto.SHA512, x509.ECDSAWithSHA512},
|
||||||
|
AlgEdDSA: {"EdDSA", crypto.SHA512, x509.PureEd25519},
|
||||||
|
}
|
||||||
|
|
||||||
|
type Error struct {
|
||||||
|
// Short name for the type of error that has occurred.
|
||||||
|
Type string `json:"type"`
|
||||||
|
|
||||||
|
// Additional details about the error.
|
||||||
|
Details string `json:"error"`
|
||||||
|
|
||||||
|
// Information to help debug the error.
|
||||||
|
DevInfo string `json:"debug"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrUnsupportedKey = &Error{
|
||||||
|
Type: "invalid_key_type",
|
||||||
|
Details: "Unsupported Public Key Type",
|
||||||
|
}
|
||||||
|
ErrUnsupportedAlgorithm = &Error{
|
||||||
|
Type: "unsupported_key_algorithm",
|
||||||
|
Details: "Unsupported public key algorithm",
|
||||||
|
}
|
||||||
|
ErrSigNotProvidedOrInvalid = &Error{
|
||||||
|
Type: "signature_not_provided_or_invalid",
|
||||||
|
Details: "Signature invalid or not provided",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (err *Error) Error() string {
|
||||||
|
return err.Details
|
||||||
|
}
|
||||||
|
|
||||||
|
func (passedError *Error) WithDetails(details string) *Error {
|
||||||
|
err := *passedError
|
||||||
|
err.Details = details
|
||||||
|
|
||||||
|
return &err
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateOKPPublicKey(k *OKPPublicKeyData) error {
|
||||||
|
if len(k.XCoord) != ed25519.PublicKeySize {
|
||||||
|
return ErrUnsupportedKey.WithDetails(fmt.Sprintf("OKP key x coordinate has invalid length %d, expected %d", len(k.XCoord), ed25519.PublicKeySize))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateEC2PublicKey(k *EC2PublicKeyData) error {
|
||||||
|
curve := ec2AlgCurve(k.Algorithm)
|
||||||
|
if curve == nil {
|
||||||
|
return ErrUnsupportedAlgorithm.WithDetails("Unsupported EC2 algorithm")
|
||||||
|
}
|
||||||
|
|
||||||
|
byteLen := (curve.Params().BitSize + 7) / 8
|
||||||
|
|
||||||
|
if len(k.XCoord) != byteLen || len(k.YCoord) != byteLen {
|
||||||
|
return ErrUnsupportedKey.WithDetails("EC2 key x or y coordinate has invalid length")
|
||||||
|
}
|
||||||
|
|
||||||
|
x := new(big.Int).SetBytes(k.XCoord)
|
||||||
|
y := new(big.Int).SetBytes(k.YCoord)
|
||||||
|
|
||||||
|
if !curve.IsOnCurve(x, y) {
|
||||||
|
return ErrUnsupportedKey.WithDetails("EC2 key point is not on curve")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateRSAPublicKey(k *RSAPublicKeyData) error {
|
||||||
|
n := new(big.Int).SetBytes(k.Modulus)
|
||||||
|
if n.Sign() <= 0 {
|
||||||
|
return ErrUnsupportedKey.WithDetails("RSA key contains zero or empty modulus")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := parseRSAPublicKeyDataExponent(k); err != nil {
|
||||||
|
return ErrUnsupportedKey.WithDetails(fmt.Sprintf("RSA key contains invalid exponent: %v", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseRSAPublicKeyDataExponent(k *RSAPublicKeyData) (exp int, err error) {
|
||||||
|
if k == nil {
|
||||||
|
return 0, fmt.Errorf("invalid key")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(k.Exponent) == 0 {
|
||||||
|
return 0, fmt.Errorf("invalid exponent length")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, b := range k.Exponent {
|
||||||
|
if exp > (math.MaxInt >> 8) {
|
||||||
|
return 0, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
exp = (exp << 8) | int(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
if exp <= 0 {
|
||||||
|
return 0, ErrUnsupportedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
return exp, nil
|
||||||
|
}
|
||||||
60
vendor/github.com/go-webauthn/webauthn/webauthn/authenticator.go
generated
vendored
Normal file
60
vendor/github.com/go-webauthn/webauthn/webauthn/authenticator.go
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
package webauthn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/go-webauthn/webauthn/protocol"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Authenticator represents a specific authenticator in the context of a [Credential].
|
||||||
|
type Authenticator struct {
|
||||||
|
// The AAGUID of the authenticator. An AAGUID is defined as an array containing the globally unique
|
||||||
|
// identifier of the authenticator model being sought.
|
||||||
|
AAGUID []byte `json:"AAGUID"`
|
||||||
|
|
||||||
|
// SignCount is a representation of the number of times the Authenticator or Credential have been used to login.
|
||||||
|
// Upon a new login operation, the Relying Party compares the stored signature counter value with the new SignCount
|
||||||
|
// value returned in the assertion’s authenticator data. If this new SignCount value is less than or equal to the
|
||||||
|
// stored value, a cloned authenticator may exist, or the authenticator may be malfunctioning.
|
||||||
|
SignCount uint32 `json:"signCount"`
|
||||||
|
|
||||||
|
// CloneWarning is a signal that the authenticator may be cloned, i.e. at least two copies of the
|
||||||
|
// credential private key may exist and are being used in parallel. Relying Parties should incorporate
|
||||||
|
// this information into their risk scoring. Whether the Relying Party updates the stored signature
|
||||||
|
// counter value in this case, or not, or fails the authentication ceremony or not, is Relying Party-specific.
|
||||||
|
CloneWarning bool `json:"cloneWarning"`
|
||||||
|
|
||||||
|
// Attachment is the authenticatorAttachment value returned by the request.
|
||||||
|
Attachment protocol.AuthenticatorAttachment `json:"attachment"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SelectAuthenticator allow for easy marshaling of authenticator options that are provided to the user.
|
||||||
|
func SelectAuthenticator(att string, rrk *bool, uv string) protocol.AuthenticatorSelection {
|
||||||
|
return protocol.AuthenticatorSelection{
|
||||||
|
AuthenticatorAttachment: protocol.AuthenticatorAttachment(att),
|
||||||
|
RequireResidentKey: rrk,
|
||||||
|
UserVerification: protocol.UserVerificationRequirement(uv),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateCounter updates the authenticator and either sets the clone warning value or the sign count.
|
||||||
|
//
|
||||||
|
// Step 17 of §7.2. about verifying attestation. If the signature counter value authData.signCount
|
||||||
|
// is nonzero or the value stored in conjunction with credential’s id attribute is nonzero, then
|
||||||
|
// run the following sub-step:
|
||||||
|
//
|
||||||
|
// If the signature counter value authData.signCount is
|
||||||
|
//
|
||||||
|
// → Greater than the signature counter value stored in conjunction with credential’s id attribute.
|
||||||
|
// Update the stored signature counter value, associated with credential’s id attribute, to be the value of
|
||||||
|
// authData.signCount.
|
||||||
|
//
|
||||||
|
// → Less than or equal to the signature counter value stored in conjunction with credential’s id attribute.
|
||||||
|
// This is a signal that the authenticator may be cloned, see CloneWarning above for more information.
|
||||||
|
func (a *Authenticator) UpdateCounter(authDataCount uint32) {
|
||||||
|
if authDataCount <= a.SignCount && (authDataCount != 0 || a.SignCount != 0) {
|
||||||
|
a.CloneWarning = true
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
a.SignCount = authDataCount
|
||||||
|
}
|
||||||
15
vendor/github.com/go-webauthn/webauthn/webauthn/const.go
generated
vendored
Normal file
15
vendor/github.com/go-webauthn/webauthn/webauthn/const.go
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package webauthn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
errFmtFieldNotValidDomainString = "field '%s' is not a valid domain string: %w"
|
||||||
|
errFmtConfigValidate = "error occurred validating the configuration: %w"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultTimeoutUVD = time.Millisecond * 120000
|
||||||
|
defaultTimeout = time.Millisecond * 300000
|
||||||
|
)
|
||||||
195
vendor/github.com/go-webauthn/webauthn/webauthn/credential.go
generated
vendored
Normal file
195
vendor/github.com/go-webauthn/webauthn/webauthn/credential.go
generated
vendored
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
package webauthn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/metadata"
|
||||||
|
"github.com/go-webauthn/webauthn/protocol"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Credential contains all needed information about a WebAuthn credential for storage. This struct is effectively the
|
||||||
|
// Credential Record as described in the specification.
|
||||||
|
//
|
||||||
|
// Providing this data structure is preserved properly this Credential can be properly verified using the
|
||||||
|
// [Credential.Verify] function when provided a [metadata.Provider].
|
||||||
|
//
|
||||||
|
// It is strongly recommended for the best security that a [Credential] is encrypted at rest with the exception of the
|
||||||
|
// ID and the value you use to lookup the user. This prevents a person with access to the database being able to
|
||||||
|
// compromise privacy by being able to view this data, as well as prevents them being able to compromise security by
|
||||||
|
// adding or modifying a Credential without them also having access to the encryption key.
|
||||||
|
//
|
||||||
|
// See: §4. Terminology: Credential Record (https://www.w3.org/TR/webauthn-3/#credential-record)
|
||||||
|
type Credential struct {
|
||||||
|
// The ID is the ID of the public key credential source. Described by the Credential Record 'id' field.
|
||||||
|
ID []byte `json:"id"`
|
||||||
|
|
||||||
|
// The credential public key of the public key credential source. Described by the Credential Record 'publicKey'
|
||||||
|
// field.
|
||||||
|
PublicKey []byte `json:"publicKey"`
|
||||||
|
|
||||||
|
// The AttestationType stores the attestation format used (if any) by the authenticator when creating the
|
||||||
|
// Credential.
|
||||||
|
//
|
||||||
|
// Important Note: This field is named attestationType but this is actually the attestation format.
|
||||||
|
AttestationType string `json:"attestationType"`
|
||||||
|
|
||||||
|
// Transport types the authenticator supports. Described by the Credential Record 'transports' field.
|
||||||
|
Transport []protocol.AuthenticatorTransport `json:"transport"`
|
||||||
|
|
||||||
|
// Flags represent the commonly stored flags.
|
||||||
|
Flags CredentialFlags `json:"flags"`
|
||||||
|
|
||||||
|
// The Authenticator information for a given Credential.
|
||||||
|
Authenticator Authenticator `json:"authenticator"`
|
||||||
|
|
||||||
|
// The attestation values that can be used to validate this Credential via the MDS3 at a later date.
|
||||||
|
Attestation CredentialAttestation `json:"attestation"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignalUnknownCredential creates a struct that can easily be marshaled to JSON which indicates this is an unknown
|
||||||
|
// Credential.
|
||||||
|
func (c Credential) SignalUnknownCredential(rpid string) *protocol.SignalUnknownCredential {
|
||||||
|
return c.Descriptor().SignalUnknownCredential(rpid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Credentials is a decorator type which allows easily converting a [Credential] slice into a
|
||||||
|
// [protocol.CredentialDescriptor] slice by utilizing the [Credentials.CredentialDescriptors] method. This will be the
|
||||||
|
// type used globally for the library in a future release.
|
||||||
|
type Credentials []Credential
|
||||||
|
|
||||||
|
// CredentialDescriptors returns the [protocol.CredentialDescriptor] slice for this [Credentials] type.
|
||||||
|
func (c Credentials) CredentialDescriptors() (descriptors []protocol.CredentialDescriptor) {
|
||||||
|
descriptors = make([]protocol.CredentialDescriptor, len(c))
|
||||||
|
|
||||||
|
for i, credential := range c {
|
||||||
|
descriptors[i] = credential.Descriptor()
|
||||||
|
}
|
||||||
|
|
||||||
|
return descriptors
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCredentialFlags is a utility function that is used to derive the [Credential]'s Flags field given a
|
||||||
|
// [protocol.AuthenticatorFlags]. This allows implementers to solely save the Raw field of the [CredentialFlags] to
|
||||||
|
// restore them appropriately for appropriate processing without concern that changes forced upon implementers by the
|
||||||
|
// W3C will introduce breaking changes.
|
||||||
|
func NewCredentialFlags(flags protocol.AuthenticatorFlags) CredentialFlags {
|
||||||
|
return CredentialFlags{
|
||||||
|
UserPresent: flags.HasUserPresent(),
|
||||||
|
UserVerified: flags.HasUserVerified(),
|
||||||
|
BackupEligible: flags.HasBackupEligible(),
|
||||||
|
BackupState: flags.HasBackupState(),
|
||||||
|
raw: flags,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialFlags is a JSON representation of the flags.
|
||||||
|
type CredentialFlags struct {
|
||||||
|
// Flag UP indicates the users presence.
|
||||||
|
UserPresent bool `json:"userPresent"`
|
||||||
|
|
||||||
|
// Flag UV indicates the user performed verification.
|
||||||
|
UserVerified bool `json:"userVerified"`
|
||||||
|
|
||||||
|
// Flag BE indicates the credential is able to be backed up and/or sync'd between devices. This should NEVER change.
|
||||||
|
BackupEligible bool `json:"backupEligible"`
|
||||||
|
|
||||||
|
// Flag BS indicates the credential has been backed up and/or sync'd. This value can change but it's recommended
|
||||||
|
// that RP's keep track of this value.
|
||||||
|
BackupState bool `json:"backupState"`
|
||||||
|
|
||||||
|
raw protocol.AuthenticatorFlags
|
||||||
|
}
|
||||||
|
|
||||||
|
// ProtocolValue returns the underlying [protocol.AuthenticatorFlags] provided this [CredentialFlags] was created using
|
||||||
|
// NewCredentialFlags.
|
||||||
|
func (f CredentialFlags) ProtocolValue() protocol.AuthenticatorFlags {
|
||||||
|
return f.raw
|
||||||
|
}
|
||||||
|
|
||||||
|
// CredentialAttestation is a decoded representation of the [protocol.AuthenticatorAttestationResponse] in a format that
|
||||||
|
// can easily be serialized.
|
||||||
|
type CredentialAttestation struct {
|
||||||
|
ClientDataJSON []byte `json:"clientDataJSON"`
|
||||||
|
ClientDataHash []byte `json:"clientDataHash"`
|
||||||
|
AuthenticatorData []byte `json:"authenticatorData"`
|
||||||
|
PublicKeyAlgorithm int64 `json:"publicKeyAlgorithm"`
|
||||||
|
Object []byte `json:"object"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Descriptor converts a [Credential] into a [protocol.CredentialDescriptor].
|
||||||
|
func (c Credential) Descriptor() (descriptor protocol.CredentialDescriptor) {
|
||||||
|
return protocol.CredentialDescriptor{
|
||||||
|
Type: protocol.PublicKeyCredentialType,
|
||||||
|
CredentialID: c.ID,
|
||||||
|
Transport: c.Transport,
|
||||||
|
AttestationType: c.AttestationType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewCredential will return a credential pointer on successful validation of a registration response.
|
||||||
|
func NewCredential(clientDataHash []byte, c *protocol.ParsedCredentialCreationData) (credential *Credential, err error) {
|
||||||
|
credential = &Credential{
|
||||||
|
ID: c.Response.AttestationObject.AuthData.AttData.CredentialID,
|
||||||
|
PublicKey: c.Response.AttestationObject.AuthData.AttData.CredentialPublicKey,
|
||||||
|
AttestationType: c.Response.AttestationObject.Format,
|
||||||
|
Transport: c.Response.Transports,
|
||||||
|
Flags: NewCredentialFlags(c.Response.AttestationObject.AuthData.Flags),
|
||||||
|
Authenticator: Authenticator{
|
||||||
|
AAGUID: c.Response.AttestationObject.AuthData.AttData.AAGUID,
|
||||||
|
SignCount: c.Response.AttestationObject.AuthData.Counter,
|
||||||
|
Attachment: c.AuthenticatorAttachment,
|
||||||
|
},
|
||||||
|
Attestation: CredentialAttestation{
|
||||||
|
ClientDataJSON: c.Raw.AttestationResponse.ClientDataJSON,
|
||||||
|
ClientDataHash: clientDataHash,
|
||||||
|
AuthenticatorData: c.Raw.AttestationResponse.AuthenticatorData,
|
||||||
|
PublicKeyAlgorithm: c.Raw.AttestationResponse.PublicKeyAlgorithm,
|
||||||
|
Object: c.Raw.AttestationResponse.AttestationObject,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return credential, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify this credentials against the metadata.Provider given.
|
||||||
|
func (c Credential) Verify(mds metadata.Provider) (err error) {
|
||||||
|
if mds == nil {
|
||||||
|
return fmt.Errorf("error verifying credential: the metadata provider must be provided but it's nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
raw := &protocol.AuthenticatorAttestationResponse{
|
||||||
|
AuthenticatorResponse: protocol.AuthenticatorResponse{
|
||||||
|
ClientDataJSON: c.Attestation.ClientDataJSON,
|
||||||
|
},
|
||||||
|
Transports: make([]string, len(c.Transport)),
|
||||||
|
AuthenticatorData: c.Attestation.AuthenticatorData,
|
||||||
|
PublicKey: c.PublicKey,
|
||||||
|
PublicKeyAlgorithm: c.Attestation.PublicKeyAlgorithm,
|
||||||
|
AttestationObject: c.Attestation.Object,
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, transport := range c.Transport {
|
||||||
|
raw.Transports[i] = string(transport)
|
||||||
|
}
|
||||||
|
|
||||||
|
var attestation *protocol.ParsedAttestationResponse
|
||||||
|
|
||||||
|
if attestation, err = raw.Parse(); err != nil {
|
||||||
|
return fmt.Errorf("error verifying credential: error parsing attestation: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
clientDataHash := c.Attestation.ClientDataHash
|
||||||
|
|
||||||
|
if len(clientDataHash) == 0 {
|
||||||
|
sum := sha256.Sum256(c.Attestation.ClientDataJSON)
|
||||||
|
|
||||||
|
clientDataHash = sum[:]
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = attestation.AttestationObject.VerifyAttestation(clientDataHash, mds); err != nil {
|
||||||
|
return fmt.Errorf("error verifying credential: error verifying attestation: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
50
vendor/github.com/go-webauthn/webauthn/webauthn/doc.go
generated
vendored
Normal file
50
vendor/github.com/go-webauthn/webauthn/webauthn/doc.go
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
// Package webauthn contains the API functionality of the library. After creating and configuring a webauthn object,
|
||||||
|
// users can call the object to create and validate web authentication credentials.
|
||||||
|
//
|
||||||
|
// This documentation section highlights key functions within the library which are recommended and often have
|
||||||
|
// examples attached. Functions which are discouraged due to their lack of functionality are expressly not documented
|
||||||
|
// here, and you're on your own with these functions. Generally speaking, if the function is not documented here, it is
|
||||||
|
// either used by another function documented here, and it hides one of the arguments or return values, or it is lower
|
||||||
|
// level logic only intended for advanced use cases.
|
||||||
|
//
|
||||||
|
// The [New] function is a key function in creating a new instance of a WebAuthn Relying Party which is required to
|
||||||
|
// perform most actions.
|
||||||
|
//
|
||||||
|
// To start the credential creation ceremony, the [WebAuthn.BeginMediatedRegistration] or [WebAuthn.BeginRegistration]
|
||||||
|
// functions are used which returns [*SessionData] and a [*protocol.CredentialCreation] struct which can be easily
|
||||||
|
// serialized as JSON for the frontend library/logic. The [*SessionData] must be saved in a way which allows the
|
||||||
|
// implementer to restore it later. This [*SessionData] should be safely anchored to a user agent without allowing the
|
||||||
|
// user agent to modify the contents (i.e. opaque session cookie).
|
||||||
|
//
|
||||||
|
// To finish the credential creation ceremony, the [WebAuthn.FinishRegistration] function can be used. This function
|
||||||
|
// requires a [*http.Request] and performs all the necessary and requested validations. If you have other requirements,
|
||||||
|
// you can use [protocol.ParseCredentialCreationResponseBody] or [protocol.ParseCredentialCreationResponseBytes] which
|
||||||
|
// require an [io.Reader] or byte array respectively, then use [WebAuthn.CreateCredential] to
|
||||||
|
// perform validations against the [*protocol.ParsedCredentialCreationData] and saved [*SessionData] and finalize the
|
||||||
|
// process. For complete customizability, just produce the [*protocol.ParsedCredentialCreationData] with a custom parser
|
||||||
|
// and provide it to [WebAuthn.CreateCredential].
|
||||||
|
//
|
||||||
|
// To start a Passkey login ceremony, the [WebAuthn.BeginDiscoverableMediatedLogin] or [WebAuthn.BeginDiscoverableLogin]
|
||||||
|
// functions are used which returns [*SessionData] and a [*protocol.CredentialAssertion] struct which can easily be
|
||||||
|
// serialized as JSON for the frontend library/logic. The [*SessionData] should be safely handled as previously described.
|
||||||
|
//
|
||||||
|
// To finish a Passkey login ceremony, the [WebAuthn.FinishPasskeyLogin] function can be used. This function requires a
|
||||||
|
// [*http.Request] and performs all the necessary validations. If you have other requirements, you can use the
|
||||||
|
// [protocol.ParseCredentialRequestResponseBody] or [protocol.ParseCredentialRequestResponseBytes] which require an
|
||||||
|
// [io.Reader] or byte array respectively, then use [WebAuthn.ValidatePasskeyLogin] to perform validations against the
|
||||||
|
// [*protocol.ParsedCredentialAssertionData] and saved [*SessionData] and finalize the process. For complete customizabilty,
|
||||||
|
// just produce the [protocol.ParsedCredentialAssertionData] with a custom parser and provide it to
|
||||||
|
// [WebAuthn.ValidatePasskeyLogin].
|
||||||
|
//
|
||||||
|
// To start a Multi-Factor login ceremony, the [WebAuthn.BeginMediatedLogin] or [WebAuthn.BeginLogin]
|
||||||
|
// functions are used which returns [SessionData] and a [*protocol.CredentialAssertion] struct which can easily be
|
||||||
|
// serialized as JSON for the frontend library/logic. The [*SessionData] should be safely handled as previously described.
|
||||||
|
//
|
||||||
|
// To finish a Multi-Factor login ceremony, the [WebAuthn.FinishLogin] function can be used. This function requires a
|
||||||
|
// [*http.Request] and performs all the necessary validations. If you have other requirements, you can use the
|
||||||
|
// [protocol.ParseCredentialRequestResponseBody] or [protocol.ParseCredentialRequestResponseBytes] which require an
|
||||||
|
// [io.Reader] or byte array respectively, then use [WebAuthn.ValidateLogin] to perform validations against the
|
||||||
|
// [*protocol.ParsedCredentialAssertionData] and saved [*SessionData] and finalize the process. For complete customizabilty,
|
||||||
|
// just produce the [protocol.ParsedCredentialAssertionData] with a custom parser and provide it to
|
||||||
|
// [WebAuthn.ValidateLogin].
|
||||||
|
package webauthn
|
||||||
384
vendor/github.com/go-webauthn/webauthn/webauthn/login.go
generated
vendored
Normal file
384
vendor/github.com/go-webauthn/webauthn/webauthn/login.go
generated
vendored
Normal file
@@ -0,0 +1,384 @@
|
|||||||
|
package webauthn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
|
"github.com/go-webauthn/webauthn/protocol"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LoginOption is used to provide parameters that modify the default [Credential] Assertion Payload that is sent to the user.
|
||||||
|
type LoginOption func(*protocol.PublicKeyCredentialRequestOptions)
|
||||||
|
|
||||||
|
// DiscoverableUserHandler returns a [*User] given the provided userHandle.
|
||||||
|
type DiscoverableUserHandler func(rawID, userHandle []byte) (user User, err error)
|
||||||
|
|
||||||
|
// BeginLogin creates the [*protocol.CredentialAssertion] data payload that should be sent to the user agent for beginning
|
||||||
|
// the login/assertion process. This function is used to perform a login when the identity of the user is known such as
|
||||||
|
// multifactor authentications, to specify a conditional mediation requirement use [WebAuthn.BeginMediatedLogin], to
|
||||||
|
// perform a login when the identity of the user is not known see [WebAuthn.BeginDiscoverableLogin] and
|
||||||
|
// [WebAuthn.BeginDiscoverableMediatedLogin] instead. The format of this data can be seen in §5.5 of the WebAuthn
|
||||||
|
// specification. These default values can be amended by providing additional [LoginOption] parameters. This function
|
||||||
|
// also returns [SessionData], that must be stored by the RP in a secure manner and then provided to the
|
||||||
|
// [WebAuthn.FinishLogin] function. This data helps us verify the ownership of the credential being retrieved.
|
||||||
|
//
|
||||||
|
// Specification: §5.5. Options for Assertion Generation (https://www.w3.org/TR/webauthn/#dictionary-assertion-options)
|
||||||
|
func (webauthn *WebAuthn) BeginLogin(user User, opts ...LoginOption) (*protocol.CredentialAssertion, *SessionData, error) {
|
||||||
|
return webauthn.BeginMediatedLogin(user, protocol.MediationDefault, opts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeginDiscoverableLogin creates the [*protocol.CredentialAssertion] data payload that should be sent to the user agent
|
||||||
|
// for beginning the login/assertion process. This function is used to perform a client-side discoverable login when the
|
||||||
|
// identity of the user is not known such as passwordless or usernameless authentication, to specify a conditional
|
||||||
|
// mediation requirement use [WebAuthn.BeginDiscoverableMediatedLogin], to perform logins where the identity of the user
|
||||||
|
// is known such as multifactor authentication see [WebAuthn.BeginLogin] and [WebAuthn.BeginMediatedLogin] instead.
|
||||||
|
// The format of this data can be seen in §5.5 of the WebAuthn specification. These default values can be amended by
|
||||||
|
// providing additional [LoginOption] parameters. This function also returns [SessionData], that
|
||||||
|
// must be stored by the RP in a secure manner and then provided to the [WebAuthn.FinishLogin] function. This data helps
|
||||||
|
// us verify the ownership of the credential being retrieved.
|
||||||
|
//
|
||||||
|
// Specification: §5.5. Options for Assertion Generation (https://www.w3.org/TR/webauthn/#dictionary-assertion-options)
|
||||||
|
func (webauthn *WebAuthn) BeginDiscoverableLogin(opts ...LoginOption) (*protocol.CredentialAssertion, *SessionData, error) {
|
||||||
|
return webauthn.beginLogin(nil, nil, protocol.MediationDefault, opts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeginMediatedLogin is similar to [WebAuthn.BeginLogin] however it also allows specifying a credential mediation
|
||||||
|
// requirement.
|
||||||
|
func (webauthn *WebAuthn) BeginMediatedLogin(user User, mediation protocol.CredentialMediationRequirement, opts ...LoginOption) (*protocol.CredentialAssertion, *SessionData, error) {
|
||||||
|
credentials := user.WebAuthnCredentials()
|
||||||
|
|
||||||
|
if len(credentials) == 0 { // If the user does not have any credentials, we cannot perform an assertion.
|
||||||
|
return nil, nil, protocol.ErrBadRequest.WithDetails("Found no credentials for user")
|
||||||
|
}
|
||||||
|
|
||||||
|
var allowedCredentials = make([]protocol.CredentialDescriptor, len(credentials))
|
||||||
|
|
||||||
|
for i, credential := range credentials {
|
||||||
|
allowedCredentials[i] = credential.Descriptor()
|
||||||
|
}
|
||||||
|
|
||||||
|
return webauthn.beginLogin(user.WebAuthnID(), allowedCredentials, mediation, opts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// BeginDiscoverableMediatedLogin is similar to [WebAuthn.BeginDiscoverableLogin] however it also allows specifying a
|
||||||
|
// credential mediation requirement.
|
||||||
|
func (webauthn *WebAuthn) BeginDiscoverableMediatedLogin(mediation protocol.CredentialMediationRequirement, opts ...LoginOption) (*protocol.CredentialAssertion, *SessionData, error) {
|
||||||
|
return webauthn.beginLogin(nil, nil, mediation, opts...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (webauthn *WebAuthn) beginLogin(userID []byte, allowedCredentials []protocol.CredentialDescriptor, mediation protocol.CredentialMediationRequirement, opts ...LoginOption) (assertion *protocol.CredentialAssertion, session *SessionData, err error) {
|
||||||
|
if err = webauthn.Config.validate(); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf(errFmtConfigValidate, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertion = &protocol.CredentialAssertion{
|
||||||
|
Response: protocol.PublicKeyCredentialRequestOptions{
|
||||||
|
RelyingPartyID: webauthn.Config.RPID,
|
||||||
|
UserVerification: webauthn.Config.AuthenticatorSelection.UserVerification,
|
||||||
|
AllowedCredentials: allowedCredentials,
|
||||||
|
},
|
||||||
|
Mediation: mediation,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, opt := range opts {
|
||||||
|
opt(&assertion.Response)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(assertion.Response.Challenge) == 0 {
|
||||||
|
var challenge protocol.URLEncodedBase64
|
||||||
|
if challenge, err = protocol.CreateChallenge(); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
assertion.Response.Challenge = challenge
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(assertion.Response.Challenge) < 16 {
|
||||||
|
return nil, nil, fmt.Errorf("error generating assertion: the challenge must be at least 16 bytes")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(assertion.Response.RelyingPartyID) == 0 {
|
||||||
|
return nil, nil, fmt.Errorf("error generating assertion: the relying party id must be provided via the configuration or a functional option for a login")
|
||||||
|
} else if err = protocol.ValidateRPID(assertion.Response.RelyingPartyID); err != nil {
|
||||||
|
return nil, nil, fmt.Errorf("error generating assertion: the relying party id failed to validate as it's not a valid domain string with error: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if assertion.Response.Timeout == 0 {
|
||||||
|
switch assertion.Response.UserVerification {
|
||||||
|
case protocol.VerificationDiscouraged:
|
||||||
|
assertion.Response.Timeout = int(webauthn.Config.Timeouts.Login.TimeoutUVD.Milliseconds())
|
||||||
|
default:
|
||||||
|
assertion.Response.Timeout = int(webauthn.Config.Timeouts.Login.Timeout.Milliseconds())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
session = &SessionData{
|
||||||
|
Challenge: assertion.Response.Challenge.String(),
|
||||||
|
RelyingPartyID: assertion.Response.RelyingPartyID,
|
||||||
|
UserID: userID,
|
||||||
|
AllowedCredentialIDs: assertion.Response.GetAllowedCredentialIDs(),
|
||||||
|
UserVerification: assertion.Response.UserVerification,
|
||||||
|
Extensions: assertion.Response.Extensions,
|
||||||
|
}
|
||||||
|
|
||||||
|
if webauthn.Config.Timeouts.Login.Enforce {
|
||||||
|
session.Expires = time.Now().Add(time.Millisecond * time.Duration(assertion.Response.Timeout))
|
||||||
|
}
|
||||||
|
|
||||||
|
return assertion, session, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinishLogin takes the response from the client and validates it against the user credentials and stored session data.
|
||||||
|
//
|
||||||
|
// As with all Finish functions, this function requires a [*http.Request] but you can perform the same steps with the
|
||||||
|
// [protocol.ParseCredentialRequestResponseBody] or [protocol.ParseCredentialRequestResponseBytes] which require an
|
||||||
|
// [io.Reader] or byte array respectively, you can also use an arbitrary [*protocol.ParsedCredentialAssertionData] which is
|
||||||
|
// returned from all of these functions i.e. by implementing a custom parser. The [*SessionData],
|
||||||
|
// and [*protocol.ParsedCredentialAssertionData] can then be used with the [WebAuthn.ValidateLogin] function.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] provided does not contain
|
||||||
|
// a [Credential] with the same ID byte array provided all [Credential]'s in the [SessionData] exist in the [User]'s
|
||||||
|
// [Credential] list.
|
||||||
|
func (webauthn *WebAuthn) FinishLogin(user User, session SessionData, response *http.Request) (credential *Credential, err error) {
|
||||||
|
var parsedResponse *protocol.ParsedCredentialAssertionData
|
||||||
|
|
||||||
|
if parsedResponse, err = protocol.ParseCredentialRequestResponse(response); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return webauthn.ValidateLogin(user, session, parsedResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinishDiscoverableLogin takes the response from the client and validates it against the handler and stored session data.
|
||||||
|
// The handler helps to find out which user must be used to validate the response. This is a function defined in your
|
||||||
|
// business code that will retrieve the user from your persistent data.
|
||||||
|
//
|
||||||
|
// As with all Finish functions, this function requires a [*http.Request] but you can perform the same steps with the
|
||||||
|
// [protocol.ParseCredentialRequestResponseBody] or [protocol.ParseCredentialRequestResponseBytes] which require an
|
||||||
|
// [io.Reader] or byte array respectively, you can also use an arbitrary [*protocol.ParsedCredentialAssertionData] which is
|
||||||
|
// returned from all of these functions i.e. by implementing a custom parser. The [DiscoverableUserHandler], [*SessionData],
|
||||||
|
// and [*protocol.ParsedCredentialAssertionData] can then be used with the [WebAuthn.ValidatePasskeyLogin] function.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] returned by the
|
||||||
|
// handler does not contain a [Credential] with the same ID byte array provided all [Credential]'s
|
||||||
|
// in the [SessionData] exist in the [User]'s [Credential] list.
|
||||||
|
func (webauthn *WebAuthn) FinishDiscoverableLogin(handler DiscoverableUserHandler, session SessionData, response *http.Request) (credential *Credential, err error) {
|
||||||
|
var parsedResponse *protocol.ParsedCredentialAssertionData
|
||||||
|
|
||||||
|
if parsedResponse, err = protocol.ParseCredentialRequestResponse(response); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return webauthn.ValidateDiscoverableLogin(handler, session, parsedResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FinishPasskeyLogin takes the response from the client and validate it against the handler and stored session data.
|
||||||
|
// The handler helps to find out which user must be used to validate the response. This is a function defined in your
|
||||||
|
// business code that will retrieve the user from your persistent data.
|
||||||
|
//
|
||||||
|
// As with all Finish functions this function requires a [*http.Request] but you can perform the same steps with the
|
||||||
|
// [protocol.ParseCredentialRequestResponseBody] or [protocol.ParseCredentialRequestResponseBytes] which require an
|
||||||
|
// io.Reader or byte array respectively, you can also use an arbitrary [*protocol.ParsedCredentialAssertionData] which is
|
||||||
|
// returned from all of these functions i.e. by implementing a custom parser. The [DiscoverableUserHandler], [*SessionData],
|
||||||
|
// and [*protocol.ParsedCredentialAssertionData] can then be used with the [WebAuthn.ValidatePasskeyLogin] function.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] returned by the
|
||||||
|
// handler does not contain a [Credential] with the same ID byte array provided all [Credential]'s
|
||||||
|
// in the [SessionData] exist in the [User]'s [Credential] list.
|
||||||
|
func (webauthn *WebAuthn) FinishPasskeyLogin(handler DiscoverableUserHandler, session SessionData, response *http.Request) (user User, credential *Credential, err error) {
|
||||||
|
var parsedResponse *protocol.ParsedCredentialAssertionData
|
||||||
|
|
||||||
|
if parsedResponse, err = protocol.ParseCredentialRequestResponse(response); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return webauthn.ValidatePasskeyLogin(handler, session, parsedResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateLogin takes a parsed response and validates it against the user credentials and session data.
|
||||||
|
//
|
||||||
|
// If you wish to skip performing the step required to parse the *protocol.ParsedCredentialAssertionData and
|
||||||
|
// you're using net/http then you can use [WebAuthn.FinishLogin] instead.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] provided does not contain
|
||||||
|
// a [Credential] with the same ID byte array provided all [Credential]'s in the [SessionData] exist in
|
||||||
|
// the [User]'s [Credential] list.
|
||||||
|
func (webauthn *WebAuthn) ValidateLogin(user User, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (credential *Credential, err error) {
|
||||||
|
if !bytes.Equal(user.WebAuthnID(), session.UserID) {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("ID mismatch for User and Session")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !session.Expires.IsZero() && session.Expires.Before(time.Now()) {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Session has Expired")
|
||||||
|
}
|
||||||
|
|
||||||
|
return webauthn.validateLogin(user, session, parsedResponse)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidateDiscoverableLogin is similar to [WebAuthn.ValidateLogin] that allows for discoverable credentials. It's
|
||||||
|
// recommended that [WebAuthn.ValidatePasskeyLogin] is used instead.
|
||||||
|
//
|
||||||
|
// If you wish to skip performing the step required to parse the [*protocol.ParsedCredentialAssertionData] and
|
||||||
|
// you're using net/http then you can use [WebAuthn.FinishDiscoverableLogin] instead.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] returned by the
|
||||||
|
// handler does not contain a [Credential] with the same ID byte array provided all [Credential]'s
|
||||||
|
// in the [SessionData] exist in the [User]'s [Credential] list.
|
||||||
|
//
|
||||||
|
// Note: this is just a backwards compatibility layer over [WebAuthn.ValidatePasskeyLogin] which returns more information.
|
||||||
|
func (webauthn *WebAuthn) ValidateDiscoverableLogin(handler DiscoverableUserHandler, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (credential *Credential, err error) {
|
||||||
|
_, credential, err = webauthn.ValidatePasskeyLogin(handler, session, parsedResponse)
|
||||||
|
|
||||||
|
return credential, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ValidatePasskeyLogin is similar to [WebAuthn.ValidateLogin] that allows for discoverable credentials.
|
||||||
|
//
|
||||||
|
// If you wish to skip performing the step required to parse the [*protocol.ParsedCredentialAssertionData] and
|
||||||
|
// you're using net/http then you can use [WebAuthn.FinishPasskeyLogin] instead.
|
||||||
|
//
|
||||||
|
// This function will return the [protocol.ErrorUnknownCredential] error type when the [User] returned by the
|
||||||
|
// handler does not contain a [Credential] with the same ID byte array provided all [Credential]'s
|
||||||
|
// in the [SessionData] exist in the [User]'s [Credential] list.
|
||||||
|
func (webauthn *WebAuthn) ValidatePasskeyLogin(handler DiscoverableUserHandler, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (user User, credential *Credential, err error) {
|
||||||
|
if len(session.UserID) != 0 {
|
||||||
|
return nil, nil, protocol.ErrBadRequest.WithDetails("Session was not initiated as a client-side discoverable login")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !session.Expires.IsZero() && session.Expires.Before(time.Now()) {
|
||||||
|
return nil, nil, protocol.ErrBadRequest.WithDetails("Session has Expired")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(parsedResponse.Response.UserHandle) == 0 {
|
||||||
|
return nil, nil, protocol.ErrBadRequest.WithDetails("Client-side Discoverable Assertion was attempted with a blank User Handle")
|
||||||
|
}
|
||||||
|
|
||||||
|
if user, err = handler(parsedResponse.RawID, parsedResponse.Response.UserHandle); err != nil {
|
||||||
|
return nil, nil, protocol.ErrBadRequest.WithDetails(fmt.Sprintf("Failed to lookup Client-side Discoverable Credential: %s", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if credential, err = webauthn.validateLogin(user, session, parsedResponse); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return user, credential, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateLogin takes a parsed response and validates it against the user credentials and session data.
|
||||||
|
func (webauthn *WebAuthn) validateLogin(user User, session SessionData, parsedResponse *protocol.ParsedCredentialAssertionData) (*Credential, error) {
|
||||||
|
// Step 1. If the allowCredentials option was given when this authentication ceremony was initiated,
|
||||||
|
// verify that credential.id identifies one of the public key credentials that were listed in
|
||||||
|
// allowCredentials.
|
||||||
|
|
||||||
|
// NON-NORMATIVE Prior Step: Verify that the allowCredentials for the session are owned by the user provided.
|
||||||
|
credentials := user.WebAuthnCredentials()
|
||||||
|
|
||||||
|
if len(session.AllowedCredentialIDs) > 0 {
|
||||||
|
if !isCredentialsAllowedMatchingOwned(session.AllowedCredentialIDs, credentials) {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("User does not own all credentials from the allowed credential list")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isCredentialIDInCredentials(parsedResponse.RawID, credentials) {
|
||||||
|
return nil, &protocol.ErrorUnknownCredential{Err: protocol.ErrBadRequest.WithDetails("The credential ID provided is not owned by the user")}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !isByteArrayInSlice(parsedResponse.RawID, session.AllowedCredentialIDs...) {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("The credential ID provided is not in the sessions allowed credential list")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2. If credential.response.userHandle is present, verify that the user identified by this value is
|
||||||
|
// the owner of the public key credential identified by credential.id.
|
||||||
|
|
||||||
|
// This is in part handled by our Step 1.
|
||||||
|
|
||||||
|
userHandle := parsedResponse.Response.UserHandle
|
||||||
|
if len(userHandle) > 0 {
|
||||||
|
if !bytes.Equal(userHandle, user.WebAuthnID()) {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("User handle and User ID do not match")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
found bool
|
||||||
|
credential Credential
|
||||||
|
)
|
||||||
|
|
||||||
|
// Step 3. Using credential’s id attribute (or the corresponding rawId, if base64url encoding is inappropriate
|
||||||
|
// for your use case), look up the corresponding credential public key.
|
||||||
|
for _, credential = range credentials {
|
||||||
|
if bytes.Equal(credential.ID, parsedResponse.RawID) {
|
||||||
|
found = true
|
||||||
|
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
found = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Unable to find the credential for the returned credential ID")
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
appID string
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
// Ensure authenticators with a bad status are not used.
|
||||||
|
if webauthn.Config.MDS != nil {
|
||||||
|
var aaguid uuid.UUID
|
||||||
|
|
||||||
|
if len(credential.Authenticator.AAGUID) == 0 {
|
||||||
|
aaguid = uuid.Nil
|
||||||
|
} else if aaguid, err = uuid.FromBytes(credential.Authenticator.AAGUID); err != nil {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Failed to decode AAGUID").WithInfo(fmt.Sprintf("Error occurred decoding AAGUID from the credential record: %s", err)).WithError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if e := protocol.ValidateMetadata(context.Background(), webauthn.Config.MDS, aaguid, "", credential.AttestationType, nil); e != nil {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Failed to validate credential record metadata").WithInfo(e.DevInfo).WithError(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
shouldVerifyUser := session.UserVerification == protocol.VerificationRequired
|
||||||
|
shouldVerifyUserPresence := true
|
||||||
|
|
||||||
|
rpID := webauthn.Config.RPID
|
||||||
|
rpOrigins := webauthn.Config.RPOrigins
|
||||||
|
rpTopOrigins := webauthn.Config.RPTopOrigins
|
||||||
|
|
||||||
|
if appID, err = parsedResponse.GetAppID(session.Extensions, credential.AttestationType); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle steps 4 through 16.
|
||||||
|
if err = parsedResponse.Verify(session.Challenge, rpID, rpOrigins, rpTopOrigins, webauthn.Config.RPTopOriginVerificationMode, appID, shouldVerifyUser, shouldVerifyUserPresence, credential.PublicKey); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the BackupEligible flag has changed.
|
||||||
|
if credential.Flags.BackupEligible != parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Backup Eligible flag inconsistency detected during login validation")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for the invalid combination BE=0 and BS=1.
|
||||||
|
if !parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible() && parsedResponse.Response.AuthenticatorData.Flags.HasBackupState() {
|
||||||
|
return nil, protocol.ErrBadRequest.WithDetails("Backup State Flag is true but Backup Eligible flag is false which is invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle step 17.
|
||||||
|
credential.Authenticator.UpdateCounter(parsedResponse.Response.AuthenticatorData.Counter)
|
||||||
|
|
||||||
|
// Update flags from response data.
|
||||||
|
credential.Flags.UserPresent = parsedResponse.Response.AuthenticatorData.Flags.HasUserPresent()
|
||||||
|
credential.Flags.UserVerified = parsedResponse.Response.AuthenticatorData.Flags.HasUserVerified()
|
||||||
|
credential.Flags.BackupEligible = parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible()
|
||||||
|
credential.Flags.BackupState = parsedResponse.Response.AuthenticatorData.Flags.HasBackupState()
|
||||||
|
|
||||||
|
return &credential, nil
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user