Compare commits

..

26 Commits

Author SHA1 Message Date
3d9625b40b Fix calls to die.With. 2025-11-15 16:10:14 -08:00
547a0d8f32 disable linting until cleanups are finished 2025-11-15 16:00:58 -08:00
876a0a2c2b fileutil: linter fixes. 2025-11-15 15:58:51 -08:00
a37d28e3d7 die: linter feedback fixes. 2025-11-15 15:55:17 -08:00
ddf26e00af dbg: linter feedback updates. 2025-11-15 15:53:57 -08:00
e4db163efe Cleaning up. 2025-11-15 15:48:18 -08:00
571443c282 config: apply linting feedback. 2025-11-15 15:47:29 -08:00
aba5e519a4 First round of linter cleanups. 2025-11-15 15:11:07 -08:00
5fcba0e814 Trying a different config. 2025-11-15 13:34:18 -08:00
928c643d8d Fix linter config. 2025-11-15 13:16:30 -08:00
fd9f9f6d66 Fix linting. 2025-11-15 13:08:38 -08:00
a5b7727c8f Add linting stage. 2025-11-15 13:05:00 -08:00
3135c18d95 ignore goland directory 2025-11-15 01:53:40 -08:00
1d32a64dc0 add cert-revcheck 2025-11-14 22:56:10 -08:00
d70ca5ee87 adding golangci-lint 2025-11-14 22:54:06 -08:00
eca3a229a4 config: golangci-lint cleanups. 2025-11-14 22:53:02 -08:00
4c1eb03671 Cleaning up golangci-lint warnings. 2025-11-14 22:49:54 -08:00
f463eeed88 minor updates 2025-11-14 22:01:12 -08:00
289c9d2343 cmd: add cert-bundler
Builds certificate bundles.
2025-11-14 21:59:03 -08:00
e375963243 cmd: add tlsinfo. 2025-11-14 15:21:38 -08:00
31baa10b3b Update README. 2025-11-14 14:55:16 -08:00
0556c7c56d ca-signed: note self-signed certs. 2025-11-14 14:49:46 -08:00
83c95d9db8 cmd: add ca-signed tool.
Verify certificates are signed by a CA.
2025-11-14 14:48:34 -08:00
beccb551e2 add minmax 2025-04-10 01:17:11 -07:00
c761d98b82 additional debugging for basic constraints 2024-08-22 18:06:09 -07:00
e68d22337b cmd: add die roller 2024-06-14 20:27:00 -07:00
56 changed files with 2844 additions and 401 deletions

View File

@@ -5,6 +5,30 @@ version: 2.1
# Define a job to be invoked later in a workflow.
# See: https://circleci.com/docs/2.0/configuration-reference/#jobs
jobs:
lint:
working_directory: ~/repo
docker:
- image: cimg/go:1.22.2
steps:
- checkout
- restore_cache:
keys:
- go-mod-v4-{{ checksum "go.sum" }}
- run:
name: Install Dependencies
command: go mod download
- save_cache:
key: go-mod-v4-{{ checksum "go.sum" }}
paths:
- "/go/pkg/mod"
- run:
name: Install golangci-lint
command: |
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin
- run:
name: Run golangci-lint
command: golangci-lint run --timeout=5m
testbuild:
working_directory: ~/repo
# Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub.
@@ -27,16 +51,17 @@ jobs:
- "/go/pkg/mod"
- run:
name: Run tests
command: go test ./...
command: go test -race ./...
- run:
name: Run build
command: go build ./...
- store_test_results:
path: /tmp/test-reports
# Invoke jobs via workflows
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
# Linting is disabled while cleanups are ongoing.
workflows:
testbuild:
jobs:
- testbuild
# - lint

5
.gitignore vendored
View File

@@ -1,4 +1 @@
bazel-bin
bazel-goutils
bazel-out
bazel-testlogs
.idea

473
.golangci.yml Normal file
View File

@@ -0,0 +1,473 @@
# This file is licensed under the terms of the MIT license https://opensource.org/license/mit
# Copyright (c) 2021-2025 Marat Reymers
## Golden config for golangci-lint v2.6.2
#
# This is the best config for golangci-lint based on my experience and opinion.
# It is very strict, but not extremely strict.
# Feel free to adapt it to suit your needs.
# If this config helps you, please consider keeping a link to this file (see the next comment).
# Based on https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322
version: "2"
issues:
# Maximum count of issues with the same text.
# Set to 0 to disable.
# Default: 3
max-same-issues: 50
formatters:
enable:
- goimports # checks if the code and import statements are formatted according to the 'goimports' command
- golines # checks if code is formatted, and fixes long lines
## you may want to enable
#- gci # checks if code and import statements are formatted, with additional rules
#- gofmt # checks if the code is formatted according to 'gofmt' command
#- gofumpt # enforces a stricter format than 'gofmt', while being backwards compatible
#- swaggo # formats swaggo comments
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
goimports:
# A list of prefixes, which, if set, checks import paths
# with the given prefixes are grouped after 3rd-party packages.
# Default: []
local-prefixes:
- github.com/my/project
golines:
# Target maximum line length.
# Default: 100
max-len: 120
linters:
enable:
- asasalint # checks for pass []any as any in variadic func(...any)
- asciicheck # checks that your code does not contain non-ASCII identifiers
- bidichk # checks for dangerous unicode character sequences
- bodyclose # checks whether HTTP response body is closed successfully
- canonicalheader # checks whether net/http.Header uses canonical header
- copyloopvar # detects places where loop variables are copied (Go 1.22+)
- cyclop # checks function and package cyclomatic complexity
- depguard # checks if package imports are in a list of acceptable packages
- dupl # tool for code clone detection
- durationcheck # checks for two durations multiplied together
- errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases
- errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error
- errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13
- exhaustive # checks exhaustiveness of enum switch statements
- exptostd # detects functions from golang.org/x/exp/ that can be replaced by std functions
- fatcontext # detects nested contexts in loops
- forbidigo # forbids identifiers
- funcorder # checks the order of functions, methods, and constructors
- funlen # tool for detection of long functions
- gocheckcompilerdirectives # validates go compiler directive comments (//go:)
- gochecksumtype # checks exhaustiveness on Go "sum types"
- gocognit # computes and checks the cognitive complexity of functions
- goconst # finds repeated strings that could be replaced by a constant
- gocritic # provides diagnostics that check for bugs, performance and style issues
- gocyclo # computes and checks the cyclomatic complexity of functions
- godoclint # checks Golang's documentation practice
- godot # checks if comments end in a period
- gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod
- goprintffuncname # checks that printf-like functions are named with f at the end
- gosec # inspects source code for security problems
- govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
- iface # checks the incorrect use of interfaces, helping developers avoid interface pollution
- ineffassign # detects when assignments to existing variables are not used
- intrange # finds places where for loops could make use of an integer range
- iotamixing # checks if iotas are being used in const blocks with other non-iota declarations
- loggercheck # checks key value pairs for common logger libraries (kitlog,klog,logr,zap)
- makezero # finds slice declarations with non-zero initial length
- mirror # reports wrong mirror patterns of bytes/strings usage
- mnd # detects magic numbers
- modernize # suggests simplifications to Go code, using modern language and library features
- musttag # enforces field tags in (un)marshaled structs
- nakedret # finds naked returns in functions greater than a specified function length
- nestif # reports deeply nested if statements
- nilerr # finds the code that returns nil even if it checks that the error is not nil
- nilnesserr # reports that it checks for err != nil, but it returns a different nil value error (powered by nilness and nilerr)
- nilnil # checks that there is no simultaneous return of nil error and an invalid value
- noctx # finds sending http request without context.Context
- nolintlint # reports ill-formed or insufficient nolint directives
- nonamedreturns # reports all named returns
- nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL
- perfsprint # checks that fmt.Sprintf can be replaced with a faster alternative
- predeclared # finds code that shadows one of Go's predeclared identifiers
- promlinter # checks Prometheus metrics naming via promlint
- protogetter # reports direct reads from proto message fields when getters should be used
- reassign # checks that package variables are not reassigned
- recvcheck # checks for receiver type consistency
- revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint
- rowserrcheck # checks whether Err of rows is checked successfully
- sloglint # ensure consistent code style when using log/slog
- spancheck # checks for mistakes with OpenTelemetry/Census spans
- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed
- staticcheck # is a go vet on steroids, applying a ton of static analysis checks
- testableexamples # checks if examples are testable (have an expected output)
- testifylint # checks usage of github.com/stretchr/testify
- testpackage # makes you use a separate _test package
- tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes
- unconvert # removes unnecessary type conversions
- unparam # reports unused function parameters
- unqueryvet # detects SELECT * in SQL queries and SQL builders, encouraging explicit column selection
- unused # checks for unused constants, variables, functions and types
- usestdlibvars # detects the possibility to use variables/constants from the Go standard library
- usetesting # reports uses of functions with replacement inside the testing package
- wastedassign # finds wasted assignment statements
- whitespace # detects leading and trailing whitespace
## you may want to enable
#- arangolint # opinionated best practices for arangodb client
#- decorder # checks declaration order and count of types, constants, variables and functions
#- exhaustruct # [highly recommend to enable] checks if all structure fields are initialized
#- ginkgolinter # [if you use ginkgo/gomega] enforces standards of using ginkgo and gomega
#- godox # detects usage of FIXME, TODO and other keywords inside comments
#- goheader # checks is file header matches to pattern
#- inamedparam # [great idea, but too strict, need to ignore a lot of cases by default] reports interfaces with unnamed method parameters
#- interfacebloat # checks the number of methods inside an interface
#- ireturn # accept interfaces, return concrete types
#- noinlineerr # disallows inline error handling `if err := ...; err != nil {`
#- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated
#- tagalign # checks that struct tags are well aligned
#- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope
#- wrapcheck # checks that errors returned from external packages are wrapped
#- zerologlint # detects the wrong usage of zerolog that a user forgets to dispatch zerolog.Event
## disabled
#- containedctx # detects struct contained context.Context field
#- contextcheck # [too many false positives] checks the function whether use a non-inherited context
#- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
#- dupword # [useless without config] checks for duplicate words in the source code
#- err113 # [too strict] checks the errors handling expressions
#- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted
#- forcetypeassert # [replaced by errcheck] finds forced type assertions
#- gomodguard # [use more powerful depguard] allow and block lists linter for direct Go module dependencies
#- gosmopolitan # reports certain i18n/l10n anti-patterns in your Go codebase
#- grouper # analyzes expression groups
#- importas # enforces consistent import aliases
#- lll # [replaced by golines] reports long lines
#- maintidx # measures the maintainability index of each function
#- misspell # [useless] finds commonly misspelled English words in comments
#- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity
#- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test
#- tagliatelle # checks the struct tags
#- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers
#- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines
#- wsl_v5 # [too strict and mostly code is not more readable] add or remove empty lines
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
cyclop:
# The maximal code complexity to report.
# Default: 10
max-complexity: 30
# The maximal average package complexity.
# If it's higher than 0.0 (float) the check is enabled.
# Default: 0.0
package-average: 10.0
depguard:
# Rules to apply.
#
# Variables:
# - File Variables
# Use an exclamation mark `!` to negate a variable.
# Example: `!$test` matches any file that is not a go test file.
#
# `$all` - matches all go files
# `$test` - matches all go test files
#
# - Package Variables
#
# `$gostd` - matches all of go's standard library (Pulled from `GOROOT`)
#
# Default (applies if no custom rules are defined): Only allow $gostd in all files.
rules:
"deprecated":
# List of file globs that will match this list of settings to compare against.
# By default, if a path is relative, it is relative to the directory where the golangci-lint command is executed.
# The placeholder '${base-path}' is substituted with a path relative to the mode defined with `run.relative-path-mode`.
# The placeholder '${config-path}' is substituted with a path relative to the configuration file.
# Default: $all
files:
- "$all"
# List of packages that are not allowed.
# Entries can be a variable (starting with $), a string prefix, or an exact match (if ending with $).
# Default: []
deny:
- pkg: github.com/golang/protobuf
desc: Use google.golang.org/protobuf instead, see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules
- pkg: github.com/satori/go.uuid
desc: Use github.com/google/uuid instead, satori's package is not maintained
- pkg: github.com/gofrs/uuid$
desc: Use github.com/gofrs/uuid/v5 or later, it was not a go module before v5
"non-test files":
files:
- "!$test"
deny:
- pkg: math/rand$
desc: Use math/rand/v2 instead, see https://go.dev/blog/randv2
"non-main files":
files:
- "!**/main.go"
deny:
- pkg: log$
desc: Use log/slog instead, see https://go.dev/blog/slog
embeddedstructfieldcheck:
# Checks that sync.Mutex and sync.RWMutex are not used as embedded fields.
# Default: false
forbid-mutex: true
errcheck:
# Report about not checking of errors in type assertions: `a := b.(MyStruct)`.
# Such cases aren't reported by default.
# Default: false
check-type-assertions: true
exhaustive:
# Program elements to check for exhaustiveness.
# Default: [ switch ]
check:
- switch
- map
exhaustruct:
# List of regular expressions to match type names that should be excluded from processing.
# Anonymous structs can be matched by '<anonymous>' alias.
# Has precedence over `include`.
# Each regular expression must match the full type name, including package path.
# For example, to match type `net/http.Cookie` regular expression should be `.*/http\.Cookie`,
# but not `http\.Cookie`.
# Default: []
exclude:
# std libs
- ^net/http.Client$
- ^net/http.Cookie$
- ^net/http.Request$
- ^net/http.Response$
- ^net/http.Server$
- ^net/http.Transport$
- ^net/url.URL$
- ^os/exec.Cmd$
- ^reflect.StructField$
# public libs
- ^github.com/Shopify/sarama.Config$
- ^github.com/Shopify/sarama.ProducerMessage$
- ^github.com/mitchellh/mapstructure.DecoderConfig$
- ^github.com/prometheus/client_golang/.+Opts$
- ^github.com/spf13/cobra.Command$
- ^github.com/spf13/cobra.CompletionOptions$
- ^github.com/stretchr/testify/mock.Mock$
- ^github.com/testcontainers/testcontainers-go.+Request$
- ^github.com/testcontainers/testcontainers-go.FromDockerfile$
- ^golang.org/x/tools/go/analysis.Analyzer$
- ^google.golang.org/protobuf/.+Options$
- ^gopkg.in/yaml.v3.Node$
# Allows empty structures in return statements.
# Default: false
allow-empty-returns: true
funcorder:
# Checks if the exported methods of a structure are placed before the non-exported ones.
# Default: true
struct-method: false
funlen:
# Checks the number of lines in a function.
# If lower than 0, disable the check.
# Default: 60
lines: 100
# Checks the number of statements in a function.
# If lower than 0, disable the check.
# Default: 40
statements: 50
gochecksumtype:
# Presence of `default` case in switch statements satisfies exhaustiveness, if all members are not listed.
# Default: true
default-signifies-exhaustive: false
gocognit:
# Minimal code complexity to report.
# Default: 30 (but we recommend 10-20)
min-complexity: 20
gocritic:
# Settings passed to gocritic.
# The settings key is the name of a supported gocritic checker.
# The list of supported checkers can be found at https://go-critic.com/overview.
settings:
captLocal:
# Whether to restrict checker to params only.
# Default: true
paramsOnly: false
underef:
# Whether to skip (*x).method() calls where x is a pointer receiver.
# Default: true
skipRecvDeref: false
godoclint:
# List of rules to enable in addition to the default set.
# Default: empty
enable:
# Assert no unused link in godocs.
# https://github.com/godoc-lint/godoc-lint?tab=readme-ov-file#no-unused-link
- no-unused-link
govet:
# Enable all analyzers.
# Default: false
enable-all: true
# Disable analyzers by name.
# Run `GL_DEBUG=govet golangci-lint run --enable=govet` to see default, all available analyzers, and enabled analyzers.
# Default: []
disable:
- fieldalignment # too strict
# Settings per analyzer.
settings:
shadow:
# Whether to be strict about shadowing; can be noisy.
# Default: false
strict: true
inamedparam:
# Skips check for interface methods with only a single parameter.
# Default: false
skip-single-param: true
mnd:
# List of function patterns to exclude from analysis.
# Values always ignored: `time.Date`,
# `strconv.FormatInt`, `strconv.FormatUint`, `strconv.FormatFloat`,
# `strconv.ParseInt`, `strconv.ParseUint`, `strconv.ParseFloat`.
# Default: []
ignored-functions:
- args.Error
- flag.Arg
- flag.Duration.*
- flag.Float.*
- flag.Int.*
- flag.Uint.*
- os.Chmod
- os.Mkdir.*
- os.OpenFile
- os.WriteFile
- prometheus.ExponentialBuckets.*
- prometheus.LinearBuckets
nakedret:
# Make an issue if func has more lines of code than this setting, and it has naked returns.
# Default: 30
max-func-lines: 0
nolintlint:
# Exclude the following linters from requiring an explanation.
# Default: []
allow-no-explanation: [ funlen, gocognit, golines ]
# Enable to require an explanation of nonzero length after each nolint directive.
# Default: false
require-explanation: true
# Enable to require nolint directives to mention the specific linter being suppressed.
# Default: false
require-specific: true
perfsprint:
# Optimizes into strings concatenation.
# Default: true
strconcat: false
reassign:
# Patterns for global variable names that are checked for reassignment.
# See https://github.com/curioswitch/go-reassign#usage
# Default: ["EOF", "Err.*"]
patterns:
- ".*"
rowserrcheck:
# database/sql is always checked.
# Default: []
packages:
- github.com/jmoiron/sqlx
sloglint:
# Enforce not using global loggers.
# Values:
# - "": disabled
# - "all": report all global loggers
# - "default": report only the default slog logger
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#no-global
# Default: ""
no-global: all
# Enforce using methods that accept a context.
# Values:
# - "": disabled
# - "all": report all contextless calls
# - "scope": report only if a context exists in the scope of the outermost function
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#context-only
# Default: ""
context: scope
staticcheck:
# SAxxxx checks in https://staticcheck.dev/docs/configuration/options/#checks
# Example (to disable some checks): [ "all", "-SA1000", "-SA1001"]
# Default: ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"]
checks:
- all
# Incorrect or missing package comment.
# https://staticcheck.dev/docs/checks/#ST1000
- -ST1000
# Use consistent method receiver names.
# https://staticcheck.dev/docs/checks/#ST1016
- -ST1016
# Omit embedded fields from selector expression.
# https://staticcheck.dev/docs/checks/#QF1008
- -QF1008
usetesting:
# Enable/disable `os.TempDir()` detections.
# Default: false
os-temp-dir: true
exclusions:
# Log a warning if an exclusion rule is unused.
# Default: false
warn-unused: true
# Predefined exclusion rules.
# Default: []
presets:
- std-error-handling
- common-false-positives
rules:
- path: 'ahash/ahash.go'
linters: [ staticcheck, gosec ]
- path: 'backoff/backoff_test.go'
linters: [ testpackage ]
- path: 'dbg/dbg_test.go'
linters: [ testpackage ]
- source: 'TODO'
linters: [ godot ]
- text: 'should have a package comment'
linters: [ revive ]
- text: 'exported \S+ \S+ should have comment( \(or a comment on this block\))? or be unexported'
linters: [ revive ]
- text: 'package comment should be of the form ".+"'
source: '// ?(nolint|TODO)'
linters: [ revive ]
- text: 'comment on exported \S+ \S+ should be of the form ".+"'
source: '// ?(nolint|TODO)'
linters: [ revive, staticcheck ]
- path: '_test\.go'
linters:
- bodyclose
- dupl
- errcheck
- funlen
- goconst
- gosec
- noctx
- wrapcheck

View File

@@ -1,26 +0,0 @@
arch:
- amd64
- ppc64le
sudo: false
language: go
go:
- tip
- 1.9
jobs:
exclude:
- go: 1.9
arch: amd64
- go: 1.9
arch: ppc64le
script:
- go get golang.org/x/lint/golint
- go get golang.org/x/tools/cmd/cover
- go get github.com/kisom/goutils/...
- go test -cover github.com/kisom/goutils/...
- golint github.com/kisom/goutils/...
notifications:
email:
recipients:
- coder@kyleisom.net
on_success: change
on_failure: change

View File

@@ -4,8 +4,8 @@
package ahash
import (
"crypto/md5"
"crypto/sha1"
"crypto/md5" // #nosec G505
"crypto/sha1" // #nosec G501
"crypto/sha256"
"crypto/sha512"
"errors"
@@ -17,34 +17,15 @@ import (
"io"
"sort"
"git.wntrmute.dev/kyle/goutils/assert"
"golang.org/x/crypto/blake2b"
"golang.org/x/crypto/blake2s"
"golang.org/x/crypto/md4"
"golang.org/x/crypto/ripemd160"
"golang.org/x/crypto/md4" // #nosec G506
"golang.org/x/crypto/ripemd160" // #nosec G507
"golang.org/x/crypto/sha3"
"git.wntrmute.dev/kyle/goutils/assert"
)
func sha224Slicer(bs []byte) []byte {
sum := sha256.Sum224(bs)
return sum[:]
}
func sha256Slicer(bs []byte) []byte {
sum := sha256.Sum256(bs)
return sum[:]
}
func sha384Slicer(bs []byte) []byte {
sum := sha512.Sum384(bs)
return sum[:]
}
func sha512Slicer(bs []byte) []byte {
sum := sha512.Sum512(bs)
return sum[:]
}
// Hash represents a generic hash function that may or may not be secure. It
// satisfies the hash.Hash interface.
type Hash struct {
@@ -247,17 +228,17 @@ func init() {
// HashList returns a sorted list of all the hash algorithms supported by the
// package.
func HashList() []string {
return hashList[:]
return hashList
}
// SecureHashList returns a sorted list of all the secure (cryptographic) hash
// algorithms supported by the package.
func SecureHashList() []string {
return secureHashList[:]
return secureHashList
}
// InsecureHashList returns a sorted list of all the insecure hash algorithms
// supported by the package.
func InsecureHashList() []string {
return insecureHashList[:]
return insecureHashList
}

View File

@@ -1,16 +1,18 @@
package ahash
package ahash_test
import (
"bytes"
"encoding/hex"
"fmt"
"testing"
"git.wntrmute.dev/kyle/goutils/ahash"
"git.wntrmute.dev/kyle/goutils/assert"
)
func TestSecureHash(t *testing.T) {
algo := "sha256"
h, err := New(algo)
h, err := ahash.New(algo)
assert.NoErrorT(t, err)
assert.BoolT(t, h.IsSecure(), algo+" should be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -19,28 +21,28 @@ func TestSecureHash(t *testing.T) {
var data []byte
var expected = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
sum, err := Sum(algo, data)
sum, err := ahash.Sum(algo, data)
assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello, world")
buf := bytes.NewBuffer(data)
expected = "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
sum, err = SumReader(algo, buf)
sum, err = ahash.SumReader(algo, buf)
assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello world")
_, err = h.Write(data)
assert.NoErrorT(t, err)
unExpected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
sum = h.Sum(nil)
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
assert.BoolT(t, hex.EncodeToString(sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
}
func TestInsecureHash(t *testing.T) {
algo := "md5"
h, err := New(algo)
h, err := ahash.New(algo)
assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -49,28 +51,28 @@ func TestInsecureHash(t *testing.T) {
var data []byte
var expected = "d41d8cd98f00b204e9800998ecf8427e"
sum, err := Sum(algo, data)
sum, err := ahash.Sum(algo, data)
assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello, world")
buf := bytes.NewBuffer(data)
expected = "e4d7f1b4ed2e42d15898f4b27b019da4"
sum, err = SumReader(algo, buf)
sum, err = ahash.SumReader(algo, buf)
assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello world")
_, err = h.Write(data)
assert.NoErrorT(t, err)
unExpected := "e4d7f1b4ed2e42d15898f4b27b019da4"
sum = h.Sum(nil)
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
assert.BoolT(t, hex.EncodeToString(sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
}
func TestHash32(t *testing.T) {
algo := "crc32-ieee"
h, err := New(algo)
h, err := ahash.New(algo)
assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -102,7 +104,7 @@ func TestHash32(t *testing.T) {
func TestHash64(t *testing.T) {
algo := "crc64"
h, err := New(algo)
h, err := ahash.New(algo)
assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -133,9 +135,9 @@ func TestHash64(t *testing.T) {
}
func TestListLengthSanity(t *testing.T) {
all := HashList()
secure := SecureHashList()
insecure := InsecureHashList()
all := ahash.HashList()
secure := ahash.SecureHashList()
insecure := ahash.InsecureHashList()
assert.BoolT(t, len(all) == len(secure)+len(insecure))
}
@@ -146,11 +148,11 @@ func TestSumLimitedReader(t *testing.T) {
extendedData := bytes.NewBufferString("hello, world! this is an extended message")
expected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
hash, err := SumReader("sha256", data)
hash, err := ahash.SumReader("sha256", data)
assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", hash) == expected, fmt.Sprintf("have hash %x, want %s", hash, expected))
assert.BoolT(t, hex.EncodeToString(hash) == expected, fmt.Sprintf("have hash %x, want %s", hash, expected))
extendedHash, err := SumLimitedReader("sha256", extendedData, int64(dataLen))
extendedHash, err := ahash.SumLimitedReader("sha256", extendedData, int64(dataLen))
assert.NoErrorT(t, err)
assert.BoolT(t, bytes.Equal(hash, extendedHash), fmt.Sprintf("have hash %x, want %x", extendedHash, hash))

View File

@@ -9,6 +9,7 @@
package assert
import (
"errors"
"fmt"
"os"
"runtime"
@@ -16,11 +17,13 @@ import (
"testing"
)
const callerSkip = 2
// NoDebug can be set to true to cause all asserts to be ignored.
var NoDebug bool
func die(what string, a ...string) {
_, file, line, ok := runtime.Caller(2)
_, file, line, ok := runtime.Caller(callerSkip)
if !ok {
panic(what)
}
@@ -31,7 +34,8 @@ func die(what string, a ...string) {
s = ": " + s
}
panic(what + s)
} else {
}
fmt.Fprintf(os.Stderr, "%s", what)
if len(a) > 0 {
s := strings.Join(a, ", ")
@@ -44,16 +48,17 @@ func die(what string, a ...string) {
os.Exit(1)
}
}
// Bool asserts that cond is false.
//
// For example, this would replace
//
// if x < 0 {
// log.Fatal("x is subzero")
// }
//
// The same assertion would be
//
// assert.Bool(x, "x is subzero")
func Bool(cond bool, s ...string) {
if NoDebug {
@@ -68,6 +73,7 @@ func Bool(cond bool, s ...string) {
// Error asserts that err is not nil, e.g. that an error has occurred.
//
// For example,
//
// if err == nil {
// log.Fatal("call to <something> should have failed")
// }
@@ -100,7 +106,7 @@ func NoError(err error, s ...string) {
// ErrorEq asserts that the actual error is the expected error.
func ErrorEq(expected, actual error) {
if NoDebug || (expected == actual) {
if NoDebug || (errors.Is(expected, actual)) {
return
}
@@ -155,7 +161,7 @@ func NoErrorT(t *testing.T, err error) {
// ErrorEqT compares a pair of errors, calling Fatal on it if they
// don't match.
func ErrorEqT(t *testing.T, expected, actual error) {
if NoDebug || (expected == actual) {
if NoDebug || (errors.Is(expected, actual)) {
return
}

View File

@@ -10,29 +10,21 @@
// backoff is configured with a maximum duration that will not be
// exceeded.
//
// The `New` function will attempt to use the system's cryptographic
// random number generator to seed a Go math/rand random number
// source. If this fails, the package will panic on startup.
// This package uses math/rand/v2 for jitter, which is automatically
// seeded from a cryptographically secure source.
package backoff
import (
"crypto/rand"
"encoding/binary"
"io"
"math"
mrand "math/rand"
"sync"
"math/rand/v2"
"time"
)
var prngMu sync.Mutex
var prng *mrand.Rand
// DefaultInterval is used when a Backoff is initialised with a
// zero-value Interval.
var DefaultInterval = 5 * time.Minute
// DefaultMaxDuration is maximum amount of time that the backoff will
// DefaultMaxDuration is the maximum amount of time that the backoff will
// delay for.
var DefaultMaxDuration = 6 * time.Hour
@@ -50,10 +42,9 @@ type Backoff struct {
// interval controls the time step for backing off.
interval time.Duration
// noJitter controls whether to use the "Full Jitter"
// improvement to attempt to smooth out spikes in a high
// contention scenario. If noJitter is set to true, no
// jitter will be introduced.
// noJitter controls whether to use the "Full Jitter" improvement to attempt
// to smooth out spikes in a high-contention scenario. If noJitter is set to
// true, no jitter will be introduced.
noJitter bool
// decay controls the decay of n. If it is non-zero, n is
@@ -65,17 +56,17 @@ type Backoff struct {
lastTry time.Time
}
// New creates a new backoff with the specified max duration and
// New creates a new backoff with the specified maxDuration duration and
// interval. Zero values may be used to use the default values.
//
// Panics if either max or interval is negative.
func New(max time.Duration, interval time.Duration) *Backoff {
if max < 0 || interval < 0 {
panic("backoff: max or interval is negative")
// Panics if either dMax or interval is negative.
func New(dMax time.Duration, interval time.Duration) *Backoff {
if dMax < 0 || interval < 0 {
panic("backoff: dMax or interval is negative")
}
b := &Backoff{
maxDuration: max,
maxDuration: dMax,
interval: interval,
}
b.setup()
@@ -84,27 +75,12 @@ func New(max time.Duration, interval time.Duration) *Backoff {
// NewWithoutJitter works similarly to New, except that the created
// Backoff will not use jitter.
func NewWithoutJitter(max time.Duration, interval time.Duration) *Backoff {
b := New(max, interval)
func NewWithoutJitter(dMax time.Duration, interval time.Duration) *Backoff {
b := New(dMax, interval)
b.noJitter = true
return b
}
func init() {
var buf [8]byte
var n int64
_, err := io.ReadFull(rand.Reader, buf[:])
if err != nil {
panic(err.Error())
}
n = int64(binary.LittleEndian.Uint64(buf[:]))
src := mrand.NewSource(n)
prng = mrand.New(src)
}
func (b *Backoff) setup() {
if b.interval == 0 {
b.interval = DefaultInterval
@@ -122,35 +98,44 @@ func (b *Backoff) Duration() time.Duration {
b.decayN()
t := b.duration(b.n)
d := b.duration(b.n)
if b.n < math.MaxUint64 {
b.n++
}
if !b.noJitter {
prngMu.Lock()
t = time.Duration(prng.Int63n(int64(t)))
prngMu.Unlock()
d = time.Duration(rand.Int64N(int64(d))) // #nosec G404
}
return t
return d
}
const maxN uint64 = 63
// requires b to be locked.
func (b *Backoff) duration(n uint64) (t time.Duration) {
// Saturate pow
pow := time.Duration(math.MaxInt64)
if n < 63 {
pow = 1 << n
func (b *Backoff) duration(n uint64) time.Duration {
// Use left shift on the underlying integer representation to avoid
// multiplying time.Duration by time.Duration (which is semantically
// incorrect and flagged by linters).
if n >= maxN {
// Saturate when n would overflow a 64-bit shift or exceed maxDuration.
return b.maxDuration
}
t = b.interval * pow
if t/pow != b.interval || t > b.maxDuration {
t = b.maxDuration
// Calculate 2^n * interval using a shift. Detect overflow by checking
// for sign change or monotonicity loss and clamp to maxDuration.
shifted := b.interval << n
if shifted < 0 || shifted < b.interval {
// Overflow occurred during the shift; clamp to maxDuration.
return b.maxDuration
}
return
if shifted > b.maxDuration {
return b.maxDuration
}
return shifted
}
// Reset resets the attempt counter of a backoff.
@@ -174,7 +159,7 @@ func (b *Backoff) SetDecay(decay time.Duration) {
b.decay = decay
}
// requires b to be locked
// requires b to be locked.
func (b *Backoff) decayN() {
if b.decay == 0 {
return
@@ -186,7 +171,9 @@ func (b *Backoff) decayN() {
}
lastDuration := b.duration(b.n - 1)
decayed := time.Since(b.lastTry) > lastDuration+b.decay
// Reset when the elapsed time is at least the previous backoff plus decay.
// Using ">=" avoids boundary flakiness in tests and real usage.
decayed := time.Since(b.lastTry) >= lastDuration+b.decay
b.lastTry = time.Now()
if !decayed {

View File

@@ -9,7 +9,7 @@ import (
// If given New with 0's and no jitter, ensure that certain invariants are met:
//
// - the default max duration and interval should be used
// - the default maxDuration duration and interval should be used
// - noJitter should be true
// - the RNG should not be initialised
// - the first duration should be equal to the default interval
@@ -17,7 +17,11 @@ func TestDefaults(t *testing.T) {
b := NewWithoutJitter(0, 0)
if b.maxDuration != DefaultMaxDuration {
t.Fatalf("expected new backoff to use the default max duration (%s), but have %s", DefaultMaxDuration, b.maxDuration)
t.Fatalf(
"expected new backoff to use the default maxDuration duration (%s), but have %s",
DefaultMaxDuration,
b.maxDuration,
)
}
if b.interval != DefaultInterval {
@@ -44,11 +48,11 @@ func TestSetup(t *testing.T) {
}
}
// Ensure that tries incremenets as expected.
// Ensure that tries increments as expected.
func TestTries(t *testing.T) {
b := NewWithoutJitter(5, 1)
for i := uint64(0); i < 3; i++ {
for i := range uint64(3) {
if b.n != i {
t.Fatalf("want tries=%d, have tries=%d", i, b.n)
}
@@ -73,7 +77,7 @@ func TestTries(t *testing.T) {
func TestReset(t *testing.T) {
const iter = 10
b := New(1000, 1)
for i := 0; i < iter; i++ {
for range iter {
_ = b.Duration()
}
@@ -88,17 +92,17 @@ func TestReset(t *testing.T) {
}
const decay = 5 * time.Millisecond
const max = 10 * time.Millisecond
const maxDuration = 10 * time.Millisecond
const interval = time.Millisecond
func TestDecay(t *testing.T) {
const iter = 10
b := NewWithoutJitter(max, 1)
b := NewWithoutJitter(maxDuration, 1)
b.SetDecay(decay)
var backoff time.Duration
for i := 0; i < iter; i++ {
for range iter {
backoff = b.Duration()
}
@@ -127,7 +131,7 @@ func TestDecaySaturation(t *testing.T) {
b.SetDecay(decay)
var duration time.Duration
for i := 0; i <= 2; i++ {
for range 3 {
duration = b.Duration()
}
@@ -145,7 +149,7 @@ func TestDecaySaturation(t *testing.T) {
}
func ExampleBackoff_SetDecay() {
b := NewWithoutJitter(max, interval)
b := NewWithoutJitter(maxDuration, interval)
b.SetDecay(decay)
// try 0

82
certlib/hosts/hosts.go Normal file
View File

@@ -0,0 +1,82 @@
package hosts
import (
"errors"
"fmt"
"net"
"net/url"
"strconv"
"strings"
)
type Target struct {
Host string
Port int
}
func (t *Target) String() string {
return fmt.Sprintf("%s:%d", t.Host, t.Port)
}
func parseURL(host string) (string, int, error) {
url, err := url.Parse(host)
if err != nil {
return "", 0, fmt.Errorf("certlib/hosts: invalid host: %s", host)
}
if strings.ToLower(url.Scheme) != "https" {
return "", 0, errors.New("certlib/hosts: only https scheme supported")
}
if url.Port() == "" {
return url.Hostname(), 443, nil
}
port, err := strconv.ParseInt(url.Port(), 10, 16)
if err != nil {
return "", 0, fmt.Errorf("certlib/hosts: invalid port: %s", url.Port())
}
return url.Hostname(), int(port), nil
}
func parseHostPort(host string) (string, int, error) {
host, sport, err := net.SplitHostPort(host)
if err == nil {
port, err := strconv.ParseInt(sport, 10, 16)
if err != nil {
return "", 0, fmt.Errorf("certlib/hosts: invalid port: %s", sport)
}
return host, int(port), nil
}
return host, 443, nil
}
func ParseHost(host string) (*Target, error) {
host, port, err := parseURL(host)
if err == nil {
return &Target{Host: host, Port: port}, nil
}
host, port, err = parseHostPort(host)
if err == nil {
return &Target{Host: host, Port: port}, nil
}
return nil, fmt.Errorf("certlib/hosts: invalid host: %s", host)
}
func ParseHosts(hosts ...string) ([]*Target, error) {
targets := make([]*Target, 0, len(hosts))
for _, host := range hosts {
target, err := ParseHost(host)
if err != nil {
return nil, err
}
targets = append(targets, target)
}
return targets, nil
}

View File

@@ -158,59 +158,87 @@ type EncryptedContentInfo struct {
EncryptedContent []byte `asn1:"tag:0,optional"`
}
func unmarshalInit(raw []byte) (init initPKCS7, err error) {
_, err = asn1.Unmarshal(raw, &init)
if err != nil {
return initPKCS7{}, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
return init, nil
}
func populateData(msg *PKCS7, content asn1.RawValue) error {
msg.ContentInfo = "Data"
_, err := asn1.Unmarshal(content.Bytes, &msg.Content.Data)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
return nil
}
func populateSignedData(msg *PKCS7, contentBytes []byte) error {
msg.ContentInfo = "SignedData"
var sd signedData
if _, err := asn1.Unmarshal(contentBytes, &sd); err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if len(sd.Certificates.Bytes) != 0 {
certs, err := x509.ParseCertificates(sd.Certificates.Bytes)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
msg.Content.SignedData.Certificates = certs
}
if len(sd.Crls.Bytes) != 0 {
crl, err := x509.ParseRevocationList(sd.Crls.Bytes)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
msg.Content.SignedData.Crl = crl
}
msg.Content.SignedData.Version = sd.Version
msg.Content.SignedData.Raw = contentBytes
return nil
}
func populateEncryptedData(msg *PKCS7, contentBytes []byte) error {
msg.ContentInfo = "EncryptedData"
var ed EncryptedData
if _, err := asn1.Unmarshal(contentBytes, &ed); err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if ed.Version != 0 {
return certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS #7 encryptedData version 0 is supported"))
}
msg.Content.EncryptedData = ed
return nil
}
// ParsePKCS7 attempts to parse the DER encoded bytes of a
// PKCS7 structure.
func ParsePKCS7(raw []byte) (msg *PKCS7, err error) {
var pkcs7 initPKCS7
_, err = asn1.Unmarshal(raw, &pkcs7)
pkcs7, err := unmarshalInit(raw)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
return nil, err
}
msg = new(PKCS7)
msg.Raw = pkcs7.Raw
msg.ContentInfo = pkcs7.ContentType.String()
switch {
case msg.ContentInfo == ObjIDData:
msg.ContentInfo = "Data"
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &msg.Content.Data)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
case msg.ContentInfo == ObjIDSignedData:
msg.ContentInfo = "SignedData"
var signedData signedData
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &signedData)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if len(signedData.Certificates.Bytes) != 0 {
msg.Content.SignedData.Certificates, err = x509.ParseCertificates(signedData.Certificates.Bytes)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
}
if len(signedData.Crls.Bytes) != 0 {
msg.Content.SignedData.Crl, err = x509.ParseRevocationList(signedData.Crls.Bytes)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
}
msg.Content.SignedData.Version = signedData.Version
msg.Content.SignedData.Raw = pkcs7.Content.Bytes
case msg.ContentInfo == ObjIDEncryptedData:
msg.ContentInfo = "EncryptedData"
var encryptedData EncryptedData
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &encryptedData)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if encryptedData.Version != 0 {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS #7 encryptedData version 0 is supported"))
}
msg.Content.EncryptedData = encryptedData
switch msg.ContentInfo {
case ObjIDData:
if err := populateData(msg, pkcs7.Content); err != nil {
return nil, err
}
case ObjIDSignedData:
if err := populateSignedData(msg, pkcs7.Content.Bytes); err != nil {
return nil, err
}
case ObjIDEncryptedData:
if err := populateEncryptedData(msg, pkcs7.Content.Bytes); err != nil {
return nil, err
}
default:
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS# 7 content of type data, signed data or encrypted data can be parsed"))
}

View File

@@ -222,11 +222,11 @@ func VerifyCertificateError(cert *x509.Certificate) (revoked, ok bool, err error
if !time.Now().Before(cert.NotAfter) {
msg := fmt.Sprintf("Certificate expired %s\n", cert.NotAfter)
log.Info(msg)
return true, true, fmt.Errorf(msg)
return true, true, errors.New(msg)
} else if !time.Now().After(cert.NotBefore) {
msg := fmt.Sprintf("Certificate isn't valid until %s\n", cert.NotBefore)
log.Info(msg)
return true, true, fmt.Errorf(msg)
return true, true, errors.New(msg)
}
return revCheck(cert)
}

40
cmd/ca-signed/README.txt Normal file
View File

@@ -0,0 +1,40 @@
ca-signed: verify certificates against a CA
-------------------------------------------
Description
ca-signed verifies whether one or more certificates are signed by a given
Certificate Authority (CA). It prints a concise status per input certificate
along with the certificates expiration date when validation succeeds.
Usage
ca-signed CA.pem cert1.pem [cert2.pem ...]
- CA.pem: A file containing one or more CA certificates in PEM, DER, or PKCS#7/PKCS#12 formats.
- certN.pem: A file containing the end-entity (leaf) certificate to verify. If the file contains a chain,
the first certificate is treated as the leaf and the remaining ones are used as intermediates.
Output format
For each input certificate file, one line is printed:
<filename>: OK (expires YYYY-MM-DD)
<filename>: INVALID
Special self-test mode
ca-signed selftest
Runs a built-in test suite using embedded certificates. This mode requires no
external files or network access. The program exits with code 0 if all tests
pass, or a non-zero exit code if any test fails. Example output lines include
whether validation succeeds and the leafs expiration when applicable.
Examples
# Verify a server certificate against a root CA
ca-signed isrg-root-x1.pem le-e7.pem
# Run the embedded self-test suite
ca-signed selftest
Notes
- The tool attempts to parse certificates in PEM first, then falls back to
DER/PKCS#7/PKCS#12 (with an empty password) where applicable.
- Expiration is shown for the leaf certificate only.
- In selftest mode, test certificates are compiled into the binary using go:embed.

287
cmd/ca-signed/main.go Normal file
View File

@@ -0,0 +1,287 @@
package main
import (
"crypto/x509"
"embed"
"fmt"
"os"
"path/filepath"
"time"
"git.wntrmute.dev/kyle/goutils/certlib"
)
// loadCertsFromFile attempts to parse certificates from a file that may be in
// PEM or DER/PKCS#7 format. Returns the parsed certificates or an error.
func loadCertsFromFile(path string) ([]*x509.Certificate, error) {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
// Try PEM first
if certs, err := certlib.ParseCertificatesPEM(data); err == nil {
return certs, nil
}
// Try DER/PKCS7/PKCS12 (with no password)
if certs, _, err := certlib.ParseCertificatesDER(data, ""); err == nil {
return certs, nil
} else {
return nil, err
}
}
func makePoolFromFile(path string) (*x509.CertPool, error) {
// Try PEM via helper (it builds a pool)
if pool, err := certlib.LoadPEMCertPool(path); err == nil && pool != nil {
return pool, nil
}
// Fallback: read as DER(s), add to a new pool
certs, err := loadCertsFromFile(path)
if err != nil || len(certs) == 0 {
return nil, fmt.Errorf("failed to load CA certificates from %s", path)
}
pool := x509.NewCertPool()
for _, c := range certs {
pool.AddCert(c)
}
return pool, nil
}
//go:embed testdata/*.pem
var embeddedTestdata embed.FS
// loadCertsFromBytes attempts to parse certificates from bytes that may be in
// PEM or DER/PKCS#7 format.
func loadCertsFromBytes(data []byte) ([]*x509.Certificate, error) {
// Try PEM first
if certs, err := certlib.ParseCertificatesPEM(data); err == nil {
return certs, nil
}
// Try DER/PKCS7/PKCS12 (with no password)
if certs, _, err := certlib.ParseCertificatesDER(data, ""); err == nil {
return certs, nil
} else {
return nil, err
}
}
func makePoolFromBytes(data []byte) (*x509.CertPool, error) {
certs, err := loadCertsFromBytes(data)
if err != nil || len(certs) == 0 {
return nil, fmt.Errorf("failed to load CA certificates from embedded bytes")
}
pool := x509.NewCertPool()
for _, c := range certs {
pool.AddCert(c)
}
return pool, nil
}
// isSelfSigned returns true if the given certificate is self-signed.
// It checks that the subject and issuer match and that the certificate's
// signature verifies against its own public key.
func isSelfSigned(cert *x509.Certificate) bool {
if cert == nil {
return false
}
// Quick check: subject and issuer match
if cert.Subject.String() != cert.Issuer.String() {
return false
}
// Cryptographic check: the certificate is signed by itself
if err := cert.CheckSignatureFrom(cert); err != nil {
return false
}
return true
}
func verifyAgainstCA(caPool *x509.CertPool, path string) (ok bool, expiry string) {
certs, err := loadCertsFromFile(path)
if err != nil || len(certs) == 0 {
return false, ""
}
leaf := certs[0]
ints := x509.NewCertPool()
if len(certs) > 1 {
for _, ic := range certs[1:] {
ints.AddCert(ic)
}
}
opts := x509.VerifyOptions{
Roots: caPool,
Intermediates: ints,
KeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageAny},
}
if _, err := leaf.Verify(opts); err != nil {
return false, ""
}
return true, leaf.NotAfter.Format("2006-01-02")
}
func verifyAgainstCABytes(caPool *x509.CertPool, certData []byte) (ok bool, expiry string) {
certs, err := loadCertsFromBytes(certData)
if err != nil || len(certs) == 0 {
return false, ""
}
leaf := certs[0]
ints := x509.NewCertPool()
if len(certs) > 1 {
for _, ic := range certs[1:] {
ints.AddCert(ic)
}
}
opts := x509.VerifyOptions{
Roots: caPool,
Intermediates: ints,
KeyUsages: []x509.ExtKeyUsage{x509.ExtKeyUsageAny},
}
if _, err := leaf.Verify(opts); err != nil {
return false, ""
}
return true, leaf.NotAfter.Format("2006-01-02")
}
// selftest runs built-in validation using embedded certificates.
func selftest() int {
type testCase struct {
name string
caFile string
certFile string
expectOK bool
}
cases := []testCase{
{name: "ISRG Root X1 validates LE E7", caFile: "testdata/isrg-root-x1.pem", certFile: "testdata/le-e7.pem", expectOK: true},
{name: "ISRG Root X1 does NOT validate Google WR2", caFile: "testdata/isrg-root-x1.pem", certFile: "testdata/goog-wr2.pem", expectOK: false},
{name: "GTS R1 validates Google WR2", caFile: "testdata/gts-r1.pem", certFile: "testdata/goog-wr2.pem", expectOK: true},
{name: "GTS R1 does NOT validate LE E7", caFile: "testdata/gts-r1.pem", certFile: "testdata/le-e7.pem", expectOK: false},
}
failures := 0
for _, tc := range cases {
caBytes, err := embeddedTestdata.ReadFile(tc.caFile)
if err != nil {
fmt.Fprintf(os.Stderr, "selftest: failed to read embedded %s: %v\n", tc.caFile, err)
failures++
continue
}
certBytes, err := embeddedTestdata.ReadFile(tc.certFile)
if err != nil {
fmt.Fprintf(os.Stderr, "selftest: failed to read embedded %s: %v\n", tc.certFile, err)
failures++
continue
}
pool, err := makePoolFromBytes(caBytes)
if err != nil || pool == nil {
fmt.Fprintf(os.Stderr, "selftest: failed to build CA pool for %s: %v\n", tc.caFile, err)
failures++
continue
}
ok, exp := verifyAgainstCABytes(pool, certBytes)
if ok != tc.expectOK {
fmt.Printf("%s: unexpected result: got %v, want %v\n", tc.name, ok, tc.expectOK)
failures++
} else {
if ok {
fmt.Printf("%s: OK (expires %s)\n", tc.name, exp)
} else {
fmt.Printf("%s: INVALID (as expected)\n", tc.name)
}
}
}
// Verify that both embedded root CAs are detected as self-signed
roots := []string{"testdata/gts-r1.pem", "testdata/isrg-root-x1.pem"}
for _, root := range roots {
b, err := embeddedTestdata.ReadFile(root)
if err != nil {
fmt.Fprintf(os.Stderr, "selftest: failed to read embedded %s: %v\n", root, err)
failures++
continue
}
certs, err := loadCertsFromBytes(b)
if err != nil || len(certs) == 0 {
fmt.Fprintf(os.Stderr, "selftest: failed to parse cert(s) from %s: %v\n", root, err)
failures++
continue
}
leaf := certs[0]
if isSelfSigned(leaf) {
fmt.Printf("%s: SELF-SIGNED (as expected)\n", root)
} else {
fmt.Printf("%s: expected SELF-SIGNED, but was not detected as such\n", root)
failures++
}
}
if failures == 0 {
fmt.Println("selftest: PASS")
return 0
}
fmt.Fprintf(os.Stderr, "selftest: FAIL (%d failure(s))\n", failures)
return 1
}
func main() {
// Special selftest mode: single argument "selftest"
if len(os.Args) == 2 && os.Args[1] == "selftest" {
os.Exit(selftest())
}
if len(os.Args) < 3 {
prog := filepath.Base(os.Args[0])
fmt.Fprintf(os.Stderr, "Usage:\n %s ca.pem cert1.pem cert2.pem ...\n %s selftest\n", prog, prog)
os.Exit(2)
}
caPath := os.Args[1]
caPool, err := makePoolFromFile(caPath)
if err != nil || caPool == nil {
fmt.Fprintf(os.Stderr, "failed to load CA certificate(s): %v\n", err)
os.Exit(1)
}
for _, certPath := range os.Args[2:] {
ok, exp := verifyAgainstCA(caPool, certPath)
name := filepath.Base(certPath)
// Load the leaf once for self-signed detection and potential expiry fallback
var leaf *x509.Certificate
if certs, err := loadCertsFromFile(certPath); err == nil && len(certs) > 0 {
leaf = certs[0]
}
// If the certificate is self-signed, prefer the SELF-SIGNED label
if isSelfSigned(leaf) {
fmt.Printf("%s: SELF-SIGNED\n", name)
continue
}
if ok {
// Display with the requested format
// Example: file: OK (expires 2031-01-01)
// Ensure deterministic date formatting
// Note: no timezone displayed; date only as per example
// If exp ended up empty for some reason, recompute safely
if exp == "" {
if leaf != nil {
exp = leaf.NotAfter.Format("2006-01-02")
} else {
// fallback to the current date to avoid empty; though shouldn't happen
exp = time.Now().Format("2006-01-02")
}
}
fmt.Printf("%s: OK (expires %s)\n", name, exp)
} else {
fmt.Printf("%s: INVALID\n", name)
}
}
}

29
cmd/ca-signed/testdata/goog-wr2.pem vendored Normal file
View File

@@ -0,0 +1,29 @@
-----BEGIN CERTIFICATE-----
MIIFCzCCAvOgAwIBAgIQf/AFoHxM3tEArZ1mpRB7mDANBgkqhkiG9w0BAQsFADBH
MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMjMxMjEzMDkwMDAwWhcNMjkwMjIw
MTQwMDAwWjA7MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVR29vZ2xlIFRydXN0IFNl
cnZpY2VzMQwwCgYDVQQDEwNXUjIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
AoIBAQCp/5x/RR5wqFOfytnlDd5GV1d9vI+aWqxG8YSau5HbyfsvAfuSCQAWXqAc
+MGr+XgvSszYhaLYWTwO0xj7sfUkDSbutltkdnwUxy96zqhMt/TZCPzfhyM1IKji
aeKMTj+xWfpgoh6zySBTGYLKNlNtYE3pAJH8do1cCA8Kwtzxc2vFE24KT3rC8gIc
LrRjg9ox9i11MLL7q8Ju26nADrn5Z9TDJVd06wW06Y613ijNzHoU5HEDy01hLmFX
xRmpC5iEGuh5KdmyjS//V2pm4M6rlagplmNwEmceOuHbsCFx13ye/aoXbv4r+zgX
FNFmp6+atXDMyGOBOozAKql2N87jAgMBAAGjgf4wgfswDgYDVR0PAQH/BAQDAgGG
MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/
AgEAMB0GA1UdDgQWBBTeGx7teRXUPjckwyG77DQ5bUKyMDAfBgNVHSMEGDAWgBTk
rysmcRorSCeFL1JmLO/wiRNxPjA0BggrBgEFBQcBAQQoMCYwJAYIKwYBBQUHMAKG
GGh0dHA6Ly9pLnBraS5nb29nL3IxLmNydDArBgNVHR8EJDAiMCCgHqAchhpodHRw
Oi8vYy5wa2kuZ29vZy9yL3IxLmNybDATBgNVHSAEDDAKMAgGBmeBDAECATANBgkq
hkiG9w0BAQsFAAOCAgEARXWL5R87RBOWGqtY8TXJbz3S0DNKhjO6V1FP7sQ02hYS
TL8Tnw3UVOlIecAwPJQl8hr0ujKUtjNyC4XuCRElNJThb0Lbgpt7fyqaqf9/qdLe
SiDLs/sDA7j4BwXaWZIvGEaYzq9yviQmsR4ATb0IrZNBRAq7x9UBhb+TV+PfdBJT
DhEl05vc3ssnbrPCuTNiOcLgNeFbpwkuGcuRKnZc8d/KI4RApW//mkHgte8y0YWu
ryUJ8GLFbsLIbjL9uNrizkqRSvOFVU6xddZIMy9vhNkSXJ/UcZhjJY1pXAprffJB
vei7j+Qi151lRehMCofa6WBmiA4fx+FOVsV2/7R6V2nyAiIJJkEd2nSi5SnzxJrl
Xdaqev3htytmOPvoKWa676ATL/hzfvDaQBEcXd2Ppvy+275W+DKcH0FBbX62xevG
iza3F4ydzxl6NJ8hk8R+dDXSqv1MbRT1ybB5W0k8878XSOjvmiYTDIfyc9acxVJr
Y/cykHipa+te1pOhv7wYPYtZ9orGBV5SGOJm4NrB3K1aJar0RfzxC3ikr7Dyc6Qw
qDTBU39CluVIQeuQRgwG3MuSxl7zRERDRilGoKb8uY45JzmxWuKxrfwT/478JuHU
/oTxUFqOl2stKnn7QGTq8z29W+GgBLCXSBxC9epaHM0myFH/FJlniXJfHeytWt0=
-----END CERTIFICATE-----

31
cmd/ca-signed/testdata/gts-r1.pem vendored Normal file
View File

@@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
-----END CERTIFICATE-----

31
cmd/ca-signed/testdata/isrg-root-x1.pem vendored Normal file
View File

@@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
-----END CERTIFICATE-----

26
cmd/ca-signed/testdata/le-e7.pem vendored Normal file
View File

@@ -0,0 +1,26 @@
-----BEGIN CERTIFICATE-----
MIIEVzCCAj+gAwIBAgIRAKp18eYrjwoiCWbTi7/UuqEwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMjQwMzEzMDAwMDAw
WhcNMjcwMzEyMjM1OTU5WjAyMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg
RW5jcnlwdDELMAkGA1UEAxMCRTcwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARB6AST
CFh/vjcwDMCgQer+VtqEkz7JANurZxLP+U9TCeioL6sp5Z8VRvRbYk4P1INBmbef
QHJFHCxcSjKmwtvGBWpl/9ra8HW0QDsUaJW2qOJqceJ0ZVFT3hbUHifBM/2jgfgw
gfUwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMCBggrBgEFBQcD
ATASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSuSJ7chx1EoG/aouVgdAR4
wpwAgDAfBgNVHSMEGDAWgBR5tFnme7bl5AFzgAiIyBpY9umbbjAyBggrBgEFBQcB
AQQmMCQwIgYIKwYBBQUHMAKGFmh0dHA6Ly94MS5pLmxlbmNyLm9yZy8wEwYDVR0g
BAwwCjAIBgZngQwBAgEwJwYDVR0fBCAwHjAcoBqgGIYWaHR0cDovL3gxLmMubGVu
Y3Iub3JnLzANBgkqhkiG9w0BAQsFAAOCAgEAjx66fDdLk5ywFn3CzA1w1qfylHUD
aEf0QZpXcJseddJGSfbUUOvbNR9N/QQ16K1lXl4VFyhmGXDT5Kdfcr0RvIIVrNxF
h4lqHtRRCP6RBRstqbZ2zURgqakn/Xip0iaQL0IdfHBZr396FgknniRYFckKORPG
yM3QKnd66gtMst8I5nkRQlAg/Jb+Gc3egIvuGKWboE1G89NTsN9LTDD3PLj0dUMr
OIuqVjLB8pEC6yk9enrlrqjXQgkLEYhXzq7dLafv5Vkig6Gl0nuuqjqfp0Q1bi1o
yVNAlXe6aUXw92CcghC9bNsKEO1+M52YY5+ofIXlS/SEQbvVYYBLZ5yeiglV6t3S
M6H+vTG0aP9YHzLn/KVOHzGQfXDP7qM5tkf+7diZe7o2fw6O7IvN6fsQXEQQj8TJ
UXJxv2/uJhcuy/tSDgXwHM8Uk34WNbRT7zGTGkQRX0gsbjAea/jYAoWv0ZvQRwpq
Pe79D/i7Cep8qWnA+7AE/3B3S/3dEEYmc0lpe1366A/6GEgk3ktr9PEoQrLChs6I
tu3wnNLB2euC8IKGLQFpGtOO/2/hiAKjyajaBP25w1jF0Wl8Bbqne3uZ2q1GyPFJ
YRmT7/OXpmOH/FVLtwS+8ng1cAmpCujPwteJZNcDG0sF2n/sc0+SQf49fdyUK0ty
+VUwFj9tmWxyR/M=
-----END CERTIFICATE-----

148
cmd/cert-bundler/README.txt Normal file
View File

@@ -0,0 +1,148 @@
cert-bundler: create certificate chain archives
------------------------------------------------
Description
cert-bundler creates archives of certificate chains from a YAML configuration
file. It validates certificates, checks expiration dates, and generates
archives in multiple formats (zip, tar.gz) with optional manifest files
containing SHA256 checksums.
Usage
cert-bundler [options]
Options:
-c <file> Path to YAML configuration file (default: bundle.yaml)
-o <dir> Output directory for archives (default: pkg)
YAML Configuration Format
The configuration file uses the following structure:
config:
hashes: <filename>
expiry: <duration>
chains:
<group_name>:
certs:
- root: <path>
intermediates:
- <path>
- <path>
- root: <path>
intermediates:
- <path>
outputs:
include_single: <bool>
include_individual: <bool>
manifest: <bool>
encoding: <encoding>
formats:
- <format>
- <format>
Configuration Fields
config:
hashes: (optional) Name of the file to write SHA256 checksums of all
generated archives. If omitted, no hash file is created.
expiry: (optional) Expiration warning threshold. Certificates expiring
within this period will trigger a warning. Supports formats like
"1y" (year), "6m" (month), "30d" (day). Default: 1y
chains:
Each key under "chains" defines a named certificate group. All certificates
in a group are bundled together into archives with the group name.
certs:
List of certificate chains. Each chain has:
root: Path to root CA certificate (PEM or DER format)
intermediates: List of paths to intermediate certificates
All intermediates are validated against their root CA. An error is
reported if signature verification fails.
outputs:
Defines output formats and content for the group's archives:
include_single: (bool) If true, all certificates in the group are
concatenated into a single file named "bundle.pem"
(or "bundle.crt" for DER encoding).
include_individual: (bool) If true, each certificate is included as
a separate file in the archive, named after the
original file (e.g., "int/cca2.pem" becomes
"cca2.pem").
manifest: (bool) If true, a MANIFEST file is included containing
SHA256 checksums of all files in the archive.
encoding: Specifies certificate encoding in the archive:
- "pem": PEM format with .pem extension (default)
- "der": DER format with .crt extension
- "both": Both PEM and DER versions are included
formats: List of archive formats to generate:
- "zip": Creates a .zip archive
- "tgz": Creates a .tar.gz archive
Output Files
For each group and format combination, an archive is created:
<group_name>.zip or <group_name>.tar.gz
If config.hashes is specified, a hash file is created in the output directory
containing SHA256 checksums of all generated archives.
Example Configuration
config:
hashes: bundle.sha256
expiry: 1y
chains:
core_certs:
certs:
- root: roots/core-ca.pem
intermediates:
- int/cca1.pem
- int/cca2.pem
- int/cca3.pem
- root: roots/ssh-ca.pem
intermediates:
- ssh/ssh_dmz1.pem
- ssh/ssh_internal.pem
outputs:
include_single: true
include_individual: true
manifest: true
encoding: pem
formats:
- zip
- tgz
This configuration:
- Creates core_certs.zip and core_certs.tar.gz in the output directory
- Each archive contains bundle.pem (all certificates concatenated)
- Each archive contains individual certificates (core-ca.pem, cca1.pem, etc.)
- Each archive includes a MANIFEST file with SHA256 checksums
- Creates bundle.sha256 with checksums of the two archives
- Warns if any certificate expires within 1 year
Examples
# Create bundles using default configuration (bundle.yaml -> pkg/)
cert-bundler
# Use custom configuration and output directory
cert-bundler -c myconfig.yaml -o output
# Create bundles from testdata configuration
cert-bundler -c testdata/bundle.yaml -o testdata/pkg
Notes
- Certificate paths in the YAML are relative to the current working directory
- All intermediates must be properly signed by their specified root CA
- Certificates are checked for expiration; warnings are printed to stderr
- Expired certificates do not prevent archive creation but generate warnings
- Both PEM and DER certificate formats are supported as input
- Archive filenames use the group name, not individual chain names
- If both include_single and include_individual are true, archives contain both

528
cmd/cert-bundler/main.go Normal file
View File

@@ -0,0 +1,528 @@
package main
import (
"archive/tar"
"archive/zip"
"compress/gzip"
"crypto/sha256"
"crypto/x509"
_ "embed"
"encoding/pem"
"flag"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"git.wntrmute.dev/kyle/goutils/certlib"
"gopkg.in/yaml.v2"
)
// Config represents the top-level YAML configuration
type Config struct {
Config struct {
Hashes string `yaml:"hashes"`
Expiry string `yaml:"expiry"`
} `yaml:"config"`
Chains map[string]ChainGroup `yaml:"chains"`
}
// ChainGroup represents a named group of certificate chains
type ChainGroup struct {
Certs []CertChain `yaml:"certs"`
Outputs Outputs `yaml:"outputs"`
}
// CertChain represents a root certificate and its intermediates
type CertChain struct {
Root string `yaml:"root"`
Intermediates []string `yaml:"intermediates"`
}
// Outputs defines output format options
type Outputs struct {
IncludeSingle bool `yaml:"include_single"`
IncludeIndividual bool `yaml:"include_individual"`
Manifest bool `yaml:"manifest"`
Formats []string `yaml:"formats"`
Encoding string `yaml:"encoding"`
}
var (
configFile string
outputDir string
)
var formatExtensions = map[string]string{
"zip": ".zip",
"tgz": ".tar.gz",
}
//go:embed README.txt
var readmeContent string
func usage() {
fmt.Fprint(os.Stderr, readmeContent)
}
func main() {
flag.Usage = usage
flag.StringVar(&configFile, "c", "bundle.yaml", "path to YAML configuration file")
flag.StringVar(&outputDir, "o", "pkg", "output directory for archives")
flag.Parse()
if configFile == "" {
fmt.Fprintf(os.Stderr, "Error: configuration file required (-c flag)\n")
os.Exit(1)
}
// Load and parse configuration
cfg, err := loadConfig(configFile)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading config: %v\n", err)
os.Exit(1)
}
// Parse expiry duration (default 1 year)
expiryDuration := 365 * 24 * time.Hour
if cfg.Config.Expiry != "" {
expiryDuration, err = parseDuration(cfg.Config.Expiry)
if err != nil {
fmt.Fprintf(os.Stderr, "Error parsing expiry: %v\n", err)
os.Exit(1)
}
}
// Create output directory if it doesn't exist
if err := os.MkdirAll(outputDir, 0755); err != nil {
fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err)
os.Exit(1)
}
// Process each chain group
// Pre-allocate createdFiles based on total number of formats across all groups
totalFormats := 0
for _, group := range cfg.Chains {
totalFormats += len(group.Outputs.Formats)
}
createdFiles := make([]string, 0, totalFormats)
for groupName, group := range cfg.Chains {
files, err := processChainGroup(groupName, group, expiryDuration)
if err != nil {
fmt.Fprintf(os.Stderr, "Error processing chain group %s: %v\n", groupName, err)
os.Exit(1)
}
createdFiles = append(createdFiles, files...)
}
// Generate hash file for all created archives
if cfg.Config.Hashes != "" {
hashFile := filepath.Join(outputDir, cfg.Config.Hashes)
if err := generateHashFile(hashFile, createdFiles); err != nil {
fmt.Fprintf(os.Stderr, "Error generating hash file: %v\n", err)
os.Exit(1)
}
}
fmt.Println("Certificate bundling completed successfully")
}
func loadConfig(path string) (*Config, error) {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var cfg Config
if err := yaml.Unmarshal(data, &cfg); err != nil {
return nil, err
}
return &cfg, nil
}
func parseDuration(s string) (time.Duration, error) {
// Support simple formats like "1y", "6m", "30d"
if len(s) < 2 {
return 0, fmt.Errorf("invalid duration format: %s", s)
}
unit := s[len(s)-1]
value := s[:len(s)-1]
var multiplier time.Duration
switch unit {
case 'y', 'Y':
multiplier = 365 * 24 * time.Hour
case 'm', 'M':
multiplier = 30 * 24 * time.Hour
case 'd', 'D':
multiplier = 24 * time.Hour
default:
return time.ParseDuration(s)
}
var num int
_, err := fmt.Sscanf(value, "%d", &num)
if err != nil {
return 0, fmt.Errorf("invalid duration value: %s", s)
}
return time.Duration(num) * multiplier, nil
}
func processChainGroup(groupName string, group ChainGroup, expiryDuration time.Duration) ([]string, error) {
// Default encoding to "pem" if not specified
encoding := group.Outputs.Encoding
if encoding == "" {
encoding = "pem"
}
// Collect certificates from all chains in the group
singleFileCerts, individualCerts, err := loadAndCollectCerts(group.Certs, group.Outputs, expiryDuration)
if err != nil {
return nil, err
}
// Prepare files for inclusion in archives
archiveFiles, err := prepareArchiveFiles(singleFileCerts, individualCerts, group.Outputs, encoding)
if err != nil {
return nil, err
}
// Create archives for the entire group
createdFiles, err := createArchiveFiles(groupName, group.Outputs.Formats, archiveFiles)
if err != nil {
return nil, err
}
return createdFiles, nil
}
// loadAndCollectCerts loads all certificates from chains and collects them for processing
func loadAndCollectCerts(chains []CertChain, outputs Outputs, expiryDuration time.Duration) ([]*x509.Certificate, []certWithPath, error) {
var singleFileCerts []*x509.Certificate
var individualCerts []certWithPath
for _, chain := range chains {
// Load root certificate
rootCert, err := certlib.LoadCertificate(chain.Root)
if err != nil {
return nil, nil, fmt.Errorf("failed to load root certificate %s: %v", chain.Root, err)
}
// Check expiry for root
checkExpiry(chain.Root, rootCert, expiryDuration)
// Add root to collections if needed
if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, rootCert)
}
if outputs.IncludeIndividual {
individualCerts = append(individualCerts, certWithPath{
cert: rootCert,
path: chain.Root,
})
}
// Load and validate intermediates
for _, intPath := range chain.Intermediates {
intCert, err := certlib.LoadCertificate(intPath)
if err != nil {
return nil, nil, fmt.Errorf("failed to load intermediate certificate %s: %v", intPath, err)
}
// Validate that intermediate is signed by root
if err := intCert.CheckSignatureFrom(rootCert); err != nil {
return nil, nil, fmt.Errorf("intermediate %s is not properly signed by root %s: %v", intPath, chain.Root, err)
}
// Check expiry for intermediate
checkExpiry(intPath, intCert, expiryDuration)
// Add intermediate to collections if needed
if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, intCert)
}
if outputs.IncludeIndividual {
individualCerts = append(individualCerts, certWithPath{
cert: intCert,
path: intPath,
})
}
}
}
return singleFileCerts, individualCerts, nil
}
// prepareArchiveFiles prepares all files to be included in archives
func prepareArchiveFiles(singleFileCerts []*x509.Certificate, individualCerts []certWithPath, outputs Outputs, encoding string) ([]fileEntry, error) {
var archiveFiles []fileEntry
// Handle a single bundle file
if outputs.IncludeSingle && len(singleFileCerts) > 0 {
files, err := encodeCertsToFiles(singleFileCerts, "bundle", encoding, true)
if err != nil {
return nil, fmt.Errorf("failed to encode single bundle: %v", err)
}
archiveFiles = append(archiveFiles, files...)
}
// Handle individual files
if outputs.IncludeIndividual {
for _, cp := range individualCerts {
baseName := strings.TrimSuffix(filepath.Base(cp.path), filepath.Ext(cp.path))
files, err := encodeCertsToFiles([]*x509.Certificate{cp.cert}, baseName, encoding, false)
if err != nil {
return nil, fmt.Errorf("failed to encode individual cert %s: %v", cp.path, err)
}
archiveFiles = append(archiveFiles, files...)
}
}
// Generate manifest if requested
if outputs.Manifest {
manifestContent := generateManifest(archiveFiles)
archiveFiles = append(archiveFiles, fileEntry{
name: "MANIFEST",
content: manifestContent,
})
}
return archiveFiles, nil
}
// createArchiveFiles creates archive files in the specified formats
func createArchiveFiles(groupName string, formats []string, archiveFiles []fileEntry) ([]string, error) {
createdFiles := make([]string, 0, len(formats))
for _, format := range formats {
ext, ok := formatExtensions[format]
if !ok {
return nil, fmt.Errorf("unsupported format: %s", format)
}
archivePath := filepath.Join(outputDir, groupName+ext)
switch format {
case "zip":
if err := createZipArchive(archivePath, archiveFiles); err != nil {
return nil, fmt.Errorf("failed to create zip archive: %v", err)
}
case "tgz":
if err := createTarGzArchive(archivePath, archiveFiles); err != nil {
return nil, fmt.Errorf("failed to create tar.gz archive: %v", err)
}
default:
return nil, fmt.Errorf("unsupported format: %s", format)
}
createdFiles = append(createdFiles, archivePath)
}
return createdFiles, nil
}
func checkExpiry(path string, cert *x509.Certificate, expiryDuration time.Duration) {
now := time.Now()
expiryThreshold := now.Add(expiryDuration)
if cert.NotAfter.Before(expiryThreshold) {
daysUntilExpiry := int(cert.NotAfter.Sub(now).Hours() / 24)
if daysUntilExpiry < 0 {
fmt.Fprintf(os.Stderr, "WARNING: Certificate %s has EXPIRED (expired %d days ago)\n", path, -daysUntilExpiry)
} else {
fmt.Fprintf(os.Stderr, "WARNING: Certificate %s will expire in %d days (on %s)\n", path, daysUntilExpiry, cert.NotAfter.Format("2006-01-02"))
}
}
}
type fileEntry struct {
name string
content []byte
}
type certWithPath struct {
cert *x509.Certificate
path string
}
// encodeCertsToFiles converts certificates to file entries based on encoding type
// If isSingle is true, certs are concatenated into a single file; otherwise one cert per file
func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding string, isSingle bool) ([]fileEntry, error) {
var files []fileEntry
switch encoding {
case "pem":
pemContent := encodeCertsToPEM(certs)
files = append(files, fileEntry{
name: baseName + ".pem",
content: pemContent,
})
case "der":
if isSingle {
// For single file in DER, concatenate all cert DER bytes
var derContent []byte
for _, cert := range certs {
derContent = append(derContent, cert.Raw...)
}
files = append(files, fileEntry{
name: baseName + ".crt",
content: derContent,
})
} else {
// Individual DER file (should only have one cert)
if len(certs) > 0 {
files = append(files, fileEntry{
name: baseName + ".crt",
content: certs[0].Raw,
})
}
}
case "both":
// Add PEM version
pemContent := encodeCertsToPEM(certs)
files = append(files, fileEntry{
name: baseName + ".pem",
content: pemContent,
})
// Add DER version
if isSingle {
var derContent []byte
for _, cert := range certs {
derContent = append(derContent, cert.Raw...)
}
files = append(files, fileEntry{
name: baseName + ".crt",
content: derContent,
})
} else {
if len(certs) > 0 {
files = append(files, fileEntry{
name: baseName + ".crt",
content: certs[0].Raw,
})
}
}
default:
return nil, fmt.Errorf("unsupported encoding: %s (must be 'pem', 'der', or 'both')", encoding)
}
return files, nil
}
// encodeCertsToPEM encodes certificates to PEM format
func encodeCertsToPEM(certs []*x509.Certificate) []byte {
var pemContent []byte
for _, cert := range certs {
pemBlock := &pem.Block{
Type: "CERTIFICATE",
Bytes: cert.Raw,
}
pemContent = append(pemContent, pem.EncodeToMemory(pemBlock)...)
}
return pemContent
}
func generateManifest(files []fileEntry) []byte {
var manifest strings.Builder
for _, file := range files {
if file.name == "MANIFEST" {
continue
}
hash := sha256.Sum256(file.content)
manifest.WriteString(fmt.Sprintf("%x %s\n", hash, file.name))
}
return []byte(manifest.String())
}
func createZipArchive(path string, files []fileEntry) error {
f, err := os.Create(path)
if err != nil {
return err
}
w := zip.NewWriter(f)
for _, file := range files {
fw, err := w.Create(file.name)
if err != nil {
w.Close()
f.Close()
return err
}
if _, err := fw.Write(file.content); err != nil {
w.Close()
f.Close()
return err
}
}
// Check errors on close operations
if err := w.Close(); err != nil {
f.Close()
return err
}
return f.Close()
}
func createTarGzArchive(path string, files []fileEntry) error {
f, err := os.Create(path)
if err != nil {
return err
}
gw := gzip.NewWriter(f)
tw := tar.NewWriter(gw)
for _, file := range files {
hdr := &tar.Header{
Name: file.name,
Mode: 0644,
Size: int64(len(file.content)),
}
if err := tw.WriteHeader(hdr); err != nil {
tw.Close()
gw.Close()
f.Close()
return err
}
if _, err := tw.Write(file.content); err != nil {
tw.Close()
gw.Close()
f.Close()
return err
}
}
// Check errors on close operations in the correct order
if err := tw.Close(); err != nil {
gw.Close()
f.Close()
return err
}
if err := gw.Close(); err != nil {
f.Close()
return err
}
return f.Close()
}
func generateHashFile(path string, files []string) error {
f, err := os.Create(path)
if err != nil {
return err
}
defer f.Close()
for _, file := range files {
data, err := os.ReadFile(file)
if err != nil {
return err
}
hash := sha256.Sum256(data)
fmt.Fprintf(f, "%x %s\n", hash, filepath.Base(file))
}
return nil
}

197
cmd/cert-bundler/prompt.txt Normal file
View File

@@ -0,0 +1,197 @@
This project is an exploration into the utility of Jetbrains' Junie
to write smaller but tedious programs.
Task: build a certificate bundling tool in cmd/cert-bundler. It
creates archives of certificates chains.
A YAML file for this looks something like:
``` yaml
config:
hashes: bundle.sha256
expiry: 1y
chains:
core_certs:
certs:
- root: roots/core-ca.pem
intermediates:
- int/cca1.pem
- int/cca2.pem
- int/cca3.pem
- root: roots/ssh-ca.pem
intermediates:
- ssh/ssh_dmz1.pem
- ssh/ssh_internal.pem
outputs:
include_single: true
include_individual: true
manifest: true
formats:
- zip
- tgz
```
Some requirements:
1. First, all the certificates should be loaded.
2. For each root, each of the indivudal intermediates should be
checked to make sure they are properly signed by the root CA.
3. The program should optionally take an expiration period (defaulting
to one year), specified in config.expiration, and if any certificate
is within that expiration period, a warning should be printed.
4. If outputs.include_single is true, all certificates under chains
should be concatenated into a single file.
5. If outputs.include_individual is true, all certificates under
chains should be included at the root level (e.g. int/cca2.pem
would be cca2.pem in the archive).
6. If bundle.manifest is true, a "MANIFEST" file is created with
SHA256 sums of each file included in the archive.
7. For each of the formats, create an archive file in the output
directory (specified with `-o`) with that format.
- If zip is included, create a .zip file.
- If tgz is included, create a .tar.gz file with default compression
levels.
- All archive files should include any generated files (single
and/or individual) in the top-level directory.
8. In the output directory, create a file with the same name as
config.hashes that contains the SHA256 sum of all files created.
-----
The outputs.include_single and outputs.include_individual describe
what should go in the final archive. If both are specified, the output
archive should include both a single bundle.pem and each individual
certificate, for example.
-----
As it stands, given the following `bundle.yaml`:
``` yaml
config:
hashes: bundle.sha256
expiry: 1y
chains:
core_certs:
certs:
- root: pems/gts-r1.pem
intermediates:
- pems/goog-wr2.pem
outputs:
include_single: true
include_individual: true
manifest: true
formats:
- zip
- tgz
- root: pems/isrg-root-x1.pem
intermediates:
- pems/le-e7.pem
outputs:
include_single: true
include_individual: false
manifest: true
formats:
- zip
- tgz
google_certs:
certs:
- root: pems/gts-r1.pem
intermediates:
- pems/goog-wr2.pem
outputs:
include_single: true
include_individual: false
manifest: true
formats:
- tgz
lets_encrypt:
certs:
- root: pems/isrg-root-x1.pem
intermediates:
- pems/le-e7.pem
outputs:
include_single: false
include_individual: true
manifest: false
formats:
- zip
```
The program outputs the following files:
- bundle.sha256
- core_certs_0.tgz (contains individual certs)
- core_certs_0.zip (contains individual certs)
- core_certs_1.tgz (contains core_certs.pem)
- core_certs_1.zip (contains core_certs.pem)
- google_certs_0.tgz
- lets_encrypt_0.zip
It should output
- bundle.sha256
- core_certs.tgz
- core_certs.zip
- google_certs.tgz
- lets_encrypt.zip
core_certs.* should contain `bundle.pem` and all the individual
certs. There should be no _$n$ variants of archives.
-----
Add an additional field to outputs: encoding. It should accept one of
`der`, `pem`, or `both`. If `der`, certificates should be output as a
`.crt` file containing a DER-encoded certificate. If `pem`, certificates
should be output as a `.pem` file containing a PEM-encoded certificate.
If both, both the `.crt` and `.pem` certificate should be included.
For example, given the previous config, if `encoding` is der, the
google_certs.tgz archive should contain
- bundle.crt
- MANIFEST
Or with lets_encrypt.zip:
- isrg-root-x1.crt
- le-e7.crt
However, if `encoding` is pem, the lets_encrypt.zip archive should contain:
- isrg-root-x1.pem
- le-e7.pem
And if it `encoding` is both, the lets_encrypt.zip archive should contain:
- isrg-root-x1.crt
- isrg-root-x1.pem
- le-e7.crt
- le-e7.pem
-----
The tgz format should output a `.tar.gz` file instead of a `.tgz` file.
-----
Move the format extensions to a global variable.
-----
Write a README.txt with a description of the bundle.yaml format.
Additionally, update the help text for the program (e.g. with `-h`)
to provide the same detailed information.
-----
It may be easier to embed the README.txt in the program on build.
-----
For the archive (tar.gz and zip) writers, make sure errors are
checked at the end, and don't just defer the close operations.

43
cmd/cert-bundler/testdata/bundle.yaml vendored Normal file
View File

@@ -0,0 +1,43 @@
config:
hashes: bundle.sha256
expiry: 1y
chains:
core_certs:
certs:
- root: pems/gts-r1.pem
intermediates:
- pems/goog-wr2.pem
- root: pems/isrg-root-x1.pem
intermediates:
- pems/le-e7.pem
outputs:
include_single: true
include_individual: true
manifest: true
formats:
- zip
- tgz
google_certs:
certs:
- root: pems/gts-r1.pem
intermediates:
- pems/goog-wr2.pem
outputs:
include_single: true
include_individual: false
manifest: true
encoding: der
formats:
- tgz
lets_encrypt:
certs:
- root: pems/isrg-root-x1.pem
intermediates:
- pems/le-e7.pem
outputs:
include_single: false
include_individual: true
manifest: false
encoding: both
formats:
- zip

View File

@@ -0,0 +1,29 @@
-----BEGIN CERTIFICATE-----
MIIFCzCCAvOgAwIBAgIQf/AFoHxM3tEArZ1mpRB7mDANBgkqhkiG9w0BAQsFADBH
MQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExM
QzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMjMxMjEzMDkwMDAwWhcNMjkwMjIw
MTQwMDAwWjA7MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVR29vZ2xlIFRydXN0IFNl
cnZpY2VzMQwwCgYDVQQDEwNXUjIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
AoIBAQCp/5x/RR5wqFOfytnlDd5GV1d9vI+aWqxG8YSau5HbyfsvAfuSCQAWXqAc
+MGr+XgvSszYhaLYWTwO0xj7sfUkDSbutltkdnwUxy96zqhMt/TZCPzfhyM1IKji
aeKMTj+xWfpgoh6zySBTGYLKNlNtYE3pAJH8do1cCA8Kwtzxc2vFE24KT3rC8gIc
LrRjg9ox9i11MLL7q8Ju26nADrn5Z9TDJVd06wW06Y613ijNzHoU5HEDy01hLmFX
xRmpC5iEGuh5KdmyjS//V2pm4M6rlagplmNwEmceOuHbsCFx13ye/aoXbv4r+zgX
FNFmp6+atXDMyGOBOozAKql2N87jAgMBAAGjgf4wgfswDgYDVR0PAQH/BAQDAgGG
MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/
AgEAMB0GA1UdDgQWBBTeGx7teRXUPjckwyG77DQ5bUKyMDAfBgNVHSMEGDAWgBTk
rysmcRorSCeFL1JmLO/wiRNxPjA0BggrBgEFBQcBAQQoMCYwJAYIKwYBBQUHMAKG
GGh0dHA6Ly9pLnBraS5nb29nL3IxLmNydDArBgNVHR8EJDAiMCCgHqAchhpodHRw
Oi8vYy5wa2kuZ29vZy9yL3IxLmNybDATBgNVHSAEDDAKMAgGBmeBDAECATANBgkq
hkiG9w0BAQsFAAOCAgEARXWL5R87RBOWGqtY8TXJbz3S0DNKhjO6V1FP7sQ02hYS
TL8Tnw3UVOlIecAwPJQl8hr0ujKUtjNyC4XuCRElNJThb0Lbgpt7fyqaqf9/qdLe
SiDLs/sDA7j4BwXaWZIvGEaYzq9yviQmsR4ATb0IrZNBRAq7x9UBhb+TV+PfdBJT
DhEl05vc3ssnbrPCuTNiOcLgNeFbpwkuGcuRKnZc8d/KI4RApW//mkHgte8y0YWu
ryUJ8GLFbsLIbjL9uNrizkqRSvOFVU6xddZIMy9vhNkSXJ/UcZhjJY1pXAprffJB
vei7j+Qi151lRehMCofa6WBmiA4fx+FOVsV2/7R6V2nyAiIJJkEd2nSi5SnzxJrl
Xdaqev3htytmOPvoKWa676ATL/hzfvDaQBEcXd2Ppvy+275W+DKcH0FBbX62xevG
iza3F4ydzxl6NJ8hk8R+dDXSqv1MbRT1ybB5W0k8878XSOjvmiYTDIfyc9acxVJr
Y/cykHipa+te1pOhv7wYPYtZ9orGBV5SGOJm4NrB3K1aJar0RfzxC3ikr7Dyc6Qw
qDTBU39CluVIQeuQRgwG3MuSxl7zRERDRilGoKb8uY45JzmxWuKxrfwT/478JuHU
/oTxUFqOl2stKnn7QGTq8z29W+GgBLCXSBxC9epaHM0myFH/FJlniXJfHeytWt0=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
-----END CERTIFICATE-----

View File

@@ -0,0 +1,31 @@
-----BEGIN CERTIFICATE-----
MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,26 @@
-----BEGIN CERTIFICATE-----
MIIEVzCCAj+gAwIBAgIRAKp18eYrjwoiCWbTi7/UuqEwDQYJKoZIhvcNAQELBQAw
TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMjQwMzEzMDAwMDAw
WhcNMjcwMzEyMjM1OTU5WjAyMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg
RW5jcnlwdDELMAkGA1UEAxMCRTcwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARB6AST
CFh/vjcwDMCgQer+VtqEkz7JANurZxLP+U9TCeioL6sp5Z8VRvRbYk4P1INBmbef
QHJFHCxcSjKmwtvGBWpl/9ra8HW0QDsUaJW2qOJqceJ0ZVFT3hbUHifBM/2jgfgw
gfUwDgYDVR0PAQH/BAQDAgGGMB0GA1UdJQQWMBQGCCsGAQUFBwMCBggrBgEFBQcD
ATASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSuSJ7chx1EoG/aouVgdAR4
wpwAgDAfBgNVHSMEGDAWgBR5tFnme7bl5AFzgAiIyBpY9umbbjAyBggrBgEFBQcB
AQQmMCQwIgYIKwYBBQUHMAKGFmh0dHA6Ly94MS5pLmxlbmNyLm9yZy8wEwYDVR0g
BAwwCjAIBgZngQwBAgEwJwYDVR0fBCAwHjAcoBqgGIYWaHR0cDovL3gxLmMubGVu
Y3Iub3JnLzANBgkqhkiG9w0BAQsFAAOCAgEAjx66fDdLk5ywFn3CzA1w1qfylHUD
aEf0QZpXcJseddJGSfbUUOvbNR9N/QQ16K1lXl4VFyhmGXDT5Kdfcr0RvIIVrNxF
h4lqHtRRCP6RBRstqbZ2zURgqakn/Xip0iaQL0IdfHBZr396FgknniRYFckKORPG
yM3QKnd66gtMst8I5nkRQlAg/Jb+Gc3egIvuGKWboE1G89NTsN9LTDD3PLj0dUMr
OIuqVjLB8pEC6yk9enrlrqjXQgkLEYhXzq7dLafv5Vkig6Gl0nuuqjqfp0Q1bi1o
yVNAlXe6aUXw92CcghC9bNsKEO1+M52YY5+ofIXlS/SEQbvVYYBLZ5yeiglV6t3S
M6H+vTG0aP9YHzLn/KVOHzGQfXDP7qM5tkf+7diZe7o2fw6O7IvN6fsQXEQQj8TJ
UXJxv2/uJhcuy/tSDgXwHM8Uk34WNbRT7zGTGkQRX0gsbjAea/jYAoWv0ZvQRwpq
Pe79D/i7Cep8qWnA+7AE/3B3S/3dEEYmc0lpe1366A/6GEgk3ktr9PEoQrLChs6I
tu3wnNLB2euC8IKGLQFpGtOO/2/hiAKjyajaBP25w1jF0Wl8Bbqne3uZ2q1GyPFJ
YRmT7/OXpmOH/FVLtwS+8ng1cAmpCujPwteJZNcDG0sF2n/sc0+SQf49fdyUK0ty
+VUwFj9tmWxyR/M=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,4 @@
5ed8bf9ed693045faa8a5cb0edc4a870052e56aef6291ce8b1604565affbc2a4 core_certs.zip
e59eddc590d2f7b790a87c5b56e81697088ab54be382c0e2c51b82034006d308 core_certs.tgz
51b9b63b1335118079e90700a3a5b847c363808e9116e576ca84f301bc433289 google_certs.tgz
3d1910ca8835c3ded1755a8c7d6c48083c2f3ff68b2bfbf932aaf27e29d0a232 lets_encrypt.zip

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,36 @@
cert-revcheck: check certificate expiry and revocation
-----------------------------------------------------
Description
cert-revcheck accepts a list of certificate files (PEM or DER) or
site addresses (host[:port]) and checks whether the leaf certificate
is expired or revoked. Revocation checks use CRL and OCSP via the
certlib/revoke package.
Usage
cert-revcheck [options] <target> [<target>...]
Options
-hardfail treat revocation check failures as fatal (default: false)
-timeout dur HTTP/OCSP/CRL timeout for network operations (default: 10s)
-v verbose output
Targets
- File paths to certificates in PEM or DER format.
- Site addresses in the form host or host:port. If no port is
provided, 443 is assumed.
Examples
# Check a PEM file
cert-revcheck ./server.pem
# Check a DER (single) certificate
cert-revcheck ./server.der
# Check a live site (leaf certificate)
cert-revcheck example.com:443
Notes
- For sites, only the leaf certificate is checked.
- When -hardfail is set, network issues during OCSP/CRL fetch will
cause the check to fail (treated as revoked).

140
cmd/cert-revcheck/main.go Normal file
View File

@@ -0,0 +1,140 @@
package main
import (
"crypto/tls"
"crypto/x509"
"flag"
"errors"
"fmt"
"io/ioutil"
"net"
"os"
"time"
"git.wntrmute.dev/kyle/goutils/certlib"
hosts "git.wntrmute.dev/kyle/goutils/certlib/hosts"
"git.wntrmute.dev/kyle/goutils/certlib/revoke"
"git.wntrmute.dev/kyle/goutils/fileutil"
)
var (
hardfail bool
timeout time.Duration
verbose bool
)
func main() {
flag.BoolVar(&hardfail, "hardfail", false, "treat revocation check failures as fatal")
flag.DurationVar(&timeout, "timeout", 10*time.Second, "network timeout for OCSP/CRL fetches and TLS site connects")
flag.BoolVar(&verbose, "v", false, "verbose output")
flag.Parse()
revoke.HardFail = hardfail
// Set HTTP client timeout for revocation library
revoke.HTTPClient.Timeout = timeout
if flag.NArg() == 0 {
fmt.Fprintf(os.Stderr, "Usage: %s [options] <target> [<target>...]\n", os.Args[0])
os.Exit(2)
}
exitCode := 0
for _, target := range flag.Args() {
status, err := processTarget(target)
switch status {
case "OK":
fmt.Printf("%s: OK\n", target)
case "EXPIRED":
fmt.Printf("%s: EXPIRED: %v\n", target, err)
exitCode = 1
case "REVOKED":
fmt.Printf("%s: REVOKED\n", target)
exitCode = 1
case "UNKNOWN":
fmt.Printf("%s: UNKNOWN: %v\n", target, err)
if hardfail {
// In hardfail, treat unknown as failure
exitCode = 1
}
}
}
os.Exit(exitCode)
}
func processTarget(target string) (string, error) {
if fileutil.FileDoesExist(target) {
return checkFile(target)
}
// Not a file; treat as site
return checkSite(target)
}
func checkFile(path string) (string, error) {
in, err := ioutil.ReadFile(path)
if err != nil {
return "UNKNOWN", err
}
// Try PEM first; if that fails, try single DER cert
certs, err := certlib.ReadCertificates(in)
if err != nil || len(certs) == 0 {
cert, _, derr := certlib.ReadCertificate(in)
if derr != nil || cert == nil {
if err == nil {
err = derr
}
return "UNKNOWN", err
}
return evaluateCert(cert)
}
// Evaluate the first certificate (leaf) by default
return evaluateCert(certs[0])
}
func checkSite(hostport string) (string, error) {
// Use certlib/hosts to parse host/port (supports https URLs and host:port)
target, err := hosts.ParseHost(hostport)
if err != nil {
return "UNKNOWN", err
}
d := &net.Dialer{Timeout: timeout}
conn, err := tls.DialWithDialer(d, "tcp", target.String(), &tls.Config{InsecureSkipVerify: true, ServerName: target.Host})
if err != nil {
return "UNKNOWN", err
}
defer conn.Close()
state := conn.ConnectionState()
if len(state.PeerCertificates) == 0 {
return "UNKNOWN", errors.New("no peer certificates presented")
}
return evaluateCert(state.PeerCertificates[0])
}
func evaluateCert(cert *x509.Certificate) (string, error) {
// Expiry check
now := time.Now()
if !now.Before(cert.NotAfter) {
return "EXPIRED", fmt.Errorf("expired at %s", cert.NotAfter)
}
if !now.After(cert.NotBefore) {
return "EXPIRED", fmt.Errorf("not valid until %s", cert.NotBefore)
}
// Revocation check using certlib/revoke
revoked, ok, err := revoke.VerifyCertificateError(cert)
if revoked {
// If revoked is true, ok will be true per implementation, err may describe why
return "REVOKED", err
}
if !ok {
// Revocation status could not be determined
return "UNKNOWN", err
}
return "OK", nil
}

View File

@@ -110,6 +110,14 @@ func showBasicConstraints(cert *x509.Certificate) {
if cert.IsCA {
fmt.Printf(", is a CA certificate")
if !cert.BasicConstraintsValid {
fmt.Printf(" (basic constraint failure)")
}
} else {
fmt.Printf("is not a CA certificate")
if cert.KeyUsage&x509.KeyUsageKeyEncipherment != 0 {
fmt.Printf(" (key encipherment usage enabled!)")
}
}
if (cert.MaxPathLen == 0 && cert.MaxPathLenZero) || (cert.MaxPathLen > 0) {

View File

@@ -39,10 +39,6 @@ func compress(path, target string, level int) error {
return errors.Wrap(err, "compressing file")
}
if err != nil {
return errors.Wrap(err, "stat(2)ing destination file")
}
return nil
}

3
cmd/minmax/README Normal file
View File

@@ -0,0 +1,3 @@
minmax
A quick tool to calculate minmax codes if needed for uLisp.

53
cmd/minmax/minmax.go Normal file
View File

@@ -0,0 +1,53 @@
package main
import (
"flag"
"fmt"
"os"
"strconv"
)
var kinds = map[string]int{
"sym": 0,
"tf": 1,
"fn": 2,
"sp": 3,
}
func dieIf(err error) {
if err == nil {
return
}
fmt.Fprintf(os.Stderr, "[!] %s\n", err)
os.Exit(1)
}
func usage() {
fmt.Fprintf(os.Stderr, "usage: minmax type min max\n")
fmt.Fprintf(os.Stderr, " type is one of fn, sp, sym, tf\n")
os.Exit(1)
}
func main() {
flag.Parse()
if flag.NArg() != 3 {
usage()
}
kind, ok := kinds[flag.Arg(0)]
if !ok {
usage()
}
min, err := strconv.Atoi(flag.Arg(1))
dieIf(err)
max, err := strconv.Atoi(flag.Arg(2))
dieIf(err)
code := kind << 6
code += (min << 3)
code += max
fmt.Printf("%0o\n", code)
}

48
cmd/rolldie/main.go Normal file
View File

@@ -0,0 +1,48 @@
package main
import (
"flag"
"fmt"
"math/rand"
"os"
"regexp"
"strconv"
"git.wntrmute.dev/kyle/goutils/die"
)
var dieRollFormat = regexp.MustCompile(`^(\d+)[dD](\d+)$`)
func rollDie(count, sides int) []int {
sum := 0
var rolls []int
for i := 0; i < count; i++ {
roll := rand.Intn(sides) + 1
sum += roll
rolls = append(rolls, roll)
}
rolls = append(rolls, sum)
return rolls
}
func main() {
flag.Parse()
for _, arg := range flag.Args() {
if !dieRollFormat.MatchString(arg) {
fmt.Fprintf(os.Stderr, "invalid die format %s: should be XdY\n", arg)
os.Exit(1)
}
dieRoll := dieRollFormat.FindAllStringSubmatch(arg, -1)
count, err := strconv.Atoi(dieRoll[0][1])
die.If(err)
sides, err := strconv.Atoi(dieRoll[0][2])
die.If(err)
fmt.Println(rollDie(count, sides))
}
}

34
cmd/tlsinfo/README.txt Normal file
View File

@@ -0,0 +1,34 @@
tlsinfo: show TLS version, cipher, and peer certificates
---------------------------------------------------------
Description
tlsinfo connects to a TLS server and prints the negotiated TLS version and
cipher suite, followed by details for each certificate in the servers
presented chain (as provided by the server).
Usage
tlsinfo <hostname:port>
Output
The program prints the negotiated protocol and cipher, then one section per
certificate in the order received from the server. Example fields:
TLS Version: TLS 1.3
Cipher Suite: TLS_AES_128_GCM_SHA256
Certificate 1
Subject: CN=example.com, O=Example Corp, C=US
Issuer: CN=Example Root CA, O=Example Corp, C=US
DNS Names: [example.com www.example.com]
Not Before: 2025-01-01 00:00:00 +0000 UTC
Not After: 2026-01-01 23:59:59 +0000 UTC
Examples
# Inspect a public HTTPS endpoint
tlsinfo example.com:443
Notes
- Verification is intentionally disabled (InsecureSkipVerify=true). The tool
does not validate the server certificate or hostname; it is for inspection
only.
- The SNI/ServerName is inferred from <hostname> when applicable.
- You must specify a port (e.g., 443 for HTTPS).
- The entire certificate chain is printed exactly as presented by the server.

64
cmd/tlsinfo/main.go Normal file
View File

@@ -0,0 +1,64 @@
package main
import (
"crypto/tls"
"crypto/x509"
"fmt"
"os"
)
func main() {
if len(os.Args) != 2 {
fmt.Printf("Usage: %s hostname:port>\n", os.Args[0])
os.Exit(1)
}
hostPort := os.Args[1]
conn, err := tls.Dial("tcp", hostPort, &tls.Config{
InsecureSkipVerify: true,
})
if err != nil {
fmt.Printf("Failed to connect to the TLS server: %v\n", err)
os.Exit(1)
}
defer conn.Close()
state := conn.ConnectionState()
printConnectionDetails(state)
}
func printConnectionDetails(state tls.ConnectionState) {
version := tlsVersion(state.Version)
cipherSuite := tls.CipherSuiteName(state.CipherSuite)
fmt.Printf("TLS Version: %s\n", version)
fmt.Printf("Cipher Suite: %s\n", cipherSuite)
printPeerCertificates(state.PeerCertificates)
}
func tlsVersion(version uint16) string {
switch version {
case tls.VersionTLS13:
return "TLS 1.3"
case tls.VersionTLS12:
return "TLS 1.2"
case tls.VersionTLS11:
return "TLS 1.1"
case tls.VersionTLS10:
return "TLS 1.0"
default:
return "Unknown"
}
}
func printPeerCertificates(certificates []*x509.Certificate) {
for i, cert := range certificates {
fmt.Printf("Certificate %d\n", i+1)
fmt.Printf("\tSubject: %s\n", cert.Subject)
fmt.Printf("\tIssuer: %s\n", cert.Issuer)
fmt.Printf("\tDNS Names: %v\n", cert.DNSNames)
fmt.Printf("\tNot Before: %s\n:", cert.NotBefore)
fmt.Printf("\tNot After: %s\n", cert.NotAfter)
}
}

View File

@@ -11,7 +11,7 @@ package config
import (
"bufio"
"fmt"
"log"
"maps"
"os"
"sort"
"strings"
@@ -33,14 +33,15 @@ func SetEnvPrefix(pfx string) {
prefix = pfx
}
const keyValueSplitLength = 2
func addLine(line string) {
if strings.HasPrefix(line, "#") || line == "" {
return
}
lineParts := strings.SplitN(line, "=", 2)
if len(lineParts) != 2 {
log.Print("skipping line: ", line)
lineParts := strings.SplitN(line, "=", keyValueSplitLength)
if len(lineParts) != keyValueSplitLength {
return // silently ignore empty keys
}
@@ -49,7 +50,7 @@ func addLine(line string) {
vars[lineParts[0]] = lineParts[1]
}
// LoadFile scans the file at path for key=value pairs and adds them
// LoadFile scans the file at 'path' for key=value pairs and adds them
// to the configuration.
func LoadFile(path string) error {
file, err := os.Open(path)
@@ -64,25 +65,19 @@ func LoadFile(path string) error {
addLine(line)
}
if err = scanner.Err(); err != nil {
return err
return scanner.Err()
}
return nil
}
// LoadFileFor scans the ini file at path, loading the default section
// and overriding any keys found under section. If strict is true, the
// named section must exist (i.e. to catch typos in the section name).
// LoadFileFor scans the ini file at 'path', loading the default section
// and overriding any keys found under 'section'. If strict is true, the
// named section must exist (i.e., to catch typos in the section name).
func LoadFileFor(path, section string, strict bool) error {
cmap, err := iniconf.ParseFile(path)
if err != nil {
return err
}
for key, value := range cmap[iniconf.DefaultSection] {
vars[key] = value
}
maps.Copy(vars, cmap[iniconf.DefaultSection])
smap, ok := cmap[section]
if !ok {
@@ -92,9 +87,7 @@ func LoadFileFor(path, section string, strict bool) error {
return nil
}
for key, value := range smap {
vars[key] = value
}
maps.Copy(vars, smap)
return nil
}
@@ -111,7 +104,7 @@ func Get(key string) string {
// GetDefault retrieves a value from either a configuration file or
// the environment. Note that value from a file will override
// environment variables. If a value isn't found (e.g. Get returns an
// environment variables. If a value isn't found (e.g., Get returns an
// empty string), the default value will be used.
func GetDefault(key, def string) string {
if v := Get(key); v != "" {
@@ -121,8 +114,7 @@ func GetDefault(key, def string) string {
}
// Require retrieves a value from either a configuration file or the
// environment. If the key isn't present, it will call log.Fatal, printing
// the missing key.
// environment. If the key isn't present, it will panic.
func Require(key string) string {
if v, ok := vars[key]; ok {
return v
@@ -135,7 +127,7 @@ func Require(key string) string {
envMessage = " (note: looked for the key " + prefix + key
envMessage += " in the local env)"
}
log.Fatalf("missing required configuration value %s%s", key, envMessage)
panic(fmt.Sprintf("missing required configuration value %s%s", key, envMessage))
}
return v
@@ -143,7 +135,8 @@ func Require(key string) string {
// ListKeys returns a slice of the currently known keys.
func ListKeys() []string {
keyList := []string{}
var keyList []string
for k := range vars {
keyList = append(keyList, k)
}

View File

@@ -1,27 +1,26 @@
package config
package config_test
import (
"os"
"testing"
"git.wntrmute.dev/kyle/goutils/config"
)
const (
testFilePath = "testdata/test.env"
// Keys
// Key constants.
kOrder = "ORDER"
kSpecies = "SPECIES"
kName = "COMMON_NAME"
// Env
eOrder = "corvus"
eSpecies = "corvus corax"
eName = "northern raven"
// File
fOrder = "stringiformes"
fSpecies = "strix aluco"
// Name isn't set in the file to test fall through.
)
func init() {
@@ -31,8 +30,8 @@ func init() {
}
func TestLoadEnvOnly(t *testing.T) {
order := Get(kOrder)
species := Get(kSpecies)
order := config.Get(kOrder)
species := config.Get(kSpecies)
if order != eOrder {
t.Errorf("want %s, have %s", eOrder, order)
}
@@ -43,14 +42,14 @@ func TestLoadEnvOnly(t *testing.T) {
}
func TestLoadFile(t *testing.T) {
err := LoadFile(testFilePath)
err := config.LoadFile(testFilePath)
if err != nil {
t.Fatal(err)
}
order := Get(kOrder)
species := Get(kSpecies)
name := Get(kName)
order := config.Get(kOrder)
species := config.Get(kSpecies)
name := config.Get(kName)
if order != fOrder {
t.Errorf("want %s, have %s", fOrder, order)

View File

@@ -2,6 +2,7 @@ package iniconf
import (
"bufio"
"errors"
"fmt"
"io"
"os"
@@ -23,86 +24,115 @@ var (
var DefaultSection = "default"
// ParseFile attempts to load the named config file.
func ParseFile(fileName string) (cfg ConfigMap, err error) {
var file *os.File
file, err = os.Open(fileName)
func ParseFile(fileName string) (ConfigMap, error) {
file, err := os.Open(fileName)
if err != nil {
return
return nil, err
}
defer file.Close()
return ParseReader(file)
}
// ParseReader reads a configuration from an io.Reader.
func ParseReader(r io.Reader) (cfg ConfigMap, err error) {
cfg = ConfigMap{}
func ParseReader(r io.Reader) (ConfigMap, error) {
cfg := ConfigMap{}
buf := bufio.NewReader(r)
var (
line string
longLine bool
currentSection string
lineBytes []byte
isPrefix bool
err error
)
for {
err = nil
lineBytes, isPrefix, err = buf.ReadLine()
if io.EOF == err {
line, longLine, err = readConfigLine(buf, line, longLine)
if errors.Is(err, io.EOF) {
err = nil
break
} else if err != nil {
break
} else if isPrefix {
line += string(lineBytes)
longLine = true
continue
} else if longLine {
line += string(lineBytes)
longLine = false
} else {
line = string(lineBytes)
}
if commentLine.MatchString(line) {
if line == "" {
continue
} else if blankLine.MatchString(line) {
continue
} else if configSection.MatchString(line) {
section := configSection.ReplaceAllString(line,
"$1")
if section == "" {
err = fmt.Errorf("invalid structure in file")
}
currentSection, err = processConfigLine(cfg, line, currentSection)
if err != nil {
break
} else if !cfg.SectionInConfig(section) {
}
}
return cfg, err
}
// readConfigLine reads and assembles a complete configuration line, handling long lines.
func readConfigLine(buf *bufio.Reader, currentLine string, longLine bool) (string, bool, error) {
lineBytes, isPrefix, err := buf.ReadLine()
if err != nil {
return "", false, err
}
if isPrefix {
return currentLine + string(lineBytes), true, nil
} else if longLine {
return currentLine + string(lineBytes), false, nil
}
return string(lineBytes), false, nil
}
// processConfigLine processes a single line and updates the configuration map.
func processConfigLine(cfg ConfigMap, line string, currentSection string) (string, error) {
if commentLine.MatchString(line) || blankLine.MatchString(line) {
return currentSection, nil
}
if configSection.MatchString(line) {
return handleSectionLine(cfg, line)
}
if configLine.MatchString(line) {
return handleConfigLine(cfg, line, currentSection)
}
return currentSection, errors.New("invalid config file")
}
// handleSectionLine processes a section header line.
func handleSectionLine(cfg ConfigMap, line string) (string, error) {
section := configSection.ReplaceAllString(line, "$1")
if section == "" {
return "", errors.New("invalid structure in file")
}
if !cfg.SectionInConfig(section) {
cfg[section] = make(map[string]string, 0)
}
currentSection = section
} else if configLine.MatchString(line) {
return section, nil
}
// handleConfigLine processes a key=value configuration line.
func handleConfigLine(cfg ConfigMap, line string, currentSection string) (string, error) {
regex := configLine
if quotedConfigLine.MatchString(line) {
regex = quotedConfigLine
}
if currentSection == "" {
currentSection = DefaultSection
if !cfg.SectionInConfig(currentSection) {
cfg[currentSection] = map[string]string{}
}
}
key := regex.ReplaceAllString(line, "$1")
val := regex.ReplaceAllString(line, "$2")
if key == "" {
continue
}
if key != "" {
cfg[currentSection][key] = val
} else {
err = fmt.Errorf("invalid config file")
break
}
}
return
return currentSection, nil
}
// SectionInConfig determines whether a section is in the configuration.
@@ -112,41 +142,39 @@ func (c ConfigMap) SectionInConfig(section string) bool {
}
// ListSections returns the list of sections in the config map.
func (c ConfigMap) ListSections() (sections []string) {
func (c ConfigMap) ListSections() []string {
sections := make([]string, 0, len(c))
for section := range c {
sections = append(sections, section)
}
return
return sections
}
// WriteFile writes out the configuration to a file.
func (c ConfigMap) WriteFile(filename string) (err error) {
func (c ConfigMap) WriteFile(filename string) error {
file, err := os.Create(filename)
if err != nil {
return
return err
}
defer file.Close()
for _, section := range c.ListSections() {
sName := fmt.Sprintf("[ %s ]\n", section)
_, err = file.Write([]byte(sName))
if err != nil {
return
if _, err = file.WriteString(sName); err != nil {
return err
}
for k, v := range c[section] {
line := fmt.Sprintf("%s = %s\n", k, v)
_, err = file.Write([]byte(line))
if err != nil {
return
if _, err = file.WriteString(line); err != nil {
return err
}
}
_, err = file.Write([]byte{0x0a})
if err != nil {
return
if _, err = file.Write([]byte{0x0a}); err != nil {
return err
}
}
return
return nil
}
// AddSection creates a new section in the config map.
@@ -170,27 +198,26 @@ func (c ConfigMap) AddKeyVal(section, key, val string) {
}
// GetValue retrieves the value from a key map.
func (c ConfigMap) GetValue(section, key string) (val string, present bool) {
func (c ConfigMap) GetValue(section, key string) (string, bool) {
if c == nil {
return
return "", false
}
if section == "" {
section = DefaultSection
}
_, ok := c[section]
if !ok {
return
if _, ok := c[section]; !ok {
return "", false
}
val, present = c[section][key]
return
val, present := c[section][key]
return val, present
}
// GetValueDefault retrieves the value from a key map if present,
// otherwise the default value.
func (c ConfigMap) GetValueDefault(section, key, value string) (val string) {
func (c ConfigMap) GetValueDefault(section, key, value string) string {
kval, ok := c.GetValue(section, key)
if !ok {
return value
@@ -199,7 +226,7 @@ func (c ConfigMap) GetValueDefault(section, key, value string) (val string) {
}
// SectionKeys returns the sections in the config map.
func (c ConfigMap) SectionKeys(section string) (keys []string, present bool) {
func (c ConfigMap) SectionKeys(section string) ([]string, bool) {
if c == nil {
return nil, false
}
@@ -208,13 +235,12 @@ func (c ConfigMap) SectionKeys(section string) (keys []string, present bool) {
section = DefaultSection
}
cm := c
s, ok := cm[section]
s, ok := c[section]
if !ok {
return nil, false
}
keys = make([]string, 0, len(s))
keys := make([]string, 0, len(s))
for key := range s {
keys = append(keys, key)
}

View File

@@ -1,18 +1,19 @@
package iniconf
package iniconf_test
import (
"errors"
"fmt"
"os"
"sort"
"testing"
"git.wntrmute.dev/kyle/goutils/config/iniconf"
)
// FailWithError is a utility for dumping errors and failing the test.
func FailWithError(t *testing.T, err error) {
fmt.Println("failed")
t.Log("failed")
if err != nil {
fmt.Println("[!] ", err.Error())
t.Log("[!] ", err.Error())
}
t.FailNow()
}
@@ -49,47 +50,50 @@ func stringSlicesEqual(slice1, slice2 []string) bool {
func TestGoodConfig(t *testing.T) {
testFile := "testdata/test.conf"
fmt.Printf("[+] validating known-good config... ")
cmap, err := ParseFile(testFile)
t.Logf("[+] validating known-good config... ")
cmap, err := iniconf.ParseFile(testFile)
if err != nil {
FailWithError(t, err)
} else if len(cmap) != 2 {
FailWithError(t, err)
}
fmt.Println("ok")
t.Log("ok")
}
func TestGoodConfig2(t *testing.T) {
testFile := "testdata/test2.conf"
fmt.Printf("[+] validating second known-good config... ")
cmap, err := ParseFile(testFile)
if err != nil {
t.Logf("[+] validating second known-good config... ")
cmap, err := iniconf.ParseFile(testFile)
switch {
case err != nil:
FailWithError(t, err)
} else if len(cmap) != 1 {
case len(cmap) != 1:
FailWithError(t, err)
} else if len(cmap["default"]) != 3 {
case len(cmap["default"]) != 3:
FailWithError(t, err)
default:
// nothing to do here
}
fmt.Println("ok")
t.Log("ok")
}
func TestBadConfig(t *testing.T) {
testFile := "testdata/bad.conf"
fmt.Printf("[+] ensure invalid config file fails... ")
_, err := ParseFile(testFile)
t.Logf("[+] ensure invalid config file fails... ")
_, err := iniconf.ParseFile(testFile)
if err == nil {
err = fmt.Errorf("invalid config file should fail")
err = errors.New("invalid config file should fail")
FailWithError(t, err)
}
fmt.Println("ok")
t.Log("ok")
}
func TestWriteConfigFile(t *testing.T) {
fmt.Printf("[+] ensure config file is written properly... ")
t.Logf("[+] ensure config file is written properly... ")
const testFile = "testdata/test.conf"
const testOut = "testdata/test.out"
cmap, err := ParseFile(testFile)
cmap, err := iniconf.ParseFile(testFile)
if err != nil {
FailWithError(t, err)
}
@@ -100,7 +104,7 @@ func TestWriteConfigFile(t *testing.T) {
FailWithError(t, err)
}
cmap2, err := ParseFile(testOut)
cmap2, err := iniconf.ParseFile(testOut)
if err != nil {
FailWithError(t, err)
}
@@ -110,25 +114,25 @@ func TestWriteConfigFile(t *testing.T) {
sort.Strings(sectionList1)
sort.Strings(sectionList2)
if !stringSlicesEqual(sectionList1, sectionList2) {
err = fmt.Errorf("section lists don't match")
err = errors.New("section lists don't match")
FailWithError(t, err)
}
for _, section := range sectionList1 {
for _, k := range cmap[section] {
if cmap[section][k] != cmap2[section][k] {
err = fmt.Errorf("config key doesn't match")
err = errors.New("config key doesn't match")
FailWithError(t, err)
}
}
}
fmt.Println("ok")
t.Log("ok")
}
func TestQuotedValue(t *testing.T) {
testFile := "testdata/test.conf"
fmt.Printf("[+] validating quoted value... ")
cmap, _ := ParseFile(testFile)
t.Logf("[+] validating quoted value... ")
cmap, _ := iniconf.ParseFile(testFile)
val := cmap["sectionName"]["key4"]
if val != " space at beginning and end " {
FailWithError(t, errors.New("Wrong value in double quotes ["+val+"]"))
@@ -138,5 +142,5 @@ func TestQuotedValue(t *testing.T) {
if val != " is quoted with single quotes " {
FailWithError(t, errors.New("Wrong value in single quotes ["+val+"]"))
}
fmt.Println("ok")
t.Log("ok")
}

View File

@@ -1,5 +1,4 @@
//go:build !linux
// +build !linux
package config

View File

@@ -1,7 +1,11 @@
package config
package config_test
import "testing"
import (
"testing"
"git.wntrmute.dev/kyle/goutils/config"
)
func TestDefaultPath(t *testing.T) {
t.Log(DefaultConfigPath("demoapp", "app.conf"))
t.Log(config.DefaultConfigPath("demoapp", "app.conf"))
}

View File

@@ -47,7 +47,7 @@ func ToFile(path string) (*DebugPrinter, error) {
}, nil
}
// To sets up a new DebugPrint to an io.WriteCloser.
// To will set up a new DebugPrint to an io.WriteCloser.
func To(w io.WriteCloser) *DebugPrinter {
return &DebugPrinter{
out: w,
@@ -55,21 +55,21 @@ func To(w io.WriteCloser) *DebugPrinter {
}
// Print calls fmt.Print if Enabled is true.
func (dbg *DebugPrinter) Print(v ...interface{}) {
func (dbg *DebugPrinter) Print(v ...any) {
if dbg.Enabled {
fmt.Fprint(dbg.out, v...)
}
}
// Println calls fmt.Println if Enabled is true.
func (dbg *DebugPrinter) Println(v ...interface{}) {
func (dbg *DebugPrinter) Println(v ...any) {
if dbg.Enabled {
fmt.Fprintln(dbg.out, v...)
}
}
// Printf calls fmt.Printf if Enabled is true.
func (dbg *DebugPrinter) Printf(format string, v ...interface{}) {
func (dbg *DebugPrinter) Printf(format string, v ...any) {
if dbg.Enabled {
fmt.Fprintf(dbg.out, format, v...)
}

View File

@@ -2,7 +2,6 @@ package dbg
import (
"fmt"
"io/ioutil"
"os"
"testing"
@@ -50,7 +49,7 @@ func TestTo(t *testing.T) {
}
func TestToFile(t *testing.T) {
testFile, err := ioutil.TempFile("", "dbg")
testFile, err := os.CreateTemp(t.TempDir(), "dbg")
assert.NoErrorT(t, err)
err = testFile.Close()
assert.NoErrorT(t, err)
@@ -103,7 +102,7 @@ func TestWriting(t *testing.T) {
}
func TestToFileError(t *testing.T) {
testFile, err := ioutil.TempFile("", "dbg")
testFile, err := os.CreateTemp(t.TempDir(), "dbg")
assert.NoErrorT(t, err)
err = testFile.Chmod(0400)
assert.NoErrorT(t, err)

View File

@@ -1,12 +0,0 @@
Simple fatal utilities for Go programs.
```
result, err := doSomething()
die.If(err)
ok := processResult(result)
if !ok {
die.With("failed to process result %s", result.Name)
}
```

View File

@@ -1,4 +1,5 @@
// Package die contains utilities for fatal error handling.
// Package die contains utilities for fatal error handling. It
// presents simple fatal utilities for Go programs.
package die
import (
@@ -15,14 +16,14 @@ func If(err error) {
}
// With prints the message to stderr, appending a newline, and exits.
func With(fstr string, args ...interface{}) {
func With(fstr string, args ...any) {
out := fmt.Sprintf("[!] %s\n", fstr)
fmt.Fprintf(os.Stderr, out, args...)
os.Exit(1)
}
// When prints the error to stderr and exits if cond is true.
func When(cond bool, fstr string, args ...interface{}) {
func When(cond bool, fstr string, args ...any) {
if cond {
With(fstr, args...)
}

View File

@@ -1,10 +1,10 @@
//go:build !windows
// +build !windows
// Package fileutil contains common file functions.
package fileutil
import (
"math"
"os"
"golang.org/x/sys/unix"
@@ -46,5 +46,9 @@ const (
// Access returns a boolean indicating whether the mode being checked
// for is valid.
func Access(path string, mode int) error {
return unix.Access(path, uint32(mode))
// Validate the conversion to avoid potential integer overflow (gosec G115).
if mode < 0 || uint64(mode) > uint64(math.MaxUint32) {
return unix.EINVAL
}
return unix.Access(path, uint32(mode)) // #nosec G115 - handled above.
}

View File

@@ -1,5 +1,4 @@
//go:build windows
// +build windows
// Package fileutil contains common file functions.
package fileutil

6
go.mod
View File

@@ -1,14 +1,14 @@
module git.wntrmute.dev/kyle/goutils
go 1.20
go 1.24.0
require (
github.com/hashicorp/go-syslog v1.0.0
github.com/kr/text v0.2.0
github.com/pkg/errors v0.9.1
github.com/pkg/sftp v1.12.0
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad
golang.org/x/crypto v0.44.0
golang.org/x/sys v0.38.0
gopkg.in/yaml.v2 v2.4.0
)

5
go.sum
View File

@@ -27,12 +27,17 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b h1:Qwe1rC8PSniVfAFPFJeyUkB+zcysC3RgJBAGk7eqBEU=
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.44.0 h1:A97SsFvM3AIwEEmTBiaxPPTYpDC47w720rdiiUvgoAU=
golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=

View File

@@ -11,7 +11,7 @@ import (
var progname = filepath.Base(os.Args[0])
// ProgName returns what lib thinks the program name is, namely the
// basename of of argv0.
// basename of argv0.
//
// It is similar to the Linux __progname function.
func ProgName() string {

View File

@@ -23,7 +23,7 @@ func Example() {
map[string]string{"when": time.Now().String()})
}
func ExampleNewFromFile() {
func ExampleNewSplitFile() {
flog, err := logging.NewSplitFile("example.log", "example.err", true)
if err != nil {
log.Fatal("filelog", "failed to open logger",