Compare commits

..

17 Commits

Author SHA1 Message Date
3d9625b40b Fix calls to die.With. 2025-11-15 16:10:14 -08:00
547a0d8f32 disable linting until cleanups are finished 2025-11-15 16:00:58 -08:00
876a0a2c2b fileutil: linter fixes. 2025-11-15 15:58:51 -08:00
a37d28e3d7 die: linter feedback fixes. 2025-11-15 15:55:17 -08:00
ddf26e00af dbg: linter feedback updates. 2025-11-15 15:53:57 -08:00
e4db163efe Cleaning up. 2025-11-15 15:48:18 -08:00
571443c282 config: apply linting feedback. 2025-11-15 15:47:29 -08:00
aba5e519a4 First round of linter cleanups. 2025-11-15 15:11:07 -08:00
5fcba0e814 Trying a different config. 2025-11-15 13:34:18 -08:00
928c643d8d Fix linter config. 2025-11-15 13:16:30 -08:00
fd9f9f6d66 Fix linting. 2025-11-15 13:08:38 -08:00
a5b7727c8f Add linting stage. 2025-11-15 13:05:00 -08:00
3135c18d95 ignore goland directory 2025-11-15 01:53:40 -08:00
1d32a64dc0 add cert-revcheck 2025-11-14 22:56:10 -08:00
d70ca5ee87 adding golangci-lint 2025-11-14 22:54:06 -08:00
eca3a229a4 config: golangci-lint cleanups. 2025-11-14 22:53:02 -08:00
4c1eb03671 Cleaning up golangci-lint warnings. 2025-11-14 22:49:54 -08:00
32 changed files with 1167 additions and 450 deletions

View File

@@ -5,6 +5,30 @@ version: 2.1
# Define a job to be invoked later in a workflow. # Define a job to be invoked later in a workflow.
# See: https://circleci.com/docs/2.0/configuration-reference/#jobs # See: https://circleci.com/docs/2.0/configuration-reference/#jobs
jobs: jobs:
lint:
working_directory: ~/repo
docker:
- image: cimg/go:1.22.2
steps:
- checkout
- restore_cache:
keys:
- go-mod-v4-{{ checksum "go.sum" }}
- run:
name: Install Dependencies
command: go mod download
- save_cache:
key: go-mod-v4-{{ checksum "go.sum" }}
paths:
- "/go/pkg/mod"
- run:
name: Install golangci-lint
command: |
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin
- run:
name: Run golangci-lint
command: golangci-lint run --timeout=5m
testbuild: testbuild:
working_directory: ~/repo working_directory: ~/repo
# Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub. # Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub.
@@ -27,16 +51,17 @@ jobs:
- "/go/pkg/mod" - "/go/pkg/mod"
- run: - run:
name: Run tests name: Run tests
command: go test ./... command: go test -race ./...
- run: - run:
name: Run build name: Run build
command: go build ./... command: go build ./...
- store_test_results: - store_test_results:
path: /tmp/test-reports path: /tmp/test-reports
# Invoke jobs via workflows # Invoke jobs via workflows
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows # See: https://circleci.com/docs/2.0/configuration-reference/#workflows
# Linting is disabled while cleanups are ongoing.
workflows: workflows:
testbuild: testbuild:
jobs: jobs:
- testbuild - testbuild
# - lint

5
.gitignore vendored
View File

@@ -1,4 +1 @@
bazel-bin .idea
bazel-goutils
bazel-out
bazel-testlogs

473
.golangci.yml Normal file
View File

@@ -0,0 +1,473 @@
# This file is licensed under the terms of the MIT license https://opensource.org/license/mit
# Copyright (c) 2021-2025 Marat Reymers
## Golden config for golangci-lint v2.6.2
#
# This is the best config for golangci-lint based on my experience and opinion.
# It is very strict, but not extremely strict.
# Feel free to adapt it to suit your needs.
# If this config helps you, please consider keeping a link to this file (see the next comment).
# Based on https://gist.github.com/maratori/47a4d00457a92aa426dbd48a18776322
version: "2"
issues:
# Maximum count of issues with the same text.
# Set to 0 to disable.
# Default: 3
max-same-issues: 50
formatters:
enable:
- goimports # checks if the code and import statements are formatted according to the 'goimports' command
- golines # checks if code is formatted, and fixes long lines
## you may want to enable
#- gci # checks if code and import statements are formatted, with additional rules
#- gofmt # checks if the code is formatted according to 'gofmt' command
#- gofumpt # enforces a stricter format than 'gofmt', while being backwards compatible
#- swaggo # formats swaggo comments
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
goimports:
# A list of prefixes, which, if set, checks import paths
# with the given prefixes are grouped after 3rd-party packages.
# Default: []
local-prefixes:
- github.com/my/project
golines:
# Target maximum line length.
# Default: 100
max-len: 120
linters:
enable:
- asasalint # checks for pass []any as any in variadic func(...any)
- asciicheck # checks that your code does not contain non-ASCII identifiers
- bidichk # checks for dangerous unicode character sequences
- bodyclose # checks whether HTTP response body is closed successfully
- canonicalheader # checks whether net/http.Header uses canonical header
- copyloopvar # detects places where loop variables are copied (Go 1.22+)
- cyclop # checks function and package cyclomatic complexity
- depguard # checks if package imports are in a list of acceptable packages
- dupl # tool for code clone detection
- durationcheck # checks for two durations multiplied together
- errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases
- errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error
- errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13
- exhaustive # checks exhaustiveness of enum switch statements
- exptostd # detects functions from golang.org/x/exp/ that can be replaced by std functions
- fatcontext # detects nested contexts in loops
- forbidigo # forbids identifiers
- funcorder # checks the order of functions, methods, and constructors
- funlen # tool for detection of long functions
- gocheckcompilerdirectives # validates go compiler directive comments (//go:)
- gochecksumtype # checks exhaustiveness on Go "sum types"
- gocognit # computes and checks the cognitive complexity of functions
- goconst # finds repeated strings that could be replaced by a constant
- gocritic # provides diagnostics that check for bugs, performance and style issues
- gocyclo # computes and checks the cyclomatic complexity of functions
- godoclint # checks Golang's documentation practice
- godot # checks if comments end in a period
- gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod
- goprintffuncname # checks that printf-like functions are named with f at the end
- gosec # inspects source code for security problems
- govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
- iface # checks the incorrect use of interfaces, helping developers avoid interface pollution
- ineffassign # detects when assignments to existing variables are not used
- intrange # finds places where for loops could make use of an integer range
- iotamixing # checks if iotas are being used in const blocks with other non-iota declarations
- loggercheck # checks key value pairs for common logger libraries (kitlog,klog,logr,zap)
- makezero # finds slice declarations with non-zero initial length
- mirror # reports wrong mirror patterns of bytes/strings usage
- mnd # detects magic numbers
- modernize # suggests simplifications to Go code, using modern language and library features
- musttag # enforces field tags in (un)marshaled structs
- nakedret # finds naked returns in functions greater than a specified function length
- nestif # reports deeply nested if statements
- nilerr # finds the code that returns nil even if it checks that the error is not nil
- nilnesserr # reports that it checks for err != nil, but it returns a different nil value error (powered by nilness and nilerr)
- nilnil # checks that there is no simultaneous return of nil error and an invalid value
- noctx # finds sending http request without context.Context
- nolintlint # reports ill-formed or insufficient nolint directives
- nonamedreturns # reports all named returns
- nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL
- perfsprint # checks that fmt.Sprintf can be replaced with a faster alternative
- predeclared # finds code that shadows one of Go's predeclared identifiers
- promlinter # checks Prometheus metrics naming via promlint
- protogetter # reports direct reads from proto message fields when getters should be used
- reassign # checks that package variables are not reassigned
- recvcheck # checks for receiver type consistency
- revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint
- rowserrcheck # checks whether Err of rows is checked successfully
- sloglint # ensure consistent code style when using log/slog
- spancheck # checks for mistakes with OpenTelemetry/Census spans
- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed
- staticcheck # is a go vet on steroids, applying a ton of static analysis checks
- testableexamples # checks if examples are testable (have an expected output)
- testifylint # checks usage of github.com/stretchr/testify
- testpackage # makes you use a separate _test package
- tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes
- unconvert # removes unnecessary type conversions
- unparam # reports unused function parameters
- unqueryvet # detects SELECT * in SQL queries and SQL builders, encouraging explicit column selection
- unused # checks for unused constants, variables, functions and types
- usestdlibvars # detects the possibility to use variables/constants from the Go standard library
- usetesting # reports uses of functions with replacement inside the testing package
- wastedassign # finds wasted assignment statements
- whitespace # detects leading and trailing whitespace
## you may want to enable
#- arangolint # opinionated best practices for arangodb client
#- decorder # checks declaration order and count of types, constants, variables and functions
#- exhaustruct # [highly recommend to enable] checks if all structure fields are initialized
#- ginkgolinter # [if you use ginkgo/gomega] enforces standards of using ginkgo and gomega
#- godox # detects usage of FIXME, TODO and other keywords inside comments
#- goheader # checks is file header matches to pattern
#- inamedparam # [great idea, but too strict, need to ignore a lot of cases by default] reports interfaces with unnamed method parameters
#- interfacebloat # checks the number of methods inside an interface
#- ireturn # accept interfaces, return concrete types
#- noinlineerr # disallows inline error handling `if err := ...; err != nil {`
#- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated
#- tagalign # checks that struct tags are well aligned
#- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope
#- wrapcheck # checks that errors returned from external packages are wrapped
#- zerologlint # detects the wrong usage of zerolog that a user forgets to dispatch zerolog.Event
## disabled
#- containedctx # detects struct contained context.Context field
#- contextcheck # [too many false positives] checks the function whether use a non-inherited context
#- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
#- dupword # [useless without config] checks for duplicate words in the source code
#- err113 # [too strict] checks the errors handling expressions
#- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted
#- forcetypeassert # [replaced by errcheck] finds forced type assertions
#- gomodguard # [use more powerful depguard] allow and block lists linter for direct Go module dependencies
#- gosmopolitan # reports certain i18n/l10n anti-patterns in your Go codebase
#- grouper # analyzes expression groups
#- importas # enforces consistent import aliases
#- lll # [replaced by golines] reports long lines
#- maintidx # measures the maintainability index of each function
#- misspell # [useless] finds commonly misspelled English words in comments
#- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity
#- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test
#- tagliatelle # checks the struct tags
#- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers
#- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines
#- wsl_v5 # [too strict and mostly code is not more readable] add or remove empty lines
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
settings:
cyclop:
# The maximal code complexity to report.
# Default: 10
max-complexity: 30
# The maximal average package complexity.
# If it's higher than 0.0 (float) the check is enabled.
# Default: 0.0
package-average: 10.0
depguard:
# Rules to apply.
#
# Variables:
# - File Variables
# Use an exclamation mark `!` to negate a variable.
# Example: `!$test` matches any file that is not a go test file.
#
# `$all` - matches all go files
# `$test` - matches all go test files
#
# - Package Variables
#
# `$gostd` - matches all of go's standard library (Pulled from `GOROOT`)
#
# Default (applies if no custom rules are defined): Only allow $gostd in all files.
rules:
"deprecated":
# List of file globs that will match this list of settings to compare against.
# By default, if a path is relative, it is relative to the directory where the golangci-lint command is executed.
# The placeholder '${base-path}' is substituted with a path relative to the mode defined with `run.relative-path-mode`.
# The placeholder '${config-path}' is substituted with a path relative to the configuration file.
# Default: $all
files:
- "$all"
# List of packages that are not allowed.
# Entries can be a variable (starting with $), a string prefix, or an exact match (if ending with $).
# Default: []
deny:
- pkg: github.com/golang/protobuf
desc: Use google.golang.org/protobuf instead, see https://developers.google.com/protocol-buffers/docs/reference/go/faq#modules
- pkg: github.com/satori/go.uuid
desc: Use github.com/google/uuid instead, satori's package is not maintained
- pkg: github.com/gofrs/uuid$
desc: Use github.com/gofrs/uuid/v5 or later, it was not a go module before v5
"non-test files":
files:
- "!$test"
deny:
- pkg: math/rand$
desc: Use math/rand/v2 instead, see https://go.dev/blog/randv2
"non-main files":
files:
- "!**/main.go"
deny:
- pkg: log$
desc: Use log/slog instead, see https://go.dev/blog/slog
embeddedstructfieldcheck:
# Checks that sync.Mutex and sync.RWMutex are not used as embedded fields.
# Default: false
forbid-mutex: true
errcheck:
# Report about not checking of errors in type assertions: `a := b.(MyStruct)`.
# Such cases aren't reported by default.
# Default: false
check-type-assertions: true
exhaustive:
# Program elements to check for exhaustiveness.
# Default: [ switch ]
check:
- switch
- map
exhaustruct:
# List of regular expressions to match type names that should be excluded from processing.
# Anonymous structs can be matched by '<anonymous>' alias.
# Has precedence over `include`.
# Each regular expression must match the full type name, including package path.
# For example, to match type `net/http.Cookie` regular expression should be `.*/http\.Cookie`,
# but not `http\.Cookie`.
# Default: []
exclude:
# std libs
- ^net/http.Client$
- ^net/http.Cookie$
- ^net/http.Request$
- ^net/http.Response$
- ^net/http.Server$
- ^net/http.Transport$
- ^net/url.URL$
- ^os/exec.Cmd$
- ^reflect.StructField$
# public libs
- ^github.com/Shopify/sarama.Config$
- ^github.com/Shopify/sarama.ProducerMessage$
- ^github.com/mitchellh/mapstructure.DecoderConfig$
- ^github.com/prometheus/client_golang/.+Opts$
- ^github.com/spf13/cobra.Command$
- ^github.com/spf13/cobra.CompletionOptions$
- ^github.com/stretchr/testify/mock.Mock$
- ^github.com/testcontainers/testcontainers-go.+Request$
- ^github.com/testcontainers/testcontainers-go.FromDockerfile$
- ^golang.org/x/tools/go/analysis.Analyzer$
- ^google.golang.org/protobuf/.+Options$
- ^gopkg.in/yaml.v3.Node$
# Allows empty structures in return statements.
# Default: false
allow-empty-returns: true
funcorder:
# Checks if the exported methods of a structure are placed before the non-exported ones.
# Default: true
struct-method: false
funlen:
# Checks the number of lines in a function.
# If lower than 0, disable the check.
# Default: 60
lines: 100
# Checks the number of statements in a function.
# If lower than 0, disable the check.
# Default: 40
statements: 50
gochecksumtype:
# Presence of `default` case in switch statements satisfies exhaustiveness, if all members are not listed.
# Default: true
default-signifies-exhaustive: false
gocognit:
# Minimal code complexity to report.
# Default: 30 (but we recommend 10-20)
min-complexity: 20
gocritic:
# Settings passed to gocritic.
# The settings key is the name of a supported gocritic checker.
# The list of supported checkers can be found at https://go-critic.com/overview.
settings:
captLocal:
# Whether to restrict checker to params only.
# Default: true
paramsOnly: false
underef:
# Whether to skip (*x).method() calls where x is a pointer receiver.
# Default: true
skipRecvDeref: false
godoclint:
# List of rules to enable in addition to the default set.
# Default: empty
enable:
# Assert no unused link in godocs.
# https://github.com/godoc-lint/godoc-lint?tab=readme-ov-file#no-unused-link
- no-unused-link
govet:
# Enable all analyzers.
# Default: false
enable-all: true
# Disable analyzers by name.
# Run `GL_DEBUG=govet golangci-lint run --enable=govet` to see default, all available analyzers, and enabled analyzers.
# Default: []
disable:
- fieldalignment # too strict
# Settings per analyzer.
settings:
shadow:
# Whether to be strict about shadowing; can be noisy.
# Default: false
strict: true
inamedparam:
# Skips check for interface methods with only a single parameter.
# Default: false
skip-single-param: true
mnd:
# List of function patterns to exclude from analysis.
# Values always ignored: `time.Date`,
# `strconv.FormatInt`, `strconv.FormatUint`, `strconv.FormatFloat`,
# `strconv.ParseInt`, `strconv.ParseUint`, `strconv.ParseFloat`.
# Default: []
ignored-functions:
- args.Error
- flag.Arg
- flag.Duration.*
- flag.Float.*
- flag.Int.*
- flag.Uint.*
- os.Chmod
- os.Mkdir.*
- os.OpenFile
- os.WriteFile
- prometheus.ExponentialBuckets.*
- prometheus.LinearBuckets
nakedret:
# Make an issue if func has more lines of code than this setting, and it has naked returns.
# Default: 30
max-func-lines: 0
nolintlint:
# Exclude the following linters from requiring an explanation.
# Default: []
allow-no-explanation: [ funlen, gocognit, golines ]
# Enable to require an explanation of nonzero length after each nolint directive.
# Default: false
require-explanation: true
# Enable to require nolint directives to mention the specific linter being suppressed.
# Default: false
require-specific: true
perfsprint:
# Optimizes into strings concatenation.
# Default: true
strconcat: false
reassign:
# Patterns for global variable names that are checked for reassignment.
# See https://github.com/curioswitch/go-reassign#usage
# Default: ["EOF", "Err.*"]
patterns:
- ".*"
rowserrcheck:
# database/sql is always checked.
# Default: []
packages:
- github.com/jmoiron/sqlx
sloglint:
# Enforce not using global loggers.
# Values:
# - "": disabled
# - "all": report all global loggers
# - "default": report only the default slog logger
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#no-global
# Default: ""
no-global: all
# Enforce using methods that accept a context.
# Values:
# - "": disabled
# - "all": report all contextless calls
# - "scope": report only if a context exists in the scope of the outermost function
# https://github.com/go-simpler/sloglint?tab=readme-ov-file#context-only
# Default: ""
context: scope
staticcheck:
# SAxxxx checks in https://staticcheck.dev/docs/configuration/options/#checks
# Example (to disable some checks): [ "all", "-SA1000", "-SA1001"]
# Default: ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022"]
checks:
- all
# Incorrect or missing package comment.
# https://staticcheck.dev/docs/checks/#ST1000
- -ST1000
# Use consistent method receiver names.
# https://staticcheck.dev/docs/checks/#ST1016
- -ST1016
# Omit embedded fields from selector expression.
# https://staticcheck.dev/docs/checks/#QF1008
- -QF1008
usetesting:
# Enable/disable `os.TempDir()` detections.
# Default: false
os-temp-dir: true
exclusions:
# Log a warning if an exclusion rule is unused.
# Default: false
warn-unused: true
# Predefined exclusion rules.
# Default: []
presets:
- std-error-handling
- common-false-positives
rules:
- path: 'ahash/ahash.go'
linters: [ staticcheck, gosec ]
- path: 'backoff/backoff_test.go'
linters: [ testpackage ]
- path: 'dbg/dbg_test.go'
linters: [ testpackage ]
- source: 'TODO'
linters: [ godot ]
- text: 'should have a package comment'
linters: [ revive ]
- text: 'exported \S+ \S+ should have comment( \(or a comment on this block\))? or be unexported'
linters: [ revive ]
- text: 'package comment should be of the form ".+"'
source: '// ?(nolint|TODO)'
linters: [ revive ]
- text: 'comment on exported \S+ \S+ should be of the form ".+"'
source: '// ?(nolint|TODO)'
linters: [ revive, staticcheck ]
- path: '_test\.go'
linters:
- bodyclose
- dupl
- errcheck
- funlen
- goconst
- gosec
- noctx
- wrapcheck

View File

@@ -1,26 +0,0 @@
arch:
- amd64
- ppc64le
sudo: false
language: go
go:
- tip
- 1.9
jobs:
exclude:
- go: 1.9
arch: amd64
- go: 1.9
arch: ppc64le
script:
- go get golang.org/x/lint/golint
- go get golang.org/x/tools/cmd/cover
- go get github.com/kisom/goutils/...
- go test -cover github.com/kisom/goutils/...
- golint github.com/kisom/goutils/...
notifications:
email:
recipients:
- coder@kyleisom.net
on_success: change
on_failure: change

View File

@@ -4,8 +4,8 @@
package ahash package ahash
import ( import (
"crypto/md5" "crypto/md5" // #nosec G505
"crypto/sha1" "crypto/sha1" // #nosec G501
"crypto/sha256" "crypto/sha256"
"crypto/sha512" "crypto/sha512"
"errors" "errors"
@@ -17,34 +17,15 @@ import (
"io" "io"
"sort" "sort"
"git.wntrmute.dev/kyle/goutils/assert"
"golang.org/x/crypto/blake2b" "golang.org/x/crypto/blake2b"
"golang.org/x/crypto/blake2s" "golang.org/x/crypto/blake2s"
"golang.org/x/crypto/md4" "golang.org/x/crypto/md4" // #nosec G506
"golang.org/x/crypto/ripemd160" "golang.org/x/crypto/ripemd160" // #nosec G507
"golang.org/x/crypto/sha3" "golang.org/x/crypto/sha3"
"git.wntrmute.dev/kyle/goutils/assert"
) )
func sha224Slicer(bs []byte) []byte {
sum := sha256.Sum224(bs)
return sum[:]
}
func sha256Slicer(bs []byte) []byte {
sum := sha256.Sum256(bs)
return sum[:]
}
func sha384Slicer(bs []byte) []byte {
sum := sha512.Sum384(bs)
return sum[:]
}
func sha512Slicer(bs []byte) []byte {
sum := sha512.Sum512(bs)
return sum[:]
}
// Hash represents a generic hash function that may or may not be secure. It // Hash represents a generic hash function that may or may not be secure. It
// satisfies the hash.Hash interface. // satisfies the hash.Hash interface.
type Hash struct { type Hash struct {
@@ -247,17 +228,17 @@ func init() {
// HashList returns a sorted list of all the hash algorithms supported by the // HashList returns a sorted list of all the hash algorithms supported by the
// package. // package.
func HashList() []string { func HashList() []string {
return hashList[:] return hashList
} }
// SecureHashList returns a sorted list of all the secure (cryptographic) hash // SecureHashList returns a sorted list of all the secure (cryptographic) hash
// algorithms supported by the package. // algorithms supported by the package.
func SecureHashList() []string { func SecureHashList() []string {
return secureHashList[:] return secureHashList
} }
// InsecureHashList returns a sorted list of all the insecure hash algorithms // InsecureHashList returns a sorted list of all the insecure hash algorithms
// supported by the package. // supported by the package.
func InsecureHashList() []string { func InsecureHashList() []string {
return insecureHashList[:] return insecureHashList
} }

View File

@@ -1,16 +1,18 @@
package ahash package ahash_test
import ( import (
"bytes" "bytes"
"encoding/hex"
"fmt" "fmt"
"testing" "testing"
"git.wntrmute.dev/kyle/goutils/ahash"
"git.wntrmute.dev/kyle/goutils/assert" "git.wntrmute.dev/kyle/goutils/assert"
) )
func TestSecureHash(t *testing.T) { func TestSecureHash(t *testing.T) {
algo := "sha256" algo := "sha256"
h, err := New(algo) h, err := ahash.New(algo)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, h.IsSecure(), algo+" should be a secure hash") assert.BoolT(t, h.IsSecure(), algo+" should be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo") assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -19,28 +21,28 @@ func TestSecureHash(t *testing.T) {
var data []byte var data []byte
var expected = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" var expected = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
sum, err := Sum(algo, data) sum, err := ahash.Sum(algo, data)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum)) assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello, world") data = []byte("hello, world")
buf := bytes.NewBuffer(data) buf := bytes.NewBuffer(data)
expected = "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b" expected = "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
sum, err = SumReader(algo, buf) sum, err = ahash.SumReader(algo, buf)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum)) assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello world") data = []byte("hello world")
_, err = h.Write(data) _, err = h.Write(data)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
unExpected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b" unExpected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
sum = h.Sum(nil) sum = h.Sum(nil)
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected)) assert.BoolT(t, hex.EncodeToString(sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
} }
func TestInsecureHash(t *testing.T) { func TestInsecureHash(t *testing.T) {
algo := "md5" algo := "md5"
h, err := New(algo) h, err := ahash.New(algo)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash") assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo") assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -49,28 +51,28 @@ func TestInsecureHash(t *testing.T) {
var data []byte var data []byte
var expected = "d41d8cd98f00b204e9800998ecf8427e" var expected = "d41d8cd98f00b204e9800998ecf8427e"
sum, err := Sum(algo, data) sum, err := ahash.Sum(algo, data)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum)) assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello, world") data = []byte("hello, world")
buf := bytes.NewBuffer(data) buf := bytes.NewBuffer(data)
expected = "e4d7f1b4ed2e42d15898f4b27b019da4" expected = "e4d7f1b4ed2e42d15898f4b27b019da4"
sum, err = SumReader(algo, buf) sum, err = ahash.SumReader(algo, buf)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum)) assert.BoolT(t, hex.EncodeToString(sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
data = []byte("hello world") data = []byte("hello world")
_, err = h.Write(data) _, err = h.Write(data)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
unExpected := "e4d7f1b4ed2e42d15898f4b27b019da4" unExpected := "e4d7f1b4ed2e42d15898f4b27b019da4"
sum = h.Sum(nil) sum = h.Sum(nil)
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected)) assert.BoolT(t, hex.EncodeToString(sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
} }
func TestHash32(t *testing.T) { func TestHash32(t *testing.T) {
algo := "crc32-ieee" algo := "crc32-ieee"
h, err := New(algo) h, err := ahash.New(algo)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash") assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo") assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -102,7 +104,7 @@ func TestHash32(t *testing.T) {
func TestHash64(t *testing.T) { func TestHash64(t *testing.T) {
algo := "crc64" algo := "crc64"
h, err := New(algo) h, err := ahash.New(algo)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash") assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo") assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
@@ -133,9 +135,9 @@ func TestHash64(t *testing.T) {
} }
func TestListLengthSanity(t *testing.T) { func TestListLengthSanity(t *testing.T) {
all := HashList() all := ahash.HashList()
secure := SecureHashList() secure := ahash.SecureHashList()
insecure := InsecureHashList() insecure := ahash.InsecureHashList()
assert.BoolT(t, len(all) == len(secure)+len(insecure)) assert.BoolT(t, len(all) == len(secure)+len(insecure))
} }
@@ -146,11 +148,11 @@ func TestSumLimitedReader(t *testing.T) {
extendedData := bytes.NewBufferString("hello, world! this is an extended message") extendedData := bytes.NewBufferString("hello, world! this is an extended message")
expected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b" expected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
hash, err := SumReader("sha256", data) hash, err := ahash.SumReader("sha256", data)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, fmt.Sprintf("%x", hash) == expected, fmt.Sprintf("have hash %x, want %s", hash, expected)) assert.BoolT(t, hex.EncodeToString(hash) == expected, fmt.Sprintf("have hash %x, want %s", hash, expected))
extendedHash, err := SumLimitedReader("sha256", extendedData, int64(dataLen)) extendedHash, err := ahash.SumLimitedReader("sha256", extendedData, int64(dataLen))
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
assert.BoolT(t, bytes.Equal(hash, extendedHash), fmt.Sprintf("have hash %x, want %x", extendedHash, hash)) assert.BoolT(t, bytes.Equal(hash, extendedHash), fmt.Sprintf("have hash %x, want %x", extendedHash, hash))

View File

@@ -9,6 +9,7 @@
package assert package assert
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"runtime" "runtime"
@@ -16,11 +17,13 @@ import (
"testing" "testing"
) )
const callerSkip = 2
// NoDebug can be set to true to cause all asserts to be ignored. // NoDebug can be set to true to cause all asserts to be ignored.
var NoDebug bool var NoDebug bool
func die(what string, a ...string) { func die(what string, a ...string) {
_, file, line, ok := runtime.Caller(2) _, file, line, ok := runtime.Caller(callerSkip)
if !ok { if !ok {
panic(what) panic(what)
} }
@@ -31,30 +34,32 @@ func die(what string, a ...string) {
s = ": " + s s = ": " + s
} }
panic(what + s) panic(what + s)
} else {
fmt.Fprintf(os.Stderr, "%s", what)
if len(a) > 0 {
s := strings.Join(a, ", ")
fmt.Fprintln(os.Stderr, ": "+s)
} else {
fmt.Fprintf(os.Stderr, "\n")
}
fmt.Fprintf(os.Stderr, "\t%s line %d\n", file, line)
os.Exit(1)
} }
fmt.Fprintf(os.Stderr, "%s", what)
if len(a) > 0 {
s := strings.Join(a, ", ")
fmt.Fprintln(os.Stderr, ": "+s)
} else {
fmt.Fprintf(os.Stderr, "\n")
}
fmt.Fprintf(os.Stderr, "\t%s line %d\n", file, line)
os.Exit(1)
} }
// Bool asserts that cond is false. // Bool asserts that cond is false.
// //
// For example, this would replace // For example, this would replace
// if x < 0 { //
// log.Fatal("x is subzero") // if x < 0 {
// } // log.Fatal("x is subzero")
// }
// //
// The same assertion would be // The same assertion would be
// assert.Bool(x, "x is subzero") //
// assert.Bool(x, "x is subzero")
func Bool(cond bool, s ...string) { func Bool(cond bool, s ...string) {
if NoDebug { if NoDebug {
return return
@@ -68,11 +73,12 @@ func Bool(cond bool, s ...string) {
// Error asserts that err is not nil, e.g. that an error has occurred. // Error asserts that err is not nil, e.g. that an error has occurred.
// //
// For example, // For example,
// if err == nil { //
// log.Fatal("call to <something> should have failed") // if err == nil {
// } // log.Fatal("call to <something> should have failed")
// // becomes // }
// assert.Error(err, "call to <something> should have failed") // // becomes
// assert.Error(err, "call to <something> should have failed")
func Error(err error, s ...string) { func Error(err error, s ...string) {
if NoDebug { if NoDebug {
return return
@@ -100,7 +106,7 @@ func NoError(err error, s ...string) {
// ErrorEq asserts that the actual error is the expected error. // ErrorEq asserts that the actual error is the expected error.
func ErrorEq(expected, actual error) { func ErrorEq(expected, actual error) {
if NoDebug || (expected == actual) { if NoDebug || (errors.Is(expected, actual)) {
return return
} }
@@ -155,7 +161,7 @@ func NoErrorT(t *testing.T, err error) {
// ErrorEqT compares a pair of errors, calling Fatal on it if they // ErrorEqT compares a pair of errors, calling Fatal on it if they
// don't match. // don't match.
func ErrorEqT(t *testing.T, expected, actual error) { func ErrorEqT(t *testing.T, expected, actual error) {
if NoDebug || (expected == actual) { if NoDebug || (errors.Is(expected, actual)) {
return return
} }

View File

@@ -10,29 +10,21 @@
// backoff is configured with a maximum duration that will not be // backoff is configured with a maximum duration that will not be
// exceeded. // exceeded.
// //
// The `New` function will attempt to use the system's cryptographic // This package uses math/rand/v2 for jitter, which is automatically
// random number generator to seed a Go math/rand random number // seeded from a cryptographically secure source.
// source. If this fails, the package will panic on startup.
package backoff package backoff
import ( import (
"crypto/rand"
"encoding/binary"
"io"
"math" "math"
mrand "math/rand" "math/rand/v2"
"sync"
"time" "time"
) )
var prngMu sync.Mutex
var prng *mrand.Rand
// DefaultInterval is used when a Backoff is initialised with a // DefaultInterval is used when a Backoff is initialised with a
// zero-value Interval. // zero-value Interval.
var DefaultInterval = 5 * time.Minute var DefaultInterval = 5 * time.Minute
// DefaultMaxDuration is maximum amount of time that the backoff will // DefaultMaxDuration is the maximum amount of time that the backoff will
// delay for. // delay for.
var DefaultMaxDuration = 6 * time.Hour var DefaultMaxDuration = 6 * time.Hour
@@ -50,10 +42,9 @@ type Backoff struct {
// interval controls the time step for backing off. // interval controls the time step for backing off.
interval time.Duration interval time.Duration
// noJitter controls whether to use the "Full Jitter" // noJitter controls whether to use the "Full Jitter" improvement to attempt
// improvement to attempt to smooth out spikes in a high // to smooth out spikes in a high-contention scenario. If noJitter is set to
// contention scenario. If noJitter is set to true, no // true, no jitter will be introduced.
// jitter will be introduced.
noJitter bool noJitter bool
// decay controls the decay of n. If it is non-zero, n is // decay controls the decay of n. If it is non-zero, n is
@@ -65,17 +56,17 @@ type Backoff struct {
lastTry time.Time lastTry time.Time
} }
// New creates a new backoff with the specified max duration and // New creates a new backoff with the specified maxDuration duration and
// interval. Zero values may be used to use the default values. // interval. Zero values may be used to use the default values.
// //
// Panics if either max or interval is negative. // Panics if either dMax or interval is negative.
func New(max time.Duration, interval time.Duration) *Backoff { func New(dMax time.Duration, interval time.Duration) *Backoff {
if max < 0 || interval < 0 { if dMax < 0 || interval < 0 {
panic("backoff: max or interval is negative") panic("backoff: dMax or interval is negative")
} }
b := &Backoff{ b := &Backoff{
maxDuration: max, maxDuration: dMax,
interval: interval, interval: interval,
} }
b.setup() b.setup()
@@ -84,27 +75,12 @@ func New(max time.Duration, interval time.Duration) *Backoff {
// NewWithoutJitter works similarly to New, except that the created // NewWithoutJitter works similarly to New, except that the created
// Backoff will not use jitter. // Backoff will not use jitter.
func NewWithoutJitter(max time.Duration, interval time.Duration) *Backoff { func NewWithoutJitter(dMax time.Duration, interval time.Duration) *Backoff {
b := New(max, interval) b := New(dMax, interval)
b.noJitter = true b.noJitter = true
return b return b
} }
func init() {
var buf [8]byte
var n int64
_, err := io.ReadFull(rand.Reader, buf[:])
if err != nil {
panic(err.Error())
}
n = int64(binary.LittleEndian.Uint64(buf[:]))
src := mrand.NewSource(n)
prng = mrand.New(src)
}
func (b *Backoff) setup() { func (b *Backoff) setup() {
if b.interval == 0 { if b.interval == 0 {
b.interval = DefaultInterval b.interval = DefaultInterval
@@ -122,35 +98,44 @@ func (b *Backoff) Duration() time.Duration {
b.decayN() b.decayN()
t := b.duration(b.n) d := b.duration(b.n)
if b.n < math.MaxUint64 { if b.n < math.MaxUint64 {
b.n++ b.n++
} }
if !b.noJitter { if !b.noJitter {
prngMu.Lock() d = time.Duration(rand.Int64N(int64(d))) // #nosec G404
t = time.Duration(prng.Int63n(int64(t)))
prngMu.Unlock()
} }
return t return d
} }
const maxN uint64 = 63
// requires b to be locked. // requires b to be locked.
func (b *Backoff) duration(n uint64) (t time.Duration) { func (b *Backoff) duration(n uint64) time.Duration {
// Saturate pow // Use left shift on the underlying integer representation to avoid
pow := time.Duration(math.MaxInt64) // multiplying time.Duration by time.Duration (which is semantically
if n < 63 { // incorrect and flagged by linters).
pow = 1 << n if n >= maxN {
// Saturate when n would overflow a 64-bit shift or exceed maxDuration.
return b.maxDuration
} }
t = b.interval * pow // Calculate 2^n * interval using a shift. Detect overflow by checking
if t/pow != b.interval || t > b.maxDuration { // for sign change or monotonicity loss and clamp to maxDuration.
t = b.maxDuration shifted := b.interval << n
if shifted < 0 || shifted < b.interval {
// Overflow occurred during the shift; clamp to maxDuration.
return b.maxDuration
} }
return if shifted > b.maxDuration {
return b.maxDuration
}
return shifted
} }
// Reset resets the attempt counter of a backoff. // Reset resets the attempt counter of a backoff.
@@ -174,7 +159,7 @@ func (b *Backoff) SetDecay(decay time.Duration) {
b.decay = decay b.decay = decay
} }
// requires b to be locked // requires b to be locked.
func (b *Backoff) decayN() { func (b *Backoff) decayN() {
if b.decay == 0 { if b.decay == 0 {
return return
@@ -186,7 +171,9 @@ func (b *Backoff) decayN() {
} }
lastDuration := b.duration(b.n - 1) lastDuration := b.duration(b.n - 1)
decayed := time.Since(b.lastTry) > lastDuration+b.decay // Reset when the elapsed time is at least the previous backoff plus decay.
// Using ">=" avoids boundary flakiness in tests and real usage.
decayed := time.Since(b.lastTry) >= lastDuration+b.decay
b.lastTry = time.Now() b.lastTry = time.Now()
if !decayed { if !decayed {

View File

@@ -9,7 +9,7 @@ import (
// If given New with 0's and no jitter, ensure that certain invariants are met: // If given New with 0's and no jitter, ensure that certain invariants are met:
// //
// - the default max duration and interval should be used // - the default maxDuration duration and interval should be used
// - noJitter should be true // - noJitter should be true
// - the RNG should not be initialised // - the RNG should not be initialised
// - the first duration should be equal to the default interval // - the first duration should be equal to the default interval
@@ -17,7 +17,11 @@ func TestDefaults(t *testing.T) {
b := NewWithoutJitter(0, 0) b := NewWithoutJitter(0, 0)
if b.maxDuration != DefaultMaxDuration { if b.maxDuration != DefaultMaxDuration {
t.Fatalf("expected new backoff to use the default max duration (%s), but have %s", DefaultMaxDuration, b.maxDuration) t.Fatalf(
"expected new backoff to use the default maxDuration duration (%s), but have %s",
DefaultMaxDuration,
b.maxDuration,
)
} }
if b.interval != DefaultInterval { if b.interval != DefaultInterval {
@@ -44,11 +48,11 @@ func TestSetup(t *testing.T) {
} }
} }
// Ensure that tries incremenets as expected. // Ensure that tries increments as expected.
func TestTries(t *testing.T) { func TestTries(t *testing.T) {
b := NewWithoutJitter(5, 1) b := NewWithoutJitter(5, 1)
for i := uint64(0); i < 3; i++ { for i := range uint64(3) {
if b.n != i { if b.n != i {
t.Fatalf("want tries=%d, have tries=%d", i, b.n) t.Fatalf("want tries=%d, have tries=%d", i, b.n)
} }
@@ -73,7 +77,7 @@ func TestTries(t *testing.T) {
func TestReset(t *testing.T) { func TestReset(t *testing.T) {
const iter = 10 const iter = 10
b := New(1000, 1) b := New(1000, 1)
for i := 0; i < iter; i++ { for range iter {
_ = b.Duration() _ = b.Duration()
} }
@@ -88,17 +92,17 @@ func TestReset(t *testing.T) {
} }
const decay = 5 * time.Millisecond const decay = 5 * time.Millisecond
const max = 10 * time.Millisecond const maxDuration = 10 * time.Millisecond
const interval = time.Millisecond const interval = time.Millisecond
func TestDecay(t *testing.T) { func TestDecay(t *testing.T) {
const iter = 10 const iter = 10
b := NewWithoutJitter(max, 1) b := NewWithoutJitter(maxDuration, 1)
b.SetDecay(decay) b.SetDecay(decay)
var backoff time.Duration var backoff time.Duration
for i := 0; i < iter; i++ { for range iter {
backoff = b.Duration() backoff = b.Duration()
} }
@@ -127,7 +131,7 @@ func TestDecaySaturation(t *testing.T) {
b.SetDecay(decay) b.SetDecay(decay)
var duration time.Duration var duration time.Duration
for i := 0; i <= 2; i++ { for range 3 {
duration = b.Duration() duration = b.Duration()
} }
@@ -145,7 +149,7 @@ func TestDecaySaturation(t *testing.T) {
} }
func ExampleBackoff_SetDecay() { func ExampleBackoff_SetDecay() {
b := NewWithoutJitter(max, interval) b := NewWithoutJitter(maxDuration, interval)
b.SetDecay(decay) b.SetDecay(decay)
// try 0 // try 0

View File

@@ -158,59 +158,87 @@ type EncryptedContentInfo struct {
EncryptedContent []byte `asn1:"tag:0,optional"` EncryptedContent []byte `asn1:"tag:0,optional"`
} }
func unmarshalInit(raw []byte) (init initPKCS7, err error) {
_, err = asn1.Unmarshal(raw, &init)
if err != nil {
return initPKCS7{}, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
return init, nil
}
func populateData(msg *PKCS7, content asn1.RawValue) error {
msg.ContentInfo = "Data"
_, err := asn1.Unmarshal(content.Bytes, &msg.Content.Data)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
return nil
}
func populateSignedData(msg *PKCS7, contentBytes []byte) error {
msg.ContentInfo = "SignedData"
var sd signedData
if _, err := asn1.Unmarshal(contentBytes, &sd); err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if len(sd.Certificates.Bytes) != 0 {
certs, err := x509.ParseCertificates(sd.Certificates.Bytes)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
msg.Content.SignedData.Certificates = certs
}
if len(sd.Crls.Bytes) != 0 {
crl, err := x509.ParseRevocationList(sd.Crls.Bytes)
if err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
msg.Content.SignedData.Crl = crl
}
msg.Content.SignedData.Version = sd.Version
msg.Content.SignedData.Raw = contentBytes
return nil
}
func populateEncryptedData(msg *PKCS7, contentBytes []byte) error {
msg.ContentInfo = "EncryptedData"
var ed EncryptedData
if _, err := asn1.Unmarshal(contentBytes, &ed); err != nil {
return certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if ed.Version != 0 {
return certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS #7 encryptedData version 0 is supported"))
}
msg.Content.EncryptedData = ed
return nil
}
// ParsePKCS7 attempts to parse the DER encoded bytes of a // ParsePKCS7 attempts to parse the DER encoded bytes of a
// PKCS7 structure. // PKCS7 structure.
func ParsePKCS7(raw []byte) (msg *PKCS7, err error) { func ParsePKCS7(raw []byte) (msg *PKCS7, err error) {
var pkcs7 initPKCS7 pkcs7, err := unmarshalInit(raw)
_, err = asn1.Unmarshal(raw, &pkcs7)
if err != nil { if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err) return nil, err
} }
msg = new(PKCS7) msg = new(PKCS7)
msg.Raw = pkcs7.Raw msg.Raw = pkcs7.Raw
msg.ContentInfo = pkcs7.ContentType.String() msg.ContentInfo = pkcs7.ContentType.String()
switch {
case msg.ContentInfo == ObjIDData:
msg.ContentInfo = "Data"
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &msg.Content.Data)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
case msg.ContentInfo == ObjIDSignedData:
msg.ContentInfo = "SignedData"
var signedData signedData
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &signedData)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if len(signedData.Certificates.Bytes) != 0 {
msg.Content.SignedData.Certificates, err = x509.ParseCertificates(signedData.Certificates.Bytes)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
}
if len(signedData.Crls.Bytes) != 0 {
msg.Content.SignedData.Crl, err = x509.ParseRevocationList(signedData.Crls.Bytes)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
}
msg.Content.SignedData.Version = signedData.Version
msg.Content.SignedData.Raw = pkcs7.Content.Bytes
case msg.ContentInfo == ObjIDEncryptedData:
msg.ContentInfo = "EncryptedData"
var encryptedData EncryptedData
_, err = asn1.Unmarshal(pkcs7.Content.Bytes, &encryptedData)
if err != nil {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, err)
}
if encryptedData.Version != 0 {
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS #7 encryptedData version 0 is supported"))
}
msg.Content.EncryptedData = encryptedData
switch msg.ContentInfo {
case ObjIDData:
if err := populateData(msg, pkcs7.Content); err != nil {
return nil, err
}
case ObjIDSignedData:
if err := populateSignedData(msg, pkcs7.Content.Bytes); err != nil {
return nil, err
}
case ObjIDEncryptedData:
if err := populateEncryptedData(msg, pkcs7.Content.Bytes); err != nil {
return nil, err
}
default: default:
return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS# 7 content of type data, signed data or encrypted data can be parsed")) return nil, certerr.ParsingError(certerr.ErrorSourceCertificate, errors.New("only PKCS# 7 content of type data, signed data or encrypted data can be parsed"))
} }

View File

@@ -219,16 +219,16 @@ func VerifyCertificate(cert *x509.Certificate) (revoked, ok bool) {
// VerifyCertificateError ensures that the certificate passed in hasn't // VerifyCertificateError ensures that the certificate passed in hasn't
// expired and checks the CRL for the server. // expired and checks the CRL for the server.
func VerifyCertificateError(cert *x509.Certificate) (revoked, ok bool, err error) { func VerifyCertificateError(cert *x509.Certificate) (revoked, ok bool, err error) {
if !time.Now().Before(cert.NotAfter) { if !time.Now().Before(cert.NotAfter) {
msg := fmt.Sprintf("Certificate expired %s\n", cert.NotAfter) msg := fmt.Sprintf("Certificate expired %s\n", cert.NotAfter)
log.Info(msg) log.Info(msg)
return true, true, fmt.Errorf(msg) return true, true, errors.New(msg)
} else if !time.Now().After(cert.NotBefore) { } else if !time.Now().After(cert.NotBefore) {
msg := fmt.Sprintf("Certificate isn't valid until %s\n", cert.NotBefore) msg := fmt.Sprintf("Certificate isn't valid until %s\n", cert.NotBefore)
log.Info(msg) log.Info(msg)
return true, true, fmt.Errorf(msg) return true, true, errors.New(msg)
} }
return revCheck(cert) return revCheck(cert)
} }
func fetchRemote(url string) (*x509.Certificate, error) { func fetchRemote(url string) (*x509.Certificate, error) {

View File

@@ -101,7 +101,12 @@ func main() {
} }
// Process each chain group // Process each chain group
createdFiles := []string{} // Pre-allocate createdFiles based on total number of formats across all groups
totalFormats := 0
for _, group := range cfg.Chains {
totalFormats += len(group.Outputs.Formats)
}
createdFiles := make([]string, 0, totalFormats)
for groupName, group := range cfg.Chains { for groupName, group := range cfg.Chains {
files, err := processChainGroup(groupName, group, expiryDuration) files, err := processChainGroup(groupName, group, expiryDuration)
if err != nil { if err != nil {
@@ -168,68 +173,79 @@ func parseDuration(s string) (time.Duration, error) {
} }
func processChainGroup(groupName string, group ChainGroup, expiryDuration time.Duration) ([]string, error) { func processChainGroup(groupName string, group ChainGroup, expiryDuration time.Duration) ([]string, error) {
var createdFiles []string
// Default encoding to "pem" if not specified // Default encoding to "pem" if not specified
encoding := group.Outputs.Encoding encoding := group.Outputs.Encoding
if encoding == "" { if encoding == "" {
encoding = "pem" encoding = "pem"
} }
// Collect data from all chains in the group // Collect certificates from all chains in the group
singleFileCerts, individualCerts, err := loadAndCollectCerts(group.Certs, group.Outputs, expiryDuration)
if err != nil {
return nil, err
}
// Prepare files for inclusion in archives
archiveFiles, err := prepareArchiveFiles(singleFileCerts, individualCerts, group.Outputs, encoding)
if err != nil {
return nil, err
}
// Create archives for the entire group
createdFiles, err := createArchiveFiles(groupName, group.Outputs.Formats, archiveFiles)
if err != nil {
return nil, err
}
return createdFiles, nil
}
// loadAndCollectCerts loads all certificates from chains and collects them for processing
func loadAndCollectCerts(chains []CertChain, outputs Outputs, expiryDuration time.Duration) ([]*x509.Certificate, []certWithPath, error) {
var singleFileCerts []*x509.Certificate var singleFileCerts []*x509.Certificate
var individualCerts []certWithPath var individualCerts []certWithPath
for _, chain := range group.Certs { for _, chain := range chains {
// Step 1: Load all certificates for this chain
allCerts := make(map[string]*x509.Certificate)
// Load root certificate // Load root certificate
rootCert, err := certlib.LoadCertificate(chain.Root) rootCert, err := certlib.LoadCertificate(chain.Root)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to load root certificate %s: %v", chain.Root, err) return nil, nil, fmt.Errorf("failed to load root certificate %s: %v", chain.Root, err)
} }
allCerts[chain.Root] = rootCert
// Check expiry for root // Check expiry for root
checkExpiry(chain.Root, rootCert, expiryDuration) checkExpiry(chain.Root, rootCert, expiryDuration)
// Add root to single file if needed // Add root to collections if needed
if group.Outputs.IncludeSingle { if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, rootCert) singleFileCerts = append(singleFileCerts, rootCert)
} }
if outputs.IncludeIndividual {
// Add root to individual files if needed
if group.Outputs.IncludeIndividual {
individualCerts = append(individualCerts, certWithPath{ individualCerts = append(individualCerts, certWithPath{
cert: rootCert, cert: rootCert,
path: chain.Root, path: chain.Root,
}) })
} }
// Step 2: Load and validate intermediates // Load and validate intermediates
for _, intPath := range chain.Intermediates { for _, intPath := range chain.Intermediates {
intCert, err := certlib.LoadCertificate(intPath) intCert, err := certlib.LoadCertificate(intPath)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to load intermediate certificate %s: %v", intPath, err) return nil, nil, fmt.Errorf("failed to load intermediate certificate %s: %v", intPath, err)
} }
allCerts[intPath] = intCert
// Validate that intermediate is signed by root // Validate that intermediate is signed by root
if err := intCert.CheckSignatureFrom(rootCert); err != nil { if err := intCert.CheckSignatureFrom(rootCert); err != nil {
return nil, fmt.Errorf("intermediate %s is not properly signed by root %s: %v", intPath, chain.Root, err) return nil, nil, fmt.Errorf("intermediate %s is not properly signed by root %s: %v", intPath, chain.Root, err)
} }
// Check expiry for intermediate // Check expiry for intermediate
checkExpiry(intPath, intCert, expiryDuration) checkExpiry(intPath, intCert, expiryDuration)
// Add intermediate to a single file if needed // Add intermediate to collections if needed
if group.Outputs.IncludeSingle { if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, intCert) singleFileCerts = append(singleFileCerts, intCert)
} }
if outputs.IncludeIndividual {
// Add intermediate to individual files if needed
if group.Outputs.IncludeIndividual {
individualCerts = append(individualCerts, certWithPath{ individualCerts = append(individualCerts, certWithPath{
cert: intCert, cert: intCert,
path: intPath, path: intPath,
@@ -238,11 +254,15 @@ func processChainGroup(groupName string, group ChainGroup, expiryDuration time.D
} }
} }
// Prepare files for inclusion in archives for the entire group. return singleFileCerts, individualCerts, nil
}
// prepareArchiveFiles prepares all files to be included in archives
func prepareArchiveFiles(singleFileCerts []*x509.Certificate, individualCerts []certWithPath, outputs Outputs, encoding string) ([]fileEntry, error) {
var archiveFiles []fileEntry var archiveFiles []fileEntry
// Handle a single bundle file. // Handle a single bundle file
if group.Outputs.IncludeSingle && len(singleFileCerts) > 0 { if outputs.IncludeSingle && len(singleFileCerts) > 0 {
files, err := encodeCertsToFiles(singleFileCerts, "bundle", encoding, true) files, err := encodeCertsToFiles(singleFileCerts, "bundle", encoding, true)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to encode single bundle: %v", err) return nil, fmt.Errorf("failed to encode single bundle: %v", err)
@@ -251,7 +271,7 @@ func processChainGroup(groupName string, group ChainGroup, expiryDuration time.D
} }
// Handle individual files // Handle individual files
if group.Outputs.IncludeIndividual { if outputs.IncludeIndividual {
for _, cp := range individualCerts { for _, cp := range individualCerts {
baseName := strings.TrimSuffix(filepath.Base(cp.path), filepath.Ext(cp.path)) baseName := strings.TrimSuffix(filepath.Base(cp.path), filepath.Ext(cp.path))
files, err := encodeCertsToFiles([]*x509.Certificate{cp.cert}, baseName, encoding, false) files, err := encodeCertsToFiles([]*x509.Certificate{cp.cert}, baseName, encoding, false)
@@ -263,7 +283,7 @@ func processChainGroup(groupName string, group ChainGroup, expiryDuration time.D
} }
// Generate manifest if requested // Generate manifest if requested
if group.Outputs.Manifest { if outputs.Manifest {
manifestContent := generateManifest(archiveFiles) manifestContent := generateManifest(archiveFiles)
archiveFiles = append(archiveFiles, fileEntry{ archiveFiles = append(archiveFiles, fileEntry{
name: "MANIFEST", name: "MANIFEST",
@@ -271,8 +291,14 @@ func processChainGroup(groupName string, group ChainGroup, expiryDuration time.D
}) })
} }
// Create archives for the entire group return archiveFiles, nil
for _, format := range group.Outputs.Formats { }
// createArchiveFiles creates archive files in the specified formats
func createArchiveFiles(groupName string, formats []string, archiveFiles []fileEntry) ([]string, error) {
createdFiles := make([]string, 0, len(formats))
for _, format := range formats {
ext, ok := formatExtensions[format] ext, ok := formatExtensions[format]
if !ok { if !ok {
return nil, fmt.Errorf("unsupported format: %s", format) return nil, fmt.Errorf("unsupported format: %s", format)
@@ -327,10 +353,7 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
switch encoding { switch encoding {
case "pem": case "pem":
pemContent, err := encodeCertsToPEM(certs, isSingle) pemContent := encodeCertsToPEM(certs)
if err != nil {
return nil, err
}
files = append(files, fileEntry{ files = append(files, fileEntry{
name: baseName + ".pem", name: baseName + ".pem",
content: pemContent, content: pemContent,
@@ -357,10 +380,7 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
} }
case "both": case "both":
// Add PEM version // Add PEM version
pemContent, err := encodeCertsToPEM(certs, isSingle) pemContent := encodeCertsToPEM(certs)
if err != nil {
return nil, err
}
files = append(files, fileEntry{ files = append(files, fileEntry{
name: baseName + ".pem", name: baseName + ".pem",
content: pemContent, content: pemContent,
@@ -391,7 +411,7 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
} }
// encodeCertsToPEM encodes certificates to PEM format // encodeCertsToPEM encodes certificates to PEM format
func encodeCertsToPEM(certs []*x509.Certificate, concatenate bool) ([]byte, error) { func encodeCertsToPEM(certs []*x509.Certificate) []byte {
var pemContent []byte var pemContent []byte
for _, cert := range certs { for _, cert := range certs {
pemBlock := &pem.Block{ pemBlock := &pem.Block{
@@ -400,7 +420,7 @@ func encodeCertsToPEM(certs []*x509.Certificate, concatenate bool) ([]byte, erro
} }
pemContent = append(pemContent, pem.EncodeToMemory(pemBlock)...) pemContent = append(pemContent, pem.EncodeToMemory(pemBlock)...)
} }
return pemContent, nil return pemContent
} }
func generateManifest(files []fileEntry) []byte { func generateManifest(files []fileEntry) []byte {
@@ -420,22 +440,29 @@ func createZipArchive(path string, files []fileEntry) error {
if err != nil { if err != nil {
return err return err
} }
defer f.Close()
w := zip.NewWriter(f) w := zip.NewWriter(f)
defer w.Close()
for _, file := range files { for _, file := range files {
fw, err := w.Create(file.name) fw, err := w.Create(file.name)
if err != nil { if err != nil {
w.Close()
f.Close()
return err return err
} }
if _, err := fw.Write(file.content); err != nil { if _, err := fw.Write(file.content); err != nil {
w.Close()
f.Close()
return err return err
} }
} }
return nil // Check errors on close operations
if err := w.Close(); err != nil {
f.Close()
return err
}
return f.Close()
} }
func createTarGzArchive(path string, files []fileEntry) error { func createTarGzArchive(path string, files []fileEntry) error {
@@ -443,13 +470,9 @@ func createTarGzArchive(path string, files []fileEntry) error {
if err != nil { if err != nil {
return err return err
} }
defer f.Close()
gw := gzip.NewWriter(f) gw := gzip.NewWriter(f)
defer gw.Close()
tw := tar.NewWriter(gw) tw := tar.NewWriter(gw)
defer tw.Close()
for _, file := range files { for _, file := range files {
hdr := &tar.Header{ hdr := &tar.Header{
@@ -458,14 +481,30 @@ func createTarGzArchive(path string, files []fileEntry) error {
Size: int64(len(file.content)), Size: int64(len(file.content)),
} }
if err := tw.WriteHeader(hdr); err != nil { if err := tw.WriteHeader(hdr); err != nil {
tw.Close()
gw.Close()
f.Close()
return err return err
} }
if _, err := tw.Write(file.content); err != nil { if _, err := tw.Write(file.content); err != nil {
tw.Close()
gw.Close()
f.Close()
return err return err
} }
} }
return nil // Check errors on close operations in the correct order
if err := tw.Close(); err != nil {
gw.Close()
f.Close()
return err
}
if err := gw.Close(); err != nil {
f.Close()
return err
}
return f.Close()
} }
func generateHashFile(path string, files []string) error { func generateHashFile(path string, files []string) error {

View File

@@ -1,3 +1,6 @@
This project is an exploration into the utility of Jetbrains' Junie
to write smaller but tedious programs.
Task: build a certificate bundling tool in cmd/cert-bundler. It Task: build a certificate bundling tool in cmd/cert-bundler. It
creates archives of certificates chains. creates archives of certificates chains.
@@ -186,4 +189,9 @@ to provide the same detailed information.
It may be easier to embed the README.txt in the program on build. It may be easier to embed the README.txt in the program on build.
-----
For the archive (tar.gz and zip) writers, make sure errors are
checked at the end, and don't just defer the close operations.

View File

@@ -0,0 +1,36 @@
cert-revcheck: check certificate expiry and revocation
-----------------------------------------------------
Description
cert-revcheck accepts a list of certificate files (PEM or DER) or
site addresses (host[:port]) and checks whether the leaf certificate
is expired or revoked. Revocation checks use CRL and OCSP via the
certlib/revoke package.
Usage
cert-revcheck [options] <target> [<target>...]
Options
-hardfail treat revocation check failures as fatal (default: false)
-timeout dur HTTP/OCSP/CRL timeout for network operations (default: 10s)
-v verbose output
Targets
- File paths to certificates in PEM or DER format.
- Site addresses in the form host or host:port. If no port is
provided, 443 is assumed.
Examples
# Check a PEM file
cert-revcheck ./server.pem
# Check a DER (single) certificate
cert-revcheck ./server.der
# Check a live site (leaf certificate)
cert-revcheck example.com:443
Notes
- For sites, only the leaf certificate is checked.
- When -hardfail is set, network issues during OCSP/CRL fetch will
cause the check to fail (treated as revoked).

140
cmd/cert-revcheck/main.go Normal file
View File

@@ -0,0 +1,140 @@
package main
import (
"crypto/tls"
"crypto/x509"
"flag"
"errors"
"fmt"
"io/ioutil"
"net"
"os"
"time"
"git.wntrmute.dev/kyle/goutils/certlib"
hosts "git.wntrmute.dev/kyle/goutils/certlib/hosts"
"git.wntrmute.dev/kyle/goutils/certlib/revoke"
"git.wntrmute.dev/kyle/goutils/fileutil"
)
var (
hardfail bool
timeout time.Duration
verbose bool
)
func main() {
flag.BoolVar(&hardfail, "hardfail", false, "treat revocation check failures as fatal")
flag.DurationVar(&timeout, "timeout", 10*time.Second, "network timeout for OCSP/CRL fetches and TLS site connects")
flag.BoolVar(&verbose, "v", false, "verbose output")
flag.Parse()
revoke.HardFail = hardfail
// Set HTTP client timeout for revocation library
revoke.HTTPClient.Timeout = timeout
if flag.NArg() == 0 {
fmt.Fprintf(os.Stderr, "Usage: %s [options] <target> [<target>...]\n", os.Args[0])
os.Exit(2)
}
exitCode := 0
for _, target := range flag.Args() {
status, err := processTarget(target)
switch status {
case "OK":
fmt.Printf("%s: OK\n", target)
case "EXPIRED":
fmt.Printf("%s: EXPIRED: %v\n", target, err)
exitCode = 1
case "REVOKED":
fmt.Printf("%s: REVOKED\n", target)
exitCode = 1
case "UNKNOWN":
fmt.Printf("%s: UNKNOWN: %v\n", target, err)
if hardfail {
// In hardfail, treat unknown as failure
exitCode = 1
}
}
}
os.Exit(exitCode)
}
func processTarget(target string) (string, error) {
if fileutil.FileDoesExist(target) {
return checkFile(target)
}
// Not a file; treat as site
return checkSite(target)
}
func checkFile(path string) (string, error) {
in, err := ioutil.ReadFile(path)
if err != nil {
return "UNKNOWN", err
}
// Try PEM first; if that fails, try single DER cert
certs, err := certlib.ReadCertificates(in)
if err != nil || len(certs) == 0 {
cert, _, derr := certlib.ReadCertificate(in)
if derr != nil || cert == nil {
if err == nil {
err = derr
}
return "UNKNOWN", err
}
return evaluateCert(cert)
}
// Evaluate the first certificate (leaf) by default
return evaluateCert(certs[0])
}
func checkSite(hostport string) (string, error) {
// Use certlib/hosts to parse host/port (supports https URLs and host:port)
target, err := hosts.ParseHost(hostport)
if err != nil {
return "UNKNOWN", err
}
d := &net.Dialer{Timeout: timeout}
conn, err := tls.DialWithDialer(d, "tcp", target.String(), &tls.Config{InsecureSkipVerify: true, ServerName: target.Host})
if err != nil {
return "UNKNOWN", err
}
defer conn.Close()
state := conn.ConnectionState()
if len(state.PeerCertificates) == 0 {
return "UNKNOWN", errors.New("no peer certificates presented")
}
return evaluateCert(state.PeerCertificates[0])
}
func evaluateCert(cert *x509.Certificate) (string, error) {
// Expiry check
now := time.Now()
if !now.Before(cert.NotAfter) {
return "EXPIRED", fmt.Errorf("expired at %s", cert.NotAfter)
}
if !now.After(cert.NotBefore) {
return "EXPIRED", fmt.Errorf("not valid until %s", cert.NotBefore)
}
// Revocation check using certlib/revoke
revoked, ok, err := revoke.VerifyCertificateError(cert)
if revoked {
// If revoked is true, ok will be true per implementation, err may describe why
return "REVOKED", err
}
if !ok {
// Revocation status could not be determined
return "UNKNOWN", err
}
return "OK", nil
}

View File

@@ -39,10 +39,6 @@ func compress(path, target string, level int) error {
return errors.Wrap(err, "compressing file") return errors.Wrap(err, "compressing file")
} }
if err != nil {
return errors.Wrap(err, "stat(2)ing destination file")
}
return nil return nil
} }

View File

@@ -11,7 +11,7 @@ package config
import ( import (
"bufio" "bufio"
"fmt" "fmt"
"log" "maps"
"os" "os"
"sort" "sort"
"strings" "strings"
@@ -33,14 +33,15 @@ func SetEnvPrefix(pfx string) {
prefix = pfx prefix = pfx
} }
const keyValueSplitLength = 2
func addLine(line string) { func addLine(line string) {
if strings.HasPrefix(line, "#") || line == "" { if strings.HasPrefix(line, "#") || line == "" {
return return
} }
lineParts := strings.SplitN(line, "=", 2) lineParts := strings.SplitN(line, "=", keyValueSplitLength)
if len(lineParts) != 2 { if len(lineParts) != keyValueSplitLength {
log.Print("skipping line: ", line)
return // silently ignore empty keys return // silently ignore empty keys
} }
@@ -49,7 +50,7 @@ func addLine(line string) {
vars[lineParts[0]] = lineParts[1] vars[lineParts[0]] = lineParts[1]
} }
// LoadFile scans the file at path for key=value pairs and adds them // LoadFile scans the file at 'path' for key=value pairs and adds them
// to the configuration. // to the configuration.
func LoadFile(path string) error { func LoadFile(path string) error {
file, err := os.Open(path) file, err := os.Open(path)
@@ -64,25 +65,19 @@ func LoadFile(path string) error {
addLine(line) addLine(line)
} }
if err = scanner.Err(); err != nil { return scanner.Err()
return err
}
return nil
} }
// LoadFileFor scans the ini file at path, loading the default section // LoadFileFor scans the ini file at 'path', loading the default section
// and overriding any keys found under section. If strict is true, the // and overriding any keys found under 'section'. If strict is true, the
// named section must exist (i.e. to catch typos in the section name). // named section must exist (i.e., to catch typos in the section name).
func LoadFileFor(path, section string, strict bool) error { func LoadFileFor(path, section string, strict bool) error {
cmap, err := iniconf.ParseFile(path) cmap, err := iniconf.ParseFile(path)
if err != nil { if err != nil {
return err return err
} }
for key, value := range cmap[iniconf.DefaultSection] { maps.Copy(vars, cmap[iniconf.DefaultSection])
vars[key] = value
}
smap, ok := cmap[section] smap, ok := cmap[section]
if !ok { if !ok {
@@ -92,9 +87,7 @@ func LoadFileFor(path, section string, strict bool) error {
return nil return nil
} }
for key, value := range smap { maps.Copy(vars, smap)
vars[key] = value
}
return nil return nil
} }
@@ -111,7 +104,7 @@ func Get(key string) string {
// GetDefault retrieves a value from either a configuration file or // GetDefault retrieves a value from either a configuration file or
// the environment. Note that value from a file will override // the environment. Note that value from a file will override
// environment variables. If a value isn't found (e.g. Get returns an // environment variables. If a value isn't found (e.g., Get returns an
// empty string), the default value will be used. // empty string), the default value will be used.
func GetDefault(key, def string) string { func GetDefault(key, def string) string {
if v := Get(key); v != "" { if v := Get(key); v != "" {
@@ -121,8 +114,7 @@ func GetDefault(key, def string) string {
} }
// Require retrieves a value from either a configuration file or the // Require retrieves a value from either a configuration file or the
// environment. If the key isn't present, it will call log.Fatal, printing // environment. If the key isn't present, it will panic.
// the missing key.
func Require(key string) string { func Require(key string) string {
if v, ok := vars[key]; ok { if v, ok := vars[key]; ok {
return v return v
@@ -135,7 +127,7 @@ func Require(key string) string {
envMessage = " (note: looked for the key " + prefix + key envMessage = " (note: looked for the key " + prefix + key
envMessage += " in the local env)" envMessage += " in the local env)"
} }
log.Fatalf("missing required configuration value %s%s", key, envMessage) panic(fmt.Sprintf("missing required configuration value %s%s", key, envMessage))
} }
return v return v
@@ -143,7 +135,8 @@ func Require(key string) string {
// ListKeys returns a slice of the currently known keys. // ListKeys returns a slice of the currently known keys.
func ListKeys() []string { func ListKeys() []string {
keyList := []string{} var keyList []string
for k := range vars { for k := range vars {
keyList = append(keyList, k) keyList = append(keyList, k)
} }

View File

@@ -1,27 +1,26 @@
package config package config_test
import ( import (
"os" "os"
"testing" "testing"
"git.wntrmute.dev/kyle/goutils/config"
) )
const ( const (
testFilePath = "testdata/test.env" testFilePath = "testdata/test.env"
// Keys // Key constants.
kOrder = "ORDER" kOrder = "ORDER"
kSpecies = "SPECIES" kSpecies = "SPECIES"
kName = "COMMON_NAME" kName = "COMMON_NAME"
// Env
eOrder = "corvus" eOrder = "corvus"
eSpecies = "corvus corax" eSpecies = "corvus corax"
eName = "northern raven" eName = "northern raven"
// File
fOrder = "stringiformes" fOrder = "stringiformes"
fSpecies = "strix aluco" fSpecies = "strix aluco"
// Name isn't set in the file to test fall through.
) )
func init() { func init() {
@@ -31,8 +30,8 @@ func init() {
} }
func TestLoadEnvOnly(t *testing.T) { func TestLoadEnvOnly(t *testing.T) {
order := Get(kOrder) order := config.Get(kOrder)
species := Get(kSpecies) species := config.Get(kSpecies)
if order != eOrder { if order != eOrder {
t.Errorf("want %s, have %s", eOrder, order) t.Errorf("want %s, have %s", eOrder, order)
} }
@@ -43,14 +42,14 @@ func TestLoadEnvOnly(t *testing.T) {
} }
func TestLoadFile(t *testing.T) { func TestLoadFile(t *testing.T) {
err := LoadFile(testFilePath) err := config.LoadFile(testFilePath)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
order := Get(kOrder) order := config.Get(kOrder)
species := Get(kSpecies) species := config.Get(kSpecies)
name := Get(kName) name := config.Get(kName)
if order != fOrder { if order != fOrder {
t.Errorf("want %s, have %s", fOrder, order) t.Errorf("want %s, have %s", fOrder, order)

View File

@@ -2,6 +2,7 @@ package iniconf
import ( import (
"bufio" "bufio"
"errors"
"fmt" "fmt"
"io" "io"
"os" "os"
@@ -23,86 +24,115 @@ var (
var DefaultSection = "default" var DefaultSection = "default"
// ParseFile attempts to load the named config file. // ParseFile attempts to load the named config file.
func ParseFile(fileName string) (cfg ConfigMap, err error) { func ParseFile(fileName string) (ConfigMap, error) {
var file *os.File file, err := os.Open(fileName)
file, err = os.Open(fileName)
if err != nil { if err != nil {
return return nil, err
} }
defer file.Close() defer file.Close()
return ParseReader(file) return ParseReader(file)
} }
// ParseReader reads a configuration from an io.Reader. // ParseReader reads a configuration from an io.Reader.
func ParseReader(r io.Reader) (cfg ConfigMap, err error) { func ParseReader(r io.Reader) (ConfigMap, error) {
cfg = ConfigMap{} cfg := ConfigMap{}
buf := bufio.NewReader(r) buf := bufio.NewReader(r)
var ( var (
line string line string
longLine bool longLine bool
currentSection string currentSection string
lineBytes []byte err error
isPrefix bool
) )
for { for {
err = nil line, longLine, err = readConfigLine(buf, line, longLine)
lineBytes, isPrefix, err = buf.ReadLine() if errors.Is(err, io.EOF) {
if io.EOF == err {
err = nil err = nil
break break
} else if err != nil { } else if err != nil {
break break
} else if isPrefix {
line += string(lineBytes)
longLine = true
continue
} else if longLine {
line += string(lineBytes)
longLine = false
} else {
line = string(lineBytes)
} }
if commentLine.MatchString(line) { if line == "" {
continue continue
} else if blankLine.MatchString(line) { }
continue
} else if configSection.MatchString(line) { currentSection, err = processConfigLine(cfg, line, currentSection)
section := configSection.ReplaceAllString(line, if err != nil {
"$1")
if section == "" {
err = fmt.Errorf("invalid structure in file")
break
} else if !cfg.SectionInConfig(section) {
cfg[section] = make(map[string]string, 0)
}
currentSection = section
} else if configLine.MatchString(line) {
regex := configLine
if quotedConfigLine.MatchString(line) {
regex = quotedConfigLine
}
if currentSection == "" {
currentSection = DefaultSection
if !cfg.SectionInConfig(currentSection) {
cfg[currentSection] = map[string]string{}
}
}
key := regex.ReplaceAllString(line, "$1")
val := regex.ReplaceAllString(line, "$2")
if key == "" {
continue
}
cfg[currentSection][key] = val
} else {
err = fmt.Errorf("invalid config file")
break break
} }
} }
return
return cfg, err
}
// readConfigLine reads and assembles a complete configuration line, handling long lines.
func readConfigLine(buf *bufio.Reader, currentLine string, longLine bool) (string, bool, error) {
lineBytes, isPrefix, err := buf.ReadLine()
if err != nil {
return "", false, err
}
if isPrefix {
return currentLine + string(lineBytes), true, nil
} else if longLine {
return currentLine + string(lineBytes), false, nil
}
return string(lineBytes), false, nil
}
// processConfigLine processes a single line and updates the configuration map.
func processConfigLine(cfg ConfigMap, line string, currentSection string) (string, error) {
if commentLine.MatchString(line) || blankLine.MatchString(line) {
return currentSection, nil
}
if configSection.MatchString(line) {
return handleSectionLine(cfg, line)
}
if configLine.MatchString(line) {
return handleConfigLine(cfg, line, currentSection)
}
return currentSection, errors.New("invalid config file")
}
// handleSectionLine processes a section header line.
func handleSectionLine(cfg ConfigMap, line string) (string, error) {
section := configSection.ReplaceAllString(line, "$1")
if section == "" {
return "", errors.New("invalid structure in file")
}
if !cfg.SectionInConfig(section) {
cfg[section] = make(map[string]string, 0)
}
return section, nil
}
// handleConfigLine processes a key=value configuration line.
func handleConfigLine(cfg ConfigMap, line string, currentSection string) (string, error) {
regex := configLine
if quotedConfigLine.MatchString(line) {
regex = quotedConfigLine
}
if currentSection == "" {
currentSection = DefaultSection
if !cfg.SectionInConfig(currentSection) {
cfg[currentSection] = map[string]string{}
}
}
key := regex.ReplaceAllString(line, "$1")
val := regex.ReplaceAllString(line, "$2")
if key != "" {
cfg[currentSection][key] = val
}
return currentSection, nil
} }
// SectionInConfig determines whether a section is in the configuration. // SectionInConfig determines whether a section is in the configuration.
@@ -112,41 +142,39 @@ func (c ConfigMap) SectionInConfig(section string) bool {
} }
// ListSections returns the list of sections in the config map. // ListSections returns the list of sections in the config map.
func (c ConfigMap) ListSections() (sections []string) { func (c ConfigMap) ListSections() []string {
sections := make([]string, 0, len(c))
for section := range c { for section := range c {
sections = append(sections, section) sections = append(sections, section)
} }
return return sections
} }
// WriteFile writes out the configuration to a file. // WriteFile writes out the configuration to a file.
func (c ConfigMap) WriteFile(filename string) (err error) { func (c ConfigMap) WriteFile(filename string) error {
file, err := os.Create(filename) file, err := os.Create(filename)
if err != nil { if err != nil {
return return err
} }
defer file.Close() defer file.Close()
for _, section := range c.ListSections() { for _, section := range c.ListSections() {
sName := fmt.Sprintf("[ %s ]\n", section) sName := fmt.Sprintf("[ %s ]\n", section)
_, err = file.Write([]byte(sName)) if _, err = file.WriteString(sName); err != nil {
if err != nil { return err
return
} }
for k, v := range c[section] { for k, v := range c[section] {
line := fmt.Sprintf("%s = %s\n", k, v) line := fmt.Sprintf("%s = %s\n", k, v)
_, err = file.Write([]byte(line)) if _, err = file.WriteString(line); err != nil {
if err != nil { return err
return
} }
} }
_, err = file.Write([]byte{0x0a}) if _, err = file.Write([]byte{0x0a}); err != nil {
if err != nil { return err
return
} }
} }
return return nil
} }
// AddSection creates a new section in the config map. // AddSection creates a new section in the config map.
@@ -170,27 +198,26 @@ func (c ConfigMap) AddKeyVal(section, key, val string) {
} }
// GetValue retrieves the value from a key map. // GetValue retrieves the value from a key map.
func (c ConfigMap) GetValue(section, key string) (val string, present bool) { func (c ConfigMap) GetValue(section, key string) (string, bool) {
if c == nil { if c == nil {
return return "", false
} }
if section == "" { if section == "" {
section = DefaultSection section = DefaultSection
} }
_, ok := c[section] if _, ok := c[section]; !ok {
if !ok { return "", false
return
} }
val, present = c[section][key] val, present := c[section][key]
return return val, present
} }
// GetValueDefault retrieves the value from a key map if present, // GetValueDefault retrieves the value from a key map if present,
// otherwise the default value. // otherwise the default value.
func (c ConfigMap) GetValueDefault(section, key, value string) (val string) { func (c ConfigMap) GetValueDefault(section, key, value string) string {
kval, ok := c.GetValue(section, key) kval, ok := c.GetValue(section, key)
if !ok { if !ok {
return value return value
@@ -199,7 +226,7 @@ func (c ConfigMap) GetValueDefault(section, key, value string) (val string) {
} }
// SectionKeys returns the sections in the config map. // SectionKeys returns the sections in the config map.
func (c ConfigMap) SectionKeys(section string) (keys []string, present bool) { func (c ConfigMap) SectionKeys(section string) ([]string, bool) {
if c == nil { if c == nil {
return nil, false return nil, false
} }
@@ -208,13 +235,12 @@ func (c ConfigMap) SectionKeys(section string) (keys []string, present bool) {
section = DefaultSection section = DefaultSection
} }
cm := c s, ok := c[section]
s, ok := cm[section]
if !ok { if !ok {
return nil, false return nil, false
} }
keys = make([]string, 0, len(s)) keys := make([]string, 0, len(s))
for key := range s { for key := range s {
keys = append(keys, key) keys = append(keys, key)
} }

View File

@@ -1,18 +1,19 @@
package iniconf package iniconf_test
import ( import (
"errors" "errors"
"fmt"
"os" "os"
"sort" "sort"
"testing" "testing"
"git.wntrmute.dev/kyle/goutils/config/iniconf"
) )
// FailWithError is a utility for dumping errors and failing the test. // FailWithError is a utility for dumping errors and failing the test.
func FailWithError(t *testing.T, err error) { func FailWithError(t *testing.T, err error) {
fmt.Println("failed") t.Log("failed")
if err != nil { if err != nil {
fmt.Println("[!] ", err.Error()) t.Log("[!] ", err.Error())
} }
t.FailNow() t.FailNow()
} }
@@ -49,47 +50,50 @@ func stringSlicesEqual(slice1, slice2 []string) bool {
func TestGoodConfig(t *testing.T) { func TestGoodConfig(t *testing.T) {
testFile := "testdata/test.conf" testFile := "testdata/test.conf"
fmt.Printf("[+] validating known-good config... ") t.Logf("[+] validating known-good config... ")
cmap, err := ParseFile(testFile) cmap, err := iniconf.ParseFile(testFile)
if err != nil { if err != nil {
FailWithError(t, err) FailWithError(t, err)
} else if len(cmap) != 2 { } else if len(cmap) != 2 {
FailWithError(t, err) FailWithError(t, err)
} }
fmt.Println("ok") t.Log("ok")
} }
func TestGoodConfig2(t *testing.T) { func TestGoodConfig2(t *testing.T) {
testFile := "testdata/test2.conf" testFile := "testdata/test2.conf"
fmt.Printf("[+] validating second known-good config... ") t.Logf("[+] validating second known-good config... ")
cmap, err := ParseFile(testFile) cmap, err := iniconf.ParseFile(testFile)
if err != nil { switch {
case err != nil:
FailWithError(t, err) FailWithError(t, err)
} else if len(cmap) != 1 { case len(cmap) != 1:
FailWithError(t, err) FailWithError(t, err)
} else if len(cmap["default"]) != 3 { case len(cmap["default"]) != 3:
FailWithError(t, err) FailWithError(t, err)
default:
// nothing to do here
} }
fmt.Println("ok") t.Log("ok")
} }
func TestBadConfig(t *testing.T) { func TestBadConfig(t *testing.T) {
testFile := "testdata/bad.conf" testFile := "testdata/bad.conf"
fmt.Printf("[+] ensure invalid config file fails... ") t.Logf("[+] ensure invalid config file fails... ")
_, err := ParseFile(testFile) _, err := iniconf.ParseFile(testFile)
if err == nil { if err == nil {
err = fmt.Errorf("invalid config file should fail") err = errors.New("invalid config file should fail")
FailWithError(t, err) FailWithError(t, err)
} }
fmt.Println("ok") t.Log("ok")
} }
func TestWriteConfigFile(t *testing.T) { func TestWriteConfigFile(t *testing.T) {
fmt.Printf("[+] ensure config file is written properly... ") t.Logf("[+] ensure config file is written properly... ")
const testFile = "testdata/test.conf" const testFile = "testdata/test.conf"
const testOut = "testdata/test.out" const testOut = "testdata/test.out"
cmap, err := ParseFile(testFile) cmap, err := iniconf.ParseFile(testFile)
if err != nil { if err != nil {
FailWithError(t, err) FailWithError(t, err)
} }
@@ -100,7 +104,7 @@ func TestWriteConfigFile(t *testing.T) {
FailWithError(t, err) FailWithError(t, err)
} }
cmap2, err := ParseFile(testOut) cmap2, err := iniconf.ParseFile(testOut)
if err != nil { if err != nil {
FailWithError(t, err) FailWithError(t, err)
} }
@@ -110,25 +114,25 @@ func TestWriteConfigFile(t *testing.T) {
sort.Strings(sectionList1) sort.Strings(sectionList1)
sort.Strings(sectionList2) sort.Strings(sectionList2)
if !stringSlicesEqual(sectionList1, sectionList2) { if !stringSlicesEqual(sectionList1, sectionList2) {
err = fmt.Errorf("section lists don't match") err = errors.New("section lists don't match")
FailWithError(t, err) FailWithError(t, err)
} }
for _, section := range sectionList1 { for _, section := range sectionList1 {
for _, k := range cmap[section] { for _, k := range cmap[section] {
if cmap[section][k] != cmap2[section][k] { if cmap[section][k] != cmap2[section][k] {
err = fmt.Errorf("config key doesn't match") err = errors.New("config key doesn't match")
FailWithError(t, err) FailWithError(t, err)
} }
} }
} }
fmt.Println("ok") t.Log("ok")
} }
func TestQuotedValue(t *testing.T) { func TestQuotedValue(t *testing.T) {
testFile := "testdata/test.conf" testFile := "testdata/test.conf"
fmt.Printf("[+] validating quoted value... ") t.Logf("[+] validating quoted value... ")
cmap, _ := ParseFile(testFile) cmap, _ := iniconf.ParseFile(testFile)
val := cmap["sectionName"]["key4"] val := cmap["sectionName"]["key4"]
if val != " space at beginning and end " { if val != " space at beginning and end " {
FailWithError(t, errors.New("Wrong value in double quotes ["+val+"]")) FailWithError(t, errors.New("Wrong value in double quotes ["+val+"]"))
@@ -138,5 +142,5 @@ func TestQuotedValue(t *testing.T) {
if val != " is quoted with single quotes " { if val != " is quoted with single quotes " {
FailWithError(t, errors.New("Wrong value in single quotes ["+val+"]")) FailWithError(t, errors.New("Wrong value in single quotes ["+val+"]"))
} }
fmt.Println("ok") t.Log("ok")
} }

View File

@@ -1,5 +1,4 @@
//go:build !linux //go:build !linux
// +build !linux
package config package config

View File

@@ -1,7 +1,11 @@
package config package config_test
import "testing" import (
"testing"
"git.wntrmute.dev/kyle/goutils/config"
)
func TestDefaultPath(t *testing.T) { func TestDefaultPath(t *testing.T) {
t.Log(DefaultConfigPath("demoapp", "app.conf")) t.Log(config.DefaultConfigPath("demoapp", "app.conf"))
} }

View File

@@ -47,7 +47,7 @@ func ToFile(path string) (*DebugPrinter, error) {
}, nil }, nil
} }
// To sets up a new DebugPrint to an io.WriteCloser. // To will set up a new DebugPrint to an io.WriteCloser.
func To(w io.WriteCloser) *DebugPrinter { func To(w io.WriteCloser) *DebugPrinter {
return &DebugPrinter{ return &DebugPrinter{
out: w, out: w,
@@ -55,21 +55,21 @@ func To(w io.WriteCloser) *DebugPrinter {
} }
// Print calls fmt.Print if Enabled is true. // Print calls fmt.Print if Enabled is true.
func (dbg *DebugPrinter) Print(v ...interface{}) { func (dbg *DebugPrinter) Print(v ...any) {
if dbg.Enabled { if dbg.Enabled {
fmt.Fprint(dbg.out, v...) fmt.Fprint(dbg.out, v...)
} }
} }
// Println calls fmt.Println if Enabled is true. // Println calls fmt.Println if Enabled is true.
func (dbg *DebugPrinter) Println(v ...interface{}) { func (dbg *DebugPrinter) Println(v ...any) {
if dbg.Enabled { if dbg.Enabled {
fmt.Fprintln(dbg.out, v...) fmt.Fprintln(dbg.out, v...)
} }
} }
// Printf calls fmt.Printf if Enabled is true. // Printf calls fmt.Printf if Enabled is true.
func (dbg *DebugPrinter) Printf(format string, v ...interface{}) { func (dbg *DebugPrinter) Printf(format string, v ...any) {
if dbg.Enabled { if dbg.Enabled {
fmt.Fprintf(dbg.out, format, v...) fmt.Fprintf(dbg.out, format, v...)
} }

View File

@@ -2,7 +2,6 @@ package dbg
import ( import (
"fmt" "fmt"
"io/ioutil"
"os" "os"
"testing" "testing"
@@ -50,7 +49,7 @@ func TestTo(t *testing.T) {
} }
func TestToFile(t *testing.T) { func TestToFile(t *testing.T) {
testFile, err := ioutil.TempFile("", "dbg") testFile, err := os.CreateTemp(t.TempDir(), "dbg")
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
err = testFile.Close() err = testFile.Close()
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
@@ -103,7 +102,7 @@ func TestWriting(t *testing.T) {
} }
func TestToFileError(t *testing.T) { func TestToFileError(t *testing.T) {
testFile, err := ioutil.TempFile("", "dbg") testFile, err := os.CreateTemp(t.TempDir(), "dbg")
assert.NoErrorT(t, err) assert.NoErrorT(t, err)
err = testFile.Chmod(0400) err = testFile.Chmod(0400)
assert.NoErrorT(t, err) assert.NoErrorT(t, err)

View File

@@ -1,12 +0,0 @@
Simple fatal utilities for Go programs.
```
result, err := doSomething()
die.If(err)
ok := processResult(result)
if !ok {
die.With("failed to process result %s", result.Name)
}
```

View File

@@ -1,4 +1,5 @@
// Package die contains utilities for fatal error handling. // Package die contains utilities for fatal error handling. It
// presents simple fatal utilities for Go programs.
package die package die
import ( import (
@@ -15,14 +16,14 @@ func If(err error) {
} }
// With prints the message to stderr, appending a newline, and exits. // With prints the message to stderr, appending a newline, and exits.
func With(fstr string, args ...interface{}) { func With(fstr string, args ...any) {
out := fmt.Sprintf("[!] %s\n", fstr) out := fmt.Sprintf("[!] %s\n", fstr)
fmt.Fprintf(os.Stderr, out, args...) fmt.Fprintf(os.Stderr, out, args...)
os.Exit(1) os.Exit(1)
} }
// When prints the error to stderr and exits if cond is true. // When prints the error to stderr and exits if cond is true.
func When(cond bool, fstr string, args ...interface{}) { func When(cond bool, fstr string, args ...any) {
if cond { if cond {
With(fstr, args...) With(fstr, args...)
} }

View File

@@ -1,10 +1,10 @@
//go:build !windows //go:build !windows
// +build !windows
// Package fileutil contains common file functions. // Package fileutil contains common file functions.
package fileutil package fileutil
import ( import (
"math"
"os" "os"
"golang.org/x/sys/unix" "golang.org/x/sys/unix"
@@ -46,5 +46,9 @@ const (
// Access returns a boolean indicating whether the mode being checked // Access returns a boolean indicating whether the mode being checked
// for is valid. // for is valid.
func Access(path string, mode int) error { func Access(path string, mode int) error {
return unix.Access(path, uint32(mode)) // Validate the conversion to avoid potential integer overflow (gosec G115).
if mode < 0 || uint64(mode) > uint64(math.MaxUint32) {
return unix.EINVAL
}
return unix.Access(path, uint32(mode)) // #nosec G115 - handled above.
} }

View File

@@ -1,5 +1,4 @@
//go:build windows //go:build windows
// +build windows
// Package fileutil contains common file functions. // Package fileutil contains common file functions.
package fileutil package fileutil

6
go.mod
View File

@@ -1,14 +1,14 @@
module git.wntrmute.dev/kyle/goutils module git.wntrmute.dev/kyle/goutils
go 1.20 go 1.24.0
require ( require (
github.com/hashicorp/go-syslog v1.0.0 github.com/hashicorp/go-syslog v1.0.0
github.com/kr/text v0.2.0 github.com/kr/text v0.2.0
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/pkg/sftp v1.12.0 github.com/pkg/sftp v1.12.0
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b golang.org/x/crypto v0.44.0
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad golang.org/x/sys v0.38.0
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
) )

5
go.sum
View File

@@ -27,12 +27,17 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b h1:Qwe1rC8PSniVfAFPFJeyUkB+zcysC3RgJBAGk7eqBEU= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b h1:Qwe1rC8PSniVfAFPFJeyUkB+zcysC3RgJBAGk7eqBEU=
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.44.0 h1:A97SsFvM3AIwEEmTBiaxPPTYpDC47w720rdiiUvgoAU=
golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=

View File

@@ -11,7 +11,7 @@ import (
var progname = filepath.Base(os.Args[0]) var progname = filepath.Base(os.Args[0])
// ProgName returns what lib thinks the program name is, namely the // ProgName returns what lib thinks the program name is, namely the
// basename of of argv0. // basename of argv0.
// //
// It is similar to the Linux __progname function. // It is similar to the Linux __progname function.
func ProgName() string { func ProgName() string {

View File

@@ -23,7 +23,7 @@ func Example() {
map[string]string{"when": time.Now().String()}) map[string]string{"when": time.Now().String()})
} }
func ExampleNewFromFile() { func ExampleNewSplitFile() {
flog, err := logging.NewSplitFile("example.log", "example.err", true) flog, err := logging.NewSplitFile("example.log", "example.err", true)
if err != nil { if err != nil {
log.Fatal("filelog", "failed to open logger", log.Fatal("filelog", "failed to open logger",