cert-bundler: lint fixes

This commit is contained in:
2025-11-15 23:27:50 -08:00
parent c99ffd4394
commit b879d62384
2 changed files with 168 additions and 121 deletions

View File

@@ -451,7 +451,7 @@ linters:
- path: 'logging/example_test.go' - path: 'logging/example_test.go'
linters: [ testableexamples ] linters: [ testableexamples ]
- path: 'main.go' - path: 'main.go'
linters: [ forbidigo, mnd ] linters: [ forbidigo, mnd, reassign ]
- source: 'TODO' - source: 'TODO'
linters: [ godot ] linters: [ godot ]
- text: 'should have a package comment' - text: 'should have a package comment'

View File

@@ -8,8 +8,10 @@ import (
"crypto/x509" "crypto/x509"
_ "embed" _ "embed"
"encoding/pem" "encoding/pem"
"errors"
"flag" "flag"
"fmt" "fmt"
"io"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
@@ -19,7 +21,7 @@ import (
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
) )
// Config represents the top-level YAML configuration // Config represents the top-level YAML configuration.
type Config struct { type Config struct {
Config struct { Config struct {
Hashes string `yaml:"hashes"` Hashes string `yaml:"hashes"`
@@ -28,19 +30,19 @@ type Config struct {
Chains map[string]ChainGroup `yaml:"chains"` Chains map[string]ChainGroup `yaml:"chains"`
} }
// ChainGroup represents a named group of certificate chains // ChainGroup represents a named group of certificate chains.
type ChainGroup struct { type ChainGroup struct {
Certs []CertChain `yaml:"certs"` Certs []CertChain `yaml:"certs"`
Outputs Outputs `yaml:"outputs"` Outputs Outputs `yaml:"outputs"`
} }
// CertChain represents a root certificate and its intermediates // CertChain represents a root certificate and its intermediates.
type CertChain struct { type CertChain struct {
Root string `yaml:"root"` Root string `yaml:"root"`
Intermediates []string `yaml:"intermediates"` Intermediates []string `yaml:"intermediates"`
} }
// Outputs defines output format options // Outputs defines output format options.
type Outputs struct { type Outputs struct {
IncludeSingle bool `yaml:"include_single"` IncludeSingle bool `yaml:"include_single"`
IncludeIndividual bool `yaml:"include_individual"` IncludeIndividual bool `yaml:"include_individual"`
@@ -95,7 +97,8 @@ func main() {
} }
// Create output directory if it doesn't exist // Create output directory if it doesn't exist
if err := os.MkdirAll(outputDir, 0755); err != nil { err = os.MkdirAll(outputDir, 0750)
if err != nil {
fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err) fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err)
os.Exit(1) os.Exit(1)
} }
@@ -108,9 +111,9 @@ func main() {
} }
createdFiles := make([]string, 0, totalFormats) createdFiles := make([]string, 0, totalFormats)
for groupName, group := range cfg.Chains { for groupName, group := range cfg.Chains {
files, err := processChainGroup(groupName, group, expiryDuration) files, perr := processChainGroup(groupName, group, expiryDuration)
if err != nil { if perr != nil {
fmt.Fprintf(os.Stderr, "Error processing chain group %s: %v\n", groupName, err) fmt.Fprintf(os.Stderr, "Error processing chain group %s: %v\n", groupName, perr)
os.Exit(1) os.Exit(1)
} }
createdFiles = append(createdFiles, files...) createdFiles = append(createdFiles, files...)
@@ -119,8 +122,8 @@ func main() {
// Generate hash file for all created archives // Generate hash file for all created archives
if cfg.Config.Hashes != "" { if cfg.Config.Hashes != "" {
hashFile := filepath.Join(outputDir, cfg.Config.Hashes) hashFile := filepath.Join(outputDir, cfg.Config.Hashes)
if err := generateHashFile(hashFile, createdFiles); err != nil { if gerr := generateHashFile(hashFile, createdFiles); gerr != nil {
fmt.Fprintf(os.Stderr, "Error generating hash file: %v\n", err) fmt.Fprintf(os.Stderr, "Error generating hash file: %v\n", gerr)
os.Exit(1) os.Exit(1)
} }
} }
@@ -135,8 +138,8 @@ func loadConfig(path string) (*Config, error) {
} }
var cfg Config var cfg Config
if err := yaml.Unmarshal(data, &cfg); err != nil { if uerr := yaml.Unmarshal(data, &cfg); uerr != nil {
return nil, err return nil, uerr
} }
return &cfg, nil return &cfg, nil
@@ -200,72 +203,107 @@ func processChainGroup(groupName string, group ChainGroup, expiryDuration time.D
return createdFiles, nil return createdFiles, nil
} }
// loadAndCollectCerts loads all certificates from chains and collects them for processing // loadAndCollectCerts loads all certificates from chains and collects them for processing.
func loadAndCollectCerts(chains []CertChain, outputs Outputs, expiryDuration time.Duration) ([]*x509.Certificate, []certWithPath, error) { func loadAndCollectCerts(
chains []CertChain,
outputs Outputs,
expiryDuration time.Duration,
) ([]*x509.Certificate, []certWithPath, error) {
var singleFileCerts []*x509.Certificate var singleFileCerts []*x509.Certificate
var individualCerts []certWithPath var individualCerts []certWithPath
for _, chain := range chains { for _, chain := range chains {
// Load root certificate s, i, cerr := collectFromChain(chain, outputs, expiryDuration)
rootCert, err := certlib.LoadCertificate(chain.Root) if cerr != nil {
if err != nil { return nil, nil, cerr
return nil, nil, fmt.Errorf("failed to load root certificate %s: %v", chain.Root, err)
} }
if len(s) > 0 {
// Check expiry for root singleFileCerts = append(singleFileCerts, s...)
checkExpiry(chain.Root, rootCert, expiryDuration)
// Add root to collections if needed
if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, rootCert)
} }
if outputs.IncludeIndividual { if len(i) > 0 {
individualCerts = append(individualCerts, certWithPath{ individualCerts = append(individualCerts, i...)
cert: rootCert,
path: chain.Root,
})
}
// Load and validate intermediates
for _, intPath := range chain.Intermediates {
intCert, err := certlib.LoadCertificate(intPath)
if err != nil {
return nil, nil, fmt.Errorf("failed to load intermediate certificate %s: %v", intPath, err)
}
// Validate that intermediate is signed by root
if err := intCert.CheckSignatureFrom(rootCert); err != nil {
return nil, nil, fmt.Errorf("intermediate %s is not properly signed by root %s: %v", intPath, chain.Root, err)
}
// Check expiry for intermediate
checkExpiry(intPath, intCert, expiryDuration)
// Add intermediate to collections if needed
if outputs.IncludeSingle {
singleFileCerts = append(singleFileCerts, intCert)
}
if outputs.IncludeIndividual {
individualCerts = append(individualCerts, certWithPath{
cert: intCert,
path: intPath,
})
}
} }
} }
return singleFileCerts, individualCerts, nil return singleFileCerts, individualCerts, nil
} }
// prepareArchiveFiles prepares all files to be included in archives // collectFromChain loads a single chain, performs checks, and returns the certs to include.
func prepareArchiveFiles(singleFileCerts []*x509.Certificate, individualCerts []certWithPath, outputs Outputs, encoding string) ([]fileEntry, error) { func collectFromChain(
chain CertChain,
outputs Outputs,
expiryDuration time.Duration,
) (
[]*x509.Certificate,
[]certWithPath,
error,
) {
var single []*x509.Certificate
var indiv []certWithPath
// Load root certificate
rootCert, rerr := certlib.LoadCertificate(chain.Root)
if rerr != nil {
return nil, nil, fmt.Errorf("failed to load root certificate %s: %w", chain.Root, rerr)
}
// Check expiry for root
checkExpiry(chain.Root, rootCert, expiryDuration)
// Add root to collections if needed
if outputs.IncludeSingle {
single = append(single, rootCert)
}
if outputs.IncludeIndividual {
indiv = append(indiv, certWithPath{cert: rootCert, path: chain.Root})
}
// Load and validate intermediates
for _, intPath := range chain.Intermediates {
intCert, lerr := certlib.LoadCertificate(intPath)
if lerr != nil {
return nil, nil, fmt.Errorf("failed to load intermediate certificate %s: %w", intPath, lerr)
}
// Validate that intermediate is signed by root
if sigErr := intCert.CheckSignatureFrom(rootCert); sigErr != nil {
return nil, nil, fmt.Errorf(
"intermediate %s is not properly signed by root %s: %w",
intPath,
chain.Root,
sigErr,
)
}
// Check expiry for intermediate
checkExpiry(intPath, intCert, expiryDuration)
// Add intermediate to collections if needed
if outputs.IncludeSingle {
single = append(single, intCert)
}
if outputs.IncludeIndividual {
indiv = append(indiv, certWithPath{cert: intCert, path: intPath})
}
}
return single, indiv, nil
}
// prepareArchiveFiles prepares all files to be included in archives.
func prepareArchiveFiles(
singleFileCerts []*x509.Certificate,
individualCerts []certWithPath,
outputs Outputs,
encoding string,
) ([]fileEntry, error) {
var archiveFiles []fileEntry var archiveFiles []fileEntry
// Handle a single bundle file // Handle a single bundle file
if outputs.IncludeSingle && len(singleFileCerts) > 0 { if outputs.IncludeSingle && len(singleFileCerts) > 0 {
files, err := encodeCertsToFiles(singleFileCerts, "bundle", encoding, true) files, err := encodeCertsToFiles(singleFileCerts, "bundle", encoding, true)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to encode single bundle: %v", err) return nil, fmt.Errorf("failed to encode single bundle: %w", err)
} }
archiveFiles = append(archiveFiles, files...) archiveFiles = append(archiveFiles, files...)
} }
@@ -276,7 +314,7 @@ func prepareArchiveFiles(singleFileCerts []*x509.Certificate, individualCerts []
baseName := strings.TrimSuffix(filepath.Base(cp.path), filepath.Ext(cp.path)) baseName := strings.TrimSuffix(filepath.Base(cp.path), filepath.Ext(cp.path))
files, err := encodeCertsToFiles([]*x509.Certificate{cp.cert}, baseName, encoding, false) files, err := encodeCertsToFiles([]*x509.Certificate{cp.cert}, baseName, encoding, false)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to encode individual cert %s: %v", cp.path, err) return nil, fmt.Errorf("failed to encode individual cert %s: %w", cp.path, err)
} }
archiveFiles = append(archiveFiles, files...) archiveFiles = append(archiveFiles, files...)
} }
@@ -294,7 +332,7 @@ func prepareArchiveFiles(singleFileCerts []*x509.Certificate, individualCerts []
return archiveFiles, nil return archiveFiles, nil
} }
// createArchiveFiles creates archive files in the specified formats // createArchiveFiles creates archive files in the specified formats.
func createArchiveFiles(groupName string, formats []string, archiveFiles []fileEntry) ([]string, error) { func createArchiveFiles(groupName string, formats []string, archiveFiles []fileEntry) ([]string, error) {
createdFiles := make([]string, 0, len(formats)) createdFiles := make([]string, 0, len(formats))
@@ -307,11 +345,11 @@ func createArchiveFiles(groupName string, formats []string, archiveFiles []fileE
switch format { switch format {
case "zip": case "zip":
if err := createZipArchive(archivePath, archiveFiles); err != nil { if err := createZipArchive(archivePath, archiveFiles); err != nil {
return nil, fmt.Errorf("failed to create zip archive: %v", err) return nil, fmt.Errorf("failed to create zip archive: %w", err)
} }
case "tgz": case "tgz":
if err := createTarGzArchive(archivePath, archiveFiles); err != nil { if err := createTarGzArchive(archivePath, archiveFiles); err != nil {
return nil, fmt.Errorf("failed to create tar.gz archive: %v", err) return nil, fmt.Errorf("failed to create tar.gz archive: %w", err)
} }
default: default:
return nil, fmt.Errorf("unsupported format: %s", format) return nil, fmt.Errorf("unsupported format: %s", format)
@@ -329,7 +367,12 @@ func checkExpiry(path string, cert *x509.Certificate, expiryDuration time.Durati
if cert.NotAfter.Before(expiryThreshold) { if cert.NotAfter.Before(expiryThreshold) {
daysUntilExpiry := int(cert.NotAfter.Sub(now).Hours() / 24) daysUntilExpiry := int(cert.NotAfter.Sub(now).Hours() / 24)
if daysUntilExpiry < 0 { if daysUntilExpiry < 0 {
fmt.Fprintf(os.Stderr, "WARNING: Certificate %s has EXPIRED (expired %d days ago)\n", path, -daysUntilExpiry) fmt.Fprintf(
os.Stderr,
"WARNING: Certificate %s has EXPIRED (expired %d days ago)\n",
path,
-daysUntilExpiry,
)
} else { } else {
fmt.Fprintf(os.Stderr, "WARNING: Certificate %s will expire in %d days (on %s)\n", path, daysUntilExpiry, cert.NotAfter.Format("2006-01-02")) fmt.Fprintf(os.Stderr, "WARNING: Certificate %s will expire in %d days (on %s)\n", path, daysUntilExpiry, cert.NotAfter.Format("2006-01-02"))
} }
@@ -347,8 +390,13 @@ type certWithPath struct {
} }
// encodeCertsToFiles converts certificates to file entries based on encoding type // encodeCertsToFiles converts certificates to file entries based on encoding type
// If isSingle is true, certs are concatenated into a single file; otherwise one cert per file // If isSingle is true, certs are concatenated into a single file; otherwise one cert per file.
func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding string, isSingle bool) ([]fileEntry, error) { func encodeCertsToFiles(
certs []*x509.Certificate,
baseName string,
encoding string,
isSingle bool,
) ([]fileEntry, error) {
var files []fileEntry var files []fileEntry
switch encoding { switch encoding {
@@ -369,14 +417,12 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
name: baseName + ".crt", name: baseName + ".crt",
content: derContent, content: derContent,
}) })
} else { } else if len(certs) > 0 {
// Individual DER file (should only have one cert) // Individual DER file (should only have one cert)
if len(certs) > 0 { files = append(files, fileEntry{
files = append(files, fileEntry{ name: baseName + ".crt",
name: baseName + ".crt", content: certs[0].Raw,
content: certs[0].Raw, })
})
}
} }
case "both": case "both":
// Add PEM version // Add PEM version
@@ -395,13 +441,11 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
name: baseName + ".crt", name: baseName + ".crt",
content: derContent, content: derContent,
}) })
} else { } else if len(certs) > 0 {
if len(certs) > 0 { files = append(files, fileEntry{
files = append(files, fileEntry{ name: baseName + ".crt",
name: baseName + ".crt", content: certs[0].Raw,
content: certs[0].Raw, })
})
}
} }
default: default:
return nil, fmt.Errorf("unsupported encoding: %s (must be 'pem', 'der', or 'both')", encoding) return nil, fmt.Errorf("unsupported encoding: %s (must be 'pem', 'der', or 'both')", encoding)
@@ -410,7 +454,7 @@ func encodeCertsToFiles(certs []*x509.Certificate, baseName string, encoding str
return files, nil return files, nil
} }
// encodeCertsToPEM encodes certificates to PEM format // encodeCertsToPEM encodes certificates to PEM format.
func encodeCertsToPEM(certs []*x509.Certificate) []byte { func encodeCertsToPEM(certs []*x509.Certificate) []byte {
var pemContent []byte var pemContent []byte
for _, cert := range certs { for _, cert := range certs {
@@ -435,40 +479,49 @@ func generateManifest(files []fileEntry) []byte {
return []byte(manifest.String()) return []byte(manifest.String())
} }
// closeWithErr attempts to close all provided closers, joining any close errors with baseErr.
func closeWithErr(baseErr error, closers ...io.Closer) error {
for _, c := range closers {
if c == nil {
continue
}
if cerr := c.Close(); cerr != nil {
baseErr = errors.Join(baseErr, cerr)
}
}
return baseErr
}
func createZipArchive(path string, files []fileEntry) error { func createZipArchive(path string, files []fileEntry) error {
f, err := os.Create(path) f, zerr := os.Create(path)
if err != nil { if zerr != nil {
return err return zerr
} }
w := zip.NewWriter(f) w := zip.NewWriter(f)
for _, file := range files { for _, file := range files {
fw, err := w.Create(file.name) fw, werr := w.Create(file.name)
if err != nil { if werr != nil {
w.Close() return closeWithErr(werr, w, f)
f.Close()
return err
} }
if _, err := fw.Write(file.content); err != nil { if _, werr = fw.Write(file.content); werr != nil {
w.Close() return closeWithErr(werr, w, f)
f.Close()
return err
} }
} }
// Check errors on close operations // Check errors on close operations
if err := w.Close(); err != nil { if cerr := w.Close(); cerr != nil {
f.Close() _ = f.Close()
return err return cerr
} }
return f.Close() return f.Close()
} }
func createTarGzArchive(path string, files []fileEntry) error { func createTarGzArchive(path string, files []fileEntry) error {
f, err := os.Create(path) f, terr := os.Create(path)
if err != nil { if terr != nil {
return err return terr
} }
gw := gzip.NewWriter(f) gw := gzip.NewWriter(f)
@@ -480,29 +533,23 @@ func createTarGzArchive(path string, files []fileEntry) error {
Mode: 0644, Mode: 0644,
Size: int64(len(file.content)), Size: int64(len(file.content)),
} }
if err := tw.WriteHeader(hdr); err != nil { if herr := tw.WriteHeader(hdr); herr != nil {
tw.Close() return closeWithErr(herr, tw, gw, f)
gw.Close()
f.Close()
return err
} }
if _, err := tw.Write(file.content); err != nil { if _, werr := tw.Write(file.content); werr != nil {
tw.Close() return closeWithErr(werr, tw, gw, f)
gw.Close()
f.Close()
return err
} }
} }
// Check errors on close operations in the correct order // Check errors on close operations in the correct order
if err := tw.Close(); err != nil { if cerr := tw.Close(); cerr != nil {
gw.Close() _ = gw.Close()
f.Close() _ = f.Close()
return err return cerr
} }
if err := gw.Close(); err != nil { if cerr := gw.Close(); cerr != nil {
f.Close() _ = f.Close()
return err return cerr
} }
return f.Close() return f.Close()
} }
@@ -515,9 +562,9 @@ func generateHashFile(path string, files []string) error {
defer f.Close() defer f.Close()
for _, file := range files { for _, file := range files {
data, err := os.ReadFile(file) data, rerr := os.ReadFile(file)
if err != nil { if rerr != nil {
return err return rerr
} }
hash := sha256.Sum256(data) hash := sha256.Sum256(data)