Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 06678499d4 | |||
| fad17065fe | |||
| 63e0cbeacb | |||
| 231b98dd68 | |||
|
|
160a42ec26 | ||
|
|
b6b33e00c8 | ||
|
|
9e1aed257b | ||
|
|
411907c0ad | ||
|
|
06c7f8f42f | ||
|
|
8b638065d1 | ||
|
|
9ac378eaa5 | ||
|
|
eaaaabe439 | ||
|
|
4122f01644 | ||
|
|
263a5d3973 | ||
|
|
afef3eea62 | ||
| d6c5360a06 | |||
| 0ab21e12f3 | |||
| 832475db56 | |||
| cb16cfa183 | |||
| d083a39a7d | |||
| fc77225740 | |||
| 41df73d7a8 | |||
| 0dc478746a | |||
| f44bbc9eca | |||
|
|
1df0350fc7 | ||
|
|
d42c1fa1c5 | ||
|
|
4fa6e4ab0e | ||
|
|
a3ead16faf | ||
| c8f839de73 | |||
| 0c56a477bc | |||
|
|
763dbec310 | ||
|
|
0e6b60a2c4 | ||
|
|
be34ad263d | ||
|
|
48b03c908d | ||
|
|
70d7ff505b | ||
|
|
68e5822176 | ||
|
|
54dd461733 | ||
|
|
eba03a2f4a |
17
.travis.yml
Normal file
17
.travis.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- tip
|
||||
- 1.9
|
||||
script:
|
||||
- go get github.com/golang/lint/golint
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get github.com/kisom/goutils/...
|
||||
- go test -cover github.com/kisom/goutils/...
|
||||
- golint github.com/kisom/goutils/...
|
||||
notifications:
|
||||
email:
|
||||
recipients:
|
||||
- coder@kyleisom.net
|
||||
on_success: change
|
||||
on_failure: change
|
||||
27
CHANGELOG
Normal file
27
CHANGELOG
Normal file
@@ -0,0 +1,27 @@
|
||||
Release 1.2.1 - 2018-09-15
|
||||
|
||||
+ Add missing format argument to Errorf call in kgz.
|
||||
|
||||
Release 1.2.0 - 2018-09-15
|
||||
|
||||
+ Adds the kgz command line utility.
|
||||
|
||||
Release 1.1.0 - 2017-11-16
|
||||
|
||||
+ A number of new command line utilities were added
|
||||
|
||||
+ atping
|
||||
+ cruntar
|
||||
+ renfnv
|
||||
+
|
||||
+ ski
|
||||
+ subjhash
|
||||
+ yamll
|
||||
|
||||
+ new package: ahash
|
||||
+ package for loading hashes from an algorithm string
|
||||
|
||||
+ new certificate loading functions in the lib package
|
||||
|
||||
+ new package: tee
|
||||
+ emulates tee(1)
|
||||
23
README.md
23
README.md
@@ -3,11 +3,16 @@ GOUTILS
|
||||
This is a collection of small utility code I've written in Go; the `cmd/`
|
||||
directory has a number of command-line utilities. Rather than keep all
|
||||
of these in superfluous repositories of their own, I'm putting them here.
|
||||
Note that for packaging purposes, the goutils-pkg repo should be used: it
|
||||
pins the library versions to working copies and vendors all depdencies. See
|
||||
https://github.com/kisom/goutils-pkg for more details.
|
||||
|
||||
Contents:
|
||||
|
||||
ahash/ Provides hashes from string algorithm specifiers.
|
||||
assert/ Error handling, assertion-style.
|
||||
cmd/
|
||||
atping/ Automated TCP ping, meant for putting in cronjobs.
|
||||
certchain/ Display the certificate chain from a
|
||||
TLS connection.
|
||||
certdump/ Dump certificate information.
|
||||
@@ -18,9 +23,13 @@ Contents:
|
||||
the time to expiry and checking for revocations.
|
||||
clustersh/ Run commands or transfer files across multiple
|
||||
servers via SSH.
|
||||
cruntar/ Untar an archive with hard links, copying instead of
|
||||
linking.
|
||||
csrpubdump/ Dump the public key from an X.509 certificate request.
|
||||
fragment/ Print a fragment of a file.
|
||||
jlp/ JSON linter/prettifier.
|
||||
kgz/ Custom gzip compressor / decompressor that handles 99%
|
||||
of my use cases.
|
||||
pem2bin/ Dump the binary body of a PEM-encoded block.
|
||||
pembody/ Print the body of a PEM certificate.
|
||||
pemit/ Dump data to a PEM file.
|
||||
@@ -28,21 +37,27 @@ Contents:
|
||||
current working directory) imports for a Go file.
|
||||
readchain/ Print the common name for the certificates
|
||||
in a bundle.
|
||||
renfnv/ Rename a file to base32-encoded 64-bit FNV-1a hash.
|
||||
rhash/ Compute the digest of remote files.
|
||||
showimp Display the external imports in a package.
|
||||
ski Display the SKI for PEM-encoded TLS material.
|
||||
stealchain/ Dump the verified chain from a TLS
|
||||
connection.
|
||||
subjhash/ Print or match subject info from a certificate.
|
||||
tlskeypair/ Check whether a TLS certificate and key file match.
|
||||
utc/ Convert times to UTC.
|
||||
yamll/ A small YAML linter.
|
||||
die/ Death of a program.
|
||||
fileutil/ Common file functions.
|
||||
fileutil/ Common file functions.
|
||||
lib/ Commonly-useful functions for writing Go programs.
|
||||
logging/ A logging library.
|
||||
mwc/ MultiwriteCloser implementation.
|
||||
sbuf/ A byte buffer that can be wiped.
|
||||
testio/ Various I/O utilities useful during testing.
|
||||
testutil/ Various utility functions useful during testing.
|
||||
tee/ Emulate tee(1)'s functionality in io.Writers.
|
||||
testio/ Various I/O utilities useful during testing.
|
||||
testutil/ Various utility functions useful during testing.
|
||||
|
||||
|
||||
|
||||
Each program should have a small README in the directory with more
|
||||
information.
|
||||
|
||||
|
||||
258
ahash/ahash.go
Normal file
258
ahash/ahash.go
Normal file
@@ -0,0 +1,258 @@
|
||||
// Package ahash provides support for hashing data with a selectable
|
||||
// hash function.
|
||||
package ahash
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"crypto/sha512"
|
||||
"errors"
|
||||
"hash"
|
||||
"hash/adler32"
|
||||
"hash/crc32"
|
||||
"hash/crc64"
|
||||
"hash/fnv"
|
||||
"io"
|
||||
"sort"
|
||||
|
||||
"github.com/kisom/goutils/assert"
|
||||
"golang.org/x/crypto/blake2b"
|
||||
"golang.org/x/crypto/blake2s"
|
||||
"golang.org/x/crypto/md4"
|
||||
"golang.org/x/crypto/ripemd160"
|
||||
"golang.org/x/crypto/sha3"
|
||||
)
|
||||
|
||||
func sha224Slicer(bs []byte) []byte {
|
||||
sum := sha256.Sum224(bs)
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
func sha256Slicer(bs []byte) []byte {
|
||||
sum := sha256.Sum256(bs)
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
func sha384Slicer(bs []byte) []byte {
|
||||
sum := sha512.Sum384(bs)
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
func sha512Slicer(bs []byte) []byte {
|
||||
sum := sha512.Sum512(bs)
|
||||
return sum[:]
|
||||
}
|
||||
|
||||
var sliceFunctions = map[string]func([]byte) []byte{
|
||||
"sha224": sha224Slicer,
|
||||
"sha256": sha256Slicer,
|
||||
"sha384": sha384Slicer,
|
||||
"sha512": sha512Slicer,
|
||||
}
|
||||
|
||||
// Hash represents a generic hash function that may or may not be secure. It
|
||||
// satisfies the hash.Hash interface.
|
||||
type Hash struct {
|
||||
hash.Hash
|
||||
secure bool
|
||||
algo string
|
||||
}
|
||||
|
||||
// HashAlgo returns the name of the underlying hash algorithm.
|
||||
func (h *Hash) HashAlgo() string {
|
||||
return h.algo
|
||||
}
|
||||
|
||||
// IsSecure returns true if the Hash is a cryptographic hash.
|
||||
func (h *Hash) IsSecure() bool {
|
||||
return h.secure
|
||||
}
|
||||
|
||||
// Sum32 returns true if the underlying hash is a 32-bit hash; if is, the
|
||||
// uint32 parameter will contain the hash.
|
||||
func (h *Hash) Sum32() (uint32, bool) {
|
||||
h32, ok := h.Hash.(hash.Hash32)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
return h32.Sum32(), true
|
||||
}
|
||||
|
||||
// IsHash32 returns true if the underlying hash is a 32-bit hash function.
|
||||
func (h *Hash) IsHash32() bool {
|
||||
_, ok := h.Hash.(hash.Hash32)
|
||||
return ok
|
||||
}
|
||||
|
||||
// Sum64 returns true if the underlying hash is a 64-bit hash; if is, the
|
||||
// uint64 parameter will contain the hash.
|
||||
func (h *Hash) Sum64() (uint64, bool) {
|
||||
h64, ok := h.Hash.(hash.Hash64)
|
||||
if !ok {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
return h64.Sum64(), true
|
||||
}
|
||||
|
||||
// IsHash64 returns true if the underlying hash is a 64-bit hash function.
|
||||
func (h *Hash) IsHash64() bool {
|
||||
_, ok := h.Hash.(hash.Hash64)
|
||||
return ok
|
||||
}
|
||||
|
||||
func blakeFunc(bf func(key []byte) (hash.Hash, error)) func() hash.Hash {
|
||||
return func() hash.Hash {
|
||||
h, err := bf(nil)
|
||||
assert.NoError(err, "while constructing a BLAKE2 hash function")
|
||||
return h
|
||||
}
|
||||
}
|
||||
|
||||
var secureHashes = map[string]func() hash.Hash{
|
||||
"ripemd160": ripemd160.New,
|
||||
"sha224": sha256.New224,
|
||||
"sha256": sha256.New,
|
||||
"sha384": sha512.New384,
|
||||
"sha512": sha512.New,
|
||||
"sha3-224": sha3.New224,
|
||||
"sha3-256": sha3.New256,
|
||||
"sha3-384": sha3.New384,
|
||||
"sha3-512": sha3.New512,
|
||||
"blake2s-256": blakeFunc(blake2s.New256),
|
||||
"blake2b-256": blakeFunc(blake2b.New256),
|
||||
"blake2b-384": blakeFunc(blake2b.New384),
|
||||
"blake2b-512": blakeFunc(blake2b.New512),
|
||||
}
|
||||
|
||||
func newHash32(f func() hash.Hash32) func() hash.Hash {
|
||||
return func() hash.Hash {
|
||||
return f()
|
||||
}
|
||||
}
|
||||
|
||||
func newHash64(f func() hash.Hash64) func() hash.Hash {
|
||||
return func() hash.Hash {
|
||||
return f()
|
||||
}
|
||||
}
|
||||
|
||||
func newCRC64(tab uint64) func() hash.Hash {
|
||||
return newHash64(
|
||||
func() hash.Hash64 {
|
||||
return crc64.New(crc64.MakeTable(tab))
|
||||
})
|
||||
}
|
||||
|
||||
var insecureHashes = map[string]func() hash.Hash{
|
||||
"md4": md4.New,
|
||||
"md5": md5.New,
|
||||
"sha1": sha1.New,
|
||||
"adler32": newHash32(adler32.New),
|
||||
"crc32-ieee": newHash32(crc32.NewIEEE),
|
||||
"crc64": newCRC64(crc64.ISO),
|
||||
"crc64-ecma": newCRC64(crc64.ECMA),
|
||||
"fnv1-32a": newHash32(fnv.New32a),
|
||||
"fnv1-32": newHash32(fnv.New32),
|
||||
"fnv1-64a": newHash64(fnv.New64a),
|
||||
"fnv1-64": newHash64(fnv.New64),
|
||||
}
|
||||
|
||||
// New returns a new Hash for the specified algorithm.
|
||||
func New(algo string) (*Hash, error) {
|
||||
h := &Hash{algo: algo}
|
||||
|
||||
hf, ok := secureHashes[algo]
|
||||
if ok {
|
||||
h.Hash = hf()
|
||||
h.secure = true
|
||||
return h, nil
|
||||
}
|
||||
|
||||
hf, ok = insecureHashes[algo]
|
||||
if ok {
|
||||
h.Hash = hf()
|
||||
h.secure = false
|
||||
return h, nil
|
||||
}
|
||||
|
||||
return nil, errors.New("chash: unsupport hash algorithm " + algo)
|
||||
}
|
||||
|
||||
// Sum returns the digest (not the hex digest) of the data using the given
|
||||
// algorithm.
|
||||
func Sum(algo string, data []byte) ([]byte, error) {
|
||||
h, err := New(algo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = h.Write(data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return h.Sum(nil), nil
|
||||
}
|
||||
|
||||
// SumReader reads all the data from the given io.Reader and returns the
|
||||
// digest (not the hex digest) from the specified algorithm.
|
||||
func SumReader(algo string, r io.Reader) ([]byte, error) {
|
||||
h, err := New(algo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = io.Copy(h, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return h.Sum(nil), nil
|
||||
}
|
||||
|
||||
var insecureHashList, secureHashList, hashList []string
|
||||
|
||||
func init() {
|
||||
shl := len(secureHashes) // secure hash list length
|
||||
ihl := len(insecureHashes) // insecure hash list length
|
||||
ahl := shl + ihl // all hash list length
|
||||
|
||||
insecureHashList = make([]string, 0, ihl)
|
||||
secureHashList = make([]string, 0, shl)
|
||||
hashList = make([]string, 0, ahl)
|
||||
|
||||
for algo := range insecureHashes {
|
||||
insecureHashList = append(insecureHashList, algo)
|
||||
}
|
||||
sort.Strings(insecureHashList)
|
||||
|
||||
for algo := range secureHashes {
|
||||
secureHashList = append(secureHashList, algo)
|
||||
}
|
||||
sort.Strings(secureHashList)
|
||||
|
||||
hashList = append(hashList, insecureHashList...)
|
||||
hashList = append(hashList, secureHashList...)
|
||||
sort.Strings(hashList)
|
||||
}
|
||||
|
||||
// HashList returns a sorted list of all the hash algorithms supported by the
|
||||
// package.
|
||||
func HashList() []string {
|
||||
return hashList[:]
|
||||
}
|
||||
|
||||
// SecureHashList returns a sorted list of all the secure (cryptographic) hash
|
||||
// algorithms supported by the package.
|
||||
func SecureHashList() []string {
|
||||
return secureHashList[:]
|
||||
}
|
||||
|
||||
// InsecureHashList returns a sorted list of all the insecure hash algorithms
|
||||
// supported by the package.
|
||||
func InsecureHashList() []string {
|
||||
return insecureHashList[:]
|
||||
}
|
||||
141
ahash/ahash_test.go
Normal file
141
ahash/ahash_test.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package ahash
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/kisom/goutils/assert"
|
||||
)
|
||||
|
||||
func TestSecureHash(t *testing.T) {
|
||||
algo := "sha256"
|
||||
h, err := New(algo)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, h.IsSecure(), algo+" should be a secure hash")
|
||||
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
|
||||
assert.BoolT(t, !h.IsHash32(), algo+" isn't actually a 32-bit hash")
|
||||
assert.BoolT(t, !h.IsHash64(), algo+" isn't actually a 64-bit hash")
|
||||
|
||||
var data []byte
|
||||
var expected = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
sum, err := Sum(algo, data)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
|
||||
|
||||
data = []byte("hello, world")
|
||||
buf := bytes.NewBuffer(data)
|
||||
expected = "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
|
||||
sum, err = SumReader(algo, buf)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
|
||||
|
||||
data = []byte("hello world")
|
||||
_, err = h.Write(data)
|
||||
assert.NoErrorT(t, err)
|
||||
unExpected := "09ca7e4eaa6e8ae9c7d261167129184883644d07dfba7cbfbc4c8a2e08360d5b"
|
||||
sum = h.Sum(nil)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
|
||||
}
|
||||
|
||||
func TestInsecureHash(t *testing.T) {
|
||||
algo := "md5"
|
||||
h, err := New(algo)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
|
||||
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
|
||||
assert.BoolT(t, !h.IsHash32(), algo+" isn't actually a 32-bit hash")
|
||||
assert.BoolT(t, !h.IsHash64(), algo+" isn't actually a 64-bit hash")
|
||||
|
||||
var data []byte
|
||||
var expected = "d41d8cd98f00b204e9800998ecf8427e"
|
||||
sum, err := Sum(algo, data)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
|
||||
|
||||
data = []byte("hello, world")
|
||||
buf := bytes.NewBuffer(data)
|
||||
expected = "e4d7f1b4ed2e42d15898f4b27b019da4"
|
||||
sum, err = SumReader(algo, buf)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) == expected, fmt.Sprintf("expected hash %s but have %x", expected, sum))
|
||||
|
||||
data = []byte("hello world")
|
||||
_, err = h.Write(data)
|
||||
assert.NoErrorT(t, err)
|
||||
unExpected := "e4d7f1b4ed2e42d15898f4b27b019da4"
|
||||
sum = h.Sum(nil)
|
||||
assert.BoolT(t, fmt.Sprintf("%x", sum) != unExpected, fmt.Sprintf("hash shouldn't have returned %x", unExpected))
|
||||
}
|
||||
|
||||
func TestHash32(t *testing.T) {
|
||||
algo := "crc32-ieee"
|
||||
h, err := New(algo)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
|
||||
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
|
||||
assert.BoolT(t, h.IsHash32(), algo+" is actually a 32-bit hash")
|
||||
assert.BoolT(t, !h.IsHash64(), algo+" isn't actually a 64-bit hash")
|
||||
|
||||
var data []byte
|
||||
var expected uint32
|
||||
|
||||
h.Write(data)
|
||||
sum, ok := h.Sum32()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum32")
|
||||
assert.BoolT(t, expected == sum, fmt.Sprintf("%s returned the %d but expected %d", algo, sum, expected))
|
||||
|
||||
data = []byte("hello, world")
|
||||
expected = 0xffab723a
|
||||
h.Write(data)
|
||||
sum, ok = h.Sum32()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum32")
|
||||
assert.BoolT(t, expected == sum, fmt.Sprintf("%s returned the %d but expected %d", algo, sum, expected))
|
||||
|
||||
h.Reset()
|
||||
data = []byte("hello world")
|
||||
h.Write(data)
|
||||
sum, ok = h.Sum32()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum32")
|
||||
assert.BoolT(t, expected != sum, fmt.Sprintf("%s returned %d but shouldn't have", algo, sum))
|
||||
}
|
||||
|
||||
func TestHash64(t *testing.T) {
|
||||
algo := "crc64"
|
||||
h, err := New(algo)
|
||||
assert.NoErrorT(t, err)
|
||||
assert.BoolT(t, !h.IsSecure(), algo+" shouldn't be a secure hash")
|
||||
assert.BoolT(t, h.HashAlgo() == algo, "hash returned the wrong HashAlgo")
|
||||
assert.BoolT(t, h.IsHash64(), algo+" is actually a 64-bit hash")
|
||||
assert.BoolT(t, !h.IsHash32(), algo+" isn't actually a 32-bit hash")
|
||||
|
||||
var data []byte
|
||||
var expected uint64
|
||||
|
||||
h.Write(data)
|
||||
sum, ok := h.Sum64()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum64")
|
||||
assert.BoolT(t, expected == sum, fmt.Sprintf("%s returned the %d but expected %d", algo, sum, expected))
|
||||
|
||||
data = []byte("hello, world")
|
||||
expected = 0x16c45c0eb1d9c2ec
|
||||
h.Write(data)
|
||||
sum, ok = h.Sum64()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum64")
|
||||
assert.BoolT(t, expected == sum, fmt.Sprintf("%s returned the %d but expected %d", algo, sum, expected))
|
||||
|
||||
h.Reset()
|
||||
data = []byte("hello world")
|
||||
h.Write(data)
|
||||
sum, ok = h.Sum64()
|
||||
assert.BoolT(t, ok, algo+" should be able to return a Sum64")
|
||||
assert.BoolT(t, expected != sum, fmt.Sprintf("%s returned %d but shouldn't have", algo, sum))
|
||||
}
|
||||
|
||||
func TestListLengthSanity(t *testing.T) {
|
||||
all := HashList()
|
||||
secure := SecureHashList()
|
||||
insecure := InsecureHashList()
|
||||
|
||||
assert.BoolT(t, len(all) == len(secure)+len(insecure))
|
||||
}
|
||||
@@ -16,7 +16,7 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// NoDebug, if set to true, will cause all asserts to be ignored.
|
||||
// NoDebug can be set to true to cause all asserts to be ignored.
|
||||
var NoDebug bool
|
||||
|
||||
func die(what string, a ...string) {
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rsa"
|
||||
"crypto/sha256"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"crypto/x509/pkix"
|
||||
@@ -13,6 +14,7 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/cloudflare/cfssl/helpers"
|
||||
@@ -82,6 +84,7 @@ func keyUsages(ku x509.KeyUsage) string {
|
||||
uses = append(uses, s)
|
||||
}
|
||||
}
|
||||
sort.Strings(uses)
|
||||
|
||||
return strings.Join(uses, ", ")
|
||||
}
|
||||
@@ -91,6 +94,7 @@ func extUsage(ext []x509.ExtKeyUsage) string {
|
||||
for i := range ext {
|
||||
ns = append(ns, extKeyUsages[ext[i]])
|
||||
}
|
||||
sort.Strings(ns)
|
||||
|
||||
return strings.Join(ns, ", ")
|
||||
}
|
||||
@@ -116,7 +120,10 @@ func showBasicConstraints(cert *x509.Certificate) {
|
||||
|
||||
const oneTrueDateFormat = "2006-01-02T15:04:05-0700"
|
||||
|
||||
var dateFormat string
|
||||
var (
|
||||
dateFormat string
|
||||
showHash bool // if true, print a SHA256 hash of the certificate's Raw field
|
||||
)
|
||||
|
||||
func wrapPrint(text string, indent int) {
|
||||
tabs := ""
|
||||
@@ -129,6 +136,9 @@ func wrapPrint(text string, indent int) {
|
||||
|
||||
func displayCert(cert *x509.Certificate) {
|
||||
fmt.Println("CERTIFICATE")
|
||||
if showHash {
|
||||
fmt.Println(wrap(fmt.Sprintf("SHA256: %x", sha256.Sum256(cert.Raw)), 0))
|
||||
}
|
||||
fmt.Println(wrap("Subject: "+displayName(cert.Subject), 0))
|
||||
fmt.Println(wrap("Issuer: "+displayName(cert.Issuer), 0))
|
||||
fmt.Printf("\tSignature algorithm: %s / %s\n", sigAlgoPK(cert.SignatureAlgorithm),
|
||||
@@ -273,6 +283,7 @@ func displayAllCertsWeb(uri string, leafOnly bool) {
|
||||
|
||||
func main() {
|
||||
var leafOnly bool
|
||||
flag.BoolVar(&showHash, "d", false, "show hashes of raw DER contents")
|
||||
flag.StringVar(&dateFormat, "s", oneTrueDateFormat, "date `format` in Go time format")
|
||||
flag.BoolVar(&leafOnly, "l", false, "only show the leaf certificate")
|
||||
flag.Parse()
|
||||
|
||||
20
cmd/cruntar/README
Normal file
20
cmd/cruntar/README
Normal file
@@ -0,0 +1,20 @@
|
||||
ChromeOS untar
|
||||
|
||||
This is a tool that is intended to support untarring on SquashFS file
|
||||
systems. In particular, every time it encounters a hard link, it
|
||||
will just create a copy of the file.
|
||||
|
||||
Usage: cruntar [-jmvpz] archive [dest]
|
||||
|
||||
Flags:
|
||||
-a Shortcut for -m -p: preserve owners and file mode.
|
||||
-j The archive is compressed with bzip2.
|
||||
-m Preserve file modes.
|
||||
-p Preserve ownership.
|
||||
-v Print the name of each file as it is being processed.
|
||||
-z The archive is compressed with gzip.
|
||||
|
||||
I wrote this after running into problems with untarring the
|
||||
gcc-arm-eabi-none toolchain. The shared storage in Termux under
|
||||
ChromeOS doesn't support hard links, so I opted to just make a copy
|
||||
rather than dealing with links and whatnot.
|
||||
265
cmd/cruntar/main.go
Normal file
265
cmd/cruntar/main.go
Normal file
@@ -0,0 +1,265 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/kisom/goutils/die"
|
||||
)
|
||||
|
||||
var (
|
||||
preserveOwners bool
|
||||
preserveMode bool
|
||||
verbose bool
|
||||
)
|
||||
|
||||
func setupFile(hdr *tar.Header, file *os.File) error {
|
||||
if preserveMode {
|
||||
if verbose {
|
||||
fmt.Printf("\tchmod %0#o\n", hdr.Mode)
|
||||
}
|
||||
err := file.Chmod(os.FileMode(hdr.Mode))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if preserveOwners {
|
||||
fmt.Printf("\tchown %d:%d\n", hdr.Uid, hdr.Gid)
|
||||
err := file.Chown(hdr.Uid, hdr.Gid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func linkTarget(target, top string) string {
|
||||
if filepath.IsAbs(target) {
|
||||
return target
|
||||
}
|
||||
|
||||
return filepath.Clean(filepath.Join(target, top))
|
||||
}
|
||||
|
||||
func processFile(tfr *tar.Reader, hdr *tar.Header, top string) error {
|
||||
if verbose {
|
||||
fmt.Println(hdr.Name)
|
||||
}
|
||||
filePath := filepath.Clean(filepath.Join(top, hdr.Name))
|
||||
switch hdr.Typeflag {
|
||||
case tar.TypeReg, tar.TypeRegA:
|
||||
file, err := os.Create(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(file, tfr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = setupFile(hdr, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case tar.TypeLink:
|
||||
file, err := os.Create(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
source, err := os.Open(hdr.Linkname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(file, source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = setupFile(hdr, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case tar.TypeSymlink:
|
||||
err := os.Symlink(linkTarget(hdr.Linkname, top), filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case tar.TypeDir:
|
||||
err := os.MkdirAll(filePath, os.FileMode(hdr.Mode))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
var compression = map[string]bool{
|
||||
"gzip": false,
|
||||
"bzip2": false,
|
||||
}
|
||||
|
||||
type bzipCloser struct {
|
||||
r io.Reader
|
||||
}
|
||||
|
||||
func (brc *bzipCloser) Read(p []byte) (int, error) {
|
||||
return brc.r.Read(p)
|
||||
}
|
||||
|
||||
func (brc *bzipCloser) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func newBzipCloser(r io.ReadCloser) (io.ReadCloser, error) {
|
||||
br := bzip2.NewReader(r)
|
||||
return &bzipCloser{r: br}, nil
|
||||
}
|
||||
|
||||
var compressFuncs = map[string]func(io.ReadCloser) (io.ReadCloser, error){
|
||||
"gzip": func(r io.ReadCloser) (io.ReadCloser, error) { return gzip.NewReader(r) },
|
||||
"bzip2": newBzipCloser,
|
||||
}
|
||||
|
||||
func verifyCompression() bool {
|
||||
var compressed bool
|
||||
for _, v := range compression {
|
||||
if compressed && v {
|
||||
return false
|
||||
}
|
||||
compressed = compressed || v
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func getReader(r io.ReadCloser) (io.ReadCloser, error) {
|
||||
for c, v := range compression {
|
||||
if v {
|
||||
return compressFuncs[c](r)
|
||||
}
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func openArchive(path string) (io.ReadCloser, error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r, err := getReader(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return r, nil
|
||||
}
|
||||
|
||||
var compressFlags struct {
|
||||
z bool
|
||||
j bool
|
||||
}
|
||||
|
||||
func parseCompressFlags() error {
|
||||
if compressFlags.z {
|
||||
compression["gzip"] = true
|
||||
}
|
||||
|
||||
if compressFlags.j {
|
||||
compression["bzip2"] = true
|
||||
}
|
||||
|
||||
if !verifyCompression() {
|
||||
return errors.New("multiple compression formats specified")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func usage(w io.Writer) {
|
||||
fmt.Fprintf(w, `ChromeOS untar
|
||||
|
||||
This is a tool that is intended to support untarring on SquashFS file
|
||||
systems. In particular, every time it encounters a hard link, it
|
||||
will just create a copy of the file.
|
||||
|
||||
Usage: cruntar [-jmvpz] archive [dest]
|
||||
|
||||
Flags:
|
||||
-a Shortcut for -m -p: preserve owners and file mode.
|
||||
-j The archive is compressed with bzip2.
|
||||
-m Preserve file modes.
|
||||
-p Preserve ownership.
|
||||
-v Print the name of each file as it is being processed.
|
||||
-z The archive is compressed with gzip.
|
||||
`)
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stderr) }
|
||||
}
|
||||
|
||||
func main() {
|
||||
var archive, help bool
|
||||
flag.BoolVar(&archive, "a", false, "Shortcut for -m -p: preserve owners and file mode.")
|
||||
flag.BoolVar(&help, "h", false, "print a help message")
|
||||
flag.BoolVar(&compressFlags.j, "j", false, "bzip2 compression")
|
||||
flag.BoolVar(&preserveMode, "m", false, "preserve file modes")
|
||||
flag.BoolVar(&preserveOwners, "p", false, "preserve ownership")
|
||||
flag.BoolVar(&verbose, "v", false, "verbose mode")
|
||||
flag.BoolVar(&compressFlags.z, "z", false, "gzip compression")
|
||||
flag.Parse()
|
||||
|
||||
if help {
|
||||
usage(os.Stdout)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if archive {
|
||||
preserveMode = true
|
||||
preserveOwners = true
|
||||
}
|
||||
|
||||
err := parseCompressFlags()
|
||||
die.If(err)
|
||||
|
||||
if flag.NArg() == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
top := "./"
|
||||
if flag.NArg() > 1 {
|
||||
top = flag.Arg(1)
|
||||
}
|
||||
|
||||
r, err := openArchive(flag.Arg(0))
|
||||
die.If(err)
|
||||
|
||||
tfr := tar.NewReader(r)
|
||||
for {
|
||||
hdr, err := tfr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
die.If(err)
|
||||
|
||||
err = processFile(tfr, hdr, top)
|
||||
die.If(err)
|
||||
|
||||
}
|
||||
|
||||
r.Close()
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"bufio"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
@@ -11,9 +12,13 @@ import (
|
||||
"github.com/kisom/goutils/die"
|
||||
)
|
||||
|
||||
func usage() {
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stdout); os.Exit(1) }
|
||||
}
|
||||
|
||||
func usage(w io.Writer) {
|
||||
progname := filepath.Base(os.Args[0])
|
||||
fmt.Printf(`Usage: %s [-nl] file start [end]
|
||||
fmt.Fprintf(w, `Usage: %s [-nl] file start [end]
|
||||
|
||||
Print a fragment of a file starting a line 'start' and ending
|
||||
at line 'end', or EOF if no end is specified.
|
||||
@@ -27,7 +32,7 @@ func main() {
|
||||
flag.Parse()
|
||||
|
||||
if flag.NArg() < 2 || flag.NArg() > 3 {
|
||||
usage()
|
||||
usage(os.Stderr)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
|
||||
23
cmd/kgz/README
Normal file
23
cmd/kgz/README
Normal file
@@ -0,0 +1,23 @@
|
||||
kgz
|
||||
|
||||
kgz is like gzip, but supports compressing and decompressing to a different
|
||||
directory than the source file is in.
|
||||
|
||||
Usage: kgz [-l] source [target]
|
||||
|
||||
If target is a directory, the basename of the sourcefile will be used
|
||||
as the target filename. Compression and decompression is selected
|
||||
based on whether the source filename ends in ".gz".
|
||||
|
||||
Flags:
|
||||
-l level Compression level (0-9). Only meaninful when
|
||||
compressing a file.
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
182
cmd/kgz/main.go
Normal file
182
cmd/kgz/main.go
Normal file
@@ -0,0 +1,182 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"compress/flate"
|
||||
"compress/gzip"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
const gzipExt = ".gz"
|
||||
|
||||
func compress(path, target string, level int) error {
|
||||
sourceFile, err := os.Open(path)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "opening file for read")
|
||||
}
|
||||
defer sourceFile.Close()
|
||||
|
||||
destFile, err := os.Create(target)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "opening file for write")
|
||||
}
|
||||
defer destFile.Close()
|
||||
|
||||
gzipCompressor, err := gzip.NewWriterLevel(destFile, level)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "invalid compression level")
|
||||
}
|
||||
defer gzipCompressor.Close()
|
||||
|
||||
_, err = io.Copy(gzipCompressor, sourceFile)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "compressing file")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "stat(2)ing destination file")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func uncompress(path, target string) error {
|
||||
sourceFile, err := os.Open(path)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "opening file for read")
|
||||
}
|
||||
defer sourceFile.Close()
|
||||
|
||||
gzipUncompressor, err := gzip.NewReader(sourceFile)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "reading gzip headers")
|
||||
}
|
||||
defer gzipUncompressor.Close()
|
||||
|
||||
destFile, err := os.Create(target)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "opening file for write")
|
||||
}
|
||||
defer destFile.Close()
|
||||
|
||||
_, err = io.Copy(destFile, gzipUncompressor)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "uncompressing file")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func usage(w io.Writer) {
|
||||
fmt.Fprintf(w, `Usage: %s [-l] source [target]
|
||||
|
||||
kgz is like gzip, but supports compressing and decompressing to a different
|
||||
directory than the source file is in.
|
||||
|
||||
Flags:
|
||||
-l level Compression level (0-9). Only meaninful when
|
||||
compressing a file.
|
||||
`, os.Args[0])
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stderr) }
|
||||
}
|
||||
|
||||
func isDir(path string) bool {
|
||||
file, err := os.Open(path)
|
||||
if err == nil {
|
||||
defer file.Close()
|
||||
stat, err := file.Stat()
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if stat.IsDir() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func pathForUncompressing(source, dest string) (string, error) {
|
||||
if !isDir(dest) {
|
||||
return dest, nil
|
||||
}
|
||||
|
||||
source = filepath.Base(source)
|
||||
if !strings.HasSuffix(source, gzipExt) {
|
||||
return "", errors.Errorf("%s is a not gzip-compressed file", source)
|
||||
}
|
||||
outFile := source[:len(source)-len(gzipExt)]
|
||||
outFile = filepath.Join(dest, outFile)
|
||||
return outFile, nil
|
||||
}
|
||||
|
||||
func pathForCompressing(source, dest string) (string, error) {
|
||||
if !isDir(dest) {
|
||||
return dest, nil
|
||||
}
|
||||
|
||||
source = filepath.Base(source)
|
||||
if strings.HasSuffix(source, gzipExt) {
|
||||
return "", errors.Errorf("%s is a gzip-compressed file", source)
|
||||
}
|
||||
|
||||
dest = filepath.Join(dest, source+gzipExt)
|
||||
return dest, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
var level int
|
||||
var path string
|
||||
var target = "."
|
||||
|
||||
flag.IntVar(&level, "l", flate.DefaultCompression, "compression level")
|
||||
flag.Parse()
|
||||
|
||||
if flag.NArg() < 1 || flag.NArg() > 2 {
|
||||
usage(os.Stderr)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
path = flag.Arg(0)
|
||||
if flag.NArg() == 2 {
|
||||
target = flag.Arg(1)
|
||||
}
|
||||
|
||||
if strings.HasSuffix(path, gzipExt) {
|
||||
target, err := pathForUncompressing(path, target)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = uncompress(path, target)
|
||||
if err != nil {
|
||||
os.Remove(target)
|
||||
fmt.Fprintf(os.Stderr, "%s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
} else {
|
||||
target, err := pathForCompressing(path, target)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
err = compress(path, target, level)
|
||||
if err != nil {
|
||||
os.Remove(target)
|
||||
fmt.Fprintf(os.Stderr, "%s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -46,7 +46,6 @@ func newName(path string) (string, error) {
|
||||
func move(dst, src string, force bool) (err error) {
|
||||
if fileutil.FileDoesExist(dst) && !force {
|
||||
return fmt.Errorf("%s exists (pass the -f flag to overwrite)", dst)
|
||||
return nil
|
||||
}
|
||||
dstFile, err := os.Create(dst)
|
||||
if err != nil {
|
||||
@@ -92,7 +91,7 @@ Options:
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.Usage = func () { usage(os.Stdout) }
|
||||
flag.Usage = func() { usage(os.Stdout) }
|
||||
}
|
||||
|
||||
func main() {
|
||||
|
||||
22
cmd/rhash/README
Normal file
22
cmd/rhash/README
Normal file
@@ -0,0 +1,22 @@
|
||||
rhash: remote hashing tool
|
||||
|
||||
Usage: rhash [-a algo] [-h] [-l set] urls...
|
||||
Compute the hash over each URL.
|
||||
|
||||
Flags:
|
||||
-a algo Specify the hash algorithm to use; the default is sha256.
|
||||
-h Print this help message.
|
||||
-l set List the hash functions under set. Set can be one of all,
|
||||
secure to list only cryptographic hash functions, or
|
||||
insecure to list only non-cryptographic hash functions.
|
||||
|
||||
Examples:
|
||||
Compute the SHA256 digest of the LICENSE in this repository:
|
||||
|
||||
$ rhash https://raw.githubusercontent.com/kisom/goutils/7391da8567952f69990194ead2842d21df217c89/LICENSE
|
||||
LICENSE: sha256=620bfadeb698df6c6db73908689a29371a9d4cff32b08c48a5c4307946093980
|
||||
|
||||
Compute the SHA-1 digest of the LICENSE in this repository:
|
||||
|
||||
$ rhash -a sha1 https://raw.githubusercontent.com/kisom/goutils/7391da8567952f69990194ead2842d21df217c89/LICENSE
|
||||
LICENSE: sha1=83c6e2e410715058ed6e7c1572176122c024e367
|
||||
97
cmd/rhash/main.go
Normal file
97
cmd/rhash/main.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/kisom/goutils/ahash"
|
||||
"github.com/kisom/goutils/die"
|
||||
"github.com/kisom/goutils/lib"
|
||||
)
|
||||
|
||||
func usage(w io.Writer) {
|
||||
fmt.Fprintf(w, `Usage: %s [-a algo] [-h] [-l set] urls...
|
||||
Compute the hash over each URL.
|
||||
|
||||
Flags:
|
||||
-a algo Specify the hash algorithm to use; the default is sha256.
|
||||
-h Print this help message.
|
||||
-l set List the hash functions under set. Set can be one of all,
|
||||
secure to list only cryptographic hash functions, or
|
||||
insecure to list only non-cryptographic hash functions.
|
||||
|
||||
`, lib.ProgName())
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stderr) }
|
||||
}
|
||||
|
||||
func main() {
|
||||
var algo, list string
|
||||
var help bool
|
||||
flag.StringVar(&algo, "a", "sha256", "hash algorithm to use")
|
||||
flag.BoolVar(&help, "h", false, "print a help message")
|
||||
flag.StringVar(&list, "l", "", "list known hash algorithms (one of all, secure, insecure)")
|
||||
flag.Parse()
|
||||
|
||||
if help {
|
||||
usage(os.Stdout)
|
||||
}
|
||||
|
||||
if list != "" {
|
||||
var hashes []string
|
||||
switch list {
|
||||
case "all":
|
||||
hashes = ahash.HashList()
|
||||
case "secure":
|
||||
hashes = ahash.SecureHashList()
|
||||
case "insecure":
|
||||
hashes = ahash.InsecureHashList()
|
||||
default:
|
||||
die.With("list option must be one of all, secure, or insecure.")
|
||||
}
|
||||
|
||||
for _, algo := range hashes {
|
||||
fmt.Printf("- %s\n", algo)
|
||||
}
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _, remote := range flag.Args() {
|
||||
u, err := url.Parse(remote)
|
||||
if err != nil {
|
||||
lib.Warn(err, "parsing %s", remote)
|
||||
continue
|
||||
}
|
||||
|
||||
name := filepath.Base(u.Path)
|
||||
if name == "" {
|
||||
lib.Warnx("source URL doesn't appear to name a file")
|
||||
continue
|
||||
}
|
||||
|
||||
resp, err := http.Get(remote)
|
||||
if err != nil {
|
||||
lib.Warn(err, "fetching %s", remote)
|
||||
continue
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
lib.Warn(err, "fetching %s", remote)
|
||||
continue
|
||||
}
|
||||
|
||||
sum, err := ahash.SumReader(algo, resp.Body)
|
||||
resp.Body.Close()
|
||||
if err != nil {
|
||||
lib.Err(lib.ExitFailure, err, "while hashing data")
|
||||
}
|
||||
fmt.Printf("%s: %s=%x\n", name, algo, sum)
|
||||
}
|
||||
}
|
||||
@@ -67,6 +67,10 @@ func init() {
|
||||
}
|
||||
|
||||
func walkFile(path string, info os.FileInfo, err error) error {
|
||||
if ignores[path] {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
|
||||
if !sourceRegexp.MatchString(path) {
|
||||
return nil
|
||||
}
|
||||
@@ -97,10 +101,24 @@ func walkFile(path string, info os.FileInfo, err error) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
var ignores = map[string]bool{}
|
||||
|
||||
func main() {
|
||||
var ignoreLine string
|
||||
var noVendor bool
|
||||
flag.StringVar(&ignoreLine, "i", "", "comma-separated list of directories to ignore")
|
||||
flag.BoolVar(&noVendor, "nv", false, "ignore the vendor directory")
|
||||
flag.BoolVar(&debug, "v", false, "log debugging information")
|
||||
flag.Parse()
|
||||
|
||||
if noVendor {
|
||||
ignores["vendor"] = true
|
||||
}
|
||||
|
||||
for _, word := range strings.Split(ignoreLine, ",") {
|
||||
ignores[strings.TrimSpace(word)] = true
|
||||
}
|
||||
|
||||
err := filepath.Walk(".", walkFile)
|
||||
die.If(err)
|
||||
|
||||
|
||||
30
cmd/ski/README
Normal file
30
cmd/ski/README
Normal file
@@ -0,0 +1,30 @@
|
||||
ski: print subject public key info
|
||||
|
||||
Usage:
|
||||
ski [-hm] files...
|
||||
|
||||
Flags:
|
||||
-h Print a help message and exit.
|
||||
-m All SKIs should match.
|
||||
|
||||
Examples:
|
||||
|
||||
Printing the SKI of a private key and certificate:
|
||||
|
||||
$ ski *
|
||||
server.key 3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E (RSA private key)
|
||||
[ski] trailing data in PEM file
|
||||
server.pem 3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E (RSA certificate)
|
||||
|
||||
Making sure the SKIs match:
|
||||
|
||||
$ ski -m *
|
||||
tyrfingr.key 3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E (RSA private key)
|
||||
[ski] trailing data in PEM file
|
||||
tyrfingr.pem 3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E (RSA certificate)
|
||||
|
||||
Making sure the SKIs match with a bad certificate:
|
||||
$ ski -m server.key bad.pem
|
||||
server.key 3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E (RSA private key)
|
||||
[ski] bad.pem: SKI mismatch (3A:AB:D1:B2:E5:7A:F2:5A:D5:8E:8B:7B:25:D9:41:90:F8:6B:A3:5E != 90:AF:6A:3A:94:5A:0B:D8:90:EA:12:56:73:DF:43:B4:3A:28:DA:E7)
|
||||
bad.pem 90:AF:6A:3A:94:5A:0B:D8:90:EA:12:56:73:DF:43:B4:3A:28:DA:E7 (RSA certificate)
|
||||
191
cmd/ski/main.go
Normal file
191
cmd/ski/main.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto"
|
||||
"crypto/ecdsa"
|
||||
"crypto/rsa"
|
||||
"crypto/sha1"
|
||||
"crypto/x509"
|
||||
"crypto/x509/pkix"
|
||||
"encoding/asn1"
|
||||
"encoding/pem"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/kisom/goutils/die"
|
||||
"github.com/kisom/goutils/lib"
|
||||
)
|
||||
|
||||
func usage(w io.Writer) {
|
||||
fmt.Fprintf(w, `ski: print subject key info for PEM-encoded files
|
||||
|
||||
Usage:
|
||||
ski [-hm] files...
|
||||
|
||||
Flags:
|
||||
-h Print this help message.
|
||||
-m All SKIs should match; as soon as an SKI mismatch is found,
|
||||
it is reported.
|
||||
|
||||
`)
|
||||
}
|
||||
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stderr) }
|
||||
}
|
||||
|
||||
func parse(path string) (public []byte, kt, ft string) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
die.If(err)
|
||||
|
||||
data = bytes.TrimSpace(data)
|
||||
p, rest := pem.Decode(data)
|
||||
if len(rest) > 0 {
|
||||
lib.Warnx("trailing data in PEM file")
|
||||
}
|
||||
|
||||
if p == nil {
|
||||
die.With("no PEM data found")
|
||||
}
|
||||
|
||||
data = p.Bytes
|
||||
|
||||
switch p.Type {
|
||||
case "PRIVATE KEY", "RSA PRIVATE KEY", "EC PRIVATE KEY":
|
||||
public, kt = parseKey(data)
|
||||
ft = "private key"
|
||||
case "CERTIFICATE":
|
||||
public, kt = parseCertificate(data)
|
||||
ft = "certificate"
|
||||
case "CERTIFICATE REQUEST":
|
||||
public, kt = parseCSR(data)
|
||||
ft = "certificate request"
|
||||
default:
|
||||
die.With("unknown PEM type %s", p.Type)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parseKey(data []byte) (public []byte, kt string) {
|
||||
privInterface, err := x509.ParsePKCS8PrivateKey(data)
|
||||
if err != nil {
|
||||
privInterface, err = x509.ParsePKCS1PrivateKey(data)
|
||||
if err != nil {
|
||||
privInterface, err = x509.ParseECPrivateKey(data)
|
||||
if err != nil {
|
||||
die.With("couldn't parse private key.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var priv crypto.Signer
|
||||
switch privInterface.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
priv = privInterface.(*rsa.PrivateKey)
|
||||
kt = "RSA"
|
||||
case *ecdsa.PrivateKey:
|
||||
priv = privInterface.(*ecdsa.PrivateKey)
|
||||
kt = "ECDSA"
|
||||
default:
|
||||
die.With("unknown private key type %T", privInterface)
|
||||
}
|
||||
|
||||
public, err = x509.MarshalPKIXPublicKey(priv.Public())
|
||||
die.If(err)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func parseCertificate(data []byte) (public []byte, kt string) {
|
||||
cert, err := x509.ParseCertificate(data)
|
||||
die.If(err)
|
||||
|
||||
pub := cert.PublicKey
|
||||
switch pub.(type) {
|
||||
case *rsa.PublicKey:
|
||||
kt = "RSA"
|
||||
case *ecdsa.PublicKey:
|
||||
kt = "ECDSA"
|
||||
default:
|
||||
die.With("unknown public key type %T", pub)
|
||||
}
|
||||
|
||||
public, err = x509.MarshalPKIXPublicKey(pub)
|
||||
die.If(err)
|
||||
return
|
||||
}
|
||||
|
||||
func parseCSR(data []byte) (public []byte, kt string) {
|
||||
csr, err := x509.ParseCertificateRequest(data)
|
||||
die.If(err)
|
||||
|
||||
pub := csr.PublicKey
|
||||
switch pub.(type) {
|
||||
case *rsa.PublicKey:
|
||||
kt = "RSA"
|
||||
case *ecdsa.PublicKey:
|
||||
kt = "ECDSA"
|
||||
default:
|
||||
die.With("unknown public key type %T", pub)
|
||||
}
|
||||
|
||||
public, err = x509.MarshalPKIXPublicKey(pub)
|
||||
die.If(err)
|
||||
return
|
||||
}
|
||||
|
||||
func dumpHex(in []byte) string {
|
||||
var s string
|
||||
for i := range in {
|
||||
s += fmt.Sprintf("%02X:", in[i])
|
||||
}
|
||||
|
||||
return strings.Trim(s, ":")
|
||||
}
|
||||
|
||||
type subjectPublicKeyInfo struct {
|
||||
Algorithm pkix.AlgorithmIdentifier
|
||||
SubjectPublicKey asn1.BitString
|
||||
}
|
||||
|
||||
func main() {
|
||||
var help, shouldMatch bool
|
||||
flag.BoolVar(&help, "h", false, "print a help message and exit")
|
||||
flag.BoolVar(&shouldMatch, "m", false, "all SKIs should match")
|
||||
flag.Parse()
|
||||
|
||||
if help {
|
||||
usage(os.Stdout)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
var ski string
|
||||
for _, path := range flag.Args() {
|
||||
public, kt, ft := parse(path)
|
||||
|
||||
var subPKI subjectPublicKeyInfo
|
||||
_, err := asn1.Unmarshal(public, &subPKI)
|
||||
if err != nil {
|
||||
lib.Warn(err, "failed to get subject PKI")
|
||||
continue
|
||||
}
|
||||
|
||||
pubHash := sha1.Sum(subPKI.SubjectPublicKey.Bytes)
|
||||
pubHashString := dumpHex(pubHash[:])
|
||||
if ski == "" {
|
||||
ski = pubHashString
|
||||
}
|
||||
|
||||
if shouldMatch && ski != pubHashString {
|
||||
lib.Warnx("%s: SKI mismatch (%s != %s)",
|
||||
path, ski, pubHashString)
|
||||
}
|
||||
fmt.Printf("%s %s (%s %s)\n", path, pubHashString, kt, ft)
|
||||
}
|
||||
}
|
||||
17
cmd/stealchain-server/README
Normal file
17
cmd/stealchain-server/README
Normal file
@@ -0,0 +1,17 @@
|
||||
stealchain-server
|
||||
|
||||
This is a utility to extract the verified X.509 chain from a TLS
|
||||
connection initiated by another client. It listens on a port, and
|
||||
for each connection, it will dump the certificates that the peer
|
||||
actually sent (and not the verified chain that is built from this).
|
||||
|
||||
It was written to assist in debugging issues with certificate chains.
|
||||
|
||||
There are a few knobs:
|
||||
|
||||
-listen specifies the address to listen on.
|
||||
|
||||
-ca allows the trusted CA roots to be specified via a PEM bundle of
|
||||
root certificates.
|
||||
|
||||
-verify requires that the client present a valid certificate chain.
|
||||
106
cmd/stealchain-server/main.go
Normal file
106
cmd/stealchain-server/main.go
Normal file
@@ -0,0 +1,106 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"encoding/hex"
|
||||
"encoding/pem"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
"os"
|
||||
|
||||
"github.com/kisom/goutils/die"
|
||||
)
|
||||
|
||||
func main() {
|
||||
cfg := &tls.Config{}
|
||||
|
||||
var sysRoot, listenAddr, certFile, keyFile string
|
||||
var verify bool
|
||||
flag.StringVar(&sysRoot, "ca", "", "provide an alternate CA bundle")
|
||||
flag.StringVar(&listenAddr, "listen", ":443", "address to listen on")
|
||||
flag.StringVar(&certFile, "cert", "", "server certificate to present to clients")
|
||||
flag.StringVar(&keyFile, "key", "", "key for server certificate")
|
||||
flag.BoolVar(&verify, "verify", false, "verify client certificates")
|
||||
flag.Parse()
|
||||
|
||||
if verify {
|
||||
cfg.ClientAuth = tls.RequireAndVerifyClientCert
|
||||
} else {
|
||||
cfg.ClientAuth = tls.RequestClientCert
|
||||
}
|
||||
if certFile == "" {
|
||||
fmt.Println("[!] missing required flag -cert")
|
||||
os.Exit(1)
|
||||
}
|
||||
if keyFile == "" {
|
||||
fmt.Println("[!] missing required flag -key")
|
||||
os.Exit(1)
|
||||
}
|
||||
cert, err := tls.LoadX509KeyPair(certFile, keyFile)
|
||||
if err != nil {
|
||||
fmt.Printf("[!] could not load server key pair: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cfg.Certificates = append(cfg.Certificates, cert)
|
||||
if sysRoot != "" {
|
||||
pemList, err := ioutil.ReadFile(sysRoot)
|
||||
die.If(err)
|
||||
|
||||
roots := x509.NewCertPool()
|
||||
if !roots.AppendCertsFromPEM(pemList) {
|
||||
fmt.Printf("[!] no valid roots found")
|
||||
roots = nil
|
||||
}
|
||||
|
||||
cfg.RootCAs = roots
|
||||
}
|
||||
|
||||
l, err := net.Listen("tcp", listenAddr)
|
||||
if err != nil {
|
||||
fmt.Println(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for {
|
||||
conn, err := l.Accept()
|
||||
if err != nil {
|
||||
fmt.Println(err.Error())
|
||||
}
|
||||
|
||||
raddr := conn.RemoteAddr()
|
||||
tconn := tls.Server(conn, cfg)
|
||||
err = tconn.Handshake()
|
||||
if err != nil {
|
||||
fmt.Printf("[+] %v: failed to complete handshake: %v\n", raddr, err)
|
||||
continue
|
||||
}
|
||||
cs := tconn.ConnectionState()
|
||||
if len(cs.PeerCertificates) == 0 {
|
||||
fmt.Printf("[+] %v: no chain presented\n", raddr)
|
||||
continue
|
||||
}
|
||||
|
||||
var chain []byte
|
||||
for _, cert := range cs.PeerCertificates {
|
||||
p := &pem.Block{
|
||||
Type: "CERTIFICATE",
|
||||
Bytes: cert.Raw,
|
||||
}
|
||||
chain = append(chain, pem.EncodeToMemory(p)...)
|
||||
}
|
||||
|
||||
var nonce [16]byte
|
||||
_, err = rand.Read(nonce[:])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
fname := fmt.Sprintf("%v-%v.pem", raddr, hex.EncodeToString(nonce[:]))
|
||||
err = ioutil.WriteFile(fname, chain, 0644)
|
||||
die.If(err)
|
||||
fmt.Printf("%v: [+] wrote %v.\n", raddr, fname)
|
||||
}
|
||||
}
|
||||
20
cmd/subjhash/README
Normal file
20
cmd/subjhash/README
Normal file
@@ -0,0 +1,20 @@
|
||||
subjhash
|
||||
|
||||
This tool prints the SHA-256 hash of an X.509 certificate's subject
|
||||
info or issuer fields. It can also verify that the hashes of the
|
||||
subject are the same between two certificates.
|
||||
|
||||
Usage: subjhash [-im] certs...
|
||||
|
||||
Flags:
|
||||
-i Print hash of issuer field.
|
||||
-m Matching mode. This expects arguments to be in the form of
|
||||
pairs of certificates (e.g. previous, new) whose subjects
|
||||
will be compared. For example,
|
||||
|
||||
subjhash -m ca1.pem ca1-renewed.pem \
|
||||
ca2.pem ca2-renewed.pem
|
||||
|
||||
will exit with a non-zero status if the subject in the
|
||||
ca1-renewed.pem certificate doesn't match the subject in the
|
||||
ca.pem certificate; similarly for ca2.
|
||||
112
cmd/subjhash/main.go
Normal file
112
cmd/subjhash/main.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/kisom/goutils/die"
|
||||
"github.com/kisom/goutils/lib"
|
||||
)
|
||||
|
||||
func init() {
|
||||
flag.Usage = func() { usage(os.Stdout); os.Exit(1) }
|
||||
}
|
||||
|
||||
func usage(w io.Writer) {
|
||||
fmt.Fprintf(w, `Print hash of subject or issuer fields in certificates.
|
||||
|
||||
Usage: subjhash [-im] certs...
|
||||
|
||||
Flags:
|
||||
-i Print hash of issuer field.
|
||||
-m Matching mode. This expects arguments to be in the form of
|
||||
pairs of certificates (e.g. previous, new) whose subjects
|
||||
will be compared. For example,
|
||||
|
||||
subjhash -m ca1.pem ca1-renewed.pem \
|
||||
ca2.pem ca2-renewed.pem
|
||||
|
||||
will exit with a non-zero status if the subject in the
|
||||
ca1-renewed.pem certificate doesn't match the subject in the
|
||||
ca.pem certificate; similarly for ca2.
|
||||
`)
|
||||
}
|
||||
|
||||
// NB: the Issuer field is *also* a subject field. Also, the returned
|
||||
// hash is *not* hex encoded.
|
||||
func getSubjectInfoHash(cert *x509.Certificate, issuer bool) []byte {
|
||||
if cert == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var subject []byte
|
||||
if issuer {
|
||||
subject = cert.RawIssuer
|
||||
} else {
|
||||
subject = cert.RawSubject
|
||||
}
|
||||
|
||||
digest := sha256.Sum256(subject)
|
||||
return digest[:]
|
||||
}
|
||||
|
||||
func printDigests(paths []string, issuer bool) {
|
||||
for _, path := range paths {
|
||||
cert, err := lib.LoadCertificate(path)
|
||||
if err != nil {
|
||||
lib.Warn(err, "failed to load certificate from %s", path)
|
||||
continue
|
||||
}
|
||||
|
||||
digest := getSubjectInfoHash(cert, issuer)
|
||||
fmt.Printf("%x %s\n", digest, path)
|
||||
}
|
||||
}
|
||||
|
||||
func matchDigests(paths []string, issuer bool) {
|
||||
if (len(paths) % 2) != 0 {
|
||||
lib.Errx(lib.ExitFailure, "not all certificates are paired")
|
||||
}
|
||||
|
||||
var invalid int
|
||||
for {
|
||||
if len(paths) == 0 {
|
||||
break
|
||||
}
|
||||
fst := paths[0]
|
||||
snd := paths[1]
|
||||
paths = paths[2:]
|
||||
|
||||
fstCert, err := lib.LoadCertificate(fst)
|
||||
die.If(err)
|
||||
sndCert, err := lib.LoadCertificate(snd)
|
||||
die.If(err)
|
||||
if !bytes.Equal(getSubjectInfoHash(fstCert, issuer), getSubjectInfoHash(sndCert, issuer)) {
|
||||
lib.Warnx("certificates don't match: %s and %s", fst, snd)
|
||||
invalid++
|
||||
}
|
||||
}
|
||||
|
||||
if invalid > 0 {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
var issuer, match bool
|
||||
flag.BoolVar(&issuer, "i", false, "print the issuer")
|
||||
flag.BoolVar(&match, "m", false, "match mode")
|
||||
flag.Parse()
|
||||
|
||||
paths := flag.Args()
|
||||
if match {
|
||||
matchDigests(paths, issuer)
|
||||
} else {
|
||||
printDigests(paths, issuer)
|
||||
}
|
||||
}
|
||||
@@ -11,12 +11,12 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
format = "2006-01-02 15:04" // Format that will be used for times.
|
||||
outFormat = format + " MST" // Output format.
|
||||
tz = "Local" // String descriptor for timezone.
|
||||
fromLoc *time.Location = time.Local // Go time.Location for the named timezone.
|
||||
fromUnix bool // Input times are Unix timestamps.
|
||||
toLoc *time.Location = time.UTC // Go time.Location for output timezone.
|
||||
format = "2006-01-02 15:04" // Format that will be used for times.
|
||||
outFormat = format + " MST" // Output format.
|
||||
tz = "Local" // String descriptor for timezone.
|
||||
fromLoc = time.Local // Go time.Location for the named timezone.
|
||||
fromUnix bool // Input times are Unix timestamps.
|
||||
toLoc = time.UTC // Go time.Location for output timezone.
|
||||
)
|
||||
|
||||
func usage(w io.Writer) {
|
||||
@@ -72,7 +72,7 @@ Flags:
|
||||
|
||||
func usageExamples() {
|
||||
usage(os.Stdout)
|
||||
fmt.Println(`
|
||||
fmt.Printf(`
|
||||
Examples (note that the examples are done in the America/Los_Angeles /
|
||||
PST8PDT time zone):
|
||||
|
||||
@@ -84,7 +84,7 @@ PST8PDT time zone):
|
||||
2016-06-14 21:30 PDT = 2016-06-15 04:30 UTC
|
||||
+ Converting a local EST timestamp to UTC (on a machine set to
|
||||
PST8PDT):
|
||||
$ utc -z EST '2016-06-14 21:30'
|
||||
$ utc -z EST '2016-06-14 21:30'
|
||||
2016-06-14 21:30 EST = 2016-06-15 02:30 UTC
|
||||
+ Converting timestamps in the form '14-06-2016 3:04PM':
|
||||
$ utc -f '02-01-2006 3:04PM' '14-06-2016 9:30PM'
|
||||
@@ -101,7 +101,7 @@ PST8PDT time zone):
|
||||
$ utc -u -z EST '2016-06-14 21:30'
|
||||
2016-06-14 21:30 UTC = 2016-06-14 16:30 EST
|
||||
+ Using a different output format:
|
||||
$ utc -o '2006-01-02T15:03:04-0700' '2016-06-14 21:30'
|
||||
$ utc -o '2006-01-02T15:03:04-0700' '2016-06-14 21:30'
|
||||
2016-06-14T21:09:30-0700 = 2016-06-15T04:04:30+0000
|
||||
+ Converting a Unix timestamp to a UTC time:
|
||||
$ utc -t 1466052938
|
||||
@@ -134,6 +134,7 @@ PST8PDT time zone):
|
||||
(Converting from GMT (offset +0000) to UTC (offset +0000).)
|
||||
==================================================================
|
||||
2016-06-14 23:46 = 2016-06-14 23:46
|
||||
|
||||
`)
|
||||
}
|
||||
|
||||
|
||||
80
lib/lib.go
80
lib/lib.go
@@ -2,7 +2,11 @@
|
||||
package lib
|
||||
|
||||
import (
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
@@ -79,6 +83,7 @@ var (
|
||||
yearDuration = (365 * dayDuration) + (6 * time.Hour)
|
||||
)
|
||||
|
||||
// Duration returns a prettier string for time.Durations.
|
||||
func Duration(d time.Duration) string {
|
||||
var s string
|
||||
if d >= yearDuration {
|
||||
@@ -102,3 +107,78 @@ func Duration(d time.Duration) string {
|
||||
s += fmt.Sprintf("%dh%s", hours, d)
|
||||
return s
|
||||
}
|
||||
|
||||
// ReadCertificate reads a DER or PEM-encoded certificate from the
|
||||
// byte slice.
|
||||
func ReadCertificate(in []byte) (cert *x509.Certificate, rest []byte, err error) {
|
||||
if len(in) == 0 {
|
||||
err = errors.New("lib: empty certificate")
|
||||
return
|
||||
}
|
||||
|
||||
if in[0] == '-' {
|
||||
p, remaining := pem.Decode(in)
|
||||
if p == nil {
|
||||
err = errors.New("lib: invalid PEM file")
|
||||
return
|
||||
}
|
||||
|
||||
rest = remaining
|
||||
if p.Type != "CERTIFICATE" {
|
||||
err = fmt.Errorf("lib: expected a CERTIFICATE PEM file, but have %s", p.Type)
|
||||
return
|
||||
}
|
||||
|
||||
in = p.Bytes
|
||||
}
|
||||
|
||||
cert, err = x509.ParseCertificate(in)
|
||||
return
|
||||
}
|
||||
|
||||
// ReadCertificates tries to read all the certificates in a
|
||||
// PEM-encoded collection.
|
||||
func ReadCertificates(in []byte) (certs []*x509.Certificate, err error) {
|
||||
var cert *x509.Certificate
|
||||
for {
|
||||
cert, in, err = ReadCertificate(in)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if cert == nil {
|
||||
break
|
||||
}
|
||||
|
||||
certs = append(certs, cert)
|
||||
if len(in) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return certs, err
|
||||
}
|
||||
|
||||
// LoadCertificate tries to read a single certificate from disk. If
|
||||
// the file contains multiple certificates (e.g. a chain), only the
|
||||
// first certificate is returned.
|
||||
func LoadCertificate(path string) (*x509.Certificate, error) {
|
||||
in, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cert, _, err := ReadCertificate(in)
|
||||
return cert, err
|
||||
}
|
||||
|
||||
// LoadCertificates tries to read all the certificates in a file,
|
||||
// returning them in the order that it found them in the file.
|
||||
func LoadCertificates(path string) ([]*x509.Certificate, error) {
|
||||
in, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ReadCertificates(in)
|
||||
}
|
||||
|
||||
139
lib/lib_test.go
Normal file
139
lib/lib_test.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package lib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/kisom/goutils/assert"
|
||||
)
|
||||
|
||||
// some CA certs I found on my computerbox.
|
||||
var testCerts = `-----BEGIN CERTIFICATE-----
|
||||
MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
|
||||
AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
|
||||
CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
|
||||
BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
|
||||
VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
|
||||
qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
|
||||
HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
|
||||
G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
|
||||
lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
|
||||
IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
|
||||
0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
|
||||
k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
|
||||
4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
|
||||
m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
|
||||
cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
|
||||
uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
|
||||
KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
|
||||
ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
|
||||
AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
|
||||
VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
|
||||
VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
|
||||
CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
|
||||
cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
|
||||
QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
|
||||
7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
|
||||
cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
|
||||
QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
|
||||
czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
|
||||
aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
|
||||
aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
|
||||
DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
|
||||
BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
|
||||
D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
|
||||
JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
|
||||
AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
|
||||
vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
|
||||
tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
|
||||
7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
|
||||
I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
|
||||
h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
|
||||
d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
|
||||
pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
|
||||
AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
|
||||
CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
|
||||
MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
|
||||
RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
|
||||
AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
|
||||
09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
|
||||
XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
|
||||
Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
|
||||
t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
|
||||
X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
|
||||
MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
|
||||
fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
|
||||
2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
|
||||
K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
|
||||
ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
|
||||
BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
|
||||
MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
|
||||
RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
|
||||
bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
|
||||
fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
|
||||
gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
|
||||
I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
|
||||
5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
|
||||
ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
|
||||
MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
|
||||
o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
|
||||
zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
|
||||
GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
|
||||
r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
|
||||
Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
|
||||
BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
|
||||
MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
|
||||
IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
|
||||
SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
|
||||
ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
|
||||
MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
|
||||
UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
|
||||
4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
|
||||
KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
|
||||
gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
|
||||
rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
|
||||
51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
|
||||
be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
|
||||
KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
|
||||
v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
|
||||
fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
|
||||
jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
|
||||
ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
|
||||
ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
|
||||
e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
|
||||
jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
|
||||
WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
|
||||
SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
|
||||
pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
|
||||
X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
|
||||
fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
|
||||
K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
|
||||
ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
|
||||
LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
|
||||
LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
|
||||
-----END CERTIFICATE-----
|
||||
`
|
||||
|
||||
func TestReadCertificate(t *testing.T) {
|
||||
cert, remaining, err := ReadCertificate([]byte(testCerts))
|
||||
assert.NoErrorT(t, err)
|
||||
|
||||
assert.BoolT(t, len(remaining) > 0, "lib: expected extra data from ReadCertificate")
|
||||
assert.BoolT(t, cert != nil, "lib: expected an actual certificate to have been returned")
|
||||
}
|
||||
|
||||
func TestReadCertificates(t *testing.T) {
|
||||
certs, err := ReadCertificates([]byte(testCerts))
|
||||
assert.NoErrorT(t, err)
|
||||
|
||||
assert.BoolT(t, len(certs) == 3, fmt.Sprintf("lib: expected three certificates, have %d", len(certs)))
|
||||
for _, cert := range certs {
|
||||
assert.BoolT(t, cert != nil, "lib: expected an actual certificate to have been returned")
|
||||
}
|
||||
}
|
||||
@@ -8,11 +8,20 @@ type File struct {
|
||||
*LogWriter
|
||||
}
|
||||
|
||||
func (fl *File) Close() {
|
||||
fl.fo.Close()
|
||||
if fl.fe != nil {
|
||||
fl.fe.Close()
|
||||
// Close calls close on the underlying log files.
|
||||
func (fl *File) Close() error {
|
||||
if fl.fo != nil {
|
||||
if err := fl.fo.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
fl.fo = nil
|
||||
}
|
||||
|
||||
if fl.fe != nil {
|
||||
return fl.fe.Close()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewFile creates a new Logger that writes all logs to the file
|
||||
|
||||
@@ -29,6 +29,7 @@ const (
|
||||
LevelFatal
|
||||
)
|
||||
|
||||
// DefaultLevel is the default logging level when none is provided.
|
||||
const DefaultLevel = LevelInfo
|
||||
|
||||
// Cheap integer to fixed-width decimal ASCII. Give a negative width
|
||||
|
||||
@@ -21,7 +21,7 @@ type Logger interface {
|
||||
Status() error
|
||||
|
||||
// Close gives the Logger the opportunity to perform any cleanup.
|
||||
Close()
|
||||
Close() error
|
||||
|
||||
// Log messages consist of four components:
|
||||
//
|
||||
@@ -228,7 +228,7 @@ func (lw *LogWriter) Fatal(actor, event string, attrs map[string]string) {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Fatal emits a message indicating that the system is in an unsuable
|
||||
// FatalCode emits a message indicating that the system is in an unsuable
|
||||
// state, and cannot continue to run. The program will exit with the
|
||||
// exit code speicfied in the exitcode argument.
|
||||
//
|
||||
@@ -244,7 +244,7 @@ func (lw *LogWriter) FatalCode(exitcode int, actor, event string, attrs map[stri
|
||||
os.Exit(exitcode)
|
||||
}
|
||||
|
||||
// Fatal emits a message indicating that the system is in an unsuable
|
||||
// FatalNoDie emits a message indicating that the system is in an unsuable
|
||||
// state, and cannot continue to run. The program will not exit; it is
|
||||
// assumed that the caller has some final clean up to perform.
|
||||
//
|
||||
@@ -276,4 +276,4 @@ func (lw *LogWriter) SetLevel(l Level) {
|
||||
}
|
||||
|
||||
// Close is a no-op that satisfies the Logger interface.
|
||||
func (lw *LogWriter) Close() {}
|
||||
func (lw *LogWriter) Close() error { return nil }
|
||||
|
||||
@@ -4,8 +4,8 @@ import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/kisom/goutils/testio"
|
||||
"github.com/kisom/goutils/assert"
|
||||
"github.com/kisom/goutils/testio"
|
||||
)
|
||||
|
||||
func TestMWC(t *testing.T) {
|
||||
@@ -39,7 +39,7 @@ func TestMWCShort(t *testing.T) {
|
||||
|
||||
mwc = MultiWriteCloser(buf1, buf2, buf4)
|
||||
_, err = mwc.Write([]byte("hello, world"))
|
||||
assert.ErrorT(t, err, "expected a short write error", "but no error occurred")
|
||||
assert.ErrorT(t, err, "expected a short write error", "but no error occurred")
|
||||
}
|
||||
|
||||
func TestMWCClose(t *testing.T) {
|
||||
|
||||
@@ -168,7 +168,7 @@ func TestRWByte(t *testing.T) {
|
||||
}
|
||||
|
||||
if c != 42 {
|
||||
t.Fatal("Expected 42, have %d", c)
|
||||
t.Fatalf("Expected 42, have %d", c)
|
||||
}
|
||||
|
||||
_, err = buf.ReadByte()
|
||||
|
||||
101
tee/tee.go
Normal file
101
tee/tee.go
Normal file
@@ -0,0 +1,101 @@
|
||||
package tee
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Tee emulates the Unix tee(1) command.
|
||||
type Tee struct {
|
||||
f *os.File
|
||||
Verbose bool
|
||||
}
|
||||
|
||||
func (t *Tee) Write(p []byte) (int, error) {
|
||||
n, err := os.Stdout.Write(p)
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
|
||||
if t.f != nil {
|
||||
return t.f.Write(p)
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// Close calls Close on the underlying file.
|
||||
func (t *Tee) Close() error {
|
||||
return t.f.Close()
|
||||
}
|
||||
|
||||
// NewOut writes to standard output only. The file is created, not
|
||||
// appended to.
|
||||
func NewOut(logFile string) (*Tee, error) {
|
||||
if logFile == "" {
|
||||
return &Tee{}, nil
|
||||
}
|
||||
|
||||
f, err := os.Create(logFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Tee{f: f}, nil
|
||||
}
|
||||
|
||||
// Printf formats according to a format specifier and writes to the
|
||||
// tee instance.
|
||||
func (t *Tee) Printf(format string, args ...interface{}) (int, error) {
|
||||
s := fmt.Sprintf(format, args...)
|
||||
n, err := os.Stdout.WriteString(s)
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
|
||||
if t.f == nil {
|
||||
return n, err
|
||||
}
|
||||
|
||||
return t.f.WriteString(s)
|
||||
}
|
||||
|
||||
// VPrintf is a variant of Printf that only prints if the Tee's
|
||||
// Verbose flag is set.
|
||||
func (t *Tee) VPrintf(format string, args ...interface{}) (int, error) {
|
||||
if t.Verbose {
|
||||
return t.Printf(format, args...)
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
var globalTee = &Tee{}
|
||||
|
||||
// Open will attempt to open the logFile for the global tee instance.
|
||||
func Open(logFile string) error {
|
||||
f, err := os.Create(logFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
globalTee.f = f
|
||||
return nil
|
||||
}
|
||||
|
||||
// Printf formats according to a format specifier and writes to the
|
||||
// global tee.
|
||||
func Printf(format string, args ...interface{}) (int, error) {
|
||||
return globalTee.Printf(format, args...)
|
||||
}
|
||||
|
||||
// VPrintf calls VPrintf on the global tee instance.
|
||||
func VPrintf(format string, args ...interface{}) (int, error) {
|
||||
return globalTee.VPrintf(format, args...)
|
||||
}
|
||||
|
||||
// Close calls close on the global tee instance.
|
||||
func Close() error {
|
||||
return globalTee.Close()
|
||||
}
|
||||
|
||||
// SetVerbose controls the verbosity of the global tee.
|
||||
func SetVerbose(verbose bool) {
|
||||
globalTee.Verbose = verbose
|
||||
}
|
||||
Reference in New Issue
Block a user