support raw urls

This commit is contained in:
2023-09-12 22:37:04 -07:00
parent 7285264fed
commit 552e1e3ac1
7 changed files with 37 additions and 63 deletions

View File

@@ -4,7 +4,7 @@ import (
"context"
"fmt"
"git.sr.ht/~kisom/goutils/config"
"git.wntrmute.dev/kyle/goutils/config"
"github.com/jackc/pgx/v4/pgxpool"
)

View File

@@ -39,7 +39,7 @@ func (u *URL) Store(ctx context.Context, db *pgxpool.Pool) error {
return err
}
// Normalize cleans the URL to only the parts we care about
// Normalize cleans the URL to only the parts we care about.
func Normalize(u *url.URL) *url.URL {
norm := &url.URL{
Scheme: u.Scheme,
@@ -61,7 +61,7 @@ func NormalizeString(s string) (string, error) {
}
// Clean should scrub out junk from the URL.
func Clean(u *url.URL) *url.URL {
func Clean(u *url.URL, keepQuery bool) *url.URL {
norm := &url.URL{
Scheme: u.Scheme,
Host: u.Host,
@@ -70,16 +70,20 @@ func Clean(u *url.URL) *url.URL {
Fragment: u.Fragment,
RawFragment: u.RawFragment,
}
if keepQuery {
norm.RawQuery = u.RawQuery
}
return norm
}
func CleanString(s string) (string, error) {
func CleanString(s string, isRawURL bool) (string, error) {
u, err := url.Parse(s)
if err != nil {
return "", err
}
u = Clean(u)
u = Clean(u, isRawURL)
return u.String(), nil
}