kls/links/url.go

210 lines
3.7 KiB
Go

package links
import (
"context"
"log"
"net/url"
"time"
"github.com/Masterminds/squirrel"
"github.com/google/uuid"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/pgxpool"
)
var psql = squirrel.StatementBuilder.PlaceholderFormat(squirrel.Dollar)
type URL struct {
ID string
URL string
NURL string // normalized URL
Short string
CreatedAt time.Time
}
func (u *URL) Timestamp() string {
return u.CreatedAt.Format("2006-01-02 15:04")
}
func (u *URL) Store(ctx context.Context, db *pgxpool.Pool) error {
stmt := psql.Insert("urls").
Columns("id", "url", "nurl", "short", "created_at").
Values(u.ID, u.URL, u.NURL, u.Short, u.CreatedAt)
query, args, err := stmt.ToSql()
if err != nil {
return err
}
_, err = db.Exec(ctx, query, args...)
return err
}
// Normalize cleans the URL to only the parts we care about.
func Normalize(u *url.URL) *url.URL {
norm := &url.URL{
Scheme: u.Scheme,
Host: u.Host,
Path: u.Path,
RawPath: u.RawPath,
}
return norm
}
func NormalizeString(s string) (string, error) {
u, err := url.Parse(s)
if err != nil {
return "", err
}
u = Normalize(u)
return u.String(), nil
}
// Clean should scrub out junk from the URL.
func Clean(u *url.URL, keepQuery bool) *url.URL {
norm := &url.URL{
Scheme: u.Scheme,
Host: u.Host,
Path: u.Path,
RawPath: u.RawPath,
Fragment: u.Fragment,
RawFragment: u.RawFragment,
}
if keepQuery {
norm.RawQuery = u.RawQuery
}
return norm
}
func CleanString(s string, isRawURL bool) (string, error) {
u, err := url.Parse(s)
if err != nil {
return "", err
}
u = Clean(u, isRawURL)
return u.String(), nil
}
func New(u *url.URL) *URL {
link := &URL{
ID: uuid.NewString(),
URL: u.String(),
NURL: Normalize(u).String(),
Short: GenCode(),
CreatedAt: time.Now(),
}
return link
}
func FromString(s string) (*URL, error) {
u, err := url.Parse(s)
if err != nil {
return nil, err
}
return New(u), nil
}
func Lookup(ctx context.Context, db *pgxpool.Pool, s string) (string, error) {
u, err := url.Parse(s)
if err != nil {
return "", nil
}
u = Normalize(u)
stmt := psql.Select("short").From("urls").
Where(squirrel.Eq{"nurl": u.String()})
query, args, err := stmt.ToSql()
if err != nil {
return "", err
}
row := db.QueryRow(ctx, query, args...)
var short string
err = row.Scan(&short)
if err != nil {
return "", err
}
return short, nil
}
func StoreURL(ctx context.Context, db *pgxpool.Pool, s string) (string, error) {
short, err := Lookup(ctx, db, s)
if err == nil {
return short, nil
}
if err != pgx.ErrNoRows {
return "", err
}
u, err := FromString(s)
if err != nil {
return "", err
}
err = u.Store(ctx, db)
if err != nil {
return "", err
}
return u.Short, nil
}
func RetrieveURL(ctx context.Context, db *pgxpool.Pool, short string) (string, error) {
log.Printf("look up url for short code %s", short)
stmt := psql.Select("url").From("urls").
Where(squirrel.Eq{"short": short})
query, args, err := stmt.ToSql()
if err != nil {
return "", err
}
row := db.QueryRow(ctx, query, args...)
var url string
err = row.Scan(&url)
if err != nil {
return "", err
}
return url, nil
}
func FetchAll(ctx context.Context, db *pgxpool.Pool) ([]*URL, error) {
stmt := psql.Select("*").From("urls").OrderBy("created_at")
query, args, err := stmt.ToSql()
if err != nil {
return nil, err
}
rows, err := db.Query(ctx, query, args...)
if err != nil {
return nil, err
}
var urls []*URL
for rows.Next() {
u := &URL{}
err = rows.Scan(
&u.ID,
&u.URL,
&u.NURL,
&u.Short,
&u.CreatedAt,
)
if err != nil {
return nil, err
}
urls = append(urls, u)
}
return urls, nil
}