Switch from PostgreSQL to SQLite (modernc.org/sqlite, pure Go) for simpler deployment on the MCP platform. Fix URL normalization to preserve query parameters so sites like YouTube deduplicate correctly. Add Dockerfile, Makefile, and MCP service definition. Add pg2sqlite migration tool. Support $PORT env var for MCP port assignment. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
213 lines
3.8 KiB
Go
213 lines
3.8 KiB
Go
package links
|
|
|
|
import (
|
|
"context"
|
|
"database/sql"
|
|
"log"
|
|
"net/url"
|
|
"time"
|
|
|
|
"github.com/Masterminds/squirrel"
|
|
"github.com/google/uuid"
|
|
)
|
|
|
|
var sq = squirrel.StatementBuilder.PlaceholderFormat(squirrel.Question)
|
|
|
|
type URL struct {
|
|
ID string
|
|
URL string
|
|
NURL string // normalized URL
|
|
Short string
|
|
CreatedAt time.Time
|
|
}
|
|
|
|
func (u *URL) Timestamp() string {
|
|
return u.CreatedAt.Format("2006-01-02 15:04")
|
|
}
|
|
|
|
func (u *URL) Store(ctx context.Context, db *sql.DB) error {
|
|
stmt := sq.Insert("urls").
|
|
Columns("id", "url", "nurl", "short", "created_at").
|
|
Values(u.ID, u.URL, u.NURL, u.Short, u.CreatedAt.Format(time.RFC3339))
|
|
query, args, err := stmt.ToSql()
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
_, err = db.ExecContext(ctx, query, args...)
|
|
return err
|
|
}
|
|
|
|
// Normalize cleans the URL to only the parts we care about.
|
|
func Normalize(u *url.URL) *url.URL {
|
|
norm := &url.URL{
|
|
Scheme: u.Scheme,
|
|
Host: u.Host,
|
|
Path: u.Path,
|
|
RawPath: u.RawPath,
|
|
RawQuery: u.RawQuery,
|
|
}
|
|
return norm
|
|
}
|
|
|
|
func NormalizeString(s string) (string, error) {
|
|
u, err := url.Parse(s)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
u = Normalize(u)
|
|
return u.String(), nil
|
|
}
|
|
|
|
// Clean should scrub out junk from the URL.
|
|
func Clean(u *url.URL, stripQuery bool) *url.URL {
|
|
norm := &url.URL{
|
|
Scheme: u.Scheme,
|
|
Host: u.Host,
|
|
Path: u.Path,
|
|
RawPath: u.RawPath,
|
|
RawQuery: u.RawQuery,
|
|
Fragment: u.Fragment,
|
|
RawFragment: u.RawFragment,
|
|
}
|
|
|
|
if stripQuery {
|
|
norm.RawQuery = ""
|
|
}
|
|
return norm
|
|
}
|
|
|
|
func CleanString(s string, isRawURL bool) (string, error) {
|
|
u, err := url.Parse(s)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
u = Clean(u, isRawURL)
|
|
return u.String(), nil
|
|
}
|
|
|
|
func New(u *url.URL) *URL {
|
|
link := &URL{
|
|
ID: uuid.NewString(),
|
|
URL: u.String(),
|
|
NURL: Normalize(u).String(),
|
|
Short: GenCode(),
|
|
CreatedAt: time.Now(),
|
|
}
|
|
|
|
return link
|
|
}
|
|
|
|
func FromString(s string) (*URL, error) {
|
|
u, err := url.Parse(s)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
return New(u), nil
|
|
}
|
|
|
|
func Lookup(ctx context.Context, db *sql.DB, s string) (string, error) {
|
|
u, err := url.Parse(s)
|
|
if err != nil {
|
|
return "", nil
|
|
}
|
|
|
|
u = Normalize(u)
|
|
|
|
stmt := sq.Select("short").From("urls").
|
|
Where(squirrel.Eq{"nurl": u.String()})
|
|
query, args, err := stmt.ToSql()
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
row := db.QueryRowContext(ctx, query, args...)
|
|
|
|
var short string
|
|
err = row.Scan(&short)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return short, nil
|
|
}
|
|
|
|
func StoreURL(ctx context.Context, db *sql.DB, s string) (string, error) {
|
|
short, err := Lookup(ctx, db, s)
|
|
if err == nil {
|
|
return short, nil
|
|
}
|
|
|
|
if err != sql.ErrNoRows {
|
|
return "", err
|
|
}
|
|
|
|
u, err := FromString(s)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
err = u.Store(ctx, db)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return u.Short, nil
|
|
}
|
|
|
|
func RetrieveURL(ctx context.Context, db *sql.DB, short string) (string, error) {
|
|
log.Printf("look up url for short code %s", short)
|
|
stmt := sq.Select("url").From("urls").
|
|
Where(squirrel.Eq{"short": short})
|
|
query, args, err := stmt.ToSql()
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
row := db.QueryRowContext(ctx, query, args...)
|
|
var u string
|
|
|
|
err = row.Scan(&u)
|
|
if err != nil {
|
|
return "", err
|
|
}
|
|
|
|
return u, nil
|
|
}
|
|
|
|
func FetchAll(ctx context.Context, db *sql.DB) ([]*URL, error) {
|
|
stmt := sq.Select("id", "url", "nurl", "short", "created_at").
|
|
From("urls").OrderBy("created_at")
|
|
query, args, err := stmt.ToSql()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
rows, err := db.QueryContext(ctx, query, args...)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer rows.Close()
|
|
|
|
var urls []*URL
|
|
for rows.Next() {
|
|
u := &URL{}
|
|
var ts string
|
|
err = rows.Scan(&u.ID, &u.URL, &u.NURL, &u.Short, &ts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
u.CreatedAt, err = time.Parse(time.RFC3339, ts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
urls = append(urls, u)
|
|
}
|
|
|
|
return urls, nil
|
|
}
|