A tool for crawling and finding links to URLs which no longer exist
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
deadlinks/url.go

49 lines
1.0 KiB

package deadlinks
import (
"errors"
"fmt"
"net/url"
)
// URL is a standard universal resource identifier, normalized particularly for
// this package.
type URL string
// ParseURL parses and returns a URL based on the given string, or an error.
func ParseURL(urlStr string) (URL, error) {
u, err := url.Parse(urlStr)
if err != nil {
return "", err
}
return URL(u.String()), nil
}
func parseURLs(urlStrs []string) ([]URL, error) {
var (
res = make([]URL, 0, len(urlStrs))
errs []error
)
for _, urlStr := range urlStrs {
u, err := ParseURL(urlStr)
if err == nil {
res = append(res, u)
} else {
errs = append(errs, err)
}
}
return res, errors.Join(errs...)
}
func (u URL) toStd() *url.URL {
uu, err := url.Parse(string(u))
if err != nil {
panic(fmt.Sprintf("parsing URL %q: %v", u, err))
}
return uu
}
// ResolveReference is equivalend to the method of the same name in `net/url`.
func (u URL) ResolveReference(u2 URL) URL {
return URL(u.toStd().ResolveReference(u2.toStd()).String())
}