checker-blacklist/checker/openphish.go
Pierre-Olivier Mercier 6b1d2e2540 Extract disabledResult and evidenceEval helpers to reduce boilerplate
Add two shared helpers to source.go and apply them across all sources:
- disabledResult(id, name) replaces the repeated inline SourceResult literal
- evidenceEval(r, severity) replaces the identical Evaluate body in 6 sources
2026-05-15 21:36:24 +08:00

136 lines
4 KiB
Go

package checker
import (
"bufio"
"context"
"fmt"
"io"
"net/http"
"strings"
"time"
sdk "git.happydns.org/checker-sdk-go/checker"
)
const openPhishFeedURL = "https://openphish.com/feed.txt"
func init() {
Register(&openPhishSource{
cache: newFeedCache(1*time.Hour, openPhishFetch(openPhishFeedURL)),
})
}
// openPhishSource downloads the public OpenPhish feed once per cache
// TTL and matches the registered domain (and all subdomains) against
// every URL in the feed. The cache is per-source-instance so it lives
// for as long as the process.
type openPhishSource struct {
cache *feedCache
}
func (*openPhishSource) ID() string { return "openphish" }
func (*openPhishSource) Name() string { return "OpenPhish feed" }
func (*openPhishSource) Options() SourceOptions {
return SourceOptions{
User: []sdk.CheckerOptionField{
{
Id: "enable_openphish",
Type: "bool",
Label: "Use the OpenPhish public feed",
Description: "Download the OpenPhish public feed (refreshed every 12h) and check the domain against it.",
Default: true,
},
},
}
}
func (s *openPhishSource) Query(ctx context.Context, domain, registered string, opts sdk.CheckerOptions) []SourceResult {
if !sdk.GetBoolOption(opts, "enable_openphish", true) || registered == "" {
return disabledResult(s.ID(), s.Name())
}
urls, size, fetched, err := s.cache.lookup(ctx, registered)
res := SourceResult{
SourceID: s.ID(), SourceName: s.Name(), Enabled: true,
Reference: "https://openphish.com/",
Details: mustJSON(map[string]any{"feed_size": size, "fetched_at": fetched}),
}
if err != nil {
res.Error = err.Error()
// Fall through with whatever the cache could provide.
}
if len(urls) > 0 {
res.Reasons = []string{"Phishing"}
for _, u := range urls {
res.Evidence = append(res.Evidence, Evidence{Label: "URL", Value: u})
}
}
return []SourceResult{res}
}
func (*openPhishSource) Evaluate(r SourceResult) (bool, string) {
return evidenceEval(r, SeverityCrit)
}
func (*openPhishSource) Diagnose(res SourceResult) Diagnosis {
urls := make([]string, 0, len(res.Evidence))
for _, e := range res.Evidence {
urls = append(urls, e.Value)
}
previewN := min(len(urls), 5)
return Diagnosis{
Severity: SeverityCrit,
Title: "Listed in the OpenPhish phishing feed",
Detail: fmt.Sprintf(
"%d URL(s) hosted on this domain are tracked as phishing by OpenPhish. Treat the host as compromised: rotate credentials, audit recently-added files (look for /wp-includes/, /uploads/, lookalike admin paths), then request review at OpenPhish. Examples: %s",
len(urls), joinNonEmpty(urls[:previewN], ", "),
),
Fix: "https://openphish.com/feedback.html",
FixIsURL: true,
}
}
// openPhishFetch returns a fetchFn that downloads and parses the
// OpenPhish plain-text feed at feedURL.
func openPhishFetch(feedURL string) func(context.Context) ([]string, map[string][]string, error) {
return func(ctx context.Context) ([]string, map[string][]string, error) {
reqCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
req, err := http.NewRequestWithContext(reqCtx, http.MethodGet, feedURL, nil)
if err != nil {
return nil, nil, err
}
req.Header.Set("User-Agent", "happydomain-checker-blacklist/1.0")
resp, err := sharedHTTPClient.Do(req)
if err != nil {
return nil, nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, nil, fmt.Errorf("openphish HTTP %d", resp.StatusCode)
}
urls := make([]string, 0, 8192)
byHost := make(map[string][]string, 8192)
scanner := bufio.NewScanner(io.LimitReader(resp.Body, 64<<20))
scanner.Buffer(make([]byte, 0, 64*1024), 1<<20)
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if line == "" || strings.HasPrefix(line, "#") {
continue
}
urls = append(urls, line)
if h := hostOfURL(line); h != "" {
byHost[h] = append(byHost[h], line)
}
}
if err := scanner.Err(); err != nil {
return nil, nil, err
}
return urls, byHost, nil
}
}