Compare commits
No commits in common. "acd72ae05239675e9a2ce936e9d861d42dbc5c2e" and "603e93355b7eaa3229e14cfdae07984f188bb21f" have entirely different histories.
acd72ae052
...
603e93355b
27 changed files with 2633 additions and 412 deletions
|
|
@ -21,8 +21,14 @@ relies on TLS for transport.
|
|||
| `http.x_frame_options` | `X-Frame-Options` or CSP `frame-ancestors` provides clickjacking protection. |
|
||||
| `http.x_content_type_options` | `X-Content-Type-Options: nosniff` is set. |
|
||||
| `http.x_xss_protection` | Reports the legacy `X-XSS-Protection` header (recommendation: disable). |
|
||||
| `http.referrer_policy` | `Referrer-Policy` is set to a privacy-preserving value (W3C Referrer Policy). |
|
||||
| `http.permissions_policy` | `Permissions-Policy` is set (W3C Permissions Policy, replaces Feature-Policy). |
|
||||
| `http.coop` | `Cross-Origin-Opener-Policy` isolates the document from cross-origin windows. |
|
||||
| `http.coep` | `Cross-Origin-Embedder-Policy` requires CORP/CORS opt-in for embedded resources. |
|
||||
| `http.corp` | `Cross-Origin-Resource-Policy` restricts cross-origin embedding of responses. |
|
||||
| `http.cookie_flags` | Every Set-Cookie has `Secure`, `HttpOnly`, and a `SameSite` attribute. |
|
||||
| `http.sri` | Cross-origin `<script>`/`<link>` tags carry `integrity=` (Subresource Integrity). |
|
||||
| `http.security_txt` | `/.well-known/security.txt` is published (RFC 9116). |
|
||||
|
||||
## Options
|
||||
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
|
|
@ -16,7 +17,6 @@ import (
|
|||
"net/url"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
|
|
@ -28,21 +28,112 @@ import (
|
|||
// when off, per-probe logging is silenced to keep production logs clean.
|
||||
var verboseLogging = os.Getenv("CHECKER_HTTP_VERBOSE") != ""
|
||||
|
||||
// Collect probes HTTP and HTTPS for every (IP, scheme) pair on the
|
||||
// abstract.Server. The HTTP body of the primary HTTPS probe is parsed for
|
||||
// SRI evaluation; secondary probes only retain headers/cookies/redirects.
|
||||
// Collect resolves the Target from CheckerOptions, runs the root
|
||||
// collector synchronously (its output is the canonical HTTPData), then
|
||||
// runs every registered Collector in parallel and merges their JSON
|
||||
// payloads into HTTPData.Extensions under their Key().
|
||||
func (p *httpProvider) Collect(ctx context.Context, opts sdk.CheckerOptions) (any, error) {
|
||||
server, err := resolveServer(opts)
|
||||
target, err := buildTarget(ctx, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rootOut, err := rootCollector{}.Collect(ctx, target)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, ok := rootOut.(*HTTPData)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("rootCollector returned %T, expected *HTTPData", rootOut)
|
||||
}
|
||||
|
||||
registry.mu.Lock()
|
||||
collectors := append([]Collector(nil), registry.collectors...)
|
||||
registry.mu.Unlock()
|
||||
if len(collectors) == 0 {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
type result struct {
|
||||
key string
|
||||
raw json.RawMessage
|
||||
err error
|
||||
}
|
||||
// Each collector may issue several probes (one per scheme × IP), so we
|
||||
// budget it as runProbe does (timeout × (maxRedirects+1)) multiplied by
|
||||
// a small factor for the fan-out. The deadline is shared so a single
|
||||
// hung collector cannot keep the caller waiting longer than the
|
||||
// slowest legitimate collector.
|
||||
collectorBudget := target.Timeout * time.Duration(target.MaxRedirects+1) * 4
|
||||
cctx, cancel := context.WithTimeout(ctx, collectorBudget)
|
||||
defer cancel()
|
||||
|
||||
results := make(chan result, len(collectors))
|
||||
for _, c := range collectors {
|
||||
go func(c Collector) {
|
||||
out, err := c.Collect(cctx, target)
|
||||
if err != nil {
|
||||
results <- result{key: c.Key(), err: err}
|
||||
return
|
||||
}
|
||||
raw, mErr := json.Marshal(out)
|
||||
results <- result{key: c.Key(), raw: raw, err: mErr}
|
||||
}(c)
|
||||
}
|
||||
|
||||
exts := make(map[string]json.RawMessage, len(collectors))
|
||||
pending := len(collectors)
|
||||
for pending > 0 {
|
||||
select {
|
||||
case r := <-results:
|
||||
pending--
|
||||
if r.err != nil {
|
||||
if verboseLogging {
|
||||
log.Printf("checker-http: collector %q failed: %v", r.key, r.err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
exts[r.key] = r.raw
|
||||
case <-cctx.Done():
|
||||
if verboseLogging {
|
||||
log.Printf("checker-http: %d collector(s) did not return before deadline (%v); abandoning", pending, cctx.Err())
|
||||
}
|
||||
pending = 0
|
||||
}
|
||||
}
|
||||
if len(exts) > 0 {
|
||||
data.Extensions = exts
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// LoadExtension decodes a sub-observation written by a Collector into the
|
||||
// caller-supplied typed value. Returns false (without error) when the
|
||||
// extension is absent — most rules treat that as "no_data" rather than
|
||||
// an error.
|
||||
func LoadExtension[T any](data *HTTPData, key string) (*T, bool, error) {
|
||||
raw, ok := data.Extensions[key]
|
||||
if !ok || len(raw) == 0 {
|
||||
return nil, false, nil
|
||||
}
|
||||
var v T
|
||||
if err := json.Unmarshal(raw, &v); err != nil {
|
||||
return nil, true, fmt.Errorf("decode extension %q: %w", key, err)
|
||||
}
|
||||
return &v, true, nil
|
||||
}
|
||||
|
||||
// buildTarget centralises option parsing and IP discovery so every
|
||||
// Collector receives a fully resolved Target.
|
||||
func buildTarget(ctx context.Context, opts sdk.CheckerOptions) (Target, error) {
|
||||
server, err := resolveServer(opts)
|
||||
if err != nil {
|
||||
return Target{}, err
|
||||
}
|
||||
|
||||
timeoutMs := sdk.GetIntOption(opts, OptionProbeTimeoutMs, DefaultProbeTimeoutMs)
|
||||
if timeoutMs <= 0 {
|
||||
timeoutMs = DefaultProbeTimeoutMs
|
||||
}
|
||||
timeout := time.Duration(timeoutMs) * time.Millisecond
|
||||
|
||||
maxRedirects := sdk.GetIntOption(opts, OptionMaxRedirects, DefaultMaxRedirects)
|
||||
if maxRedirects < 0 {
|
||||
maxRedirects = DefaultMaxRedirects
|
||||
|
|
@ -53,68 +144,26 @@ func (p *httpProvider) Collect(ctx context.Context, opts sdk.CheckerOptions) (an
|
|||
}
|
||||
|
||||
host, ips := addressesFromServer(server)
|
||||
// abstract.Server only pins one A and one AAAA. Resolve the host
|
||||
// to pick up any additional records the authoritative DNS exposes,
|
||||
// so multi-IP deployments aren't silently under-probed. Failures
|
||||
// are non-fatal; the pinned IPs remain.
|
||||
// abstract.Server only pins one A and one AAAA. Resolve the host to
|
||||
// pick up any additional records the authoritative DNS exposes, so
|
||||
// multi-IP deployments aren't silently under-probed. Failures are
|
||||
// non-fatal; the pinned IPs remain.
|
||||
seen := make(map[string]struct{}, len(ips)+4)
|
||||
for _, ip := range ips {
|
||||
seen[ip] = struct{}{}
|
||||
}
|
||||
ips = append(ips, discoverIPs(ctx, host, seen)...)
|
||||
if len(ips) == 0 {
|
||||
return nil, fmt.Errorf("abstract.Server has no A/AAAA records")
|
||||
return Target{}, fmt.Errorf("abstract.Server has no A/AAAA records")
|
||||
}
|
||||
|
||||
data := &HTTPData{
|
||||
Domain: host,
|
||||
CollectedAt: time.Now(),
|
||||
}
|
||||
|
||||
type job struct {
|
||||
scheme string
|
||||
port uint16
|
||||
ip string
|
||||
// parseHTML controls whether the HTML body is parsed and its
|
||||
// references kept on the probe. We only do this for the first
|
||||
// HTTPS probe to keep the payload bounded.
|
||||
parseHTML bool
|
||||
}
|
||||
|
||||
var jobs []job
|
||||
htmlPicked := false
|
||||
for _, ip := range ips {
|
||||
jobs = append(jobs, job{scheme: "http", port: DefaultHTTPPort, ip: ip})
|
||||
j := job{scheme: "https", port: DefaultHTTPSPort, ip: ip}
|
||||
if !htmlPicked {
|
||||
j.parseHTML = true
|
||||
htmlPicked = true
|
||||
}
|
||||
jobs = append(jobs, j)
|
||||
}
|
||||
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
sem := make(chan struct{}, MaxConcurrentProbes)
|
||||
for _, j := range jobs {
|
||||
wg.Add(1)
|
||||
sem <- struct{}{}
|
||||
go func(j job) {
|
||||
defer wg.Done()
|
||||
defer func() { <-sem }()
|
||||
probe := runProbe(ctx, host, j.ip, j.scheme, j.port, timeout, maxRedirects, userAgent, j.parseHTML)
|
||||
if verboseLogging {
|
||||
log.Printf("checker-http: %s ip=%s status=%d redirects=%d err=%q",
|
||||
j.scheme, j.ip, probe.StatusCode, len(probe.RedirectChain), probe.Error)
|
||||
}
|
||||
mu.Lock()
|
||||
data.Probes = append(data.Probes, probe)
|
||||
mu.Unlock()
|
||||
}(j)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
return data, nil
|
||||
return Target{
|
||||
Host: host,
|
||||
IPs: ips,
|
||||
Timeout: time.Duration(timeoutMs) * time.Millisecond,
|
||||
MaxRedirects: maxRedirects,
|
||||
UserAgent: userAgent,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func runProbe(ctx context.Context, host, ip, scheme string, port uint16, timeout time.Duration, maxRedirects int, ua string, parseHTML bool) HTTPProbe {
|
||||
|
|
@ -172,10 +221,16 @@ func runProbe(ctx context.Context, host, ip, scheme string, port uint16, timeout
|
|||
// and a separate http.Client.Timeout would race with it.
|
||||
CheckRedirect: func(req *http.Request, via []*http.Request) error {
|
||||
prev := via[len(via)-1]
|
||||
// req.Response is the 3xx response that triggered this hop;
|
||||
// it carries the redirecting status code (301/302/307/308…).
|
||||
status := 0
|
||||
if req.Response != nil {
|
||||
status = req.Response.StatusCode
|
||||
}
|
||||
redirectChain = append(redirectChain, RedirectStep{
|
||||
From: prev.URL.String(),
|
||||
To: req.URL.String(),
|
||||
Status: 0, // populated post-hoc below if available
|
||||
Status: status,
|
||||
})
|
||||
// The transport's DialContext is pinned to the original
|
||||
// (ip, port) and TLS ServerName is pinned to the original
|
||||
|
|
@ -241,8 +296,12 @@ func runProbe(ctx context.Context, host, ip, scheme string, port uint16, timeout
|
|||
probe.Headers[lk] = strings.Join(v, ", ")
|
||||
}
|
||||
|
||||
for _, c := range resp.Cookies() {
|
||||
probe.Cookies = append(probe.Cookies, CookieInfo{
|
||||
// resp.Cookies() and resp.Header.Values("Set-Cookie") yield entries
|
||||
// in the same order, so we can pair them positionally to recover the
|
||||
// raw byte length of each Set-Cookie line for the size rule.
|
||||
rawSetCookies := resp.Header.Values("Set-Cookie")
|
||||
for i, c := range resp.Cookies() {
|
||||
ci := CookieInfo{
|
||||
Name: c.Name,
|
||||
Domain: c.Domain,
|
||||
Path: c.Path,
|
||||
|
|
@ -250,7 +309,11 @@ func runProbe(ctx context.Context, host, ip, scheme string, port uint16, timeout
|
|||
HttpOnly: c.HttpOnly,
|
||||
SameSite: sameSiteString(c.SameSite),
|
||||
HasExpiry: !c.Expires.IsZero() || c.MaxAge > 0,
|
||||
})
|
||||
}
|
||||
if i < len(rawSetCookies) {
|
||||
ci.Size = len(rawSetCookies[i])
|
||||
}
|
||||
probe.Cookies = append(probe.Cookies, ci)
|
||||
}
|
||||
probe.RedirectChain = redirectChain
|
||||
|
||||
|
|
|
|||
36
checker/collector.go
Normal file
36
checker/collector.go
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Target captures everything a Collector needs to probe one logical host.
|
||||
// It is built once by the orchestrator from CheckerOptions and passed to
|
||||
// every Collector, so individual collectors don't have to re-parse options
|
||||
// or re-resolve IPs.
|
||||
type Target struct {
|
||||
Host string
|
||||
IPs []string
|
||||
Timeout time.Duration
|
||||
MaxRedirects int
|
||||
UserAgent string
|
||||
}
|
||||
|
||||
// Collector contributes a typed observation about a Target. Each collector
|
||||
// owns one slice of the work (root probe, well-known endpoints, CORS
|
||||
// preflight, etc.) and writes its result under Key() in the final
|
||||
// payload's Extensions map.
|
||||
//
|
||||
// The current orchestrator wires only the root collector and writes its
|
||||
// result directly under ObservationKeyHTTP for backward compatibility.
|
||||
// Additional collectors are introduced in step 4; they will populate
|
||||
// HTTPData.Extensions[Key()] without disturbing existing rules.
|
||||
type Collector interface {
|
||||
Key() string
|
||||
Collect(ctx context.Context, t Target) (any, error)
|
||||
}
|
||||
73
checker/collector_root.go
Normal file
73
checker/collector_root.go
Normal file
|
|
@ -0,0 +1,73 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// rootCollector probes the target host on HTTP/80 and HTTPS/443 for every
|
||||
// known IP, captures headers/cookies/redirects on each, and parses the
|
||||
// HTML body of the first successful HTTPS probe (so SRI-style rules have
|
||||
// something to evaluate). This is the original behaviour of Collect()
|
||||
// before the Collector interface was introduced.
|
||||
type rootCollector struct{}
|
||||
|
||||
func (rootCollector) Key() string { return ObservationKeyHTTP }
|
||||
|
||||
func (rootCollector) Collect(ctx context.Context, t Target) (any, error) {
|
||||
data := &HTTPData{
|
||||
Domain: t.Host,
|
||||
CollectedAt: time.Now(),
|
||||
}
|
||||
|
||||
type job struct {
|
||||
scheme string
|
||||
port uint16
|
||||
ip string
|
||||
// parseHTML controls whether the HTML body is parsed and its
|
||||
// references kept on the probe. Only the first HTTPS probe gets
|
||||
// it, to keep payload size bounded.
|
||||
parseHTML bool
|
||||
}
|
||||
|
||||
var jobs []job
|
||||
htmlPicked := false
|
||||
for _, ip := range t.IPs {
|
||||
jobs = append(jobs, job{scheme: "http", port: DefaultHTTPPort, ip: ip})
|
||||
j := job{scheme: "https", port: DefaultHTTPSPort, ip: ip}
|
||||
if !htmlPicked {
|
||||
j.parseHTML = true
|
||||
htmlPicked = true
|
||||
}
|
||||
jobs = append(jobs, j)
|
||||
}
|
||||
|
||||
var mu sync.Mutex
|
||||
var wg sync.WaitGroup
|
||||
sem := make(chan struct{}, MaxConcurrentProbes)
|
||||
for _, j := range jobs {
|
||||
wg.Add(1)
|
||||
sem <- struct{}{}
|
||||
go func(j job) {
|
||||
defer wg.Done()
|
||||
defer func() { <-sem }()
|
||||
probe := runProbe(ctx, t.Host, j.ip, j.scheme, j.port, t.Timeout, t.MaxRedirects, t.UserAgent, j.parseHTML)
|
||||
if verboseLogging {
|
||||
log.Printf("checker-http: %s ip=%s status=%d redirects=%d err=%q",
|
||||
j.scheme, j.ip, probe.StatusCode, len(probe.RedirectChain), probe.Error)
|
||||
}
|
||||
mu.Lock()
|
||||
data.Probes = append(data.Probes, probe)
|
||||
mu.Unlock()
|
||||
}(j)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
return data, nil
|
||||
}
|
||||
99
checker/collector_wellknown.go
Normal file
99
checker/collector_wellknown.go
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
// ObservationKeyWellKnown is the Extensions[] key under which
|
||||
// wellknownCollector publishes its observation.
|
||||
const ObservationKeyWellKnown = "wellknown"
|
||||
|
||||
// WellKnownData captures whether each well-known URI returned a usable
|
||||
// document. It is intentionally narrow: per-URI presence and HTTP status
|
||||
// are enough for the current rule set; deeper parsing (e.g. PGP-signed
|
||||
// security.txt fields) is left to dedicated collectors when the need
|
||||
// arises.
|
||||
type WellKnownData struct {
|
||||
URIs map[string]WellKnownProbe `json:"uris"`
|
||||
}
|
||||
|
||||
// WellKnownProbe is a single (URI → outcome) entry.
|
||||
type WellKnownProbe struct {
|
||||
URL string `json:"url"`
|
||||
StatusCode int `json:"status_code,omitempty"`
|
||||
Bytes int `json:"bytes,omitempty"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// wellknownCollector probes a small, fixed set of standardised URIs
|
||||
// served at the apex of the host. Today it covers:
|
||||
//
|
||||
// - /.well-known/security.txt (RFC 9116) — security disclosure contact
|
||||
// - /robots.txt (RFC 9309) — crawler directives
|
||||
//
|
||||
// It uses the first IP only because these documents are expected to be
|
||||
// host-uniform: there is nothing to learn from probing every backend.
|
||||
type wellknownCollector struct{}
|
||||
|
||||
func (wellknownCollector) Key() string { return ObservationKeyWellKnown }
|
||||
|
||||
func (wellknownCollector) Collect(ctx context.Context, t Target) (any, error) {
|
||||
if len(t.IPs) == 0 {
|
||||
return nil, fmt.Errorf("no IPs to probe")
|
||||
}
|
||||
addr := net.JoinHostPort(t.IPs[0], "443")
|
||||
dialer := &net.Dialer{Timeout: t.Timeout}
|
||||
transport := &http.Transport{
|
||||
DialContext: func(ctx context.Context, network, _ string) (net.Conn, error) {
|
||||
return dialer.DialContext(ctx, network, addr)
|
||||
},
|
||||
TLSClientConfig: &tls.Config{ServerName: t.Host},
|
||||
TLSHandshakeTimeout: t.Timeout,
|
||||
ResponseHeaderTimeout: t.Timeout,
|
||||
DisableKeepAlives: true,
|
||||
}
|
||||
defer transport.CloseIdleConnections()
|
||||
client := &http.Client{Transport: transport}
|
||||
|
||||
uris := []string{"/.well-known/security.txt", "/robots.txt"}
|
||||
out := WellKnownData{URIs: make(map[string]WellKnownProbe, len(uris))}
|
||||
for _, path := range uris {
|
||||
out.URIs[path] = fetchOne(ctx, client, t.Host, path, t.UserAgent)
|
||||
}
|
||||
return &out, nil
|
||||
}
|
||||
|
||||
func fetchOne(ctx context.Context, client *http.Client, host, path, ua string) WellKnownProbe {
|
||||
u := (&url.URL{Scheme: "https", Host: host, Path: path}).String()
|
||||
probe := WellKnownProbe{URL: u}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u, nil)
|
||||
if err != nil {
|
||||
probe.Error = err.Error()
|
||||
return probe
|
||||
}
|
||||
req.Header.Set("User-Agent", ua)
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
probe.Error = err.Error()
|
||||
return probe
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
probe.StatusCode = resp.StatusCode
|
||||
// Cap the read so a misconfigured server can't pull megabytes for a
|
||||
// "did this exist?" probe.
|
||||
body, _ := io.ReadAll(io.LimitReader(resp.Body, 64<<10))
|
||||
probe.Bytes = len(body)
|
||||
return probe
|
||||
}
|
||||
|
||||
func init() { RegisterCollector(wellknownCollector{}) }
|
||||
173
checker/header_rule.go
Normal file
173
checker/header_rule.go
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// HeaderResult is one observation produced by a HeaderRuleSpec callback.
|
||||
// Suffix is appended to spec.Code (with a dot separator) to form the
|
||||
// final CheckState code, e.g. "http.hsts" + "short_max_age" →
|
||||
// "http.hsts.short_max_age". An empty Suffix uses spec.Code verbatim.
|
||||
type HeaderResult struct {
|
||||
Status sdk.Status
|
||||
Suffix string
|
||||
Message string
|
||||
Meta map[string]any
|
||||
}
|
||||
|
||||
// HeaderRuleSpec declares a per-HTTPS-probe rule built around a single
|
||||
// response header. It supersedes the per-rule Evaluate boilerplate that
|
||||
// every "load HTTPData → iterate successful HTTPS probes → inspect one
|
||||
// header → emit one CheckState" rule used to repeat.
|
||||
//
|
||||
// Three callbacks cover the spectrum, from simplest to most expressive:
|
||||
//
|
||||
// - Validate: the header is present and a single boolean verdict is
|
||||
// enough. Returns (Status, message); the rule emits ".ok" on
|
||||
// StatusOK or ".invalid" otherwise. Used by the modern privacy
|
||||
// headers (Referrer-Policy, COOP/COEP/CORP, Permissions-Policy).
|
||||
//
|
||||
// - Inspect: the header is present and may produce any number of
|
||||
// findings with arbitrary suffixes. Used by HSTS (".short_max_age"),
|
||||
// CSP (".unsafe_inline" / ".wildcard_script_src" / …) and the
|
||||
// legacy X-XSS-Protection rule which reports custom suffixes
|
||||
// (".disabled", ".enabled").
|
||||
//
|
||||
// - OnMissing: the header is absent and the default ".missing"
|
||||
// emitter is wrong — either an alternative satisfies the
|
||||
// requirement (CSP frame-ancestors standing in for X-Frame-Options),
|
||||
// or absence has non-default severity (X-XSS-Protection emits
|
||||
// Info ".absent", not Warn ".missing"), or the severity depends
|
||||
// on a CheckerOption (HSTS/CSP gate "missing" on a configurable
|
||||
// "required" flag).
|
||||
//
|
||||
// Validate and Inspect are mutually exclusive. OnMissing can be combined
|
||||
// with either. Specs that omit all three behave as a pure presence check
|
||||
// (".ok" when set, default ".missing" when not).
|
||||
type HeaderRuleSpec struct {
|
||||
Code string
|
||||
Description string
|
||||
Header string
|
||||
|
||||
// Required toggles the severity of the default ".missing" emitter
|
||||
// (Warn when true, Info when false). Ignored when OnMissing is set.
|
||||
Required bool
|
||||
|
||||
// FixHint, when set, populates Meta.fix on the default ".missing"
|
||||
// emitter. Ignored when OnMissing is set (callbacks must build
|
||||
// their own Meta).
|
||||
FixHint string
|
||||
|
||||
Validate func(value string) (sdk.Status, string)
|
||||
Inspect func(value string, p HTTPProbe, opts sdk.CheckerOptions) []HeaderResult
|
||||
OnMissing func(p HTTPProbe, opts sdk.CheckerOptions) []HeaderResult
|
||||
}
|
||||
|
||||
// HeaderRule constructs a self-contained sdk.CheckRule from a spec.
|
||||
// Intended to be wired in init() via RegisterRule.
|
||||
func HeaderRule(spec HeaderRuleSpec) sdk.CheckRule {
|
||||
if spec.Validate != nil && spec.Inspect != nil {
|
||||
panic("checker: HeaderRuleSpec " + spec.Code + " sets both Validate and Inspect")
|
||||
}
|
||||
return &headerRule{spec: spec}
|
||||
}
|
||||
|
||||
type headerRule struct{ spec HeaderRuleSpec }
|
||||
|
||||
func (r *headerRule) Name() string { return r.spec.Code }
|
||||
func (r *headerRule) Description() string { return r.spec.Description }
|
||||
|
||||
func (r *headerRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, opts sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState(r.spec.Code+".no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
headerKey := strings.ToLower(r.spec.Header)
|
||||
|
||||
out := make([]sdk.CheckState, 0, len(probes))
|
||||
for _, p := range probes {
|
||||
for _, res := range r.evaluateProbe(p, opts, headerKey) {
|
||||
out = append(out, r.toCheckState(p, res))
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func (r *headerRule) evaluateProbe(p HTTPProbe, opts sdk.CheckerOptions, headerKey string) []HeaderResult {
|
||||
v := strings.TrimSpace(p.Headers[headerKey])
|
||||
if v == "" {
|
||||
if r.spec.OnMissing != nil {
|
||||
return ensureNonEmpty(r.spec.OnMissing(p, opts), r.defaultPresent())
|
||||
}
|
||||
return []HeaderResult{r.defaultMissing()}
|
||||
}
|
||||
switch {
|
||||
case r.spec.Inspect != nil:
|
||||
return ensureNonEmpty(r.spec.Inspect(v, p, opts), r.defaultPresent())
|
||||
case r.spec.Validate != nil:
|
||||
status, msg := r.spec.Validate(v)
|
||||
suffix := "invalid"
|
||||
if status == sdk.StatusOK {
|
||||
suffix = "ok"
|
||||
}
|
||||
return []HeaderResult{{Status: status, Suffix: suffix, Message: msg}}
|
||||
default:
|
||||
return []HeaderResult{r.defaultPresent()}
|
||||
}
|
||||
}
|
||||
|
||||
func (r *headerRule) defaultMissing() HeaderResult {
|
||||
status := sdk.StatusInfo
|
||||
if r.spec.Required {
|
||||
status = sdk.StatusWarn
|
||||
}
|
||||
res := HeaderResult{
|
||||
Status: status,
|
||||
Suffix: "missing",
|
||||
Message: r.spec.Header + " is not set.",
|
||||
}
|
||||
if r.spec.FixHint != "" {
|
||||
res.Meta = map[string]any{"fix": r.spec.FixHint}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func (r *headerRule) defaultPresent() HeaderResult {
|
||||
return HeaderResult{
|
||||
Status: sdk.StatusOK,
|
||||
Suffix: "ok",
|
||||
Message: r.spec.Header + " is set.",
|
||||
}
|
||||
}
|
||||
|
||||
func (r *headerRule) toCheckState(p HTTPProbe, res HeaderResult) sdk.CheckState {
|
||||
code := r.spec.Code
|
||||
if res.Suffix != "" {
|
||||
code = code + "." + res.Suffix
|
||||
}
|
||||
return sdk.CheckState{
|
||||
Status: res.Status,
|
||||
Code: code,
|
||||
Subject: p.Address,
|
||||
Message: res.Message,
|
||||
Meta: res.Meta,
|
||||
}
|
||||
}
|
||||
|
||||
func ensureNonEmpty(results []HeaderResult, fallback HeaderResult) []HeaderResult {
|
||||
if len(results) == 0 {
|
||||
return []HeaderResult{fallback}
|
||||
}
|
||||
return results
|
||||
}
|
||||
240
checker/headers.go
Normal file
240
checker/headers.go
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// HSTSDirectives is the parsed form of a Strict-Transport-Security header
|
||||
// (RFC 6797 §6.1). MaxAgeSet distinguishes an explicit max-age=0 from a
|
||||
// header that omitted the (mandatory) directive entirely. Errors lists
|
||||
// per-directive parse problems so callers can surface them instead of
|
||||
// silently treating malformed values as max-age=0.
|
||||
type HSTSDirectives struct {
|
||||
MaxAge int64
|
||||
MaxAgeSet bool
|
||||
IncludeSub bool
|
||||
Preload bool
|
||||
Errors []string
|
||||
}
|
||||
|
||||
// ParseHSTS pulls max-age, includeSubDomains and preload out of an HSTS
|
||||
// value. Returns nil for an empty value so callers can distinguish "header
|
||||
// absent" from "header present with max-age=0". Per RFC 6797 §6.1.1
|
||||
// max-age is REQUIRED, MUST appear exactly once, and its value is a
|
||||
// non-negative integer (optionally quoted); violations are reported via
|
||||
// the Errors slice.
|
||||
func ParseHSTS(v string) *HSTSDirectives {
|
||||
v = strings.TrimSpace(v)
|
||||
if v == "" {
|
||||
return nil
|
||||
}
|
||||
h := &HSTSDirectives{}
|
||||
for _, part := range strings.Split(v, ";") {
|
||||
part = strings.TrimSpace(part)
|
||||
if part == "" {
|
||||
continue
|
||||
}
|
||||
lower := strings.ToLower(part)
|
||||
switch {
|
||||
case strings.HasPrefix(lower, "max-age="):
|
||||
raw := strings.TrimSpace(part[len("max-age="):])
|
||||
val, quoted := unquoteHSTS(raw)
|
||||
if h.MaxAgeSet {
|
||||
h.Errors = append(h.Errors, "max-age specified more than once")
|
||||
continue
|
||||
}
|
||||
h.MaxAgeSet = true
|
||||
n, err := strconv.ParseInt(val, 10, 64)
|
||||
switch {
|
||||
case err != nil:
|
||||
h.Errors = append(h.Errors, fmt.Sprintf("max-age value %q is not a valid integer", raw))
|
||||
case n < 0:
|
||||
h.Errors = append(h.Errors, fmt.Sprintf("max-age value %d is negative", n))
|
||||
case quoted && val == "":
|
||||
h.Errors = append(h.Errors, "max-age value is empty")
|
||||
default:
|
||||
h.MaxAge = n
|
||||
}
|
||||
case lower == "max-age":
|
||||
h.Errors = append(h.Errors, "max-age directive has no value")
|
||||
h.MaxAgeSet = true
|
||||
case lower == "includesubdomains":
|
||||
h.IncludeSub = true
|
||||
case lower == "preload":
|
||||
h.Preload = true
|
||||
}
|
||||
// Unknown directives are ignored per RFC 6797 §6.1.
|
||||
}
|
||||
if !h.MaxAgeSet {
|
||||
h.Errors = append(h.Errors, "max-age directive is missing")
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
||||
// unquoteHSTS strips a surrounding pair of double quotes from a directive
|
||||
// value (RFC 6797 allows the quoted-string form). Returns the inner value
|
||||
// and whether quotes were present, so callers can distinguish `max-age=""`
|
||||
// from `max-age=`.
|
||||
func unquoteHSTS(s string) (string, bool) {
|
||||
if len(s) >= 2 && s[0] == '"' && s[len(s)-1] == '"' {
|
||||
return s[1 : len(s)-1], true
|
||||
}
|
||||
return s, false
|
||||
}
|
||||
|
||||
// CSPDirectives is the parsed form of a Content-Security-Policy header
|
||||
// (W3C CSP3). Directive names are lowercased; source tokens keep their
|
||||
// original casing because keywords like 'unsafe-inline' must round-trip
|
||||
// verbatim when reported back to the user.
|
||||
type CSPDirectives struct {
|
||||
Raw string
|
||||
Directives map[string][]string
|
||||
}
|
||||
|
||||
// ParseCSP splits a CSP header into its directive → sources map.
|
||||
func ParseCSP(v string) *CSPDirectives {
|
||||
v = strings.TrimSpace(v)
|
||||
if v == "" {
|
||||
return nil
|
||||
}
|
||||
c := &CSPDirectives{Raw: v, Directives: map[string][]string{}}
|
||||
for _, d := range strings.Split(v, ";") {
|
||||
d = strings.TrimSpace(d)
|
||||
if d == "" {
|
||||
continue
|
||||
}
|
||||
fields := strings.Fields(d)
|
||||
name := strings.ToLower(fields[0])
|
||||
c.Directives[name] = fields[1:]
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// HasDirective reports whether the named directive is declared at all.
|
||||
func (c *CSPDirectives) HasDirective(name string) bool {
|
||||
if c == nil {
|
||||
return false
|
||||
}
|
||||
_, ok := c.Directives[strings.ToLower(name)]
|
||||
return ok
|
||||
}
|
||||
|
||||
// HasSource reports whether the named directive lists the given source
|
||||
// token (case-insensitive comparison; pass keywords with their quotes,
|
||||
// e.g. "'unsafe-inline'").
|
||||
func (c *CSPDirectives) HasSource(directive, source string) bool {
|
||||
if c == nil {
|
||||
return false
|
||||
}
|
||||
for _, s := range c.Directives[strings.ToLower(directive)] {
|
||||
if strings.EqualFold(s, source) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// cspFetchFallback maps CSP fetch directives to default-src per CSP3
|
||||
// §6.1: when a directive is absent, the user agent falls back to
|
||||
// default-src. Non-fetch directives (frame-ancestors, form-action,
|
||||
// base-uri, …) have no fallback and are deliberately omitted.
|
||||
var cspFetchFallback = map[string]string{
|
||||
"child-src": "default-src",
|
||||
"connect-src": "default-src",
|
||||
"font-src": "default-src",
|
||||
"frame-src": "default-src",
|
||||
"img-src": "default-src",
|
||||
"manifest-src": "default-src",
|
||||
"media-src": "default-src",
|
||||
"object-src": "default-src",
|
||||
"prefetch-src": "default-src",
|
||||
"script-src": "default-src",
|
||||
"script-src-attr": "default-src",
|
||||
"script-src-elem": "default-src",
|
||||
"style-src": "default-src",
|
||||
"style-src-attr": "default-src",
|
||||
"style-src-elem": "default-src",
|
||||
"worker-src": "default-src",
|
||||
}
|
||||
|
||||
// EffectiveSources returns the source list that browsers will enforce
|
||||
// for directive: the directive's own list when declared, otherwise its
|
||||
// default-src fallback for fetch directives. The second return is true
|
||||
// iff the policy explicitly declares the directive (or its fallback).
|
||||
func (c *CSPDirectives) EffectiveSources(directive string) ([]string, bool) {
|
||||
if c == nil {
|
||||
return nil, false
|
||||
}
|
||||
name := strings.ToLower(directive)
|
||||
if s, ok := c.Directives[name]; ok {
|
||||
return s, true
|
||||
}
|
||||
if fb, ok := cspFetchFallback[name]; ok {
|
||||
if s, ok := c.Directives[fb]; ok {
|
||||
return s, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func (c *CSPDirectives) effectiveHasSource(directive, source string) bool {
|
||||
srcs, _ := c.EffectiveSources(directive)
|
||||
for _, s := range srcs {
|
||||
if strings.EqualFold(s, source) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// HasUnsafeInline reports whether the effective script-src or style-src
|
||||
// allows 'unsafe-inline'.
|
||||
func (c *CSPDirectives) HasUnsafeInline() bool {
|
||||
return c.effectiveHasSource("script-src", "'unsafe-inline'") ||
|
||||
c.effectiveHasSource("style-src", "'unsafe-inline'")
|
||||
}
|
||||
|
||||
// HasUnsafeEval reports whether the effective script-src allows
|
||||
// 'unsafe-eval' (style-src does not enforce script execution, so we
|
||||
// look at scripts only).
|
||||
func (c *CSPDirectives) HasUnsafeEval() bool {
|
||||
return c.effectiveHasSource("script-src", "'unsafe-eval'")
|
||||
}
|
||||
|
||||
// WildcardSource returns a permissive token (the literal `*`, or one of
|
||||
// the schemes `http:`, `https:`, `data:`, `blob:`) found in the
|
||||
// effective sources of directive, or "" if none. These tokens
|
||||
// effectively neutralise the directive.
|
||||
func (c *CSPDirectives) WildcardSource(directive string) string {
|
||||
srcs, _ := c.EffectiveSources(directive)
|
||||
for _, s := range srcs {
|
||||
switch strings.ToLower(s) {
|
||||
case "*", "http:", "https:", "data:", "blob:":
|
||||
return s
|
||||
}
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// ParsedHeaders bundles the structured headers we parse repeatedly. Fields
|
||||
// are nil when the underlying header is absent on the probe; rules can
|
||||
// nil-check or rely on the typed accessors which already handle nil.
|
||||
type ParsedHeaders struct {
|
||||
HSTS *HSTSDirectives
|
||||
CSP *CSPDirectives
|
||||
}
|
||||
|
||||
// ParseHeaders builds a ParsedHeaders from a probe's raw header map.
|
||||
// Header lookups use the lowercase keys produced by the collector.
|
||||
func ParseHeaders(p HTTPProbe) ParsedHeaders {
|
||||
return ParsedHeaders{
|
||||
HSTS: ParseHSTS(p.Headers["strict-transport-security"]),
|
||||
CSP: ParseCSP(p.Headers["content-security-policy"]),
|
||||
}
|
||||
}
|
||||
48
checker/iter.go
Normal file
48
checker/iter.go
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
|
||||
// EvalAggregateByScheme runs fn on the subset of probes matching scheme.
|
||||
// If no probe was attempted, returns a single Unknown state with
|
||||
// code+".no_probes". Otherwise it returns the per-probe states emitted by
|
||||
// fn, falling back to a single OK state (code+".ok" with okMsg) when fn
|
||||
// emitted nothing — the conventional "everything is fine" shape used by
|
||||
// reachability and redirect rules.
|
||||
func EvalAggregateByScheme(data *HTTPData, scheme, code, okMsg string, fn func(p HTTPProbe, emit func(sdk.CheckState))) []sdk.CheckState {
|
||||
probes := probesByScheme(data.Probes, scheme)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState(code+".no_probes", "No probes were attempted.")}
|
||||
}
|
||||
var states []sdk.CheckState
|
||||
emit := func(s sdk.CheckState) { states = append(states, s) }
|
||||
for _, p := range probes {
|
||||
fn(p, emit)
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState(code+".ok", okMsg)}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// EvalPerHTTPS calls fn for each successful HTTPS probe and returns the
|
||||
// concatenated states. If no HTTPS probe succeeded, returns a single
|
||||
// Unknown state with code+".no_https".
|
||||
//
|
||||
// Use this for rules that emit one CheckState per probe — the most common
|
||||
// shape. Rules that need access to all probes at once (aggregation,
|
||||
// cross-probe comparisons) should call successfulHTTPSProbes directly.
|
||||
func EvalPerHTTPS(data *HTTPData, code string, fn func(p HTTPProbe) sdk.CheckState) []sdk.CheckState {
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState(code+".no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
out := make([]sdk.CheckState, 0, len(probes))
|
||||
for _, p := range probes {
|
||||
out = append(out, fn(p))
|
||||
}
|
||||
return out
|
||||
}
|
||||
50
checker/registry.go
Normal file
50
checker/registry.go
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"sync"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// registry holds the rules and collectors that ship with the checker.
|
||||
// Each rule/collector registers itself in an init() so that adding a new
|
||||
// one is a single-file change — no central list to maintain.
|
||||
var registry = struct {
|
||||
mu sync.Mutex
|
||||
rules []sdk.CheckRule
|
||||
collectors []Collector
|
||||
}{}
|
||||
|
||||
// RegisterRule appends a rule to the global registry. Intended to be
|
||||
// called from init() in each rule file.
|
||||
func RegisterRule(r sdk.CheckRule) {
|
||||
registry.mu.Lock()
|
||||
defer registry.mu.Unlock()
|
||||
registry.rules = append(registry.rules, r)
|
||||
}
|
||||
|
||||
// RegisterCollector appends a collector to the global registry. Reserved
|
||||
// for step 4; the orchestrator currently wires only rootCollector
|
||||
// directly.
|
||||
func RegisterCollector(c Collector) {
|
||||
registry.mu.Lock()
|
||||
defer registry.mu.Unlock()
|
||||
registry.collectors = append(registry.collectors, c)
|
||||
}
|
||||
|
||||
// Rules returns every registered rule, sorted by Name() so the output is
|
||||
// stable across init-order changes (which Go does not guarantee between
|
||||
// files).
|
||||
func Rules() []sdk.CheckRule {
|
||||
registry.mu.Lock()
|
||||
out := make([]sdk.CheckRule, len(registry.rules))
|
||||
copy(out, registry.rules)
|
||||
registry.mu.Unlock()
|
||||
sort.Slice(out, func(i, j int) bool { return out[i].Name() < out[j].Name() })
|
||||
return out
|
||||
}
|
||||
|
|
@ -11,24 +11,6 @@ import (
|
|||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// Rules returns the full list of independent rules this checker provides.
|
||||
// Each concern surfaces independently in the UI rather than being squashed
|
||||
// into a single aggregated verdict.
|
||||
func Rules() []sdk.CheckRule {
|
||||
return []sdk.CheckRule{
|
||||
&reachabilityRule{scheme: "http", code: "http.tcp_reachable"},
|
||||
&reachabilityRule{scheme: "https", code: "https.tcp_reachable"},
|
||||
&httpsRedirectRule{},
|
||||
&hstsRule{},
|
||||
&cspRule{},
|
||||
&xFrameOptionsRule{},
|
||||
&xContentTypeOptionsRule{},
|
||||
&xXSSProtectionRule{},
|
||||
&cookieFlagsRule{},
|
||||
&sriRule{},
|
||||
}
|
||||
}
|
||||
|
||||
// loadHTTPData fetches the HTTPData observation. On failure, returns a
|
||||
// single error CheckState the caller should emit and bail out.
|
||||
func loadHTTPData(ctx context.Context, obs sdk.ObservationGetter) (*HTTPData, *sdk.CheckState) {
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import (
|
|||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() { RegisterRule(&cookieFlagsRule{}) }
|
||||
|
||||
// cookieFlagsRule audits Set-Cookie attributes on HTTPS responses: every
|
||||
// cookie should be Secure and HttpOnly, and SameSite should be set.
|
||||
type cookieFlagsRule struct{}
|
||||
|
|
@ -33,6 +35,7 @@ func (r *cookieFlagsRule) Evaluate(ctx context.Context, obs sdk.ObservationGette
|
|||
|
||||
var states []sdk.CheckState
|
||||
totalCookies := 0
|
||||
samesiteMissing := 0
|
||||
for _, p := range probes {
|
||||
for _, c := range p.Cookies {
|
||||
totalCookies++
|
||||
|
|
@ -45,6 +48,7 @@ func (r *cookieFlagsRule) Evaluate(ctx context.Context, obs sdk.ObservationGette
|
|||
}
|
||||
if c.SameSite == "" {
|
||||
issues = append(issues, "missing SameSite")
|
||||
samesiteMissing++
|
||||
} else if strings.EqualFold(c.SameSite, "None") && !c.Secure {
|
||||
issues = append(issues, "SameSite=None requires Secure")
|
||||
}
|
||||
|
|
@ -61,6 +65,16 @@ func (r *cookieFlagsRule) Evaluate(ctx context.Context, obs sdk.ObservationGette
|
|||
if totalCookies == 0 {
|
||||
return []sdk.CheckState{passState("http.cookie_flags.none", "No cookies were set on the inspected responses.")}
|
||||
}
|
||||
if samesiteMissing > 0 {
|
||||
// Aggregate alongside per-cookie diagnostics so callers see the
|
||||
// global ratio at a glance — mirrors what Mozilla Observatory
|
||||
// reports as a single cookies test outcome.
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.cookie_flags.samesite_missing",
|
||||
Message: fmt.Sprintf("%d of %d cookies do not set SameSite.", samesiteMissing, totalCookies),
|
||||
})
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState("http.cookie_flags.ok", fmt.Sprintf("All %d cookies have proper Secure/HttpOnly/SameSite flags.", totalCookies))}
|
||||
}
|
||||
|
|
|
|||
148
checker/rules_cookies_rfc6265bis.go
Normal file
148
checker/rules_cookies_rfc6265bis.go
Normal file
|
|
@ -0,0 +1,148 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() {
|
||||
RegisterRule(&cookiePrefixesRule{})
|
||||
RegisterRule(&cookieSizeRule{})
|
||||
}
|
||||
|
||||
// cookiePrefixesRule enforces the cookie name prefix semantics from
|
||||
// RFC 6265bis §4.1.3:
|
||||
//
|
||||
// - Names starting with "__Secure-" MUST have the Secure attribute.
|
||||
// - Names starting with "__Host-" MUST have Secure, MUST NOT have a
|
||||
// Domain attribute, and MUST have Path="/".
|
||||
//
|
||||
// Browsers reject Set-Cookie that violates these constraints, so a
|
||||
// failure here means the cookie is being silently dropped by every
|
||||
// modern user agent.
|
||||
type cookiePrefixesRule struct{}
|
||||
|
||||
func (r *cookiePrefixesRule) Name() string { return "http.cookie_prefixes" }
|
||||
func (r *cookiePrefixesRule) Description() string {
|
||||
return "Verifies cookies using the __Secure- / __Host- name prefixes meet the RFC 6265bis constraints (Secure, Domain, Path)."
|
||||
}
|
||||
|
||||
func (r *cookiePrefixesRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.cookie_prefixes.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
prefixed := 0
|
||||
for _, p := range probes {
|
||||
for _, c := range p.Cookies {
|
||||
switch {
|
||||
case strings.HasPrefix(c.Name, "__Host-"):
|
||||
prefixed++
|
||||
issues := hostPrefixIssues(c)
|
||||
if len(issues) > 0 {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.cookie_prefixes.invalid_host",
|
||||
Subject: fmt.Sprintf("%s :: %s", p.Address, c.Name),
|
||||
Message: fmt.Sprintf("Cookie %q violates the __Host- prefix contract (RFC 6265bis §4.1.3): %s", c.Name, strings.Join(issues, ", ")),
|
||||
})
|
||||
}
|
||||
case strings.HasPrefix(c.Name, "__Secure-"):
|
||||
prefixed++
|
||||
if !c.Secure {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.cookie_prefixes.invalid_secure",
|
||||
Subject: fmt.Sprintf("%s :: %s", p.Address, c.Name),
|
||||
Message: fmt.Sprintf("Cookie %q uses the __Secure- prefix but is not marked Secure (RFC 6265bis §4.1.3); the cookie will be rejected by browsers.", c.Name),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if prefixed == 0 {
|
||||
return []sdk.CheckState{{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.cookie_prefixes.none",
|
||||
Message: "No cookies use the __Host- or __Secure- name prefixes; consider them for high-value cookies (session, CSRF token, …).",
|
||||
}}
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState("http.cookie_prefixes.ok", fmt.Sprintf("All %d prefixed cookies satisfy the RFC 6265bis constraints.", prefixed))}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// hostPrefixIssues returns the list of __Host- contract violations on
|
||||
// the given cookie. Empty slice means the cookie is conformant.
|
||||
func hostPrefixIssues(c CookieInfo) []string {
|
||||
var issues []string
|
||||
if !c.Secure {
|
||||
issues = append(issues, "missing Secure")
|
||||
}
|
||||
if c.Domain != "" {
|
||||
issues = append(issues, "Domain attribute is forbidden")
|
||||
}
|
||||
if c.Path != "/" {
|
||||
issues = append(issues, fmt.Sprintf("Path must be \"/\", got %q", c.Path))
|
||||
}
|
||||
return issues
|
||||
}
|
||||
|
||||
// cookieSizeRule flags cookies whose raw Set-Cookie line exceeds the
|
||||
// per-cookie budget (4096 bytes) browsers are required to support per
|
||||
// RFC 6265 §6.1. Anything over is at risk of being silently truncated
|
||||
// or dropped by user agents.
|
||||
type cookieSizeRule struct{}
|
||||
|
||||
func (r *cookieSizeRule) Name() string { return "http.cookie_size" }
|
||||
func (r *cookieSizeRule) Description() string {
|
||||
return "Flags cookies whose Set-Cookie line exceeds the 4096-byte minimum browsers must support (RFC 6265 §6.1)."
|
||||
}
|
||||
|
||||
func (r *cookieSizeRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.cookie_size.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
total := 0
|
||||
for _, p := range probes {
|
||||
for _, c := range p.Cookies {
|
||||
total++
|
||||
if c.Size > MaxCookieSize {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.cookie_size.too_large",
|
||||
Subject: fmt.Sprintf("%s :: %s", p.Address, c.Name),
|
||||
Message: fmt.Sprintf("Cookie %q is %d bytes; RFC 6265 §6.1 only mandates support for cookies up to %d bytes, larger cookies may be silently dropped.", c.Name, c.Size, MaxCookieSize),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
if total == 0 {
|
||||
return []sdk.CheckState{passState("http.cookie_size.none", "No cookies were set on the inspected responses.")}
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState("http.cookie_size.ok", fmt.Sprintf("All %d cookies fit within the %d-byte per-cookie budget.", total, MaxCookieSize))}
|
||||
}
|
||||
return states
|
||||
}
|
||||
160
checker/rules_cookies_rfc6265bis_test.go
Normal file
160
checker/rules_cookies_rfc6265bis_test.go
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func TestCookiePrefixesRule_NoHTTPS(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpProbe("a:80")}}
|
||||
states := runRule(t, &cookiePrefixesRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
if !hasCode(states, "http.cookie_prefixes.no_https") {
|
||||
t.Errorf("missing no_https code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookiePrefixesRule_NoPrefixed(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{{Name: "sid", Secure: true, HttpOnly: true, SameSite: "Lax"}}
|
||||
states := runRule(t, &cookiePrefixesRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusInfo)
|
||||
if !hasCode(states, "http.cookie_prefixes.none") {
|
||||
t.Errorf("missing 'none' code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookiePrefixesRule_HostOK(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{
|
||||
{Name: "__Host-sid", Secure: true, HttpOnly: true, SameSite: "Strict", Path: "/"},
|
||||
{Name: "__Secure-tok", Secure: true, HttpOnly: true, SameSite: "Lax", Path: "/app"},
|
||||
}
|
||||
states := runRule(t, &cookiePrefixesRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.cookie_prefixes.ok") {
|
||||
t.Errorf("missing ok code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookiePrefixesRule_SecureMissingSecure(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{{Name: "__Secure-x", Secure: false, HttpOnly: true, SameSite: "Lax"}}
|
||||
states := runRule(t, &cookiePrefixesRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.cookie_prefixes.invalid_secure") {
|
||||
t.Errorf("missing invalid_secure code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookiePrefixesRule_HostViolations(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
cookie CookieInfo
|
||||
want []string
|
||||
}{
|
||||
{
|
||||
name: "no Secure",
|
||||
cookie: CookieInfo{Name: "__Host-a", Secure: false, Path: "/"},
|
||||
want: []string{"missing Secure"},
|
||||
},
|
||||
{
|
||||
name: "Domain set",
|
||||
cookie: CookieInfo{Name: "__Host-a", Secure: true, Domain: "example.test", Path: "/"},
|
||||
want: []string{"Domain attribute is forbidden"},
|
||||
},
|
||||
{
|
||||
name: "wrong Path",
|
||||
cookie: CookieInfo{Name: "__Host-a", Secure: true, Path: "/app"},
|
||||
want: []string{`Path must be "/"`},
|
||||
},
|
||||
{
|
||||
name: "all three",
|
||||
cookie: CookieInfo{Name: "__Host-a", Secure: false, Domain: "x", Path: "/x"},
|
||||
want: []string{"missing Secure", "Domain attribute is forbidden", `Path must be "/"`},
|
||||
},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{c.cookie}
|
||||
states := runRule(t, &cookiePrefixesRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.cookie_prefixes.invalid_host") {
|
||||
t.Fatalf("missing invalid_host code: %+v", states)
|
||||
}
|
||||
for _, w := range c.want {
|
||||
if !strings.Contains(states[0].Message, w) {
|
||||
t.Errorf("message missing %q: %s", w, states[0].Message)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookiePrefixesRule_LoadFailure(t *testing.T) {
|
||||
states := (&cookiePrefixesRule{}).Evaluate(t.Context(), &fakeObs{failGet: true}, nil)
|
||||
if len(states) != 1 || states[0].Status != sdk.StatusError {
|
||||
t.Fatalf("expected single error state, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookieSizeRule_NoHTTPS(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpProbe("a:80")}}
|
||||
states := runRule(t, &cookieSizeRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
}
|
||||
|
||||
func TestCookieSizeRule_None(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpsProbe("a:443")}}
|
||||
states := runRule(t, &cookieSizeRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.cookie_size.none") {
|
||||
t.Errorf("missing 'none' code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookieSizeRule_OK(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{
|
||||
{Name: "small", Size: 200},
|
||||
{Name: "borderline", Size: MaxCookieSize}, // exactly the limit is acceptable
|
||||
}
|
||||
states := runRule(t, &cookieSizeRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.cookie_size.ok") {
|
||||
t.Errorf("missing ok code: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookieSizeRule_TooLarge(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{
|
||||
{Name: "small", Size: 100},
|
||||
{Name: "huge", Size: MaxCookieSize + 1},
|
||||
}
|
||||
states := runRule(t, &cookieSizeRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
if len(states) != 1 {
|
||||
t.Fatalf("got %d states, want 1 (only the oversized cookie)", len(states))
|
||||
}
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.cookie_size.too_large") {
|
||||
t.Errorf("missing too_large code: %+v", states)
|
||||
}
|
||||
if !strings.Contains(states[0].Message, "huge") {
|
||||
t.Errorf("message should mention cookie name: %q", states[0].Message)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookieSizeRule_LoadFailure(t *testing.T) {
|
||||
states := (&cookieSizeRule{}).Evaluate(t.Context(), &fakeObs{failGet: true}, nil)
|
||||
if len(states) != 1 || states[0].Status != sdk.StatusError {
|
||||
t.Fatalf("expected single error state, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
|
@ -48,12 +48,26 @@ func TestCookieFlagsRule_Issues(t *testing.T) {
|
|||
{Name: "none-without-secure", Secure: false, HttpOnly: true, SameSite: "None"},
|
||||
}
|
||||
states := runRule(t, &cookieFlagsRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
if len(states) != len(p.Cookies) {
|
||||
t.Fatalf("got %d states, want %d", len(states), len(p.Cookies))
|
||||
// Per-cookie diagnostics + a single SameSite aggregate (1 cookie out
|
||||
// of 4 is missing SameSite).
|
||||
if len(states) != len(p.Cookies)+1 {
|
||||
t.Fatalf("got %d states, want %d", len(states), len(p.Cookies)+1)
|
||||
}
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
|
||||
// Check each diagnostic mentions the cookie name and a relevant phrase.
|
||||
if !hasCode(states, "http.cookie_flags.samesite_missing") {
|
||||
t.Errorf("missing samesite_missing aggregate: %+v", states)
|
||||
}
|
||||
for _, st := range states {
|
||||
if st.Code == "http.cookie_flags.samesite_missing" {
|
||||
if !strings.Contains(st.Message, "1 of 4") {
|
||||
t.Errorf("aggregate message %q should mention 1 of 4", st.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check each per-cookie diagnostic mentions the cookie name and a
|
||||
// relevant phrase.
|
||||
wantSubstr := map[string]string{
|
||||
"no-secure": "missing Secure",
|
||||
"no-httponly": "missing HttpOnly",
|
||||
|
|
@ -61,6 +75,9 @@ func TestCookieFlagsRule_Issues(t *testing.T) {
|
|||
"none-without-secure": "SameSite=None requires Secure",
|
||||
}
|
||||
for _, st := range states {
|
||||
if st.Code != "http.cookie_flags.weak" {
|
||||
continue
|
||||
}
|
||||
matched := false
|
||||
for name, phrase := range wantSubstr {
|
||||
if strings.Contains(st.Message, name) && strings.Contains(st.Message, phrase) {
|
||||
|
|
@ -74,6 +91,26 @@ func TestCookieFlagsRule_Issues(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestCookieFlagsRule_SameSiteAggregateOnly(t *testing.T) {
|
||||
// Two cookies, both otherwise compliant but missing SameSite. We
|
||||
// expect 2 per-cookie warnings + 1 aggregate.
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{
|
||||
{Name: "a", Secure: true, HttpOnly: true, SameSite: ""},
|
||||
{Name: "b", Secure: true, HttpOnly: true, SameSite: ""},
|
||||
}
|
||||
states := runRule(t, &cookieFlagsRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.cookie_flags.samesite_missing") {
|
||||
t.Fatalf("missing aggregate state: %+v", states)
|
||||
}
|
||||
for _, st := range states {
|
||||
if st.Code == "http.cookie_flags.samesite_missing" && !strings.Contains(st.Message, "2 of 2") {
|
||||
t.Errorf("aggregate should report 2 of 2, got %q", st.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCookieFlagsRule_SameSiteNoneCaseInsensitive(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Cookies = []CookieInfo{{Name: "x", Secure: false, HttpOnly: true, SameSite: "none"}}
|
||||
|
|
|
|||
300
checker/rules_modern_headers.go
Normal file
300
checker/rules_modern_headers.go
Normal file
|
|
@ -0,0 +1,300 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// This file wires modern privacy/isolation headers entirely through the
|
||||
// HeaderRule DSL. Each rule is a single declarative spec — no per-rule
|
||||
// type, no Evaluate plumbing, no test scaffolding beyond the value
|
||||
// validator.
|
||||
//
|
||||
// Coverage:
|
||||
// - Referrer-Policy (W3C Referrer Policy)
|
||||
// - Permissions-Policy (W3C Permissions Policy, replaces Feature-Policy)
|
||||
// - Cross-Origin-Opener-Policy (HTML spec, COOP)
|
||||
// - Cross-Origin-Embedder-Policy (HTML spec, COEP)
|
||||
// - Cross-Origin-Resource-Policy (Fetch spec, CORP)
|
||||
//
|
||||
// These are all "presence + value sanity" checks. Anything richer (e.g.
|
||||
// directive-by-directive Permissions-Policy parsing) belongs in its own
|
||||
// hand-rolled rule.
|
||||
|
||||
func init() {
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.referrer_policy",
|
||||
Description: "Verifies that responses set a Referrer-Policy header with a privacy-preserving value.",
|
||||
Header: "Referrer-Policy",
|
||||
Required: false,
|
||||
FixHint: "Send `Referrer-Policy: strict-origin-when-cross-origin` (the modern browser default) or stricter.",
|
||||
Validate: validateReferrerPolicy,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.permissions_policy",
|
||||
Description: "Verifies that the Permissions-Policy header restricts powerful APIs (camera, microphone, geolocation, …).",
|
||||
Header: "Permissions-Policy",
|
||||
Required: false,
|
||||
FixHint: "Define a Permissions-Policy that disables APIs the site does not use, e.g. `Permissions-Policy: camera=(), microphone=(), geolocation=()`.",
|
||||
Validate: validatePermissionsPolicy,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.coop",
|
||||
Description: "Verifies the Cross-Origin-Opener-Policy (COOP) header for cross-origin process isolation.",
|
||||
Header: "Cross-Origin-Opener-Policy",
|
||||
Required: false,
|
||||
FixHint: "Send `Cross-Origin-Opener-Policy: same-origin` to isolate this document from cross-origin windows.",
|
||||
Validate: validateCOOP,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.coep",
|
||||
Description: "Verifies the Cross-Origin-Embedder-Policy (COEP) header. Required (with COOP) to enable cross-origin isolation and APIs such as SharedArrayBuffer.",
|
||||
Header: "Cross-Origin-Embedder-Policy",
|
||||
Required: false,
|
||||
FixHint: "Send `Cross-Origin-Embedder-Policy: require-corp` (or `credentialless`) once embedded resources opt in via CORP/CORS.",
|
||||
Validate: validateCOEP,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.corp",
|
||||
Description: "Verifies the Cross-Origin-Resource-Policy (CORP) header, which lets a server forbid cross-origin/cross-site embedding of its responses.",
|
||||
Header: "Cross-Origin-Resource-Policy",
|
||||
Required: false,
|
||||
FixHint: "Send `Cross-Origin-Resource-Policy: same-origin` (or `same-site`) on responses that should not be embedded cross-origin.",
|
||||
Validate: validateCORP,
|
||||
}))
|
||||
}
|
||||
|
||||
// validateReferrerPolicy accepts any token (or comma-separated list of
|
||||
// tokens) defined by the W3C Referrer Policy spec, but downgrades the
|
||||
// status when the only effective value is the historically lax
|
||||
// `unsafe-url` or `no-referrer-when-downgrade`. Per the spec, browsers
|
||||
// pick the last *recognised* token of a comma list, so we evaluate that
|
||||
// one.
|
||||
func validateReferrerPolicy(v string) (sdk.Status, string) {
|
||||
tokens := splitCSV(v)
|
||||
if len(tokens) == 0 {
|
||||
return sdk.StatusWarn, "Referrer-Policy is empty."
|
||||
}
|
||||
// Per spec, the user-agent picks the last token it recognises.
|
||||
var effective string
|
||||
for _, t := range tokens {
|
||||
if isReferrerPolicyToken(t) {
|
||||
effective = t
|
||||
}
|
||||
}
|
||||
if effective == "" {
|
||||
return sdk.StatusWarn, "Referrer-Policy has no recognised token: " + v
|
||||
}
|
||||
switch effective {
|
||||
case "unsafe-url":
|
||||
return sdk.StatusWarn, "Referrer-Policy: unsafe-url leaks the full URL (including query) cross-origin; prefer strict-origin-when-cross-origin."
|
||||
case "no-referrer-when-downgrade":
|
||||
return sdk.StatusInfo, "Referrer-Policy: no-referrer-when-downgrade is the legacy default; prefer strict-origin-when-cross-origin."
|
||||
}
|
||||
return sdk.StatusOK, "Referrer-Policy is set to " + effective + "."
|
||||
}
|
||||
|
||||
func isReferrerPolicyToken(t string) bool {
|
||||
switch t {
|
||||
case "no-referrer",
|
||||
"no-referrer-when-downgrade",
|
||||
"origin",
|
||||
"origin-when-cross-origin",
|
||||
"same-origin",
|
||||
"strict-origin",
|
||||
"strict-origin-when-cross-origin",
|
||||
"unsafe-url",
|
||||
"":
|
||||
return t != ""
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func validateCOOP(v string) (sdk.Status, string) {
|
||||
switch strings.ToLower(directiveToken(v)) {
|
||||
case "same-origin", "same-origin-allow-popups", "noopener-allow-popups":
|
||||
return sdk.StatusOK, "Cross-Origin-Opener-Policy is set to " + v + "."
|
||||
case "unsafe-none":
|
||||
return sdk.StatusWarn, "Cross-Origin-Opener-Policy: unsafe-none disables the protection (this is the browser default; the header is redundant)."
|
||||
}
|
||||
return sdk.StatusWarn, "Cross-Origin-Opener-Policy has an unrecognised value: " + v
|
||||
}
|
||||
|
||||
func validateCOEP(v string) (sdk.Status, string) {
|
||||
switch strings.ToLower(directiveToken(v)) {
|
||||
case "require-corp", "credentialless":
|
||||
return sdk.StatusOK, "Cross-Origin-Embedder-Policy is set to " + v + "."
|
||||
case "unsafe-none":
|
||||
return sdk.StatusWarn, "Cross-Origin-Embedder-Policy: unsafe-none disables the protection (this is the browser default; the header is redundant)."
|
||||
}
|
||||
return sdk.StatusWarn, "Cross-Origin-Embedder-Policy has an unrecognised value: " + v
|
||||
}
|
||||
|
||||
func validateCORP(v string) (sdk.Status, string) {
|
||||
switch strings.ToLower(directiveToken(v)) {
|
||||
case "same-origin", "same-site", "cross-origin":
|
||||
return sdk.StatusOK, "Cross-Origin-Resource-Policy is set to " + v + "."
|
||||
}
|
||||
return sdk.StatusWarn, "Cross-Origin-Resource-Policy has an unrecognised value: " + v
|
||||
}
|
||||
|
||||
// dangerousPermissionsPolicyFeatures lists features whose default
|
||||
// (browser-level) allowlist is permissive enough to warrant an explicit
|
||||
// restriction. Sources: W3C Permissions Policy registry + the
|
||||
// "powerful features" list (camera, microphone, geolocation, payment,
|
||||
// usb, midi, sensors, screen-wake-lock, fullscreen, autoplay, …).
|
||||
// Tracking-related features (interest-cohort, browsing-topics) are
|
||||
// included for privacy.
|
||||
var dangerousPermissionsPolicyFeatures = map[string]struct{}{
|
||||
"accelerometer": {},
|
||||
"ambient-light-sensor": {},
|
||||
"autoplay": {},
|
||||
"battery": {},
|
||||
"browsing-topics": {},
|
||||
"camera": {},
|
||||
"display-capture": {},
|
||||
"document-domain": {},
|
||||
"encrypted-media": {},
|
||||
"fullscreen": {},
|
||||
"geolocation": {},
|
||||
"gyroscope": {},
|
||||
"hid": {},
|
||||
"identity-credentials-get": {},
|
||||
"idle-detection": {},
|
||||
"interest-cohort": {},
|
||||
"magnetometer": {},
|
||||
"microphone": {},
|
||||
"midi": {},
|
||||
"otp-credentials": {},
|
||||
"payment": {},
|
||||
"picture-in-picture": {},
|
||||
"publickey-credentials-create": {},
|
||||
"publickey-credentials-get": {},
|
||||
"screen-wake-lock": {},
|
||||
"serial": {},
|
||||
"storage-access": {},
|
||||
"usb": {},
|
||||
"window-management": {},
|
||||
"xr-spatial-tracking": {},
|
||||
}
|
||||
|
||||
// validatePermissionsPolicy parses a Permissions-Policy header
|
||||
// (RFC 8941 structured fields, dictionary form) and warns when any
|
||||
// dangerous feature is granted to all origins (`*`) or when the value
|
||||
// is syntactically broken. A header that only restricts features (e.g.
|
||||
// `camera=()`) is accepted even if it does not enumerate every
|
||||
// dangerous one — listing every feature would be noisy and
|
||||
// most browsers default-deny powerful features in cross-origin frames
|
||||
// already.
|
||||
func validatePermissionsPolicy(v string) (sdk.Status, string) {
|
||||
entries, err := parsePermissionsPolicy(v)
|
||||
if err != nil {
|
||||
return sdk.StatusWarn, "Permissions-Policy is malformed: " + err.Error()
|
||||
}
|
||||
if len(entries) == 0 {
|
||||
return sdk.StatusWarn, "Permissions-Policy is empty."
|
||||
}
|
||||
var permissive []string
|
||||
for feature, allowlist := range entries {
|
||||
if _, dangerous := dangerousPermissionsPolicyFeatures[feature]; !dangerous {
|
||||
continue
|
||||
}
|
||||
if isPermissionsAllowlistWildcard(allowlist) {
|
||||
permissive = append(permissive, feature)
|
||||
}
|
||||
}
|
||||
if len(permissive) > 0 {
|
||||
sort.Strings(permissive)
|
||||
return sdk.StatusWarn,
|
||||
"Permissions-Policy grants " + strings.Join(permissive, ", ") +
|
||||
" to all origins (`*`); restrict these to (), self or specific origins."
|
||||
}
|
||||
return sdk.StatusOK, "Permissions-Policy restricts powerful features."
|
||||
}
|
||||
|
||||
// parsePermissionsPolicy splits the header into a feature → allowlist
|
||||
// map. It tolerates the two forms in the wild: the spec'd
|
||||
// structured-field form (`camera=()`, `geolocation=(self "https://x")`)
|
||||
// and the legacy comma form (`camera=()`). Allowlist tokens are kept
|
||||
// verbatim minus surrounding parentheses so the caller can detect `*`.
|
||||
func parsePermissionsPolicy(v string) (map[string]string, error) {
|
||||
v = strings.TrimSpace(v)
|
||||
if v == "" {
|
||||
return nil, nil
|
||||
}
|
||||
out := map[string]string{}
|
||||
for _, raw := range strings.Split(v, ",") {
|
||||
entry := strings.TrimSpace(raw)
|
||||
if entry == "" {
|
||||
continue
|
||||
}
|
||||
eq := strings.IndexByte(entry, '=')
|
||||
if eq < 0 {
|
||||
return nil, fmt.Errorf("entry %q is missing `=`", entry)
|
||||
}
|
||||
feature := strings.ToLower(strings.TrimSpace(entry[:eq]))
|
||||
allowlist := strings.TrimSpace(entry[eq+1:])
|
||||
if feature == "" {
|
||||
return nil, fmt.Errorf("entry %q has an empty feature name", entry)
|
||||
}
|
||||
out[feature] = allowlist
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// isPermissionsAllowlistWildcard reports whether an allowlist grants
|
||||
// the feature to every origin. The two equivalent forms are the bare
|
||||
// `*` and the parenthesised list `(*)`.
|
||||
func isPermissionsAllowlistWildcard(allowlist string) bool {
|
||||
a := strings.TrimSpace(allowlist)
|
||||
if a == "*" {
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(a, "(") && strings.HasSuffix(a, ")") {
|
||||
inner := strings.TrimSpace(a[1 : len(a)-1])
|
||||
for _, tok := range strings.Fields(inner) {
|
||||
if tok == "*" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// splitCSV splits on commas, trims whitespace, lowercases, and drops
|
||||
// empty fragments. Used for header values that are comma-separated lists
|
||||
// of tokens (Referrer-Policy, Accept-Encoding, …).
|
||||
func splitCSV(v string) []string {
|
||||
parts := strings.Split(v, ",")
|
||||
out := make([]string, 0, len(parts))
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(strings.ToLower(p))
|
||||
if p != "" {
|
||||
out = append(out, p)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// directiveToken extracts the first whitespace-delimited token of a
|
||||
// header value, stripping any trailing parameters (e.g. `same-origin
|
||||
// "..."` -> `same-origin`). Suitable for single-token directive headers
|
||||
// like COOP/COEP/CORP.
|
||||
func directiveToken(v string) string {
|
||||
v = strings.TrimSpace(v)
|
||||
if i := strings.IndexAny(v, " \t;,"); i >= 0 {
|
||||
return v[:i]
|
||||
}
|
||||
return v
|
||||
}
|
||||
176
checker/rules_modern_headers_test.go
Normal file
176
checker/rules_modern_headers_test.go
Normal file
|
|
@ -0,0 +1,176 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// runHeaderRule looks up a registered rule by name and evaluates it
|
||||
// against an HTTPS probe whose only set header is the one under test.
|
||||
// The collector publishes headers as a lowercase-keyed map (see
|
||||
// collect.go), so we mirror that here regardless of the casing the
|
||||
// caller passed in.
|
||||
func runHeaderRule(t *testing.T, ruleName, header, value string) []sdk.CheckState {
|
||||
t.Helper()
|
||||
p := httpsProbe("a:443")
|
||||
if strings.TrimSpace(value) != "" {
|
||||
p.Headers[strings.ToLower(header)] = value
|
||||
}
|
||||
return runRule(t, ruleByName(t, ruleName), &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
}
|
||||
|
||||
func TestReferrerPolicyRule(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
value string
|
||||
want sdk.Status
|
||||
code string
|
||||
}{
|
||||
{"missing", "", sdk.StatusInfo, "http.referrer_policy.missing"},
|
||||
{"strict-origin-when-cross-origin", "strict-origin-when-cross-origin", sdk.StatusOK, "http.referrer_policy.ok"},
|
||||
{"no-referrer", "no-referrer", sdk.StatusOK, "http.referrer_policy.ok"},
|
||||
{"unsafe-url", "unsafe-url", sdk.StatusWarn, "http.referrer_policy.invalid"},
|
||||
{"no-referrer-when-downgrade", "no-referrer-when-downgrade", sdk.StatusInfo, "http.referrer_policy.invalid"},
|
||||
{"unrecognised token", "totally-made-up", sdk.StatusWarn, "http.referrer_policy.invalid"},
|
||||
// Per spec the UA picks the last *recognised* token, so the
|
||||
// `bogus` is ignored and `same-origin` wins.
|
||||
{"list with fallback", "bogus, same-origin", sdk.StatusOK, "http.referrer_policy.ok"},
|
||||
// Unknown token after a known one: UA falls back to the last
|
||||
// recognised one (`strict-origin`).
|
||||
{"list with unknown trailing", "strict-origin, bogus", sdk.StatusOK, "http.referrer_policy.ok"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
states := runHeaderRule(t, "http.referrer_policy", "Referrer-Policy", c.value)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("value=%q: missing code %q in %+v", c.value, c.code, states)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPermissionsPolicyRule(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
value string
|
||||
want sdk.Status
|
||||
code string
|
||||
}{
|
||||
{"missing", "", sdk.StatusInfo, "http.permissions_policy.missing"},
|
||||
{"restrictive", "camera=(), microphone=()", sdk.StatusOK, "http.permissions_policy.ok"},
|
||||
{"self only", "geolocation=(self)", sdk.StatusOK, "http.permissions_policy.ok"},
|
||||
{"empty value treated as missing", " ", sdk.StatusInfo, "http.permissions_policy.missing"},
|
||||
{"camera wildcard", "camera=*", sdk.StatusWarn, "http.permissions_policy.invalid"},
|
||||
{"microphone parenthesised wildcard", "microphone=(*)", sdk.StatusWarn, "http.permissions_policy.invalid"},
|
||||
{"non-dangerous wildcard ignored", "fullscreen=(self), accelerometer=*", sdk.StatusWarn, "http.permissions_policy.invalid"},
|
||||
{"unknown feature wildcard ignored", "totally-made-up=*", sdk.StatusOK, "http.permissions_policy.ok"},
|
||||
{"malformed entry", "camera", sdk.StatusWarn, "http.permissions_policy.invalid"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
states := runHeaderRule(t, "http.permissions_policy", "Permissions-Policy", c.value)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("value=%q: missing code %q in %+v", c.value, c.code, states)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCOOPRule(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
value string
|
||||
want sdk.Status
|
||||
code string
|
||||
}{
|
||||
{"missing", "", sdk.StatusInfo, "http.coop.missing"},
|
||||
{"same-origin", "same-origin", sdk.StatusOK, "http.coop.ok"},
|
||||
{"same-origin-allow-popups", "same-origin-allow-popups", sdk.StatusOK, "http.coop.ok"},
|
||||
{"unsafe-none", "unsafe-none", sdk.StatusWarn, "http.coop.invalid"},
|
||||
{"unrecognised", "bogus", sdk.StatusWarn, "http.coop.invalid"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
states := runHeaderRule(t, "http.coop", "Cross-Origin-Opener-Policy", c.value)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("value=%q: missing code %q in %+v", c.value, c.code, states)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCOEPRule(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
value string
|
||||
want sdk.Status
|
||||
code string
|
||||
}{
|
||||
{"missing", "", sdk.StatusInfo, "http.coep.missing"},
|
||||
{"require-corp", "require-corp", sdk.StatusOK, "http.coep.ok"},
|
||||
{"credentialless", "credentialless", sdk.StatusOK, "http.coep.ok"},
|
||||
{"unsafe-none", "unsafe-none", sdk.StatusWarn, "http.coep.invalid"},
|
||||
{"unrecognised", "bogus", sdk.StatusWarn, "http.coep.invalid"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
states := runHeaderRule(t, "http.coep", "Cross-Origin-Embedder-Policy", c.value)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("value=%q: missing code %q in %+v", c.value, c.code, states)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCORPRule(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
value string
|
||||
want sdk.Status
|
||||
code string
|
||||
}{
|
||||
{"missing", "", sdk.StatusInfo, "http.corp.missing"},
|
||||
{"same-origin", "same-origin", sdk.StatusOK, "http.corp.ok"},
|
||||
{"same-site", "same-site", sdk.StatusOK, "http.corp.ok"},
|
||||
{"cross-origin", "cross-origin", sdk.StatusOK, "http.corp.ok"},
|
||||
{"unrecognised", "bogus", sdk.StatusWarn, "http.corp.invalid"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
states := runHeaderRule(t, "http.corp", "Cross-Origin-Resource-Policy", c.value)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("value=%q: missing code %q in %+v", c.value, c.code, states)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestModernHeaders_NoHTTPS(t *testing.T) {
|
||||
// Each modern header rule must emit Unknown when there are no
|
||||
// successful HTTPS probes — the no_https path comes from EvalPerHTTPS.
|
||||
rules := []string{
|
||||
"http.referrer_policy",
|
||||
"http.permissions_policy",
|
||||
"http.coop",
|
||||
"http.coep",
|
||||
"http.corp",
|
||||
}
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpProbe("a:80")}}
|
||||
for _, name := range rules {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
states := runRule(t, ruleByName(t, name), data, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -11,6 +11,11 @@ import (
|
|||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() {
|
||||
RegisterRule(&reachabilityRule{scheme: "http", code: "http.tcp_reachable"})
|
||||
RegisterRule(&reachabilityRule{scheme: "https", code: "https.tcp_reachable"})
|
||||
}
|
||||
|
||||
// reachabilityRule reports per-IP reachability for one scheme.
|
||||
type reachabilityRule struct {
|
||||
scheme string // "http" or "https"
|
||||
|
|
@ -27,39 +32,31 @@ func (r *reachabilityRule) Evaluate(ctx context.Context, obs sdk.ObservationGett
|
|||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := probesByScheme(data.Probes, r.scheme)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState(r.code+".no_probes", "No probes were attempted.")}
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
okMsg := fmt.Sprintf("All %s probes responded successfully.", r.scheme)
|
||||
return EvalAggregateByScheme(data, r.scheme, r.code, okMsg, func(p HTTPProbe, emit func(sdk.CheckState)) {
|
||||
switch {
|
||||
case !p.TCPConnected:
|
||||
states = append(states, sdk.CheckState{
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusCrit,
|
||||
Code: r.code + ".unreachable",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("Cannot reach %s://%s on %s: %s", r.scheme, p.Host, p.Address, p.Error),
|
||||
})
|
||||
case p.StatusCode == 0:
|
||||
states = append(states, sdk.CheckState{
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusCrit,
|
||||
Code: r.code + ".no_response",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("TCP open but no HTTP response from %s: %s", p.Address, p.Error),
|
||||
})
|
||||
case p.StatusCode >= 500:
|
||||
states = append(states, sdk.CheckState{
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: r.code + ".server_error",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("%s returned %d", p.Address, p.StatusCode),
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState(r.code+".ok", fmt.Sprintf("All %s probes responded successfully.", r.scheme))}
|
||||
}
|
||||
return states
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ import (
|
|||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() { RegisterRule(&httpsRedirectRule{}) }
|
||||
|
||||
// httpsRedirectRule verifies that plain HTTP either redirects to HTTPS or
|
||||
// fails (which is acceptable when HTTP is intentionally not served).
|
||||
type httpsRedirectRule struct{}
|
||||
|
|
@ -28,20 +30,15 @@ func (r *httpsRedirectRule) Evaluate(ctx context.Context, obs sdk.ObservationGet
|
|||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
require := sdk.GetBoolOption(opts, OptionRequireHTTPS, true)
|
||||
probes := probesByScheme(data.Probes, "http")
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.https_redirect.no_probes", "No HTTP probes were attempted.")}
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
const okMsg = "HTTP redirects to HTTPS on every reachable IP."
|
||||
return EvalAggregateByScheme(data, "http", "http.https_redirect", okMsg, func(p HTTPProbe, emit func(sdk.CheckState)) {
|
||||
if !p.TCPConnected || p.StatusCode == 0 {
|
||||
// Reachability rule handles this; an HTTP server that is
|
||||
// simply not running is fine for redirect-purposes.
|
||||
continue
|
||||
return
|
||||
}
|
||||
final := p.FinalURL
|
||||
// final should be set; fallback to last redirect target.
|
||||
if final == "" && len(p.RedirectChain) > 0 {
|
||||
final = p.RedirectChain[len(p.RedirectChain)-1].To
|
||||
}
|
||||
|
|
@ -49,12 +46,11 @@ func (r *httpsRedirectRule) Evaluate(ctx context.Context, obs sdk.ObservationGet
|
|||
if u, err := url.Parse(final); err == nil {
|
||||
isHTTPS = strings.EqualFold(u.Scheme, "https")
|
||||
}
|
||||
|
||||
switch {
|
||||
case isHTTPS:
|
||||
// Good. No state per-probe; we'll emit one summary OK below.
|
||||
// Good. Aggregated below as a single OK.
|
||||
case require:
|
||||
states = append(states, sdk.CheckState{
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.no_https_redirect",
|
||||
Subject: p.Address,
|
||||
|
|
@ -62,16 +58,12 @@ func (r *httpsRedirectRule) Evaluate(ctx context.Context, obs sdk.ObservationGet
|
|||
Meta: map[string]any{"fix": "Configure your web server to redirect every plain-HTTP request to https://."},
|
||||
})
|
||||
default:
|
||||
states = append(states, sdk.CheckState{
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.plain_http_served",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("HTTP responded directly without redirect (status %d)", p.StatusCode),
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(states) == 0 {
|
||||
return []sdk.CheckState{passState("http.https_redirect.ok", "HTTP redirects to HTTPS on every reachable IP.")}
|
||||
}
|
||||
return states
|
||||
})
|
||||
}
|
||||
|
|
|
|||
242
checker/rules_redirect_chain.go
Normal file
242
checker/rules_redirect_chain.go
Normal file
|
|
@ -0,0 +1,242 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() {
|
||||
RegisterRule(&redirectChainRule{})
|
||||
RegisterRule(&redirectPermanenceRule{})
|
||||
}
|
||||
|
||||
// MaxRecommendedRedirectHops is the soft upper bound for a healthy redirect
|
||||
// chain. RFC 9110 §15.4 does not mandate a hard cap, but every additional
|
||||
// hop adds latency, defeats HSTS for the intermediate hop, and degrades
|
||||
// the user experience; popular guidance (Google, Mozilla, web.dev) treats
|
||||
// 3+ hops as a smell worth surfacing.
|
||||
const MaxRecommendedRedirectHops = 3
|
||||
|
||||
// redirectChainRule inspects the redirect chain captured during probing
|
||||
// and flags the three classic anti-patterns called out by RFC 9110 §15.4
|
||||
// and operational guidance:
|
||||
//
|
||||
// - a loop (the same URL appears twice in the chain);
|
||||
// - excessive length (more hops than MaxRecommendedRedirectHops);
|
||||
// - a scheme downgrade (HTTPS → HTTP at any hop), which strips transport
|
||||
// security and silently invalidates HSTS expectations.
|
||||
//
|
||||
// Each probe contributes its own state so multi-IP deployments can show
|
||||
// per-backend divergence.
|
||||
type redirectChainRule struct{}
|
||||
|
||||
func (r *redirectChainRule) Name() string { return "http.redirect_chain" }
|
||||
func (r *redirectChainRule) Description() string {
|
||||
return "Inspects the redirect chain (RFC 9110 §15.4) for loops, excessive length, and scheme downgrades."
|
||||
}
|
||||
|
||||
func (r *redirectChainRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
if len(data.Probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.redirect_chain.no_probes", "No probes were attempted.")}
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
anyChain := false
|
||||
for _, p := range data.Probes {
|
||||
if len(p.RedirectChain) == 0 {
|
||||
continue
|
||||
}
|
||||
anyChain = true
|
||||
|
||||
if loopAt, found := redirectLoop(p.RedirectChain); found {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.redirect_chain.loop",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("Redirect loop detected: %s reappears in the chain.", loopAt),
|
||||
Meta: map[string]any{"chain": chainSummary(p.RedirectChain)},
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
if downgradeAt, found := redirectDowngrade(p.RedirectChain); found {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.redirect_chain.downgrade",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("Redirect chain downgrades from HTTPS to HTTP at %q.", downgradeAt),
|
||||
Meta: map[string]any{
|
||||
"fix": "Ensure no hop in the redirect chain switches from https:// back to http://.",
|
||||
"chain": chainSummary(p.RedirectChain),
|
||||
},
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
if len(p.RedirectChain) > MaxRecommendedRedirectHops {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.redirect_chain.too_long",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("Redirect chain has %d hops (recommended ≤ %d).", len(p.RedirectChain), MaxRecommendedRedirectHops),
|
||||
Meta: map[string]any{
|
||||
"fix": "Collapse intermediate redirects so a single hop reaches the canonical URL.",
|
||||
"chain": chainSummary(p.RedirectChain),
|
||||
},
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.redirect_chain.ok",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("Redirect chain is %d hop(s), no loop, no downgrade.", len(p.RedirectChain)),
|
||||
})
|
||||
}
|
||||
|
||||
if !anyChain {
|
||||
return []sdk.CheckState{passState("http.redirect_chain.none", "No redirects observed on any probe.")}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// redirectLoop returns the first URL that appears as both source and
|
||||
// destination (or as destination twice) in the chain, signalling a cycle.
|
||||
func redirectLoop(chain []RedirectStep) (string, bool) {
|
||||
seen := make(map[string]struct{}, len(chain)+1)
|
||||
for _, step := range chain {
|
||||
key := canonicalURL(step.From)
|
||||
if _, ok := seen[key]; ok {
|
||||
return step.From, true
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
}
|
||||
if len(chain) > 0 {
|
||||
last := canonicalURL(chain[len(chain)-1].To)
|
||||
if _, ok := seen[last]; ok {
|
||||
return chain[len(chain)-1].To, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
// redirectDowngrade returns the first hop whose source is HTTPS and
|
||||
// destination is HTTP. RFC 9110 does not forbid this, but it strips
|
||||
// transport security and is universally treated as a misconfiguration.
|
||||
func redirectDowngrade(chain []RedirectStep) (string, bool) {
|
||||
for _, step := range chain {
|
||||
from, errF := url.Parse(step.From)
|
||||
to, errT := url.Parse(step.To)
|
||||
if errF != nil || errT != nil {
|
||||
continue
|
||||
}
|
||||
if strings.EqualFold(from.Scheme, "https") && strings.EqualFold(to.Scheme, "http") {
|
||||
return step.From + " → " + step.To, true
|
||||
}
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
func canonicalURL(s string) string {
|
||||
u, err := url.Parse(s)
|
||||
if err != nil {
|
||||
return strings.ToLower(strings.TrimSpace(s))
|
||||
}
|
||||
u.Scheme = strings.ToLower(u.Scheme)
|
||||
u.Host = strings.ToLower(u.Host)
|
||||
if u.Path == "" {
|
||||
u.Path = "/"
|
||||
}
|
||||
u.Fragment = ""
|
||||
return u.String()
|
||||
}
|
||||
|
||||
func chainSummary(chain []RedirectStep) []string {
|
||||
out := make([]string, 0, len(chain))
|
||||
for _, s := range chain {
|
||||
if s.Status != 0 {
|
||||
out = append(out, fmt.Sprintf("%d %s → %s", s.Status, s.From, s.To))
|
||||
} else {
|
||||
out = append(out, fmt.Sprintf("%s → %s", s.From, s.To))
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// redirectPermanenceRule scrutinises the very first hop of any HTTP probe
|
||||
// that ends up on HTTPS: per RFC 9110 §15.4, 301 (Moved Permanently) and
|
||||
// 308 (Permanent Redirect) are cacheable and signal that user-agents may
|
||||
// rewrite future requests, which is exactly what an HTTP→HTTPS upgrade
|
||||
// wants. 302/303/307 are temporary and force the client to re-resolve
|
||||
// every time, defeating browser optimisations and HSTS preload eligibility
|
||||
// guidance from hstspreload.org.
|
||||
type redirectPermanenceRule struct{}
|
||||
|
||||
func (r *redirectPermanenceRule) Name() string { return "http.redirect_permanence" }
|
||||
func (r *redirectPermanenceRule) Description() string {
|
||||
return "HTTP→HTTPS upgrade should use 301 or 308 (permanent) rather than 302/307 (temporary)."
|
||||
}
|
||||
|
||||
func (r *redirectPermanenceRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
const okMsg = "HTTP→HTTPS upgrade uses a permanent redirect (301/308) on every probe."
|
||||
return EvalAggregateByScheme(data, "http", "http.redirect_permanence", okMsg, func(p HTTPProbe, emit func(sdk.CheckState)) {
|
||||
if len(p.RedirectChain) == 0 {
|
||||
return
|
||||
}
|
||||
first := p.RedirectChain[0]
|
||||
from, errF := url.Parse(first.From)
|
||||
to, errT := url.Parse(first.To)
|
||||
if errF != nil || errT != nil {
|
||||
return
|
||||
}
|
||||
// We only care about the HTTP→HTTPS upgrade hop; other shapes
|
||||
// (HTTPS→HTTPS canonicalisation, locale redirects, …) belong to
|
||||
// the chain rule.
|
||||
if !strings.EqualFold(from.Scheme, "http") || !strings.EqualFold(to.Scheme, "https") {
|
||||
return
|
||||
}
|
||||
switch first.Status {
|
||||
case 301, 308:
|
||||
// Good; aggregated to the single OK state below.
|
||||
case 0:
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.redirect_permanence.unknown",
|
||||
Subject: p.Address,
|
||||
Message: "Could not determine the status code of the HTTP→HTTPS redirect.",
|
||||
})
|
||||
case 302, 303, 307:
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.redirect_permanence.temporary",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("HTTP→HTTPS upgrade returns %d (temporary). Prefer 301 or 308 so clients cache the upgrade.", first.Status),
|
||||
Meta: map[string]any{"fix": "Configure your web server to answer plain HTTP with `301 Moved Permanently` (or `308 Permanent Redirect`) pointing to the https:// URL."},
|
||||
})
|
||||
default:
|
||||
emit(sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.redirect_permanence.unexpected",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("HTTP→HTTPS upgrade uses an unusual status code: %d.", first.Status),
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
167
checker/rules_redirect_chain_test.go
Normal file
167
checker/rules_redirect_chain_test.go
Normal file
|
|
@ -0,0 +1,167 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func TestRedirectChainRule_NoProbes(t *testing.T) {
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{}, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
if !hasCode(states, "http.redirect_chain.no_probes") {
|
||||
t.Errorf("expected no_probes: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_NoRedirects(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.redirect_chain.none") {
|
||||
t.Errorf("expected redirect_chain.none: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_OK(t *testing.T) {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "https://example.test/", Status: 301},
|
||||
}
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.redirect_chain.ok") {
|
||||
t.Errorf("expected redirect_chain.ok: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_Loop(t *testing.T) {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/a", To: "http://example.test/b", Status: 302},
|
||||
{From: "http://example.test/b", To: "http://example.test/a", Status: 302},
|
||||
}
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.redirect_chain.loop") {
|
||||
t.Errorf("expected redirect_chain.loop: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_Downgrade(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "https://example.test/", To: "http://example.test/legacy", Status: 302},
|
||||
}
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.redirect_chain.downgrade") {
|
||||
t.Errorf("expected redirect_chain.downgrade: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_TooLong(t *testing.T) {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/1", To: "http://example.test/2", Status: 301},
|
||||
{From: "http://example.test/2", To: "http://example.test/3", Status: 301},
|
||||
{From: "http://example.test/3", To: "http://example.test/4", Status: 301},
|
||||
{From: "http://example.test/4", To: "https://example.test/5", Status: 301},
|
||||
}
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.redirect_chain.too_long") {
|
||||
t.Errorf("expected redirect_chain.too_long: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectChainRule_LoopTakesPrecedenceOverDowngrade(t *testing.T) {
|
||||
// When both anomalies are present, the loop is reported first since
|
||||
// it explains downstream weirdness.
|
||||
p := httpsProbe("a:443")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "https://example.test/x", To: "http://example.test/x", Status: 302},
|
||||
{From: "http://example.test/x", To: "https://example.test/x", Status: 302},
|
||||
}
|
||||
states := runRule(t, &redirectChainRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.redirect_chain.loop") {
|
||||
t.Errorf("expected loop to take precedence: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_NoProbes(t *testing.T) {
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{httpsProbe("a:443")}}, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_NoRedirect(t *testing.T) {
|
||||
p := httpProbe("a:80")
|
||||
p.StatusCode = 200
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_Permanent(t *testing.T) {
|
||||
for _, code := range []int{301, 308} {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "https://example.test/", Status: code},
|
||||
}
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.redirect_permanence.ok") {
|
||||
t.Errorf("status %d: expected ok: %+v", code, states)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_Temporary(t *testing.T) {
|
||||
for _, code := range []int{302, 303, 307} {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "https://example.test/", Status: code},
|
||||
}
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.redirect_permanence.temporary") {
|
||||
t.Errorf("status %d: expected temporary: %+v", code, states)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_UnknownStatus(t *testing.T) {
|
||||
p := httpProbe("a:80")
|
||||
p.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "https://example.test/", Status: 0},
|
||||
}
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, sdk.StatusInfo)
|
||||
if !hasCode(states, "http.redirect_permanence.unknown") {
|
||||
t.Errorf("expected redirect_permanence.unknown: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRedirectPermanenceRule_IgnoresNonUpgradeChain(t *testing.T) {
|
||||
// An HTTP probe whose first hop stays in HTTP (path canonicalisation,
|
||||
// trailing-slash, www stripping before the TLS bump…) is not in scope
|
||||
// for this rule, so a 302 there must not raise a warning. A second
|
||||
// probe is included so the per-probe iteration has another candidate.
|
||||
first := httpProbe("a:80")
|
||||
first.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "http://www.example.test/", Status: 302},
|
||||
}
|
||||
second := httpProbe("b:80")
|
||||
second.RedirectChain = []RedirectStep{
|
||||
{From: "http://example.test/", To: "https://example.test/", Status: 301},
|
||||
}
|
||||
states := runRule(t, &redirectPermanenceRule{}, &HTTPData{Probes: []HTTPProbe{first, second}}, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.redirect_permanence.ok") {
|
||||
t.Errorf("HTTP-only first hop should not trigger a warning: %+v", states)
|
||||
}
|
||||
}
|
||||
|
|
@ -5,299 +5,238 @@
|
|||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
// hstsRule checks the Strict-Transport-Security header on HTTPS responses.
|
||||
type hstsRule struct{}
|
||||
// All five "core" security-header rules are wired through the HeaderRule
|
||||
// DSL. The richer ones (HSTS, CSP, X-Frame-Options, X-XSS-Protection)
|
||||
// use Inspect / OnMissing to express thresholds, multi-finding outputs,
|
||||
// alternative-source fallbacks and reversed "absent is fine" semantics
|
||||
// without re-implementing the load/iterate/build-state scaffolding.
|
||||
|
||||
func (r *hstsRule) Name() string { return "http.hsts" }
|
||||
func (r *hstsRule) Description() string {
|
||||
return "Verifies the presence and quality of the Strict-Transport-Security header on HTTPS responses."
|
||||
func init() {
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.hsts",
|
||||
Description: "Verifies the presence and quality of the Strict-Transport-Security header on HTTPS responses.",
|
||||
Header: "Strict-Transport-Security",
|
||||
Inspect: inspectHSTS,
|
||||
OnMissing: missingHSTS,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.csp",
|
||||
Description: "Verifies the presence and quality of the Content-Security-Policy header on HTTPS responses.",
|
||||
Header: "Content-Security-Policy",
|
||||
Inspect: inspectCSP,
|
||||
OnMissing: missingCSP,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.x_frame_options",
|
||||
Description: "Verifies that responses set X-Frame-Options or a CSP frame-ancestors directive.",
|
||||
Header: "X-Frame-Options",
|
||||
Inspect: inspectXFrameOptions,
|
||||
OnMissing: missingXFrameOptions,
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.x_content_type_options",
|
||||
Description: "Verifies that responses set X-Content-Type-Options: nosniff.",
|
||||
Header: "X-Content-Type-Options",
|
||||
Required: true,
|
||||
FixHint: "Add `X-Content-Type-Options: nosniff` to all responses.",
|
||||
Validate: func(v string) (sdk.Status, string) {
|
||||
if strings.EqualFold(v, "nosniff") {
|
||||
return sdk.StatusOK, "X-Content-Type-Options: nosniff is set."
|
||||
}
|
||||
return sdk.StatusWarn, "X-Content-Type-Options has an unexpected value: " + strings.ToLower(v)
|
||||
},
|
||||
}))
|
||||
|
||||
RegisterRule(HeaderRule(HeaderRuleSpec{
|
||||
Code: "http.x_xss_protection",
|
||||
Description: "Reports the value of the legacy X-XSS-Protection header (disabled is preferred on modern browsers; CSP is the proper replacement).",
|
||||
Header: "X-XSS-Protection",
|
||||
Inspect: inspectXXSSProtection,
|
||||
OnMissing: func(_ HTTPProbe, _ sdk.CheckerOptions) []HeaderResult {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusInfo,
|
||||
Suffix: "absent",
|
||||
Message: "X-XSS-Protection is not set; CSP is the recommended replacement.",
|
||||
}}
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
func (r *hstsRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, opts sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
// HSTS ----------------------------------------------------------------
|
||||
|
||||
func missingHSTS(_ HTTPProbe, opts sdk.CheckerOptions) []HeaderResult {
|
||||
status := sdk.StatusWarn
|
||||
if !sdk.GetBoolOption(opts, OptionRequireHSTS, true) {
|
||||
status = sdk.StatusInfo
|
||||
}
|
||||
return []HeaderResult{{
|
||||
Status: status,
|
||||
Suffix: "missing",
|
||||
Message: "Strict-Transport-Security header is missing.",
|
||||
Meta: map[string]any{"fix": "Send `Strict-Transport-Security: max-age=15552000; includeSubDomains` from HTTPS responses."},
|
||||
}}
|
||||
}
|
||||
|
||||
func inspectHSTS(value string, _ HTTPProbe, opts sdk.CheckerOptions) []HeaderResult {
|
||||
h := ParseHSTS(value)
|
||||
if h == nil {
|
||||
// Defensive: ParseHSTS only returns nil on empty input, which the
|
||||
// DSL has already routed to OnMissing.
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn, Suffix: "invalid",
|
||||
Message: "Strict-Transport-Security header is malformed.",
|
||||
}}
|
||||
}
|
||||
if len(h.Errors) > 0 {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn,
|
||||
Suffix: "invalid",
|
||||
Message: fmt.Sprintf("Strict-Transport-Security header is malformed: %s.", strings.Join(h.Errors, "; ")),
|
||||
Meta: map[string]any{"fix": "Send `Strict-Transport-Security: max-age=15552000; includeSubDomains` with a non-negative integer max-age."},
|
||||
}}
|
||||
}
|
||||
require := sdk.GetBoolOption(opts, OptionRequireHSTS, true)
|
||||
minDays := sdk.GetIntOption(opts, OptionMinHSTSMaxAgeDays, DefaultMinHSTSMaxAge)
|
||||
minSeconds := int64(minDays) * 86400
|
||||
if h.MaxAge < minSeconds {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn,
|
||||
Suffix: "short_max_age",
|
||||
Message: fmt.Sprintf("HSTS max-age=%d is below the recommended %d seconds (%d days).", h.MaxAge, minSeconds, minDays),
|
||||
}}
|
||||
}
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusOK,
|
||||
Suffix: "ok",
|
||||
Message: fmt.Sprintf("HSTS present (max-age=%d, includeSubDomains=%v, preload=%v).", h.MaxAge, h.IncludeSub, h.Preload),
|
||||
}}
|
||||
}
|
||||
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.hsts.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
// CSP -----------------------------------------------------------------
|
||||
|
||||
func missingCSP(_ HTTPProbe, opts sdk.CheckerOptions) []HeaderResult {
|
||||
status := sdk.StatusInfo
|
||||
if sdk.GetBoolOption(opts, OptionRequireCSP, false) {
|
||||
status = sdk.StatusWarn
|
||||
}
|
||||
return []HeaderResult{{
|
||||
Status: status,
|
||||
Suffix: "missing",
|
||||
Message: "Content-Security-Policy header is missing.",
|
||||
Meta: map[string]any{"fix": "Define a CSP appropriate for your application (e.g. default-src 'self')."},
|
||||
}}
|
||||
}
|
||||
|
||||
// inspectCSP surfaces multiple weakness suffixes per probe — see the
|
||||
// historical docstring on evaluateCSP for the rationale (unsafe-inline /
|
||||
// unsafe-eval split, missing default-src, permissive script-src).
|
||||
func inspectCSP(value string, _ HTTPProbe, _ sdk.CheckerOptions) []HeaderResult {
|
||||
csp := ParseCSP(value)
|
||||
if csp == nil {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn, Suffix: "invalid",
|
||||
Message: "Content-Security-Policy header is empty.",
|
||||
}}
|
||||
}
|
||||
var out []HeaderResult
|
||||
add := func(suffix, msg string) {
|
||||
out = append(out, HeaderResult{Status: sdk.StatusWarn, Suffix: suffix, Message: msg})
|
||||
}
|
||||
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
v := strings.TrimSpace(p.Headers["strict-transport-security"])
|
||||
if v == "" {
|
||||
status := sdk.StatusWarn
|
||||
if !require {
|
||||
status = sdk.StatusInfo
|
||||
}
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: status,
|
||||
Code: "http.hsts.missing",
|
||||
Subject: p.Address,
|
||||
Message: "Strict-Transport-Security header is missing.",
|
||||
Meta: map[string]any{"fix": "Send `Strict-Transport-Security: max-age=15552000; includeSubDomains` from HTTPS responses."},
|
||||
})
|
||||
continue
|
||||
hasDefault := csp.HasDirective("default-src")
|
||||
hasScript := csp.HasDirective("script-src")
|
||||
if !hasDefault && !hasScript {
|
||||
add("missing_default",
|
||||
"Content-Security-Policy declares neither default-src nor script-src; script execution is not constrained.")
|
||||
}
|
||||
if csp.HasUnsafeInline() {
|
||||
add("unsafe_inline",
|
||||
"Content-Security-Policy allows 'unsafe-inline' for scripts or styles, which negates most XSS protection.")
|
||||
}
|
||||
if csp.HasUnsafeEval() {
|
||||
add("unsafe_eval",
|
||||
"Content-Security-Policy allows 'unsafe-eval' in script-src, enabling eval()/new Function().")
|
||||
}
|
||||
switch {
|
||||
case hasScript:
|
||||
if w := csp.WildcardSource("script-src"); w != "" {
|
||||
add("wildcard_script_src",
|
||||
"Content-Security-Policy script-src includes the permissive source "+w+", allowing scripts from arbitrary origins.")
|
||||
}
|
||||
maxAge, includeSub, preload := parseHSTS(v)
|
||||
if maxAge < minSeconds {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.hsts.short_max_age",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("HSTS max-age=%d is below the recommended %d seconds (%d days).", maxAge, minSeconds, minDays),
|
||||
})
|
||||
continue
|
||||
case hasDefault:
|
||||
if w := csp.WildcardSource("default-src"); w != "" {
|
||||
add("wildcard_default_src",
|
||||
"Content-Security-Policy default-src includes the permissive source "+w+" and no script-src overrides it.")
|
||||
}
|
||||
states = append(states, sdk.CheckState{
|
||||
}
|
||||
|
||||
if len(out) == 0 {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.hsts.ok",
|
||||
Subject: p.Address,
|
||||
Message: fmt.Sprintf("HSTS present (max-age=%d, includeSubDomains=%v, preload=%v).", maxAge, includeSub, preload),
|
||||
})
|
||||
Suffix: "ok",
|
||||
Message: "Content-Security-Policy is set with no detected weaknesses.",
|
||||
}}
|
||||
}
|
||||
return states
|
||||
return out
|
||||
}
|
||||
|
||||
// parseHSTS pulls max-age, includeSubDomains and preload out of a
|
||||
// Strict-Transport-Security header value. Returns max-age=0 on parse failure.
|
||||
func parseHSTS(v string) (maxAge int64, includeSub bool, preload bool) {
|
||||
for _, part := range strings.Split(v, ";") {
|
||||
part = strings.TrimSpace(part)
|
||||
switch {
|
||||
case strings.HasPrefix(strings.ToLower(part), "max-age="):
|
||||
val := strings.Trim(part[len("max-age="):], "\"")
|
||||
if n, err := strconv.ParseInt(val, 10, 64); err == nil {
|
||||
maxAge = n
|
||||
}
|
||||
case strings.EqualFold(part, "includeSubDomains"):
|
||||
includeSub = true
|
||||
case strings.EqualFold(part, "preload"):
|
||||
preload = true
|
||||
}
|
||||
// X-Frame-Options -----------------------------------------------------
|
||||
|
||||
func inspectXFrameOptions(value string, _ HTTPProbe, _ sdk.CheckerOptions) []HeaderResult {
|
||||
xfo := strings.ToUpper(value)
|
||||
if xfo == "DENY" || xfo == "SAMEORIGIN" {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusOK, Suffix: "ok",
|
||||
Message: "Clickjacking protection is in place.",
|
||||
}}
|
||||
}
|
||||
return
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn, Suffix: "invalid",
|
||||
Message: "X-Frame-Options has an unrecognised value: " + xfo,
|
||||
}}
|
||||
}
|
||||
|
||||
// cspRule checks for the presence of a Content-Security-Policy header.
|
||||
type cspRule struct{}
|
||||
|
||||
func (r *cspRule) Name() string { return "http.csp" }
|
||||
func (r *cspRule) Description() string {
|
||||
return "Verifies the presence of a Content-Security-Policy header on HTTPS responses."
|
||||
func missingXFrameOptions(p HTTPProbe, _ sdk.CheckerOptions) []HeaderResult {
|
||||
if ParseCSP(p.Headers["content-security-policy"]).HasDirective("frame-ancestors") {
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusOK, Suffix: "ok",
|
||||
Message: "Clickjacking protection is in place.",
|
||||
}}
|
||||
}
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusWarn,
|
||||
Suffix: "missing",
|
||||
Message: "Neither X-Frame-Options nor CSP frame-ancestors is set.",
|
||||
Meta: map[string]any{"fix": "Send `X-Frame-Options: DENY` (or SAMEORIGIN) or use CSP frame-ancestors."},
|
||||
}}
|
||||
}
|
||||
|
||||
func (r *cspRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, opts sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
require := sdk.GetBoolOption(opts, OptionRequireCSP, false)
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.csp.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
// X-XSS-Protection ----------------------------------------------------
|
||||
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
csp := strings.TrimSpace(p.Headers["content-security-policy"])
|
||||
if csp == "" {
|
||||
status := sdk.StatusInfo
|
||||
if require {
|
||||
status = sdk.StatusWarn
|
||||
}
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: status,
|
||||
Code: "http.csp.missing",
|
||||
Subject: p.Address,
|
||||
Message: "Content-Security-Policy header is missing.",
|
||||
Meta: map[string]any{"fix": "Define a CSP appropriate for your application (e.g. default-src 'self')."},
|
||||
})
|
||||
continue
|
||||
}
|
||||
// Quick sanity hints; full CSP analysis is out of scope.
|
||||
if strings.Contains(csp, "'unsafe-inline'") || strings.Contains(csp, "'unsafe-eval'") {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.csp.unsafe",
|
||||
Subject: p.Address,
|
||||
Message: "Content-Security-Policy uses 'unsafe-inline' or 'unsafe-eval'.",
|
||||
})
|
||||
continue
|
||||
}
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.csp.ok",
|
||||
Subject: p.Address,
|
||||
Message: "Content-Security-Policy is set.",
|
||||
})
|
||||
func inspectXXSSProtection(value string, _ HTTPProbe, _ sdk.CheckerOptions) []HeaderResult {
|
||||
switch {
|
||||
case strings.HasPrefix(value, "0"):
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusOK, Suffix: "disabled",
|
||||
Message: "X-XSS-Protection is explicitly disabled (recommended).",
|
||||
}}
|
||||
case strings.Contains(strings.ToLower(value), "mode=block"):
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusInfo, Suffix: "enabled",
|
||||
Message: "X-XSS-Protection is set to the historically recommended `1; mode=block`. Modern browsers ignore this header; CSP is the proper replacement.",
|
||||
}}
|
||||
default:
|
||||
return []HeaderResult{{
|
||||
Status: sdk.StatusInfo, Suffix: "enabled",
|
||||
Message: "X-XSS-Protection is enabled. Modern browsers ignore this header; CSP is the proper replacement.",
|
||||
}}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// xFrameOptionsRule checks X-Frame-Options (or frame-ancestors in CSP as
|
||||
// an acceptable substitute).
|
||||
type xFrameOptionsRule struct{}
|
||||
|
||||
func (r *xFrameOptionsRule) Name() string { return "http.x_frame_options" }
|
||||
func (r *xFrameOptionsRule) Description() string {
|
||||
return "Verifies that responses set X-Frame-Options or a CSP frame-ancestors directive."
|
||||
}
|
||||
|
||||
func (r *xFrameOptionsRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.x_frame_options.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
xfo := strings.ToUpper(strings.TrimSpace(p.Headers["x-frame-options"]))
|
||||
csp := strings.ToLower(p.Headers["content-security-policy"])
|
||||
hasFrameAncestors := strings.Contains(csp, "frame-ancestors")
|
||||
|
||||
switch {
|
||||
case xfo == "DENY" || xfo == "SAMEORIGIN" || hasFrameAncestors:
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.x_frame_options.ok",
|
||||
Subject: p.Address,
|
||||
Message: "Clickjacking protection is in place.",
|
||||
})
|
||||
case xfo != "":
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.x_frame_options.invalid",
|
||||
Subject: p.Address,
|
||||
Message: "X-Frame-Options has an unrecognised value: " + xfo,
|
||||
})
|
||||
default:
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.x_frame_options.missing",
|
||||
Subject: p.Address,
|
||||
Message: "Neither X-Frame-Options nor CSP frame-ancestors is set.",
|
||||
Meta: map[string]any{"fix": "Send `X-Frame-Options: DENY` (or SAMEORIGIN) or use CSP frame-ancestors."},
|
||||
})
|
||||
}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// xContentTypeOptionsRule checks for X-Content-Type-Options: nosniff.
|
||||
type xContentTypeOptionsRule struct{}
|
||||
|
||||
func (r *xContentTypeOptionsRule) Name() string { return "http.x_content_type_options" }
|
||||
func (r *xContentTypeOptionsRule) Description() string {
|
||||
return "Verifies that responses set X-Content-Type-Options: nosniff."
|
||||
}
|
||||
|
||||
func (r *xContentTypeOptionsRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.x_content_type_options.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
v := strings.ToLower(strings.TrimSpace(p.Headers["x-content-type-options"]))
|
||||
if v == "nosniff" {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.x_content_type_options.ok",
|
||||
Subject: p.Address,
|
||||
Message: "X-Content-Type-Options: nosniff is set.",
|
||||
})
|
||||
} else if v != "" {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.x_content_type_options.invalid",
|
||||
Subject: p.Address,
|
||||
Message: "X-Content-Type-Options has an unexpected value: " + v,
|
||||
})
|
||||
} else {
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.x_content_type_options.missing",
|
||||
Subject: p.Address,
|
||||
Message: "X-Content-Type-Options: nosniff is not set.",
|
||||
Meta: map[string]any{"fix": "Add `X-Content-Type-Options: nosniff` to all responses."},
|
||||
})
|
||||
}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
||||
// xXSSProtectionRule checks the legacy X-XSS-Protection header. Modern
|
||||
// browsers ignore it, but if present we want it to be sane.
|
||||
type xXSSProtectionRule struct{}
|
||||
|
||||
func (r *xXSSProtectionRule) Name() string { return "http.x_xss_protection" }
|
||||
func (r *xXSSProtectionRule) Description() string {
|
||||
return "Reports the value of the legacy X-XSS-Protection header (disabled is preferred on modern browsers; CSP is the proper replacement)."
|
||||
}
|
||||
|
||||
func (r *xXSSProtectionRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
probes := successfulHTTPSProbes(data.Probes)
|
||||
if len(probes) == 0 {
|
||||
return []sdk.CheckState{unknownState("http.x_xss_protection.no_https", "No successful HTTPS probe to evaluate.")}
|
||||
}
|
||||
var states []sdk.CheckState
|
||||
for _, p := range probes {
|
||||
v := strings.TrimSpace(p.Headers["x-xss-protection"])
|
||||
switch {
|
||||
case v == "":
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.x_xss_protection.absent",
|
||||
Subject: p.Address,
|
||||
Message: "X-XSS-Protection is not set; CSP is the recommended replacement.",
|
||||
})
|
||||
case strings.HasPrefix(v, "0"):
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.x_xss_protection.disabled",
|
||||
Subject: p.Address,
|
||||
Message: "X-XSS-Protection is explicitly disabled (recommended).",
|
||||
})
|
||||
case strings.Contains(strings.ToLower(v), "mode=block"):
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.x_xss_protection.enabled",
|
||||
Subject: p.Address,
|
||||
Message: "X-XSS-Protection is set to the historically recommended `1; mode=block`. Modern browsers ignore this header; CSP is the proper replacement.",
|
||||
})
|
||||
default:
|
||||
states = append(states, sdk.CheckState{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.x_xss_protection.enabled",
|
||||
Subject: p.Address,
|
||||
Message: "X-XSS-Protection is enabled. Modern browsers ignore this header; CSP is the proper replacement.",
|
||||
})
|
||||
}
|
||||
}
|
||||
return states
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,23 +17,41 @@ func TestParseHSTS(t *testing.T) {
|
|||
maxAge int64
|
||||
includeSub bool
|
||||
preload bool
|
||||
wantErr bool
|
||||
}{
|
||||
{"empty", "", 0, false, false},
|
||||
{"max-age only", "max-age=31536000", 31536000, false, false},
|
||||
{"includeSubDomains", "max-age=15552000; includeSubDomains", 15552000, true, false},
|
||||
{"all flags", "max-age=63072000; includeSubDomains; preload", 63072000, true, true},
|
||||
{"quoted max-age", `max-age="3600"`, 3600, false, false},
|
||||
{"case-insensitive directive", "MAX-AGE=42; INCLUDESUBDOMAINS; PRELOAD", 42, true, true},
|
||||
{"messy spaces", " max-age=10 ; includeSubDomains ", 10, true, false},
|
||||
{"unparseable max-age", "max-age=not-a-number", 0, false, false},
|
||||
{"no max-age, only flags", "includeSubDomains; preload", 0, true, true},
|
||||
{"empty", "", 0, false, false, false},
|
||||
{"max-age only", "max-age=31536000", 31536000, false, false, false},
|
||||
{"includeSubDomains", "max-age=15552000; includeSubDomains", 15552000, true, false, false},
|
||||
{"all flags", "max-age=63072000; includeSubDomains; preload", 63072000, true, true, false},
|
||||
{"quoted max-age", `max-age="3600"`, 3600, false, false, false},
|
||||
{"case-insensitive directive", "MAX-AGE=42; INCLUDESUBDOMAINS; PRELOAD", 42, true, true, false},
|
||||
{"messy spaces", " max-age=10 ; includeSubDomains ", 10, true, false, false},
|
||||
{"unparseable max-age", "max-age=not-a-number", 0, false, false, true},
|
||||
{"no max-age, only flags", "includeSubDomains; preload", 0, true, true, true},
|
||||
{"negative max-age", "max-age=-1", 0, false, false, true},
|
||||
{"empty quoted max-age", `max-age=""`, 0, false, false, true},
|
||||
{"max-age without value", "max-age; includeSubDomains", 0, true, false, true},
|
||||
{"duplicate max-age", "max-age=10; max-age=20", 10, false, false, true},
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
ma, inc, pre := parseHSTS(c.in)
|
||||
if ma != c.maxAge || inc != c.includeSub || pre != c.preload {
|
||||
t.Errorf("parseHSTS(%q) = (%d, %v, %v), want (%d, %v, %v)",
|
||||
c.in, ma, inc, pre, c.maxAge, c.includeSub, c.preload)
|
||||
h := ParseHSTS(c.in)
|
||||
if c.in == "" {
|
||||
if h != nil {
|
||||
t.Errorf("ParseHSTS(%q) = %+v, want nil", c.in, h)
|
||||
}
|
||||
return
|
||||
}
|
||||
if h == nil {
|
||||
t.Fatalf("ParseHSTS(%q) returned nil", c.in)
|
||||
}
|
||||
if h.MaxAge != c.maxAge || h.IncludeSub != c.includeSub || h.Preload != c.preload {
|
||||
t.Errorf("ParseHSTS(%q) = (%d, %v, %v), want (%d, %v, %v)",
|
||||
c.in, h.MaxAge, h.IncludeSub, h.Preload, c.maxAge, c.includeSub, c.preload)
|
||||
}
|
||||
if got := len(h.Errors) > 0; got != c.wantErr {
|
||||
t.Errorf("ParseHSTS(%q) errors = %v (%v), want wantErr=%v",
|
||||
c.in, h.Errors, got, c.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
@ -41,7 +59,7 @@ func TestParseHSTS(t *testing.T) {
|
|||
|
||||
func TestHSTSRule_NoHTTPSProbes(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpProbe("a:80")}}
|
||||
states := runRule(t, &hstsRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.hsts"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
if !hasCode(states, "http.hsts.no_https") {
|
||||
t.Errorf("missing no_https code: %+v", states)
|
||||
|
|
@ -50,7 +68,7 @@ func TestHSTSRule_NoHTTPSProbes(t *testing.T) {
|
|||
|
||||
func TestHSTSRule_MissingRequired(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpsProbe("a:443")}}
|
||||
states := runRule(t, &hstsRule{}, data, sdk.CheckerOptions{OptionRequireHSTS: true})
|
||||
states := runRule(t, ruleByName(t, "http.hsts"), data, sdk.CheckerOptions{OptionRequireHSTS: true})
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.hsts.missing") {
|
||||
t.Errorf("missing 'http.hsts.missing': %+v", states)
|
||||
|
|
@ -59,7 +77,7 @@ func TestHSTSRule_MissingRequired(t *testing.T) {
|
|||
|
||||
func TestHSTSRule_MissingNotRequired(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpsProbe("a:443")}}
|
||||
states := runRule(t, &hstsRule{}, data, sdk.CheckerOptions{OptionRequireHSTS: false})
|
||||
states := runRule(t, ruleByName(t, "http.hsts"), data, sdk.CheckerOptions{OptionRequireHSTS: false})
|
||||
mustStatus(t, states, sdk.StatusInfo)
|
||||
}
|
||||
|
||||
|
|
@ -67,7 +85,7 @@ func TestHSTSRule_ShortMaxAge(t *testing.T) {
|
|||
p := httpsProbe("a:443")
|
||||
p.Headers["strict-transport-security"] = "max-age=60"
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, &hstsRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.hsts"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.hsts.short_max_age") {
|
||||
t.Errorf("missing short_max_age code: %+v", states)
|
||||
|
|
@ -78,7 +96,7 @@ func TestHSTSRule_OK(t *testing.T) {
|
|||
p := httpsProbe("a:443")
|
||||
p.Headers["strict-transport-security"] = "max-age=63072000; includeSubDomains; preload"
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, &hstsRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.hsts"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.hsts.ok") {
|
||||
t.Errorf("missing ok code: %+v", states)
|
||||
|
|
@ -86,7 +104,7 @@ func TestHSTSRule_OK(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestHSTSRule_LoadFailure(t *testing.T) {
|
||||
states := (&hstsRule{}).Evaluate(t.Context(), &fakeObs{failGet: true}, nil)
|
||||
states := ruleByName(t, "http.hsts").Evaluate(t.Context(), &fakeObs{failGet: true}, nil)
|
||||
if len(states) != 1 || states[0].Status != sdk.StatusError {
|
||||
t.Fatalf("expected single error state, got %+v", states)
|
||||
}
|
||||
|
|
@ -95,22 +113,78 @@ func TestHSTSRule_LoadFailure(t *testing.T) {
|
|||
func TestCSPRule_Missing(t *testing.T) {
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpsProbe("a:443")}}
|
||||
// Default: not required → Info.
|
||||
states := runRule(t, &cspRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusInfo)
|
||||
// Required → Warn.
|
||||
states = runRule(t, &cspRule{}, data, sdk.CheckerOptions{OptionRequireCSP: true})
|
||||
states = runRule(t, ruleByName(t, "http.csp"), data, sdk.CheckerOptions{OptionRequireCSP: true})
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
}
|
||||
|
||||
func TestCSPRule_Unsafe(t *testing.T) {
|
||||
for _, csp := range []string{"default-src 'self'; script-src 'unsafe-inline'", "default-src 'unsafe-eval'"} {
|
||||
cases := []struct {
|
||||
csp string
|
||||
code string
|
||||
}{
|
||||
{"default-src 'self'; script-src 'self' 'unsafe-inline'", "http.csp.unsafe_inline"},
|
||||
{"default-src 'self'; script-src 'self' 'unsafe-eval'", "http.csp.unsafe_eval"},
|
||||
// unsafe-eval on default-src falls back to script-src.
|
||||
{"default-src 'self' 'unsafe-eval'", "http.csp.unsafe_eval"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
p := httpsProbe("a:443")
|
||||
p.Headers["content-security-policy"] = csp
|
||||
p.Headers["content-security-policy"] = c.csp
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, &cspRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.csp.unsafe") {
|
||||
t.Errorf("csp=%q: missing unsafe code: %+v", csp, states)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("csp=%q: missing code %q in %+v", c.csp, c.code, states)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCSPRule_MissingDefault(t *testing.T) {
|
||||
p := httpsProbe("a:443")
|
||||
p.Headers["content-security-policy"] = "frame-ancestors 'none'"
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.csp.missing_default") {
|
||||
t.Errorf("missing_default not emitted: %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCSPRule_WildcardScriptSrc(t *testing.T) {
|
||||
cases := []struct {
|
||||
csp string
|
||||
code string
|
||||
}{
|
||||
{"default-src 'self'; script-src *", "http.csp.wildcard_script_src"},
|
||||
{"default-src 'self'; script-src https:", "http.csp.wildcard_script_src"},
|
||||
// No script-src declared → wildcard on default-src is reported.
|
||||
{"default-src *", "http.csp.wildcard_default_src"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
p := httpsProbe("a:443")
|
||||
p.Headers["content-security-policy"] = c.csp
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("csp=%q: missing code %q in %+v", c.csp, c.code, states)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCSPRule_TightScriptSrcMasksDefaultWildcard(t *testing.T) {
|
||||
// default-src is permissive but script-src locks scripts down — we
|
||||
// should not emit the default-src wildcard warning.
|
||||
p := httpsProbe("a:443")
|
||||
p.Headers["content-security-policy"] = "default-src *; script-src 'self'"
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
for _, s := range states {
|
||||
if s.Code == "http.csp.wildcard_default_src" {
|
||||
t.Errorf("unexpected wildcard_default_src when script-src tightens scripts: %+v", states)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -119,7 +193,7 @@ func TestCSPRule_OK(t *testing.T) {
|
|||
p := httpsProbe("a:443")
|
||||
p.Headers["content-security-policy"] = "default-src 'self'"
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, &cspRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.csp"), data, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
}
|
||||
|
||||
|
|
@ -147,7 +221,7 @@ func TestXFrameOptionsRule(t *testing.T) {
|
|||
p.Headers["content-security-policy"] = c.csp
|
||||
}
|
||||
data := &HTTPData{Probes: []HTTPProbe{p}}
|
||||
states := runRule(t, &xFrameOptionsRule{}, data, nil)
|
||||
states := runRule(t, ruleByName(t, "http.x_frame_options"), data, nil)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.wantSub) {
|
||||
t.Errorf("missing code %q in %+v", c.wantSub, states)
|
||||
|
|
@ -172,7 +246,7 @@ func TestXContentTypeOptionsRule(t *testing.T) {
|
|||
if c.val != "" {
|
||||
p.Headers["x-content-type-options"] = c.val
|
||||
}
|
||||
states := runRule(t, &xContentTypeOptionsRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
states := runRule(t, ruleByName(t, "http.x_content_type_options"), &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("val=%q: missing code %q in %+v", c.val, c.code, states)
|
||||
|
|
@ -195,7 +269,7 @@ func TestXXSSProtectionRule(t *testing.T) {
|
|||
if c.val != "" {
|
||||
p.Headers["x-xss-protection"] = c.val
|
||||
}
|
||||
states := runRule(t, &xXSSProtectionRule{}, &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
states := runRule(t, ruleByName(t, "http.x_xss_protection"), &HTTPData{Probes: []HTTPProbe{p}}, nil)
|
||||
mustStatus(t, states, c.want)
|
||||
if !hasCode(states, c.code) {
|
||||
t.Errorf("val=%q: want code %q, got %+v", c.val, c.code, states)
|
||||
|
|
@ -205,7 +279,13 @@ func TestXXSSProtectionRule(t *testing.T) {
|
|||
|
||||
func TestSecurityHeaders_NoHTTPS(t *testing.T) {
|
||||
// Each header rule must emit Unknown when there are no successful HTTPS probes.
|
||||
rules := []sdk.CheckRule{&hstsRule{}, &cspRule{}, &xFrameOptionsRule{}, &xContentTypeOptionsRule{}, &xXSSProtectionRule{}}
|
||||
rules := []sdk.CheckRule{
|
||||
ruleByName(t, "http.hsts"),
|
||||
ruleByName(t, "http.csp"),
|
||||
ruleByName(t, "http.x_frame_options"),
|
||||
ruleByName(t, "http.x_content_type_options"),
|
||||
ruleByName(t, "http.x_xss_protection"),
|
||||
}
|
||||
data := &HTTPData{Probes: []HTTPProbe{httpProbe("a:80")}}
|
||||
for _, r := range rules {
|
||||
states := runRule(t, r, data, nil)
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ import (
|
|||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() { RegisterRule(&sriRule{}) }
|
||||
|
||||
// sriRule reports cross-origin <script>/<link> tags that lack an
|
||||
// integrity= attribute. Same-origin assets don't need SRI (the user
|
||||
// already trusts the origin to deliver them).
|
||||
|
|
|
|||
64
checker/rules_wellknown.go
Normal file
64
checker/rules_wellknown.go
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func init() { RegisterRule(&securityTxtRule{}) }
|
||||
|
||||
// securityTxtRule reports whether /.well-known/security.txt is published
|
||||
// (RFC 9116). Absence is an Info, not a Warn: many sites legitimately
|
||||
// have no security disclosure pipeline, but it is now the expected place
|
||||
// for researchers to look first.
|
||||
type securityTxtRule struct{}
|
||||
|
||||
func (r *securityTxtRule) Name() string { return "http.security_txt" }
|
||||
func (r *securityTxtRule) Description() string {
|
||||
return "Reports whether /.well-known/security.txt (RFC 9116) is published."
|
||||
}
|
||||
|
||||
func (r *securityTxtRule) Evaluate(ctx context.Context, obs sdk.ObservationGetter, _ sdk.CheckerOptions) []sdk.CheckState {
|
||||
data, errSt := loadHTTPData(ctx, obs)
|
||||
if errSt != nil {
|
||||
return []sdk.CheckState{*errSt}
|
||||
}
|
||||
wk, ok, err := LoadExtension[WellKnownData](data, ObservationKeyWellKnown)
|
||||
if err != nil {
|
||||
return []sdk.CheckState{{Status: sdk.StatusError, Code: "http.security_txt.decode_error", Message: err.Error()}}
|
||||
}
|
||||
if !ok {
|
||||
return []sdk.CheckState{unknownState("http.security_txt.no_data", "Well-known collector did not run.")}
|
||||
}
|
||||
probe := wk.URIs["/.well-known/security.txt"]
|
||||
switch {
|
||||
case probe.StatusCode == 200 && probe.Bytes > 0:
|
||||
return []sdk.CheckState{{
|
||||
Status: sdk.StatusOK,
|
||||
Code: "http.security_txt.ok",
|
||||
Subject: data.Domain,
|
||||
Message: fmt.Sprintf("/.well-known/security.txt is published (%d bytes).", probe.Bytes),
|
||||
}}
|
||||
case probe.StatusCode == 200:
|
||||
return []sdk.CheckState{{
|
||||
Status: sdk.StatusWarn,
|
||||
Code: "http.security_txt.empty",
|
||||
Subject: data.Domain,
|
||||
Message: "/.well-known/security.txt responded 200 but is empty.",
|
||||
}}
|
||||
default:
|
||||
return []sdk.CheckState{{
|
||||
Status: sdk.StatusInfo,
|
||||
Code: "http.security_txt.missing",
|
||||
Subject: data.Domain,
|
||||
Message: fmt.Sprintf("/.well-known/security.txt is not published (status %d).", probe.StatusCode),
|
||||
Meta: map[string]any{"fix": "Publish /.well-known/security.txt per RFC 9116 (Contact:, Expires:, …)."},
|
||||
}}
|
||||
}
|
||||
}
|
||||
96
checker/rules_wellknown_test.go
Normal file
96
checker/rules_wellknown_test.go
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
// This file is part of the happyDomain (R) project.
|
||||
// Copyright (c) 2020-2026 happyDomain
|
||||
// Authors: Pierre-Olivier Mercier, et al.
|
||||
|
||||
package checker
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
sdk "git.happydns.org/checker-sdk-go/checker"
|
||||
)
|
||||
|
||||
func wellKnownData(t *testing.T, probes map[string]WellKnownProbe) map[string]json.RawMessage {
|
||||
t.Helper()
|
||||
raw, err := json.Marshal(WellKnownData{URIs: probes})
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
return map[string]json.RawMessage{ObservationKeyWellKnown: raw}
|
||||
}
|
||||
|
||||
func TestSecurityTxtRule_OK(t *testing.T) {
|
||||
data := &HTTPData{
|
||||
Domain: "example.test",
|
||||
Probes: []HTTPProbe{httpsProbe("a:443")},
|
||||
Extensions: wellKnownData(t, map[string]WellKnownProbe{
|
||||
"/.well-known/security.txt": {StatusCode: 200, Bytes: 128},
|
||||
"/robots.txt": {StatusCode: 200, Bytes: 42},
|
||||
}),
|
||||
}
|
||||
states := runRule(t, &securityTxtRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusOK)
|
||||
if !hasCode(states, "http.security_txt.ok") {
|
||||
t.Errorf("expected ok, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSecurityTxtRule_Empty(t *testing.T) {
|
||||
data := &HTTPData{
|
||||
Domain: "example.test",
|
||||
Probes: []HTTPProbe{httpsProbe("a:443")},
|
||||
Extensions: wellKnownData(t, map[string]WellKnownProbe{
|
||||
"/.well-known/security.txt": {StatusCode: 200, Bytes: 0},
|
||||
}),
|
||||
}
|
||||
states := runRule(t, &securityTxtRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusWarn)
|
||||
if !hasCode(states, "http.security_txt.empty") {
|
||||
t.Errorf("expected empty, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSecurityTxtRule_Missing(t *testing.T) {
|
||||
data := &HTTPData{
|
||||
Domain: "example.test",
|
||||
Probes: []HTTPProbe{httpsProbe("a:443")},
|
||||
Extensions: wellKnownData(t, map[string]WellKnownProbe{
|
||||
"/.well-known/security.txt": {StatusCode: 404},
|
||||
}),
|
||||
}
|
||||
states := runRule(t, &securityTxtRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusInfo)
|
||||
if !hasCode(states, "http.security_txt.missing") {
|
||||
t.Errorf("expected missing, got %+v", states)
|
||||
}
|
||||
if states[0].Meta["fix"] == nil {
|
||||
t.Errorf("expected fix hint in meta, got %+v", states[0].Meta)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSecurityTxtRule_NoCollectorData(t *testing.T) {
|
||||
data := &HTTPData{
|
||||
Domain: "example.test",
|
||||
Probes: []HTTPProbe{httpsProbe("a:443")},
|
||||
}
|
||||
states := runRule(t, &securityTxtRule{}, data, nil)
|
||||
mustStatus(t, states, sdk.StatusUnknown)
|
||||
if !hasCode(states, "http.security_txt.no_data") {
|
||||
t.Errorf("expected no_data, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSecurityTxtRule_DecodeError(t *testing.T) {
|
||||
data := &HTTPData{
|
||||
Domain: "example.test",
|
||||
Probes: []HTTPProbe{httpsProbe("a:443")},
|
||||
Extensions: map[string]json.RawMessage{
|
||||
ObservationKeyWellKnown: json.RawMessage(`"not an object"`),
|
||||
},
|
||||
}
|
||||
states := runRule(t, &securityTxtRule{}, data, nil)
|
||||
if states[0].Status != sdk.StatusError || states[0].Code != "http.security_txt.decode_error" {
|
||||
t.Errorf("expected decode_error, got %+v", states)
|
||||
}
|
||||
}
|
||||
|
|
@ -89,6 +89,20 @@ func mustStatus(t *testing.T, states []sdk.CheckState, want sdk.Status) {
|
|||
}
|
||||
}
|
||||
|
||||
// ruleByName looks a rule up in the global registry by Name(). It exists
|
||||
// so tests can drive rules wired declaratively (HeaderRule and friends)
|
||||
// without depending on a concrete type.
|
||||
func ruleByName(t *testing.T, name string) sdk.CheckRule {
|
||||
t.Helper()
|
||||
for _, r := range Rules() {
|
||||
if r.Name() == name {
|
||||
return r
|
||||
}
|
||||
}
|
||||
t.Fatalf("rule %q not found in registry", name)
|
||||
return nil
|
||||
}
|
||||
|
||||
func hasCode(states []sdk.CheckState, code string) bool {
|
||||
for _, s := range states {
|
||||
if s.Code == code {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,10 @@
|
|||
// delegated to checker-tls.
|
||||
package checker
|
||||
|
||||
import "time"
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
const ObservationKeyHTTP = "http"
|
||||
|
||||
|
|
@ -42,10 +45,20 @@ const (
|
|||
)
|
||||
|
||||
// HTTPData is the full collected payload written under ObservationKeyHTTP.
|
||||
//
|
||||
// Probes/Domain/CollectedAt come from the root collector and are kept at
|
||||
// the top level for backward compatibility with the rules that have
|
||||
// always read them directly.
|
||||
//
|
||||
// Extensions holds the JSON-encoded outputs of every additional Collector
|
||||
// registered via RegisterCollector, keyed by Collector.Key(). Rules
|
||||
// access them via LoadExtension[T] to get a typed view.
|
||||
type HTTPData struct {
|
||||
Domain string `json:"domain,omitempty"`
|
||||
Probes []HTTPProbe `json:"probes"`
|
||||
CollectedAt time.Time `json:"collected_at"`
|
||||
|
||||
Extensions map[string]json.RawMessage `json:"extensions,omitempty"`
|
||||
}
|
||||
|
||||
// HTTPProbe is the outcome of a single (scheme, ip, port) probe.
|
||||
|
|
@ -91,8 +104,18 @@ type CookieInfo struct {
|
|||
HttpOnly bool `json:"http_only"`
|
||||
SameSite string `json:"same_site,omitempty"` // "Strict", "Lax", "None", or ""
|
||||
HasExpiry bool `json:"has_expiry,omitempty"`
|
||||
// Size is the byte length of the raw Set-Cookie header value
|
||||
// (everything after "Set-Cookie: "), used to evaluate the
|
||||
// per-cookie 4096-byte budget RFC 6265 §6.1 says browsers SHOULD
|
||||
// support.
|
||||
Size int `json:"size,omitempty"`
|
||||
}
|
||||
|
||||
// MaxCookieSize is the per-cookie size browsers are required to
|
||||
// support per RFC 6265 §6.1. Cookies above this are likely to be
|
||||
// silently dropped by some user agents.
|
||||
const MaxCookieSize = 4096
|
||||
|
||||
// HTMLResource is a <script src=...> or <link href=...> reference extracted
|
||||
// from the HTML body, used to evaluate Subresource Integrity coverage.
|
||||
type HTMLResource struct {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue