Files
nginx-logtail/cmd/frontend/handler.go
Pim van Pelt d1a21a7a62 PRE-RELEASE 0.9.2: frontend rename source→collector, tab layout fit
Rename the collector-picker concept to 'collector' throughout the
frontend so it no longer collides with the ipng_source_tag group-by
(which is labelled 'source'). Affects PageData.Collector, the raw
JSON output key, template labels, and tests. Proto Source field is
untouched (wire-level name used by CLI and aggregator too).

Shrink tab padding/gap/font-size and add window:/filter: labels so
the four tab rows (window, filter, collector, tor) line up and 7+
collectors fit on one line at the 1100px body width.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-17 15:18:40 +02:00

688 lines
17 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
package main
import (
"context"
"encoding/json"
"fmt"
"html/template"
"log"
"net/http"
"net/url"
"strconv"
"time"
st "git.ipng.ch/ipng/nginx-logtail/internal/store"
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
"google.golang.org/grpc"
)
// Handler is the HTTP handler for the frontend.
type Handler struct {
defaultTarget string
defaultN int
refreshSecs int
tmpl *template.Template
}
// Tab is a window or group-by selector link.
type Tab struct {
Label string
URL string
Active bool
}
// Crumb is one active filter shown in the breadcrumb strip.
type Crumb struct {
Text string
RemoveURL string
}
// TableRow is one row in the TopN result table.
type TableRow struct {
Rank int
Label string
Count int64
Pct float64 // 0100, relative to rank-1 entry
DrillURL string
}
// filterState holds the filter fields parsed from URL params.
type filterState struct {
Website string
Prefix string
URI string
Status string // expression: "200", "!=200", ">=400", etc.
WebsiteRe string // RE2 regex against website
URIRe string // RE2 regex against request URI
WebsiteReNeg string // RE2 regex exclusion against website
URIReNeg string // RE2 regex exclusion against request URI
IsTor string // "", "1" (TOR only), "0" (non-TOR only)
ASN string // expression: "12345", "!=65000", ">=1000", etc.
SourceTag string // exact ipng_source_tag match
}
// QueryParams holds all parsed URL parameters for one page request.
type QueryParams struct {
Target string
Window pb.Window
WindowS string // e.g. "5m"
GroupBy pb.GroupBy
GroupByS string // e.g. "website"
N int
Filter filterState
}
// PageData is passed to the HTML template.
type PageData struct {
Params QueryParams
Collector string
Entries []TableRow
TotalCount int64
Sparkline template.HTML
Breadcrumbs []Crumb
Windows []Tab
GroupBys []Tab
Targets []Tab // collector picker; empty when only one target available
TorTabs []Tab // all / tor / no-tor toggle
RefreshSecs int
Error string
FilterExpr string // current filter serialised to mini-language for the input box
FilterErr string // parse error from a submitted q= expression
ClearFilterURL string // URL that removes all filter params
}
var windowSpecs = []struct{ s, label string }{
{"1m", "1m"}, {"5m", "5m"}, {"15m", "15m"}, {"60m", "60m"}, {"6h", "6h"}, {"24h", "24h"},
}
var groupBySpecs = []struct{ s, label string }{
{"website", "website"}, {"asn", "asn"}, {"prefix", "prefix"}, {"status", "status"}, {"uri", "uri"}, {"source_tag", "source"},
}
func parseWindowString(s string) (pb.Window, string) {
switch s {
case "1m":
return pb.Window_W1M, "1m"
case "5m":
return pb.Window_W5M, "5m"
case "15m":
return pb.Window_W15M, "15m"
case "60m":
return pb.Window_W60M, "60m"
case "6h":
return pb.Window_W6H, "6h"
case "24h":
return pb.Window_W24H, "24h"
default:
return pb.Window_W5M, "5m"
}
}
func parseGroupByString(s string) (pb.GroupBy, string) {
switch s {
case "prefix":
return pb.GroupBy_CLIENT_PREFIX, "prefix"
case "uri":
return pb.GroupBy_REQUEST_URI, "uri"
case "status":
return pb.GroupBy_HTTP_RESPONSE, "status"
case "asn":
return pb.GroupBy_ASN_NUMBER, "asn"
case "source_tag":
return pb.GroupBy_SOURCE_TAG, "source_tag"
default:
return pb.GroupBy_WEBSITE, "website"
}
}
func (h *Handler) parseParams(r *http.Request) QueryParams {
q := r.URL.Query()
target := q.Get("target")
if target == "" {
target = h.defaultTarget
}
win, winS := parseWindowString(q.Get("w"))
grp, grpS := parseGroupByString(q.Get("by"))
n := h.defaultN
if ns := q.Get("n"); ns != "" {
if v, err := strconv.Atoi(ns); err == nil && v > 0 {
n = v
}
}
return QueryParams{
Target: target,
Window: win,
WindowS: winS,
GroupBy: grp,
GroupByS: grpS,
N: n,
Filter: filterState{
Website: q.Get("f_website"),
Prefix: q.Get("f_prefix"),
URI: q.Get("f_uri"),
Status: q.Get("f_status"),
WebsiteRe: q.Get("f_website_re"),
URIRe: q.Get("f_uri_re"),
WebsiteReNeg: q.Get("f_website_re_neg"),
URIReNeg: q.Get("f_uri_re_neg"),
IsTor: q.Get("f_is_tor"),
ASN: q.Get("f_asn"),
SourceTag: q.Get("f_source_tag"),
},
}
}
func buildFilter(f filterState) *pb.Filter {
if f.Website == "" && f.Prefix == "" && f.URI == "" && f.Status == "" && f.WebsiteRe == "" && f.URIRe == "" && f.WebsiteReNeg == "" && f.URIReNeg == "" && f.IsTor == "" && f.ASN == "" && f.SourceTag == "" {
return nil
}
out := &pb.Filter{}
if f.Website != "" {
out.Website = &f.Website
}
if f.Prefix != "" {
out.ClientPrefix = &f.Prefix
}
if f.URI != "" {
out.HttpRequestUri = &f.URI
}
if f.Status != "" {
if n, op, ok := st.ParseStatusExpr(f.Status); ok {
out.HttpResponse = &n
out.StatusOp = op
}
}
if f.WebsiteRe != "" {
out.WebsiteRegex = &f.WebsiteRe
}
if f.URIRe != "" {
out.UriRegex = &f.URIRe
}
if f.WebsiteReNeg != "" {
out.WebsiteRegexExclude = &f.WebsiteReNeg
}
if f.URIReNeg != "" {
out.UriRegexExclude = &f.URIReNeg
}
switch f.IsTor {
case "1":
out.Tor = pb.TorFilter_TOR_YES
case "0":
out.Tor = pb.TorFilter_TOR_NO
}
if f.ASN != "" {
if n, op, ok := st.ParseStatusExpr(f.ASN); ok {
out.AsnNumber = &n
out.AsnOp = op
}
}
if f.SourceTag != "" {
out.IpngSourceTag = &f.SourceTag
}
return out
}
// toValues serialises QueryParams back to URL query values.
func (p QueryParams) toValues() url.Values {
v := url.Values{}
v.Set("target", p.Target)
v.Set("w", p.WindowS)
v.Set("by", p.GroupByS)
v.Set("n", strconv.Itoa(p.N))
if p.Filter.Website != "" {
v.Set("f_website", p.Filter.Website)
}
if p.Filter.Prefix != "" {
v.Set("f_prefix", p.Filter.Prefix)
}
if p.Filter.URI != "" {
v.Set("f_uri", p.Filter.URI)
}
if p.Filter.Status != "" {
v.Set("f_status", p.Filter.Status)
}
if p.Filter.WebsiteRe != "" {
v.Set("f_website_re", p.Filter.WebsiteRe)
}
if p.Filter.URIRe != "" {
v.Set("f_uri_re", p.Filter.URIRe)
}
if p.Filter.WebsiteReNeg != "" {
v.Set("f_website_re_neg", p.Filter.WebsiteReNeg)
}
if p.Filter.URIReNeg != "" {
v.Set("f_uri_re_neg", p.Filter.URIReNeg)
}
if p.Filter.IsTor != "" {
v.Set("f_is_tor", p.Filter.IsTor)
}
if p.Filter.ASN != "" {
v.Set("f_asn", p.Filter.ASN)
}
if p.Filter.SourceTag != "" {
v.Set("f_source_tag", p.Filter.SourceTag)
}
return v
}
// buildURL returns a page URL derived from the current params with overrides applied.
// An override value of "" removes that key from the URL.
func (p QueryParams) buildURL(overrides map[string]string) string {
v := p.toValues()
for k, val := range overrides {
if val == "" {
v.Del(k)
} else {
v.Set(k, val)
}
}
return "/?" + v.Encode()
}
// clearFilterURL returns a URL with all filter params removed.
func (p QueryParams) clearFilterURL() string {
return p.buildURL(map[string]string{
"f_website": "", "f_prefix": "", "f_uri": "", "f_status": "",
"f_website_re": "", "f_uri_re": "", "f_website_re_neg": "", "f_uri_re_neg": "",
"f_is_tor": "", "f_asn": "", "f_source_tag": "",
})
}
// nextGroupBy advances the drill-down dimension hierarchy (cycles at the end).
func nextGroupBy(s string) string {
switch s {
case "website":
return "prefix"
case "prefix":
return "uri"
case "uri":
return "status"
case "status":
return "asn"
case "asn":
return "source_tag"
default: // source_tag → back to website
return "website"
}
}
// groupByFilterKey maps a group-by name to its URL filter parameter.
func groupByFilterKey(s string) string {
switch s {
case "website":
return "f_website"
case "prefix":
return "f_prefix"
case "uri":
return "f_uri"
case "status":
return "f_status"
case "asn":
return "f_asn"
case "source_tag":
return "f_source_tag"
default:
return "f_website"
}
}
func (p QueryParams) drillURL(label string) string {
return p.buildURL(map[string]string{
groupByFilterKey(p.GroupByS): label,
"by": nextGroupBy(p.GroupByS),
})
}
func buildCrumbs(p QueryParams) []Crumb {
var crumbs []Crumb
if p.Filter.Website != "" {
crumbs = append(crumbs, Crumb{
Text: "website=" + p.Filter.Website,
RemoveURL: p.buildURL(map[string]string{"f_website": ""}),
})
}
if p.Filter.Prefix != "" {
crumbs = append(crumbs, Crumb{
Text: "prefix=" + p.Filter.Prefix,
RemoveURL: p.buildURL(map[string]string{"f_prefix": ""}),
})
}
if p.Filter.URI != "" {
crumbs = append(crumbs, Crumb{
Text: "uri=" + p.Filter.URI,
RemoveURL: p.buildURL(map[string]string{"f_uri": ""}),
})
}
if p.Filter.Status != "" {
crumbs = append(crumbs, Crumb{
Text: "status=" + p.Filter.Status,
RemoveURL: p.buildURL(map[string]string{"f_status": ""}),
})
}
if p.Filter.WebsiteRe != "" {
crumbs = append(crumbs, Crumb{
Text: "website~=" + p.Filter.WebsiteRe,
RemoveURL: p.buildURL(map[string]string{"f_website_re": ""}),
})
}
if p.Filter.URIRe != "" {
crumbs = append(crumbs, Crumb{
Text: "uri~=" + p.Filter.URIRe,
RemoveURL: p.buildURL(map[string]string{"f_uri_re": ""}),
})
}
if p.Filter.WebsiteReNeg != "" {
crumbs = append(crumbs, Crumb{
Text: "website!~=" + p.Filter.WebsiteReNeg,
RemoveURL: p.buildURL(map[string]string{"f_website_re_neg": ""}),
})
}
if p.Filter.URIReNeg != "" {
crumbs = append(crumbs, Crumb{
Text: "uri!~=" + p.Filter.URIReNeg,
RemoveURL: p.buildURL(map[string]string{"f_uri_re_neg": ""}),
})
}
switch p.Filter.IsTor {
case "1":
crumbs = append(crumbs, Crumb{
Text: "is_tor=1 (TOR only)",
RemoveURL: p.buildURL(map[string]string{"f_is_tor": ""}),
})
case "0":
crumbs = append(crumbs, Crumb{
Text: "is_tor=0 (no TOR)",
RemoveURL: p.buildURL(map[string]string{"f_is_tor": ""}),
})
}
if p.Filter.ASN != "" {
crumbs = append(crumbs, Crumb{
Text: asnTermStr(p.Filter.ASN),
RemoveURL: p.buildURL(map[string]string{"f_asn": ""}),
})
}
if p.Filter.SourceTag != "" {
crumbs = append(crumbs, Crumb{
Text: "source_tag=" + p.Filter.SourceTag,
RemoveURL: p.buildURL(map[string]string{"f_source_tag": ""}),
})
}
return crumbs
}
func buildWindowTabs(p QueryParams) []Tab {
tabs := make([]Tab, len(windowSpecs))
for i, w := range windowSpecs {
tabs[i] = Tab{
Label: w.label,
URL: p.buildURL(map[string]string{"w": w.s}),
Active: p.WindowS == w.s,
}
}
return tabs
}
func buildGroupByTabs(p QueryParams) []Tab {
tabs := make([]Tab, len(groupBySpecs))
for i, g := range groupBySpecs {
tabs[i] = Tab{
Label: "by " + g.label,
URL: p.buildURL(map[string]string{"by": g.s}),
Active: p.GroupByS == g.s,
}
}
return tabs
}
func buildTorTabs(p QueryParams) []Tab {
specs := []struct{ val, label string }{
{"", "all"},
{"1", "tor"},
{"0", "no tor"},
}
tabs := make([]Tab, len(specs))
for i, s := range specs {
tabs[i] = Tab{
Label: s.label,
URL: p.buildURL(map[string]string{"f_is_tor": s.val}),
Active: p.Filter.IsTor == s.val,
}
}
return tabs
}
// buildTargetTabs builds the collector picker tabs from a ListTargets response.
// Returns nil (hide picker) when only one endpoint is reachable.
func (h *Handler) buildTargetTabs(p QueryParams, lt *pb.ListTargetsResponse) []Tab {
// "all" always points at the configured aggregator default.
allTab := Tab{
Label: "all",
URL: p.buildURL(map[string]string{"target": h.defaultTarget}),
Active: p.Target == h.defaultTarget,
}
var collectorTabs []Tab
if lt != nil {
for _, t := range lt.Targets {
addr := t.Addr
if addr == "" {
addr = p.Target // collector reporting itself; addr is the current target
}
collectorTabs = append(collectorTabs, Tab{
Label: t.Name,
URL: p.buildURL(map[string]string{"target": addr}),
Active: p.Target == addr,
})
}
}
// Only render the picker when there is more than one choice.
if len(collectorTabs) == 0 {
return nil
}
return append([]Tab{allTab}, collectorTabs...)
}
func buildTableRows(entries []*pb.TopNEntry, p QueryParams) ([]TableRow, int64) {
if len(entries) == 0 {
return nil, 0
}
top := float64(entries[0].Count)
var total int64
rows := make([]TableRow, len(entries))
for i, e := range entries {
total += e.Count
pct := 0.0
if top > 0 {
pct = float64(e.Count) / top * 100
}
rows[i] = TableRow{
Rank: i + 1,
Label: e.Label,
Count: e.Count,
Pct: pct,
DrillURL: p.drillURL(e.Label),
}
}
return rows, total
}
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
params := h.parseParams(r)
// Handle filter expression box submission (q= param).
var filterErr string
filterExprInput := FilterExprString(params.Filter)
if qVals, ok := r.URL.Query()["q"]; ok {
q := ""
if len(qVals) > 0 {
q = qVals[0]
}
fs, err := ParseFilterExpr(q)
if err != nil {
filterErr = err.Error()
filterExprInput = q // show what the user typed so they can fix it
// fall through: render page using existing filter params
} else {
params.Filter = fs
http.Redirect(w, r, params.buildURL(nil), http.StatusSeeOther)
return
}
}
filter := buildFilter(params.Filter)
conn, client, err := dial(params.Target)
if err != nil {
h.render(w, http.StatusBadGateway, h.errorPage(params,
fmt.Sprintf("cannot connect to %s: %v", params.Target, err)))
return
}
defer conn.Close()
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
defer cancel()
type topNResult struct {
resp *pb.TopNResponse
err error
}
type trendResult struct {
resp *pb.TrendResponse
err error
}
topNCh := make(chan topNResult, 1)
trendCh := make(chan trendResult, 1)
ltCh := make(chan *pb.ListTargetsResponse, 1)
go func() {
resp, err := client.TopN(ctx, &pb.TopNRequest{
Filter: filter,
GroupBy: params.GroupBy,
N: int32(params.N),
Window: params.Window,
})
topNCh <- topNResult{resp, err}
}()
go func() {
resp, err := client.Trend(ctx, &pb.TrendRequest{
Filter: filter,
Window: params.Window,
})
trendCh <- trendResult{resp, err}
}()
go func() {
// Always query the default target for ListTargets so we get the full
// list of available collectors even when viewing a specific one.
ltClient := client
var ltConn *grpc.ClientConn
if params.Target != h.defaultTarget {
c, cl, err := dial(h.defaultTarget)
if err == nil {
ltConn = c
ltClient = cl
}
}
resp, err := ltClient.ListTargets(ctx, &pb.ListTargetsRequest{})
if ltConn != nil {
ltConn.Close()
}
if err != nil {
ltCh <- nil
} else {
ltCh <- resp
}
}()
tn := <-topNCh
tr := <-trendCh
lt := <-ltCh
if tn.err != nil {
h.render(w, http.StatusBadGateway, h.errorPage(params,
fmt.Sprintf("error querying %s: %v", params.Target, tn.err)))
return
}
// raw=1: return JSON for scripting
if r.URL.Query().Get("raw") == "1" {
writeRawJSON(w, params, tn.resp)
return
}
rows, total := buildTableRows(tn.resp.Entries, params)
var sparkline template.HTML
if tr.err == nil && tr.resp != nil {
sparkline = renderSparkline(tr.resp.Points)
}
data := PageData{
Params: params,
Collector: tn.resp.Source,
Entries: rows,
TotalCount: total,
Sparkline: sparkline,
Breadcrumbs: buildCrumbs(params),
Windows: buildWindowTabs(params),
GroupBys: buildGroupByTabs(params),
TorTabs: buildTorTabs(params),
Targets: h.buildTargetTabs(params, lt),
RefreshSecs: h.refreshSecs,
FilterExpr: filterExprInput,
FilterErr: filterErr,
ClearFilterURL: params.clearFilterURL(),
}
h.render(w, http.StatusOK, data)
}
func (h *Handler) render(w http.ResponseWriter, status int, data PageData) {
w.Header().Set("Content-Type", "text/html; charset=utf-8")
w.WriteHeader(status)
if err := h.tmpl.ExecuteTemplate(w, "base", data); err != nil {
log.Printf("frontend: template error: %v", err)
}
}
func (h *Handler) errorPage(params QueryParams, msg string) PageData {
return PageData{
Params: params,
Windows: buildWindowTabs(params),
GroupBys: buildGroupByTabs(params),
TorTabs: buildTorTabs(params),
Breadcrumbs: buildCrumbs(params),
RefreshSecs: h.refreshSecs,
Error: msg,
FilterExpr: FilterExprString(params.Filter),
ClearFilterURL: params.clearFilterURL(),
}
}
func writeRawJSON(w http.ResponseWriter, params QueryParams, resp *pb.TopNResponse) {
type entry struct {
Label string `json:"label"`
Count int64 `json:"count"`
}
type out struct {
Collector string `json:"collector"`
Window string `json:"window"`
GroupBy string `json:"group_by"`
Entries []entry `json:"entries"`
}
o := out{
Collector: resp.Source,
Window: params.WindowS,
GroupBy: params.GroupByS,
Entries: make([]entry, len(resp.Entries)),
}
for i, e := range resp.Entries {
o.Entries[i] = entry{Label: e.Label, Count: e.Count}
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(o)
}