558 lines
13 KiB
Go
558 lines
13 KiB
Go
package main
|
||
|
||
import (
|
||
"context"
|
||
"encoding/json"
|
||
"fmt"
|
||
"html/template"
|
||
"log"
|
||
"net/http"
|
||
"net/url"
|
||
"strconv"
|
||
"time"
|
||
|
||
st "git.ipng.ch/ipng/nginx-logtail/internal/store"
|
||
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
|
||
)
|
||
|
||
// Handler is the HTTP handler for the frontend.
|
||
type Handler struct {
|
||
defaultTarget string
|
||
defaultN int
|
||
refreshSecs int
|
||
tmpl *template.Template
|
||
}
|
||
|
||
// Tab is a window or group-by selector link.
|
||
type Tab struct {
|
||
Label string
|
||
URL string
|
||
Active bool
|
||
}
|
||
|
||
// Crumb is one active filter shown in the breadcrumb strip.
|
||
type Crumb struct {
|
||
Text string
|
||
RemoveURL string
|
||
}
|
||
|
||
// TableRow is one row in the TopN result table.
|
||
type TableRow struct {
|
||
Rank int
|
||
Label string
|
||
Count int64
|
||
Pct float64 // 0–100, relative to rank-1 entry
|
||
DrillURL string
|
||
}
|
||
|
||
// filterState holds the filter fields parsed from URL params.
|
||
type filterState struct {
|
||
Website string
|
||
Prefix string
|
||
URI string
|
||
Status string // expression: "200", "!=200", ">=400", etc.
|
||
WebsiteRe string // RE2 regex against website
|
||
URIRe string // RE2 regex against request URI
|
||
}
|
||
|
||
// QueryParams holds all parsed URL parameters for one page request.
|
||
type QueryParams struct {
|
||
Target string
|
||
Window pb.Window
|
||
WindowS string // e.g. "5m"
|
||
GroupBy pb.GroupBy
|
||
GroupByS string // e.g. "website"
|
||
N int
|
||
Filter filterState
|
||
}
|
||
|
||
// PageData is passed to the HTML template.
|
||
type PageData struct {
|
||
Params QueryParams
|
||
Source string
|
||
Entries []TableRow
|
||
TotalCount int64
|
||
Sparkline template.HTML
|
||
Breadcrumbs []Crumb
|
||
Windows []Tab
|
||
GroupBys []Tab
|
||
Targets []Tab // source/target picker; empty when only one target available
|
||
RefreshSecs int
|
||
Error string
|
||
FilterExpr string // current filter serialised to mini-language for the input box
|
||
FilterErr string // parse error from a submitted q= expression
|
||
ClearFilterURL string // URL that removes all filter params
|
||
}
|
||
|
||
var windowSpecs = []struct{ s, label string }{
|
||
{"1m", "1m"}, {"5m", "5m"}, {"15m", "15m"}, {"60m", "60m"}, {"6h", "6h"}, {"24h", "24h"},
|
||
}
|
||
|
||
var groupBySpecs = []struct{ s, label string }{
|
||
{"website", "website"}, {"prefix", "prefix"}, {"uri", "uri"}, {"status", "status"},
|
||
}
|
||
|
||
func parseWindowString(s string) (pb.Window, string) {
|
||
switch s {
|
||
case "1m":
|
||
return pb.Window_W1M, "1m"
|
||
case "5m":
|
||
return pb.Window_W5M, "5m"
|
||
case "15m":
|
||
return pb.Window_W15M, "15m"
|
||
case "60m":
|
||
return pb.Window_W60M, "60m"
|
||
case "6h":
|
||
return pb.Window_W6H, "6h"
|
||
case "24h":
|
||
return pb.Window_W24H, "24h"
|
||
default:
|
||
return pb.Window_W5M, "5m"
|
||
}
|
||
}
|
||
|
||
func parseGroupByString(s string) (pb.GroupBy, string) {
|
||
switch s {
|
||
case "prefix":
|
||
return pb.GroupBy_CLIENT_PREFIX, "prefix"
|
||
case "uri":
|
||
return pb.GroupBy_REQUEST_URI, "uri"
|
||
case "status":
|
||
return pb.GroupBy_HTTP_RESPONSE, "status"
|
||
default:
|
||
return pb.GroupBy_WEBSITE, "website"
|
||
}
|
||
}
|
||
|
||
func (h *Handler) parseParams(r *http.Request) QueryParams {
|
||
q := r.URL.Query()
|
||
|
||
target := q.Get("target")
|
||
if target == "" {
|
||
target = h.defaultTarget
|
||
}
|
||
|
||
win, winS := parseWindowString(q.Get("w"))
|
||
grp, grpS := parseGroupByString(q.Get("by"))
|
||
|
||
n := h.defaultN
|
||
if ns := q.Get("n"); ns != "" {
|
||
if v, err := strconv.Atoi(ns); err == nil && v > 0 {
|
||
n = v
|
||
}
|
||
}
|
||
|
||
return QueryParams{
|
||
Target: target,
|
||
Window: win,
|
||
WindowS: winS,
|
||
GroupBy: grp,
|
||
GroupByS: grpS,
|
||
N: n,
|
||
Filter: filterState{
|
||
Website: q.Get("f_website"),
|
||
Prefix: q.Get("f_prefix"),
|
||
URI: q.Get("f_uri"),
|
||
Status: q.Get("f_status"),
|
||
WebsiteRe: q.Get("f_website_re"),
|
||
URIRe: q.Get("f_uri_re"),
|
||
},
|
||
}
|
||
}
|
||
|
||
func buildFilter(f filterState) *pb.Filter {
|
||
if f.Website == "" && f.Prefix == "" && f.URI == "" && f.Status == "" && f.WebsiteRe == "" && f.URIRe == "" {
|
||
return nil
|
||
}
|
||
out := &pb.Filter{}
|
||
if f.Website != "" {
|
||
out.Website = &f.Website
|
||
}
|
||
if f.Prefix != "" {
|
||
out.ClientPrefix = &f.Prefix
|
||
}
|
||
if f.URI != "" {
|
||
out.HttpRequestUri = &f.URI
|
||
}
|
||
if f.Status != "" {
|
||
if n, op, ok := st.ParseStatusExpr(f.Status); ok {
|
||
out.HttpResponse = &n
|
||
out.StatusOp = op
|
||
}
|
||
}
|
||
if f.WebsiteRe != "" {
|
||
out.WebsiteRegex = &f.WebsiteRe
|
||
}
|
||
if f.URIRe != "" {
|
||
out.UriRegex = &f.URIRe
|
||
}
|
||
return out
|
||
}
|
||
|
||
// toValues serialises QueryParams back to URL query values.
|
||
func (p QueryParams) toValues() url.Values {
|
||
v := url.Values{}
|
||
v.Set("target", p.Target)
|
||
v.Set("w", p.WindowS)
|
||
v.Set("by", p.GroupByS)
|
||
v.Set("n", strconv.Itoa(p.N))
|
||
if p.Filter.Website != "" {
|
||
v.Set("f_website", p.Filter.Website)
|
||
}
|
||
if p.Filter.Prefix != "" {
|
||
v.Set("f_prefix", p.Filter.Prefix)
|
||
}
|
||
if p.Filter.URI != "" {
|
||
v.Set("f_uri", p.Filter.URI)
|
||
}
|
||
if p.Filter.Status != "" {
|
||
v.Set("f_status", p.Filter.Status)
|
||
}
|
||
if p.Filter.WebsiteRe != "" {
|
||
v.Set("f_website_re", p.Filter.WebsiteRe)
|
||
}
|
||
if p.Filter.URIRe != "" {
|
||
v.Set("f_uri_re", p.Filter.URIRe)
|
||
}
|
||
return v
|
||
}
|
||
|
||
// buildURL returns a page URL derived from the current params with overrides applied.
|
||
// An override value of "" removes that key from the URL.
|
||
func (p QueryParams) buildURL(overrides map[string]string) string {
|
||
v := p.toValues()
|
||
for k, val := range overrides {
|
||
if val == "" {
|
||
v.Del(k)
|
||
} else {
|
||
v.Set(k, val)
|
||
}
|
||
}
|
||
return "/?" + v.Encode()
|
||
}
|
||
|
||
// clearFilterURL returns a URL with all filter params removed.
|
||
func (p QueryParams) clearFilterURL() string {
|
||
return p.buildURL(map[string]string{
|
||
"f_website": "", "f_prefix": "", "f_uri": "", "f_status": "",
|
||
"f_website_re": "", "f_uri_re": "",
|
||
})
|
||
}
|
||
|
||
// nextGroupBy advances the drill-down dimension hierarchy (cycles at the end).
|
||
func nextGroupBy(s string) string {
|
||
switch s {
|
||
case "website":
|
||
return "prefix"
|
||
case "prefix":
|
||
return "uri"
|
||
case "uri":
|
||
return "status"
|
||
default: // status → back to website
|
||
return "website"
|
||
}
|
||
}
|
||
|
||
// groupByFilterKey maps a group-by name to its URL filter parameter.
|
||
func groupByFilterKey(s string) string {
|
||
switch s {
|
||
case "website":
|
||
return "f_website"
|
||
case "prefix":
|
||
return "f_prefix"
|
||
case "uri":
|
||
return "f_uri"
|
||
case "status":
|
||
return "f_status"
|
||
default:
|
||
return "f_website"
|
||
}
|
||
}
|
||
|
||
func (p QueryParams) drillURL(label string) string {
|
||
return p.buildURL(map[string]string{
|
||
groupByFilterKey(p.GroupByS): label,
|
||
"by": nextGroupBy(p.GroupByS),
|
||
})
|
||
}
|
||
|
||
func buildCrumbs(p QueryParams) []Crumb {
|
||
var crumbs []Crumb
|
||
if p.Filter.Website != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "website=" + p.Filter.Website,
|
||
RemoveURL: p.buildURL(map[string]string{"f_website": ""}),
|
||
})
|
||
}
|
||
if p.Filter.Prefix != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "prefix=" + p.Filter.Prefix,
|
||
RemoveURL: p.buildURL(map[string]string{"f_prefix": ""}),
|
||
})
|
||
}
|
||
if p.Filter.URI != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "uri=" + p.Filter.URI,
|
||
RemoveURL: p.buildURL(map[string]string{"f_uri": ""}),
|
||
})
|
||
}
|
||
if p.Filter.Status != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "status=" + p.Filter.Status,
|
||
RemoveURL: p.buildURL(map[string]string{"f_status": ""}),
|
||
})
|
||
}
|
||
if p.Filter.WebsiteRe != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "website~=" + p.Filter.WebsiteRe,
|
||
RemoveURL: p.buildURL(map[string]string{"f_website_re": ""}),
|
||
})
|
||
}
|
||
if p.Filter.URIRe != "" {
|
||
crumbs = append(crumbs, Crumb{
|
||
Text: "uri~=" + p.Filter.URIRe,
|
||
RemoveURL: p.buildURL(map[string]string{"f_uri_re": ""}),
|
||
})
|
||
}
|
||
return crumbs
|
||
}
|
||
|
||
func buildWindowTabs(p QueryParams) []Tab {
|
||
tabs := make([]Tab, len(windowSpecs))
|
||
for i, w := range windowSpecs {
|
||
tabs[i] = Tab{
|
||
Label: w.label,
|
||
URL: p.buildURL(map[string]string{"w": w.s}),
|
||
Active: p.WindowS == w.s,
|
||
}
|
||
}
|
||
return tabs
|
||
}
|
||
|
||
func buildGroupByTabs(p QueryParams) []Tab {
|
||
tabs := make([]Tab, len(groupBySpecs))
|
||
for i, g := range groupBySpecs {
|
||
tabs[i] = Tab{
|
||
Label: "by " + g.label,
|
||
URL: p.buildURL(map[string]string{"by": g.s}),
|
||
Active: p.GroupByS == g.s,
|
||
}
|
||
}
|
||
return tabs
|
||
}
|
||
|
||
// buildTargetTabs builds the source/target picker tabs from a ListTargets response.
|
||
// Returns nil (hide picker) when only one endpoint is reachable.
|
||
func (h *Handler) buildTargetTabs(p QueryParams, lt *pb.ListTargetsResponse) []Tab {
|
||
// "all" always points at the configured aggregator default.
|
||
allTab := Tab{
|
||
Label: "all",
|
||
URL: p.buildURL(map[string]string{"target": h.defaultTarget}),
|
||
Active: p.Target == h.defaultTarget,
|
||
}
|
||
|
||
var collectorTabs []Tab
|
||
if lt != nil {
|
||
for _, t := range lt.Targets {
|
||
addr := t.Addr
|
||
if addr == "" {
|
||
addr = p.Target // collector reporting itself; addr is the current target
|
||
}
|
||
collectorTabs = append(collectorTabs, Tab{
|
||
Label: t.Name,
|
||
URL: p.buildURL(map[string]string{"target": addr}),
|
||
Active: p.Target == addr,
|
||
})
|
||
}
|
||
}
|
||
|
||
// Only render the picker when there is more than one choice.
|
||
if len(collectorTabs) == 0 {
|
||
return nil
|
||
}
|
||
return append([]Tab{allTab}, collectorTabs...)
|
||
}
|
||
|
||
func buildTableRows(entries []*pb.TopNEntry, p QueryParams) ([]TableRow, int64) {
|
||
if len(entries) == 0 {
|
||
return nil, 0
|
||
}
|
||
top := float64(entries[0].Count)
|
||
var total int64
|
||
rows := make([]TableRow, len(entries))
|
||
for i, e := range entries {
|
||
total += e.Count
|
||
pct := 0.0
|
||
if top > 0 {
|
||
pct = float64(e.Count) / top * 100
|
||
}
|
||
rows[i] = TableRow{
|
||
Rank: i + 1,
|
||
Label: e.Label,
|
||
Count: e.Count,
|
||
Pct: pct,
|
||
DrillURL: p.drillURL(e.Label),
|
||
}
|
||
}
|
||
return rows, total
|
||
}
|
||
|
||
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||
params := h.parseParams(r)
|
||
|
||
// Handle filter expression box submission (q= param).
|
||
var filterErr string
|
||
filterExprInput := FilterExprString(params.Filter)
|
||
if qVals, ok := r.URL.Query()["q"]; ok {
|
||
q := ""
|
||
if len(qVals) > 0 {
|
||
q = qVals[0]
|
||
}
|
||
fs, err := ParseFilterExpr(q)
|
||
if err != nil {
|
||
filterErr = err.Error()
|
||
filterExprInput = q // show what the user typed so they can fix it
|
||
// fall through: render page using existing filter params
|
||
} else {
|
||
params.Filter = fs
|
||
http.Redirect(w, r, params.buildURL(nil), http.StatusSeeOther)
|
||
return
|
||
}
|
||
}
|
||
|
||
filter := buildFilter(params.Filter)
|
||
|
||
conn, client, err := dial(params.Target)
|
||
if err != nil {
|
||
h.render(w, http.StatusBadGateway, h.errorPage(params,
|
||
fmt.Sprintf("cannot connect to %s: %v", params.Target, err)))
|
||
return
|
||
}
|
||
defer conn.Close()
|
||
|
||
ctx, cancel := context.WithTimeout(r.Context(), 5*time.Second)
|
||
defer cancel()
|
||
|
||
type topNResult struct {
|
||
resp *pb.TopNResponse
|
||
err error
|
||
}
|
||
type trendResult struct {
|
||
resp *pb.TrendResponse
|
||
err error
|
||
}
|
||
topNCh := make(chan topNResult, 1)
|
||
trendCh := make(chan trendResult, 1)
|
||
ltCh := make(chan *pb.ListTargetsResponse, 1)
|
||
|
||
go func() {
|
||
resp, err := client.TopN(ctx, &pb.TopNRequest{
|
||
Filter: filter,
|
||
GroupBy: params.GroupBy,
|
||
N: int32(params.N),
|
||
Window: params.Window,
|
||
})
|
||
topNCh <- topNResult{resp, err}
|
||
}()
|
||
go func() {
|
||
resp, err := client.Trend(ctx, &pb.TrendRequest{
|
||
Filter: filter,
|
||
Window: params.Window,
|
||
})
|
||
trendCh <- trendResult{resp, err}
|
||
}()
|
||
go func() {
|
||
resp, err := client.ListTargets(ctx, &pb.ListTargetsRequest{})
|
||
if err != nil {
|
||
ltCh <- nil
|
||
} else {
|
||
ltCh <- resp
|
||
}
|
||
}()
|
||
|
||
tn := <-topNCh
|
||
tr := <-trendCh
|
||
lt := <-ltCh
|
||
|
||
if tn.err != nil {
|
||
h.render(w, http.StatusBadGateway, h.errorPage(params,
|
||
fmt.Sprintf("error querying %s: %v", params.Target, tn.err)))
|
||
return
|
||
}
|
||
|
||
// raw=1: return JSON for scripting
|
||
if r.URL.Query().Get("raw") == "1" {
|
||
writeRawJSON(w, params, tn.resp)
|
||
return
|
||
}
|
||
|
||
rows, total := buildTableRows(tn.resp.Entries, params)
|
||
|
||
var sparkline template.HTML
|
||
if tr.err == nil && tr.resp != nil {
|
||
sparkline = renderSparkline(tr.resp.Points)
|
||
}
|
||
|
||
data := PageData{
|
||
Params: params,
|
||
Source: tn.resp.Source,
|
||
Entries: rows,
|
||
TotalCount: total,
|
||
Sparkline: sparkline,
|
||
Breadcrumbs: buildCrumbs(params),
|
||
Windows: buildWindowTabs(params),
|
||
GroupBys: buildGroupByTabs(params),
|
||
Targets: h.buildTargetTabs(params, lt),
|
||
RefreshSecs: h.refreshSecs,
|
||
FilterExpr: filterExprInput,
|
||
FilterErr: filterErr,
|
||
ClearFilterURL: params.clearFilterURL(),
|
||
}
|
||
h.render(w, http.StatusOK, data)
|
||
}
|
||
|
||
func (h *Handler) render(w http.ResponseWriter, status int, data PageData) {
|
||
w.Header().Set("Content-Type", "text/html; charset=utf-8")
|
||
w.WriteHeader(status)
|
||
if err := h.tmpl.ExecuteTemplate(w, "base", data); err != nil {
|
||
log.Printf("frontend: template error: %v", err)
|
||
}
|
||
}
|
||
|
||
func (h *Handler) errorPage(params QueryParams, msg string) PageData {
|
||
return PageData{
|
||
Params: params,
|
||
Windows: buildWindowTabs(params),
|
||
GroupBys: buildGroupByTabs(params),
|
||
Breadcrumbs: buildCrumbs(params),
|
||
RefreshSecs: h.refreshSecs,
|
||
Error: msg,
|
||
FilterExpr: FilterExprString(params.Filter),
|
||
ClearFilterURL: params.clearFilterURL(),
|
||
}
|
||
}
|
||
|
||
func writeRawJSON(w http.ResponseWriter, params QueryParams, resp *pb.TopNResponse) {
|
||
type entry struct {
|
||
Label string `json:"label"`
|
||
Count int64 `json:"count"`
|
||
}
|
||
type out struct {
|
||
Source string `json:"source"`
|
||
Window string `json:"window"`
|
||
GroupBy string `json:"group_by"`
|
||
Entries []entry `json:"entries"`
|
||
}
|
||
o := out{
|
||
Source: resp.Source,
|
||
Window: params.WindowS,
|
||
GroupBy: params.GroupByS,
|
||
Entries: make([]entry, len(resp.Entries)),
|
||
}
|
||
for i, e := range resp.Entries {
|
||
o.Entries[i] = entry{Label: e.Label, Count: e.Count}
|
||
}
|
||
w.Header().Set("Content-Type", "application/json")
|
||
json.NewEncoder(w).Encode(o)
|
||
}
|