Implement filter in status, website and uri in CLI and Frontend

This commit is contained in:
2026-03-14 21:59:30 +01:00
parent 2962590a74
commit afa65a2b29
15 changed files with 1159 additions and 123 deletions

View File

@@ -52,6 +52,7 @@ nginx-logtail/
├── frontend/ ├── frontend/
│ ├── main.go │ ├── main.go
│ ├── handler.go # URL param parsing, concurrent TopN+Trend, template exec │ ├── handler.go # URL param parsing, concurrent TopN+Trend, template exec
│ ├── filter.go # ParseFilterExpr / FilterExprString mini filter language
│ ├── client.go # gRPC dial helper │ ├── client.go # gRPC dial helper
│ ├── sparkline.go # TrendPoints → inline SVG polyline │ ├── sparkline.go # TrendPoints → inline SVG polyline
│ ├── format.go # fmtCount (space thousands separator) │ ├── format.go # fmtCount (space thousands separator)
@@ -145,11 +146,16 @@ and does not change any existing interface.
## Protobuf API (`proto/logtail.proto`) ## Protobuf API (`proto/logtail.proto`)
```protobuf ```protobuf
enum StatusOp { EQ = 0; NE = 1; GT = 2; GE = 3; LT = 4; LE = 5; }
message Filter { message Filter {
optional string website = 1; optional string website = 1;
optional string client_prefix = 2; optional string client_prefix = 2;
optional string http_request_uri = 3; optional string http_request_uri = 3;
optional int32 http_response = 4; optional int32 http_response = 4;
StatusOp status_op = 5; // comparison operator for http_response
optional string website_regex = 6; // RE2 regex against website
optional string uri_regex = 7; // RE2 regex against http_request_uri
} }
enum GroupBy { WEBSITE = 0; CLIENT_PREFIX = 1; REQUEST_URI = 2; HTTP_RESPONSE = 3; } enum GroupBy { WEBSITE = 0; CLIENT_PREFIX = 1; REQUEST_URI = 2; HTTP_RESPONSE = 3; }
@@ -262,8 +268,16 @@ service LogtailService {
### handler.go ### handler.go
- All filter state in the **URL query string**: `w` (window), `by` (group_by), `f_website`, - All filter state in the **URL query string**: `w` (window), `by` (group_by), `f_website`,
`f_prefix`, `f_uri`, `f_status`, `n`, `target`. No server-side session — URLs are shareable `f_prefix`, `f_uri`, `f_status`, `f_website_re`, `f_uri_re`, `n`, `target`. No server-side
and bookmarkable; multiple operators see independent views. session — URLs are shareable and bookmarkable; multiple operators see independent views.
- **Filter expression box**: a `q=` parameter carries a mini filter language
(`status>=400 AND website~=gouda.* AND uri~=^/api/`). On submission the handler parses it
via `ParseFilterExpr` and redirects to the canonical URL with individual `f_*` params; `q=`
never appears in the final URL. Parse errors re-render the current page with an inline message.
- **Status expressions**: `f_status` accepts `200`, `!=200`, `>=400`, `<500`, etc. — parsed by
`store.ParseStatusExpr` into `(value, StatusOp)` for the filter protobuf.
- **Regex filters**: `f_website_re` and `f_uri_re` hold RE2 patterns; compiled once per request
into `store.CompiledFilter` before the query-loop iteration. Invalid regexes match nothing.
- `TopN` and `Trend` RPCs issued **concurrently** (both with a 5 s deadline); page renders with - `TopN` and `Trend` RPCs issued **concurrently** (both with a 5 s deadline); page renders with
whatever completes. Trend failure suppresses the sparkline without erroring the page. whatever completes. Trend failure suppresses the sparkline without erroring the page.
- **Drilldown**: clicking a table row adds the current dimension's filter and advances `by` through - **Drilldown**: clicking a table row adds the current dimension's filter and advances `by` through
@@ -304,7 +318,9 @@ logtail-cli stream [flags] live snapshot feed (runs until Ctrl-C, auto-reconn
| `--website` | — | Filter: website | | `--website` | — | Filter: website |
| `--prefix` | — | Filter: client prefix | | `--prefix` | — | Filter: client prefix |
| `--uri` | — | Filter: request URI | | `--uri` | — | Filter: request URI |
| `--status` | — | Filter: HTTP status code | | `--status` | — | Filter: HTTP status expression (`200`, `!=200`, `>=400`, `<500`, …) |
| `--website-re`| — | Filter: RE2 regex against website |
| `--uri-re` | — | Filter: RE2 regex against request URI |
**`topn` only**: `--n 10`, `--window 5m`, `--group-by website` **`topn` only**: `--n 10`, `--window 5m`, `--group-by website`
@@ -345,3 +361,6 @@ with a non-zero code on gRPC error.
| CLI default: human-readable table | Operator-friendly by default; `--json` opt-in for scripting | | CLI default: human-readable table | Operator-friendly by default; `--json` opt-in for scripting |
| CLI multi-target fan-out | Compare a collector vs. aggregator, or two collectors, in one command | | CLI multi-target fan-out | Compare a collector vs. aggregator, or two collectors, in one command |
| CLI uses stdlib `flag`, no framework | Four subcommands don't justify a dependency | | CLI uses stdlib `flag`, no framework | Four subcommands don't justify a dependency |
| Status filter as expression string (`!=200`, `>=400`) | Operator-friendly; parsed once at query boundary, encoded as `(int32, StatusOp)` in proto |
| Regex filters compiled once per query (`CompiledFilter`) | Up to 288 × 5 000 per-entry calls — compiling per-entry would dominate query latency |
| Filter expression box (`q=`) redirects to canonical URL | Filter state stays in individual `f_*` params; URLs remain shareable and bookmarkable |

View File

@@ -92,6 +92,7 @@ func (c *Cache) mergeFineBuckets(now time.Time) st.Snapshot {
// QueryTopN answers a TopN request from the ring buffers. // QueryTopN answers a TopN request from the ring buffers.
func (c *Cache) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window pb.Window) []st.Entry { func (c *Cache) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window pb.Window) []st.Entry {
cf := st.CompileFilter(filter)
c.mu.RLock() c.mu.RLock()
defer c.mu.RUnlock() defer c.mu.RUnlock()
@@ -101,7 +102,7 @@ func (c *Cache) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window p
idx := (buckets.Head - 1 - i + buckets.Size) % buckets.Size idx := (buckets.Head - 1 - i + buckets.Size) % buckets.Size
for _, e := range buckets.Ring[idx].Entries { for _, e := range buckets.Ring[idx].Entries {
t := st.LabelTuple(e.Label) t := st.LabelTuple(e.Label)
if !st.MatchesFilter(t, filter) { if !st.MatchesFilter(t, cf) {
continue continue
} }
grouped[st.DimensionLabel(t, groupBy)] += e.Count grouped[st.DimensionLabel(t, groupBy)] += e.Count
@@ -112,6 +113,7 @@ func (c *Cache) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window p
// QueryTrend answers a Trend request from the ring buffers. // QueryTrend answers a Trend request from the ring buffers.
func (c *Cache) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint { func (c *Cache) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint {
cf := st.CompileFilter(filter)
c.mu.RLock() c.mu.RLock()
defer c.mu.RUnlock() defer c.mu.RUnlock()
@@ -122,7 +124,7 @@ func (c *Cache) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint
snap := buckets.Ring[idx] snap := buckets.Ring[idx]
var total int64 var total int64
for _, e := range snap.Entries { for _, e := range snap.Entries {
if st.MatchesFilter(st.LabelTuple(e.Label), filter) { if st.MatchesFilter(st.LabelTuple(e.Label), cf) {
total += e.Count total += e.Count
} }
} }

View File

@@ -4,9 +4,9 @@ import (
"flag" "flag"
"fmt" "fmt"
"os" "os"
"strconv"
"strings" "strings"
st "git.ipng.ch/ipng/nginx-logtail/internal/store"
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb" pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
) )
@@ -17,7 +17,9 @@ type sharedFlags struct {
website string website string
prefix string prefix string
uri string uri string
status string // kept as string so we can tell "unset" from "0" status string // expression: "200", "!=200", ">=400", etc.
websiteRe string // RE2 regex against website
uriRe string // RE2 regex against request URI
} }
// bindShared registers the shared flags on fs and returns a pointer to the // bindShared registers the shared flags on fs and returns a pointer to the
@@ -26,10 +28,12 @@ func bindShared(fs *flag.FlagSet) (*sharedFlags, *string) {
sf := &sharedFlags{} sf := &sharedFlags{}
target := fs.String("target", "localhost:9090", "comma-separated host:port list") target := fs.String("target", "localhost:9090", "comma-separated host:port list")
fs.BoolVar(&sf.jsonOut, "json", false, "emit newline-delimited JSON") fs.BoolVar(&sf.jsonOut, "json", false, "emit newline-delimited JSON")
fs.StringVar(&sf.website, "website", "", "filter: website") fs.StringVar(&sf.website, "website", "", "filter: exact website match")
fs.StringVar(&sf.prefix, "prefix", "", "filter: client prefix") fs.StringVar(&sf.prefix, "prefix", "", "filter: exact client prefix match")
fs.StringVar(&sf.uri, "uri", "", "filter: request URI") fs.StringVar(&sf.uri, "uri", "", "filter: exact request URI match")
fs.StringVar(&sf.status, "status", "", "filter: HTTP status code (integer)") fs.StringVar(&sf.status, "status", "", "filter: HTTP status expression (200, !=200, >=400, <500, …)")
fs.StringVar(&sf.websiteRe, "website-re", "", "filter: RE2 regex against website")
fs.StringVar(&sf.uriRe, "uri-re", "", "filter: RE2 regex against request URI")
return sf, target return sf, target
} }
@@ -52,7 +56,7 @@ func parseTargets(s string) []string {
} }
func buildFilter(sf *sharedFlags) *pb.Filter { func buildFilter(sf *sharedFlags) *pb.Filter {
if sf.website == "" && sf.prefix == "" && sf.uri == "" && sf.status == "" { if sf.website == "" && sf.prefix == "" && sf.uri == "" && sf.status == "" && sf.websiteRe == "" && sf.uriRe == "" {
return nil return nil
} }
f := &pb.Filter{} f := &pb.Filter{}
@@ -66,13 +70,19 @@ func buildFilter(sf *sharedFlags) *pb.Filter {
f.HttpRequestUri = &sf.uri f.HttpRequestUri = &sf.uri
} }
if sf.status != "" { if sf.status != "" {
n, err := strconv.Atoi(sf.status) n, op, ok := st.ParseStatusExpr(sf.status)
if err != nil { if !ok {
fmt.Fprintf(os.Stderr, "--status: %v\n", err) fmt.Fprintf(os.Stderr, "--status: invalid expression %q; use e.g. 200, !=200, >=400, <500\n", sf.status)
os.Exit(1) os.Exit(1)
} }
n32 := int32(n) f.HttpResponse = &n
f.HttpResponse = &n32 f.StatusOp = op
}
if sf.websiteRe != "" {
f.WebsiteRegex = &sf.websiteRe
}
if sf.uriRe != "" {
f.UriRegex = &sf.uriRe
} }
return f return f
} }

View File

@@ -18,7 +18,9 @@ Subcommand flags (all subcommands):
--website STRING filter: exact website match --website STRING filter: exact website match
--prefix STRING filter: exact client-prefix match --prefix STRING filter: exact client-prefix match
--uri STRING filter: exact request URI match --uri STRING filter: exact request URI match
--status INT filter: exact HTTP status code --status EXPR filter: HTTP status expression (200, !=200, >=400, <500, …)
--website-re REGEX filter: RE2 regex against website
--uri-re REGEX filter: RE2 regex against request URI
topn flags: topn flags:
--n INT number of entries (default 10) --n INT number of entries (default 10)

View File

@@ -97,6 +97,7 @@ func (s *Store) mergeFineBuckets(now time.Time) st.Snapshot {
// QueryTopN answers a TopN request from the ring buffers. // QueryTopN answers a TopN request from the ring buffers.
func (s *Store) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window pb.Window) []st.Entry { func (s *Store) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window pb.Window) []st.Entry {
cf := st.CompileFilter(filter)
s.mu.RLock() s.mu.RLock()
defer s.mu.RUnlock() defer s.mu.RUnlock()
@@ -106,7 +107,7 @@ func (s *Store) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window p
idx := (buckets.Head - 1 - i + buckets.Size) % buckets.Size idx := (buckets.Head - 1 - i + buckets.Size) % buckets.Size
for _, e := range buckets.Ring[idx].Entries { for _, e := range buckets.Ring[idx].Entries {
t := st.LabelTuple(e.Label) t := st.LabelTuple(e.Label)
if !st.MatchesFilter(t, filter) { if !st.MatchesFilter(t, cf) {
continue continue
} }
grouped[st.DimensionLabel(t, groupBy)] += e.Count grouped[st.DimensionLabel(t, groupBy)] += e.Count
@@ -117,6 +118,7 @@ func (s *Store) QueryTopN(filter *pb.Filter, groupBy pb.GroupBy, n int, window p
// QueryTrend answers a Trend request from the ring buffers. // QueryTrend answers a Trend request from the ring buffers.
func (s *Store) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint { func (s *Store) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint {
cf := st.CompileFilter(filter)
s.mu.RLock() s.mu.RLock()
defer s.mu.RUnlock() defer s.mu.RUnlock()
@@ -127,7 +129,7 @@ func (s *Store) QueryTrend(filter *pb.Filter, window pb.Window) []st.TrendPoint
snap := buckets.Ring[idx] snap := buckets.Ring[idx]
var total int64 var total int64
for _, e := range snap.Entries { for _, e := range snap.Entries {
if st.MatchesFilter(st.LabelTuple(e.Label), filter) { if st.MatchesFilter(st.LabelTuple(e.Label), cf) {
total += e.Count total += e.Count
} }
} }

175
cmd/frontend/filter.go Normal file
View File

@@ -0,0 +1,175 @@
package main
import (
"fmt"
"regexp"
"strings"
st "git.ipng.ch/ipng/nginx-logtail/internal/store"
)
// andRe splits a filter expression on AND (case-insensitive, surrounded by whitespace).
var andRe = regexp.MustCompile(`(?i)\s+and\s+`)
// ParseFilterExpr parses a mini filter expression into a filterState.
//
// Syntax: TERM [AND TERM ...]
//
// Terms:
//
// status=200 status!=200 status>=400 status>400 status<=500 status<500
// website=example.com — exact match
// website~=gouda.* — RE2 regex
// uri=/api/v1/ — exact match
// uri~=^/api/.* — RE2 regex
// prefix=1.2.3.0/24 — exact match
//
// Values may be enclosed in double or single quotes.
func ParseFilterExpr(s string) (filterState, error) {
s = strings.TrimSpace(s)
if s == "" {
return filterState{}, nil
}
terms := andRe.Split(s, -1)
var fs filterState
for _, term := range terms {
term = strings.TrimSpace(term)
if term == "" {
continue
}
if err := applyTerm(term, &fs); err != nil {
return filterState{}, err
}
}
return fs, nil
}
// applyTerm parses a single "field op value" term into fs.
func applyTerm(term string, fs *filterState) error {
// Find the first operator character: ~, !, >, <, =
opIdx := strings.IndexAny(term, "~!><=")
if opIdx <= 0 {
return fmt.Errorf("invalid term %q: expected field=value, field>=value, field~=regex, etc.", term)
}
field := strings.ToLower(strings.TrimSpace(term[:opIdx]))
rest := term[opIdx:]
var op, value string
switch {
case strings.HasPrefix(rest, "~="):
op, value = "~=", rest[2:]
case strings.HasPrefix(rest, "!="):
op, value = "!=", rest[2:]
case strings.HasPrefix(rest, ">="):
op, value = ">=", rest[2:]
case strings.HasPrefix(rest, "<="):
op, value = "<=", rest[2:]
case strings.HasPrefix(rest, ">"):
op, value = ">", rest[1:]
case strings.HasPrefix(rest, "<"):
op, value = "<", rest[1:]
case strings.HasPrefix(rest, "="):
op, value = "=", rest[1:]
default:
return fmt.Errorf("unrecognised operator in %q", term)
}
value = unquote(strings.TrimSpace(value))
switch field {
case "status":
if op == "~=" {
return fmt.Errorf("status does not support ~=; use =, !=, >=, >, <=, <")
}
expr := op + value
if op == "=" {
expr = value // ParseStatusExpr accepts bare "200"
}
if _, _, ok := st.ParseStatusExpr(expr); !ok {
return fmt.Errorf("invalid status expression %q", expr)
}
fs.Status = expr
case "website":
switch op {
case "=":
fs.Website = value
case "~=":
fs.WebsiteRe = value
default:
return fmt.Errorf("website only supports = and ~=, not %q", op)
}
case "uri":
switch op {
case "=":
fs.URI = value
case "~=":
fs.URIRe = value
default:
return fmt.Errorf("uri only supports = and ~=, not %q", op)
}
case "prefix":
if op != "=" {
return fmt.Errorf("prefix only supports =, not %q", op)
}
fs.Prefix = value
default:
return fmt.Errorf("unknown field %q; valid: status, website, uri, prefix", field)
}
return nil
}
// unquote strips surrounding double or single quotes.
func unquote(s string) string {
if len(s) >= 2 {
if (s[0] == '"' && s[len(s)-1] == '"') || (s[0] == '\'' && s[len(s)-1] == '\'') {
return s[1 : len(s)-1]
}
}
return s
}
// FilterExprString serialises a filterState back into the mini filter expression language.
// Returns "" when no filters are active.
func FilterExprString(f filterState) string {
var parts []string
if f.Website != "" {
parts = append(parts, "website="+quoteMaybe(f.Website))
}
if f.WebsiteRe != "" {
parts = append(parts, "website~="+quoteMaybe(f.WebsiteRe))
}
if f.Prefix != "" {
parts = append(parts, "prefix="+quoteMaybe(f.Prefix))
}
if f.URI != "" {
parts = append(parts, "uri="+quoteMaybe(f.URI))
}
if f.URIRe != "" {
parts = append(parts, "uri~="+quoteMaybe(f.URIRe))
}
if f.Status != "" {
parts = append(parts, statusTermStr(f.Status))
}
return strings.Join(parts, " AND ")
}
// statusTermStr converts a stored status expression (">=400", "200") to a
// full filter term ("status>=400", "status=200").
func statusTermStr(expr string) string {
if expr == "" {
return ""
}
if len(expr) > 0 && (expr[0] == '!' || expr[0] == '>' || expr[0] == '<') {
return "status" + expr
}
return "status=" + expr
}
// quoteMaybe wraps s in double quotes when it contains spaces or quote characters.
func quoteMaybe(s string) string {
if strings.ContainsAny(s, " \t\"'") {
return `"` + strings.ReplaceAll(s, `"`, `\"`) + `"`
}
return s
}

260
cmd/frontend/filter_test.go Normal file
View File

@@ -0,0 +1,260 @@
package main
import "testing"
// --- ParseFilterExpr ---
func TestParseEmpty(t *testing.T) {
fs, err := ParseFilterExpr("")
if err != nil {
t.Fatal(err)
}
if fs != (filterState{}) {
t.Fatalf("expected empty filterState, got %+v", fs)
}
}
func TestParseStatusEQ(t *testing.T) {
fs, err := ParseFilterExpr("status=200")
if err != nil {
t.Fatal(err)
}
if fs.Status != "200" {
t.Fatalf("Status=%q", fs.Status)
}
}
func TestParseStatusGE(t *testing.T) {
fs, err := ParseFilterExpr("status>=400")
if err != nil {
t.Fatal(err)
}
if fs.Status != ">=400" {
t.Fatalf("Status=%q", fs.Status)
}
}
func TestParseStatusNE(t *testing.T) {
fs, err := ParseFilterExpr("status!=200")
if err != nil {
t.Fatal(err)
}
if fs.Status != "!=200" {
t.Fatalf("Status=%q", fs.Status)
}
}
func TestParseStatusLT(t *testing.T) {
fs, err := ParseFilterExpr("status<500")
if err != nil {
t.Fatal(err)
}
if fs.Status != "<500" {
t.Fatalf("Status=%q", fs.Status)
}
}
func TestParseWebsiteExact(t *testing.T) {
fs, err := ParseFilterExpr("website=example.com")
if err != nil {
t.Fatal(err)
}
if fs.Website != "example.com" {
t.Fatalf("Website=%q", fs.Website)
}
}
func TestParseWebsiteRegex(t *testing.T) {
fs, err := ParseFilterExpr("website~=gouda.*")
if err != nil {
t.Fatal(err)
}
if fs.WebsiteRe != "gouda.*" {
t.Fatalf("WebsiteRe=%q", fs.WebsiteRe)
}
}
func TestParseURIExact(t *testing.T) {
fs, err := ParseFilterExpr("uri=/api/v1/")
if err != nil {
t.Fatal(err)
}
if fs.URI != "/api/v1/" {
t.Fatalf("URI=%q", fs.URI)
}
}
func TestParseURIRegex(t *testing.T) {
fs, err := ParseFilterExpr(`uri~=^/api/.*`)
if err != nil {
t.Fatal(err)
}
if fs.URIRe != `^/api/.*` {
t.Fatalf("URIRe=%q", fs.URIRe)
}
}
func TestParsePrefix(t *testing.T) {
fs, err := ParseFilterExpr("prefix=1.2.3.0/24")
if err != nil {
t.Fatal(err)
}
if fs.Prefix != "1.2.3.0/24" {
t.Fatalf("Prefix=%q", fs.Prefix)
}
}
func TestParseCombinedAND(t *testing.T) {
fs, err := ParseFilterExpr(`status>=400 AND website~=gouda.* AND uri~="^/.*"`)
if err != nil {
t.Fatal(err)
}
if fs.Status != ">=400" {
t.Fatalf("Status=%q", fs.Status)
}
if fs.WebsiteRe != "gouda.*" {
t.Fatalf("WebsiteRe=%q", fs.WebsiteRe)
}
if fs.URIRe != `^/.*` { // quotes stripped
t.Fatalf("URIRe=%q", fs.URIRe)
}
}
func TestParseANDCaseInsensitive(t *testing.T) {
fs, err := ParseFilterExpr("status>=400 and website=example.com")
if err != nil {
t.Fatal(err)
}
if fs.Status != ">=400" || fs.Website != "example.com" {
t.Fatalf("%+v", fs)
}
}
func TestParseQuotedValue(t *testing.T) {
fs, err := ParseFilterExpr(`website="example.com"`)
if err != nil {
t.Fatal(err)
}
if fs.Website != "example.com" {
t.Fatalf("Website=%q", fs.Website)
}
}
func TestParseUnknownField(t *testing.T) {
_, err := ParseFilterExpr("host=foo")
if err == nil {
t.Fatal("expected error for unknown field")
}
}
func TestParseStatusRegexRejected(t *testing.T) {
_, err := ParseFilterExpr("status~=4..")
if err == nil {
t.Fatal("expected error: status does not support ~=")
}
}
func TestParseInvalidStatusExpr(t *testing.T) {
_, err := ParseFilterExpr("status>=abc")
if err == nil {
t.Fatal("expected error for non-numeric status")
}
}
func TestParseMissingOperator(t *testing.T) {
_, err := ParseFilterExpr("status400")
if err == nil {
t.Fatal("expected error for missing operator")
}
}
func TestParseWebsiteUnsupportedOp(t *testing.T) {
_, err := ParseFilterExpr("website>=example.com")
if err == nil {
t.Fatal("expected error: website does not support >=")
}
}
// --- FilterExprString ---
func TestFilterExprStringEmpty(t *testing.T) {
if s := FilterExprString(filterState{}); s != "" {
t.Fatalf("expected empty, got %q", s)
}
}
func TestFilterExprStringStatus(t *testing.T) {
s := FilterExprString(filterState{Status: ">=400"})
if s != "status>=400" {
t.Fatalf("got %q", s)
}
}
func TestFilterExprStringStatusPlain(t *testing.T) {
s := FilterExprString(filterState{Status: "200"})
if s != "status=200" {
t.Fatalf("got %q", s)
}
}
func TestFilterExprStringWebsite(t *testing.T) {
s := FilterExprString(filterState{Website: "example.com"})
if s != "website=example.com" {
t.Fatalf("got %q", s)
}
}
func TestFilterExprStringWebsiteRegex(t *testing.T) {
s := FilterExprString(filterState{WebsiteRe: "gouda.*"})
if s != "website~=gouda.*" {
t.Fatalf("got %q", s)
}
}
func TestFilterExprStringCombined(t *testing.T) {
fs := filterState{Status: ">=400", WebsiteRe: "gouda.*", URIRe: `^/api/`}
s := FilterExprString(fs)
// Should contain all three parts joined by AND
if s == "" {
t.Fatal("expected non-empty")
}
// Round-trip: parse back
fs2, err := ParseFilterExpr(s)
if err != nil {
t.Fatalf("round-trip parse error: %v", err)
}
if fs2.Status != fs.Status || fs2.WebsiteRe != fs.WebsiteRe || fs2.URIRe != fs.URIRe {
t.Fatalf("round-trip mismatch: %+v vs %+v", fs, fs2)
}
}
func TestFilterExprStringQuotesValue(t *testing.T) {
s := FilterExprString(filterState{Website: "has space"})
if s != `website="has space"` {
t.Fatalf("got %q", s)
}
}
func TestFilterExprRoundTrip(t *testing.T) {
cases := []filterState{
{Status: "!=200"},
{Status: "<500"},
{Website: "example.com"},
{WebsiteRe: "gouda.*"},
{URI: "/api/v1/"},
{URIRe: `^/api/`},
{Prefix: "1.2.3.0/24"},
{Status: ">=400", WebsiteRe: "gouda.*"},
}
for _, fs := range cases {
expr := FilterExprString(fs)
fs2, err := ParseFilterExpr(expr)
if err != nil {
t.Errorf("round-trip parse error for %+v → %q: %v", fs, expr, err)
continue
}
if fs2 != fs {
t.Errorf("round-trip mismatch: %+v → %q → %+v", fs, expr, fs2)
}
}
}

View File

@@ -11,6 +11,7 @@ import (
"strconv" "strconv"
"time" "time"
st "git.ipng.ch/ipng/nginx-logtail/internal/store"
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb" pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
) )
@@ -44,12 +45,14 @@ type TableRow struct {
DrillURL string DrillURL string
} }
// filterState holds the four optional filter fields parsed from URL params. // filterState holds the filter fields parsed from URL params.
type filterState struct { type filterState struct {
Website string Website string
Prefix string Prefix string
URI string URI string
Status string // kept as string so empty means "unset" Status string // expression: "200", "!=200", ">=400", etc.
WebsiteRe string // RE2 regex against website
URIRe string // RE2 regex against request URI
} }
// QueryParams holds all parsed URL parameters for one page request. // QueryParams holds all parsed URL parameters for one page request.
@@ -75,6 +78,9 @@ type PageData struct {
GroupBys []Tab GroupBys []Tab
RefreshSecs int RefreshSecs int
Error string Error string
FilterExpr string // current filter serialised to mini-language for the input box
FilterErr string // parse error from a submitted q= expression
ClearFilterURL string // URL that removes all filter params
} }
var windowSpecs = []struct{ s, label string }{ var windowSpecs = []struct{ s, label string }{
@@ -147,12 +153,14 @@ func (h *Handler) parseParams(r *http.Request) QueryParams {
Prefix: q.Get("f_prefix"), Prefix: q.Get("f_prefix"),
URI: q.Get("f_uri"), URI: q.Get("f_uri"),
Status: q.Get("f_status"), Status: q.Get("f_status"),
WebsiteRe: q.Get("f_website_re"),
URIRe: q.Get("f_uri_re"),
}, },
} }
} }
func buildFilter(f filterState) *pb.Filter { func buildFilter(f filterState) *pb.Filter {
if f.Website == "" && f.Prefix == "" && f.URI == "" && f.Status == "" { if f.Website == "" && f.Prefix == "" && f.URI == "" && f.Status == "" && f.WebsiteRe == "" && f.URIRe == "" {
return nil return nil
} }
out := &pb.Filter{} out := &pb.Filter{}
@@ -166,11 +174,17 @@ func buildFilter(f filterState) *pb.Filter {
out.HttpRequestUri = &f.URI out.HttpRequestUri = &f.URI
} }
if f.Status != "" { if f.Status != "" {
if n, err := strconv.Atoi(f.Status); err == nil { if n, op, ok := st.ParseStatusExpr(f.Status); ok {
n32 := int32(n) out.HttpResponse = &n
out.HttpResponse = &n32 out.StatusOp = op
} }
} }
if f.WebsiteRe != "" {
out.WebsiteRegex = &f.WebsiteRe
}
if f.URIRe != "" {
out.UriRegex = &f.URIRe
}
return out return out
} }
@@ -193,6 +207,12 @@ func (p QueryParams) toValues() url.Values {
if p.Filter.Status != "" { if p.Filter.Status != "" {
v.Set("f_status", p.Filter.Status) v.Set("f_status", p.Filter.Status)
} }
if p.Filter.WebsiteRe != "" {
v.Set("f_website_re", p.Filter.WebsiteRe)
}
if p.Filter.URIRe != "" {
v.Set("f_uri_re", p.Filter.URIRe)
}
return v return v
} }
@@ -210,6 +230,14 @@ func (p QueryParams) buildURL(overrides map[string]string) string {
return "/?" + v.Encode() return "/?" + v.Encode()
} }
// clearFilterURL returns a URL with all filter params removed.
func (p QueryParams) clearFilterURL() string {
return p.buildURL(map[string]string{
"f_website": "", "f_prefix": "", "f_uri": "", "f_status": "",
"f_website_re": "", "f_uri_re": "",
})
}
// nextGroupBy advances the drill-down dimension hierarchy (cycles at the end). // nextGroupBy advances the drill-down dimension hierarchy (cycles at the end).
func nextGroupBy(s string) string { func nextGroupBy(s string) string {
switch s { switch s {
@@ -273,6 +301,18 @@ func buildCrumbs(p QueryParams) []Crumb {
RemoveURL: p.buildURL(map[string]string{"f_status": ""}), RemoveURL: p.buildURL(map[string]string{"f_status": ""}),
}) })
} }
if p.Filter.WebsiteRe != "" {
crumbs = append(crumbs, Crumb{
Text: "website~=" + p.Filter.WebsiteRe,
RemoveURL: p.buildURL(map[string]string{"f_website_re": ""}),
})
}
if p.Filter.URIRe != "" {
crumbs = append(crumbs, Crumb{
Text: "uri~=" + p.Filter.URIRe,
RemoveURL: p.buildURL(map[string]string{"f_uri_re": ""}),
})
}
return crumbs return crumbs
} }
@@ -326,6 +366,27 @@ func buildTableRows(entries []*pb.TopNEntry, p QueryParams) ([]TableRow, int64)
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
params := h.parseParams(r) params := h.parseParams(r)
// Handle filter expression box submission (q= param).
var filterErr string
filterExprInput := FilterExprString(params.Filter)
if qVals, ok := r.URL.Query()["q"]; ok {
q := ""
if len(qVals) > 0 {
q = qVals[0]
}
fs, err := ParseFilterExpr(q)
if err != nil {
filterErr = err.Error()
filterExprInput = q // show what the user typed so they can fix it
// fall through: render page using existing filter params
} else {
params.Filter = fs
http.Redirect(w, r, params.buildURL(nil), http.StatusSeeOther)
return
}
}
filter := buildFilter(params.Filter) filter := buildFilter(params.Filter)
conn, client, err := dial(params.Target) conn, client, err := dial(params.Target)
@@ -399,6 +460,9 @@ func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
Windows: buildWindowTabs(params), Windows: buildWindowTabs(params),
GroupBys: buildGroupByTabs(params), GroupBys: buildGroupByTabs(params),
RefreshSecs: h.refreshSecs, RefreshSecs: h.refreshSecs,
FilterExpr: filterExprInput,
FilterErr: filterErr,
ClearFilterURL: params.clearFilterURL(),
} }
h.render(w, http.StatusOK, data) h.render(w, http.StatusOK, data)
} }
@@ -419,6 +483,8 @@ func (h *Handler) errorPage(params QueryParams, msg string) PageData {
Breadcrumbs: buildCrumbs(params), Breadcrumbs: buildCrumbs(params),
RefreshSecs: h.refreshSecs, RefreshSecs: h.refreshSecs,
Error: msg, Error: msg,
FilterExpr: FilterExprString(params.Filter),
ClearFilterURL: params.clearFilterURL(),
} }
} }

View File

@@ -34,6 +34,12 @@ a:hover { text-decoration: underline; }
.error { color: #c00; border: 1px solid #fbb; background: #fff5f5; padding: 0.7em 1em; margin: 1em 0; border-radius: 3px; } .error { color: #c00; border: 1px solid #fbb; background: #fff5f5; padding: 0.7em 1em; margin: 1em 0; border-radius: 3px; }
.nodata { color: #999; margin: 2em 0; font-style: italic; } .nodata { color: #999; margin: 2em 0; font-style: italic; }
footer { margin-top: 2em; padding-top: 0.6em; border-top: 1px solid #e0e0e0; font-size: 0.8em; color: #999; } footer { margin-top: 2em; padding-top: 0.6em; border-top: 1px solid #e0e0e0; font-size: 0.8em; color: #999; }
.filter-form { display: flex; gap: 0.4em; align-items: center; margin-bottom: 0.7em; }
.filter-input { flex: 1; font-family: monospace; font-size: 13px; padding: 0.25em 0.5em; border: 1px solid #aaa; }
.filter-form button { padding: 0.25em 0.8em; border: 1px solid #aaa; background: #f4f4f4; cursor: pointer; font-family: monospace; }
.filter-form button:hover { background: #e8e8e8; }
.filter-form .clear { color: #c00; font-size: 0.9em; white-space: nowrap; }
.filter-err { color: #c00; font-size: 0.85em; margin: -0.3em 0 0.6em; }
</style> </style>
</head> </head>
<body> <body>

View File

@@ -13,6 +13,17 @@
{{- end}} {{- end}}
</div> </div>
<form class="filter-form" method="get" action="/">
<input type="hidden" name="target" value="{{.Params.Target}}">
<input type="hidden" name="w" value="{{.Params.WindowS}}">
<input type="hidden" name="by" value="{{.Params.GroupByS}}">
<input type="hidden" name="n" value="{{.Params.N}}">
<input class="filter-input" type="text" name="q" value="{{.FilterExpr}}" placeholder="status>=400 AND website~=gouda.* AND uri~=^/api/">
<button type="submit">filter</button>
{{- if .FilterExpr}} <a class="clear" href="{{.ClearFilterURL}}">× clear</a>{{end}}
</form>
{{- if .FilterErr}}<div class="filter-err">{{.FilterErr}}</div>{{end}}
{{if .Breadcrumbs}} {{if .Breadcrumbs}}
<div class="crumbs"> <div class="crumbs">
<span class="label">Filters:</span> <span class="label">Filters:</span>

View File

@@ -266,15 +266,55 @@ to remove just that filter, keeping the others.
**Sparkline** — inline SVG trend chart showing total request count per time bucket for the **Sparkline** — inline SVG trend chart showing total request count per time bucket for the
current filter state. Useful for spotting sudden spikes or sustained DDoS ramps. current filter state. Useful for spotting sudden spikes or sustained DDoS ramps.
**Filter expression box** — a text input above the table accepts a mini filter language that
lets you type expressions directly without editing the URL:
```
status>=400
status>=400 AND website~=gouda.*
status>=400 AND website~=gouda.* AND uri~="^/api/"
website=example.com AND prefix=1.2.3.0/24
```
Supported fields and operators:
| Field | Operators | Example |
|-----------|---------------------|----------------------------|
| `status` | `=` `!=` `>` `>=` `<` `<=` | `status>=400` |
| `website` | `=` `~=` | `website~=gouda.*` |
| `uri` | `=` `~=` | `uri~=^/api/` |
| `prefix` | `=` | `prefix=1.2.3.0/24` |
`~=` means RE2 regex match. Values with spaces or quotes may be wrapped in double or single
quotes: `uri~="^/search\?q="`.
The box pre-fills with the current active filter (including filters set by drilldown clicks),
so you can see and extend what is applied. Submitting redirects to a clean URL with the
individual filter params; `× clear` removes all filters at once.
On a parse error the page re-renders with the error shown below the input and the current
data and filters unchanged.
**Status expressions** — the `f_status` URL param (and `status` in the expression box) accepts
comparison expressions: `200`, `!=200`, `>=400`, `<500`, etc.
**Regex filters**`f_website_re` and `f_uri_re` URL params (and `~=` in the expression box)
accept RE2 regular expressions. The breadcrumb strip shows them as `website~=gouda.*` and
`uri~=^/api/` with the usual `×` remove link.
**URL sharing** — all filter state is in the URL query string (`w`, `by`, `f_website`, **URL sharing** — all filter state is in the URL query string (`w`, `by`, `f_website`,
`f_prefix`, `f_uri`, `f_status`, `n`). Copy the URL to share an exact view with another `f_prefix`, `f_uri`, `f_status`, `f_website_re`, `f_uri_re`, `n`). Copy the URL to share an
operator, or bookmark a recurring query. exact view with another operator, or bookmark a recurring query.
**JSON output** — append `&raw=1` to any URL to receive the TopN result as JSON instead of **JSON output** — append `&raw=1` to any URL to receive the TopN result as JSON instead of
HTML. Useful for scripting without the CLI binary: HTML. Useful for scripting without the CLI binary:
```bash ```bash
# All 429s by prefix
curl -s 'http://frontend:8080/?f_status=429&by=prefix&w=1m&raw=1' | jq '.entries[0]' curl -s 'http://frontend:8080/?f_status=429&by=prefix&w=1m&raw=1' | jq '.entries[0]'
# All errors (>=400) on gouda hosts
curl -s 'http://frontend:8080/?f_status=%3E%3D400&f_website_re=gouda.*&by=uri&w=5m&raw=1'
``` ```
**Target override** — append `?target=host:port` to point the frontend at a different gRPC **Target override** — append `?target=host:port` to point the frontend at a different gRPC
@@ -309,7 +349,9 @@ logtail-cli stream [flags] live snapshot feed (runs until Ctrl-C)
| `--website` | — | Filter to this website | | `--website` | — | Filter to this website |
| `--prefix` | — | Filter to this client prefix | | `--prefix` | — | Filter to this client prefix |
| `--uri` | — | Filter to this request URI | | `--uri` | — | Filter to this request URI |
| `--status` | — | Filter to this HTTP status code (integer) | | `--status` | — | Filter: HTTP status expression (`200`, `!=200`, `>=400`, `<500`, …) |
| `--website-re`| — | Filter: RE2 regex against website |
| `--uri-re` | — | Filter: RE2 regex against request URI |
### `topn` flags ### `topn` flags
@@ -365,12 +407,21 @@ logtail-cli topn --target agg:9091 --window 1m --group-by prefix --status 429 --
logtail-cli topn --target agg:9091 --window 1m --group-by prefix --status 429 --n 20 \ logtail-cli topn --target agg:9091 --window 1m --group-by prefix --status 429 --n 20 \
--json | jq '.entries[0]' --json | jq '.entries[0]'
# Which website has the most 503s over the last 24h? # Which website has the most errors (4xx or 5xx) over the last 24h?
logtail-cli topn --target agg:9091 --window 24h --group-by website --status 503 logtail-cli topn --target agg:9091 --window 24h --group-by website --status '>=400'
# Which client prefixes are NOT getting 200s? (anything non-success)
logtail-cli topn --target agg:9091 --window 5m --group-by prefix --status '!=200'
# Drill: top URIs on one website over the last 60 minutes # Drill: top URIs on one website over the last 60 minutes
logtail-cli topn --target agg:9091 --window 60m --group-by uri --website api.example.com logtail-cli topn --target agg:9091 --window 60m --group-by uri --website api.example.com
# Filter by website regex: all gouda hosts
logtail-cli topn --target agg:9091 --window 5m --website-re 'gouda.*'
# Filter by URI regex: all /api/ paths
logtail-cli topn --target agg:9091 --window 5m --group-by uri --uri-re '^/api/'
# Compare two collectors side by side in one command # Compare two collectors side by side in one command
logtail-cli topn --target nginx1:9090,nginx2:9090 --window 5m logtail-cli topn --target nginx1:9090,nginx2:9090 --window 5m

View File

@@ -4,7 +4,8 @@ package store
import ( import (
"container/heap" "container/heap"
"fmt" "log"
"regexp"
"time" "time"
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb" pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
@@ -113,27 +114,101 @@ func indexOf(s string, b byte) int {
// --- filtering and grouping --- // --- filtering and grouping ---
// CompiledFilter wraps a pb.Filter with pre-compiled regular expressions.
// Use CompileFilter to construct one before a query loop.
type CompiledFilter struct {
Proto *pb.Filter
WebsiteRe *regexp.Regexp // nil if no website_regex or compilation failed
URIRe *regexp.Regexp // nil if no uri_regex or compilation failed
}
// CompileFilter compiles the regex fields in f once. Invalid regexes are
// logged and treated as "match nothing" for that field.
func CompileFilter(f *pb.Filter) *CompiledFilter {
cf := &CompiledFilter{Proto: f}
if f == nil {
return cf
}
if f.WebsiteRegex != nil {
re, err := regexp.Compile(f.GetWebsiteRegex())
if err != nil {
log.Printf("store: invalid website_regex %q: %v", f.GetWebsiteRegex(), err)
} else {
cf.WebsiteRe = re
}
}
if f.UriRegex != nil {
re, err := regexp.Compile(f.GetUriRegex())
if err != nil {
log.Printf("store: invalid uri_regex %q: %v", f.GetUriRegex(), err)
} else {
cf.URIRe = re
}
}
return cf
}
// MatchesFilter returns true if t satisfies all constraints in f. // MatchesFilter returns true if t satisfies all constraints in f.
// A nil filter matches everything. // A nil filter matches everything.
func MatchesFilter(t Tuple4, f *pb.Filter) bool { func MatchesFilter(t Tuple4, f *CompiledFilter) bool {
if f == nil { if f == nil || f.Proto == nil {
return true return true
} }
if f.Website != nil && t.Website != f.GetWebsite() { p := f.Proto
if p.Website != nil && t.Website != p.GetWebsite() {
return false return false
} }
if f.ClientPrefix != nil && t.Prefix != f.GetClientPrefix() { if f.WebsiteRe != nil && !f.WebsiteRe.MatchString(t.Website) {
return false return false
} }
if f.HttpRequestUri != nil && t.URI != f.GetHttpRequestUri() { // website_regex set but failed to compile → match nothing
if p.WebsiteRegex != nil && f.WebsiteRe == nil {
return false return false
} }
if f.HttpResponse != nil && t.Status != fmt.Sprint(f.GetHttpResponse()) { if p.ClientPrefix != nil && t.Prefix != p.GetClientPrefix() {
return false
}
if p.HttpRequestUri != nil && t.URI != p.GetHttpRequestUri() {
return false
}
if f.URIRe != nil && !f.URIRe.MatchString(t.URI) {
return false
}
if p.UriRegex != nil && f.URIRe == nil {
return false
}
if p.HttpResponse != nil && !matchesStatusOp(t.Status, p.GetHttpResponse(), p.StatusOp) {
return false return false
} }
return true return true
} }
// matchesStatusOp applies op(statusStr, want), parsing statusStr as an integer.
// Returns false if statusStr is not a valid integer.
func matchesStatusOp(statusStr string, want int32, op pb.StatusOp) bool {
var got int32
for _, c := range []byte(statusStr) {
if c < '0' || c > '9' {
return false
}
got = got*10 + int32(c-'0')
}
switch op {
case pb.StatusOp_NE:
return got != want
case pb.StatusOp_GT:
return got > want
case pb.StatusOp_GE:
return got >= want
case pb.StatusOp_LT:
return got < want
case pb.StatusOp_LE:
return got <= want
default: // EQ
return got == want
}
}
// DimensionLabel returns the string value of t for the given group-by dimension. // DimensionLabel returns the string value of t for the given group-by dimension.
func DimensionLabel(t Tuple4, g pb.GroupBy) string { func DimensionLabel(t Tuple4, g pb.GroupBy) string {
switch g { switch g {
@@ -150,6 +225,45 @@ func DimensionLabel(t Tuple4, g pb.GroupBy) string {
} }
} }
// ParseStatusExpr parses a status filter expression into a value and operator.
// Accepted syntax: 200, =200, ==200, !=200, >400, >=400, <500, <=500.
// Returns ok=false if the expression is empty or unparseable.
func ParseStatusExpr(s string) (value int32, op pb.StatusOp, ok bool) {
if s == "" {
return 0, pb.StatusOp_EQ, false
}
var digits string
switch {
case len(s) >= 2 && s[:2] == "!=":
op, digits = pb.StatusOp_NE, s[2:]
case len(s) >= 2 && s[:2] == ">=":
op, digits = pb.StatusOp_GE, s[2:]
case len(s) >= 2 && s[:2] == "<=":
op, digits = pb.StatusOp_LE, s[2:]
case len(s) >= 2 && s[:2] == "==":
op, digits = pb.StatusOp_EQ, s[2:]
case s[0] == '>':
op, digits = pb.StatusOp_GT, s[1:]
case s[0] == '<':
op, digits = pb.StatusOp_LT, s[1:]
case s[0] == '=':
op, digits = pb.StatusOp_EQ, s[1:]
default:
op, digits = pb.StatusOp_EQ, s
}
var n int32
if digits == "" {
return 0, pb.StatusOp_EQ, false
}
for _, c := range []byte(digits) {
if c < '0' || c > '9' {
return 0, pb.StatusOp_EQ, false
}
n = n*10 + int32(c-'0')
}
return n, op, true
}
// --- heap-based top-K selection --- // --- heap-based top-K selection ---
type entryHeap []Entry type entryHeap []Entry

View File

@@ -0,0 +1,205 @@
package store
import (
"testing"
pb "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"
)
// --- ParseStatusExpr ---
func TestParseStatusExprEQ(t *testing.T) {
n, op, ok := ParseStatusExpr("200")
if !ok || n != 200 || op != pb.StatusOp_EQ {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprExplicitEQ(t *testing.T) {
for _, expr := range []string{"=200", "==200"} {
n, op, ok := ParseStatusExpr(expr)
if !ok || n != 200 || op != pb.StatusOp_EQ {
t.Fatalf("expr %q: got (%d,%v,%v)", expr, n, op, ok)
}
}
}
func TestParseStatusExprNE(t *testing.T) {
n, op, ok := ParseStatusExpr("!=200")
if !ok || n != 200 || op != pb.StatusOp_NE {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprGE(t *testing.T) {
n, op, ok := ParseStatusExpr(">=400")
if !ok || n != 400 || op != pb.StatusOp_GE {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprGT(t *testing.T) {
n, op, ok := ParseStatusExpr(">400")
if !ok || n != 400 || op != pb.StatusOp_GT {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprLE(t *testing.T) {
n, op, ok := ParseStatusExpr("<=500")
if !ok || n != 500 || op != pb.StatusOp_LE {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprLT(t *testing.T) {
n, op, ok := ParseStatusExpr("<500")
if !ok || n != 500 || op != pb.StatusOp_LT {
t.Fatalf("got (%d,%v,%v)", n, op, ok)
}
}
func TestParseStatusExprEmpty(t *testing.T) {
_, _, ok := ParseStatusExpr("")
if ok {
t.Fatal("expected ok=false for empty string")
}
}
func TestParseStatusExprInvalid(t *testing.T) {
for _, expr := range []string{"abc", "!=", ">=", "2xx"} {
_, _, ok := ParseStatusExpr(expr)
if ok {
t.Fatalf("expr %q: expected ok=false", expr)
}
}
}
// --- MatchesFilter ---
func compiledEQ(status int32) *CompiledFilter {
v := status
return CompileFilter(&pb.Filter{HttpResponse: &v, StatusOp: pb.StatusOp_EQ})
}
func TestMatchesFilterNil(t *testing.T) {
if !MatchesFilter(Tuple4{Website: "x"}, nil) {
t.Fatal("nil filter should match everything")
}
if !MatchesFilter(Tuple4{Website: "x"}, &CompiledFilter{}) {
t.Fatal("empty compiled filter should match everything")
}
}
func TestMatchesFilterExactWebsite(t *testing.T) {
w := "example.com"
cf := CompileFilter(&pb.Filter{Website: &w})
if !MatchesFilter(Tuple4{Website: "example.com"}, cf) {
t.Fatal("expected match")
}
if MatchesFilter(Tuple4{Website: "other.com"}, cf) {
t.Fatal("expected no match")
}
}
func TestMatchesFilterWebsiteRegex(t *testing.T) {
re := "gouda.*"
cf := CompileFilter(&pb.Filter{WebsiteRegex: &re})
if !MatchesFilter(Tuple4{Website: "gouda.example.com"}, cf) {
t.Fatal("expected match")
}
if MatchesFilter(Tuple4{Website: "edam.example.com"}, cf) {
t.Fatal("expected no match")
}
}
func TestMatchesFilterURIRegex(t *testing.T) {
re := "^/api/.*"
cf := CompileFilter(&pb.Filter{UriRegex: &re})
if !MatchesFilter(Tuple4{URI: "/api/users"}, cf) {
t.Fatal("expected match")
}
if MatchesFilter(Tuple4{URI: "/health"}, cf) {
t.Fatal("expected no match")
}
}
func TestMatchesFilterInvalidRegexMatchesNothing(t *testing.T) {
re := "[invalid"
cf := CompileFilter(&pb.Filter{WebsiteRegex: &re})
if MatchesFilter(Tuple4{Website: "anything"}, cf) {
t.Fatal("invalid regex should match nothing")
}
}
func TestMatchesFilterStatusEQ(t *testing.T) {
cf := compiledEQ(200)
if !MatchesFilter(Tuple4{Status: "200"}, cf) {
t.Fatal("expected match")
}
if MatchesFilter(Tuple4{Status: "404"}, cf) {
t.Fatal("expected no match")
}
}
func TestMatchesFilterStatusNE(t *testing.T) {
v := int32(200)
cf := CompileFilter(&pb.Filter{HttpResponse: &v, StatusOp: pb.StatusOp_NE})
if MatchesFilter(Tuple4{Status: "200"}, cf) {
t.Fatal("expected no match for 200 != 200")
}
if !MatchesFilter(Tuple4{Status: "404"}, cf) {
t.Fatal("expected match for 404 != 200")
}
}
func TestMatchesFilterStatusGE(t *testing.T) {
v := int32(400)
cf := CompileFilter(&pb.Filter{HttpResponse: &v, StatusOp: pb.StatusOp_GE})
if !MatchesFilter(Tuple4{Status: "400"}, cf) {
t.Fatal("expected match: 400 >= 400")
}
if !MatchesFilter(Tuple4{Status: "500"}, cf) {
t.Fatal("expected match: 500 >= 400")
}
if MatchesFilter(Tuple4{Status: "200"}, cf) {
t.Fatal("expected no match: 200 >= 400")
}
}
func TestMatchesFilterStatusLT(t *testing.T) {
v := int32(400)
cf := CompileFilter(&pb.Filter{HttpResponse: &v, StatusOp: pb.StatusOp_LT})
if !MatchesFilter(Tuple4{Status: "200"}, cf) {
t.Fatal("expected match: 200 < 400")
}
if MatchesFilter(Tuple4{Status: "400"}, cf) {
t.Fatal("expected no match: 400 < 400")
}
}
func TestMatchesFilterStatusNonNumeric(t *testing.T) {
cf := compiledEQ(200)
if MatchesFilter(Tuple4{Status: "ok"}, cf) {
t.Fatal("non-numeric status should not match")
}
}
func TestMatchesFilterCombined(t *testing.T) {
w := "example.com"
v := int32(200)
cf := CompileFilter(&pb.Filter{
Website: &w,
HttpResponse: &v,
StatusOp: pb.StatusOp_EQ,
})
if !MatchesFilter(Tuple4{Website: "example.com", Status: "200"}, cf) {
t.Fatal("expected match")
}
if MatchesFilter(Tuple4{Website: "other.com", Status: "200"}, cf) {
t.Fatal("expected no match: wrong website")
}
if MatchesFilter(Tuple4{Website: "example.com", Status: "404"}, cf) {
t.Fatal("expected no match: wrong status")
}
}

View File

@@ -4,13 +4,27 @@ package logtail;
option go_package = "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb"; option go_package = "git.ipng.ch/ipng/nginx-logtail/proto/logtailpb";
// StatusOp is the comparison operator applied to http_response in a Filter.
// Defaults to EQ (exact match) for backward compatibility.
enum StatusOp {
EQ = 0; // ==
NE = 1; // !=
GT = 2; // >
GE = 3; // >=
LT = 4; // <
LE = 5; // <=
}
// Filter restricts results to entries matching all specified fields. // Filter restricts results to entries matching all specified fields.
// Unset fields match everything. // Unset fields match everything. Exact-match and regex fields are ANDed.
message Filter { message Filter {
optional string website = 1; optional string website = 1;
optional string client_prefix = 2; optional string client_prefix = 2;
optional string http_request_uri = 3; optional string http_request_uri = 3;
optional int32 http_response = 4; optional int32 http_response = 4;
StatusOp status_op = 5; // operator for http_response; ignored when unset
optional string website_regex = 6; // RE2 regex matched against website
optional string uri_regex = 7; // RE2 regex matched against http_request_uri
} }
enum GroupBy { enum GroupBy {

View File

@@ -21,6 +21,66 @@ const (
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
) )
// StatusOp is the comparison operator applied to http_response in a Filter.
// Defaults to EQ (exact match) for backward compatibility.
type StatusOp int32
const (
StatusOp_EQ StatusOp = 0 // ==
StatusOp_NE StatusOp = 1 // !=
StatusOp_GT StatusOp = 2 // >
StatusOp_GE StatusOp = 3 // >=
StatusOp_LT StatusOp = 4 // <
StatusOp_LE StatusOp = 5 // <=
)
// Enum value maps for StatusOp.
var (
StatusOp_name = map[int32]string{
0: "EQ",
1: "NE",
2: "GT",
3: "GE",
4: "LT",
5: "LE",
}
StatusOp_value = map[string]int32{
"EQ": 0,
"NE": 1,
"GT": 2,
"GE": 3,
"LT": 4,
"LE": 5,
}
)
func (x StatusOp) Enum() *StatusOp {
p := new(StatusOp)
*p = x
return p
}
func (x StatusOp) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (StatusOp) Descriptor() protoreflect.EnumDescriptor {
return file_logtail_proto_enumTypes[0].Descriptor()
}
func (StatusOp) Type() protoreflect.EnumType {
return &file_logtail_proto_enumTypes[0]
}
func (x StatusOp) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use StatusOp.Descriptor instead.
func (StatusOp) EnumDescriptor() ([]byte, []int) {
return file_logtail_proto_rawDescGZIP(), []int{0}
}
type GroupBy int32 type GroupBy int32
const ( const (
@@ -57,11 +117,11 @@ func (x GroupBy) String() string {
} }
func (GroupBy) Descriptor() protoreflect.EnumDescriptor { func (GroupBy) Descriptor() protoreflect.EnumDescriptor {
return file_logtail_proto_enumTypes[0].Descriptor() return file_logtail_proto_enumTypes[1].Descriptor()
} }
func (GroupBy) Type() protoreflect.EnumType { func (GroupBy) Type() protoreflect.EnumType {
return &file_logtail_proto_enumTypes[0] return &file_logtail_proto_enumTypes[1]
} }
func (x GroupBy) Number() protoreflect.EnumNumber { func (x GroupBy) Number() protoreflect.EnumNumber {
@@ -70,7 +130,7 @@ func (x GroupBy) Number() protoreflect.EnumNumber {
// Deprecated: Use GroupBy.Descriptor instead. // Deprecated: Use GroupBy.Descriptor instead.
func (GroupBy) EnumDescriptor() ([]byte, []int) { func (GroupBy) EnumDescriptor() ([]byte, []int) {
return file_logtail_proto_rawDescGZIP(), []int{0} return file_logtail_proto_rawDescGZIP(), []int{1}
} }
type Window int32 type Window int32
@@ -115,11 +175,11 @@ func (x Window) String() string {
} }
func (Window) Descriptor() protoreflect.EnumDescriptor { func (Window) Descriptor() protoreflect.EnumDescriptor {
return file_logtail_proto_enumTypes[1].Descriptor() return file_logtail_proto_enumTypes[2].Descriptor()
} }
func (Window) Type() protoreflect.EnumType { func (Window) Type() protoreflect.EnumType {
return &file_logtail_proto_enumTypes[1] return &file_logtail_proto_enumTypes[2]
} }
func (x Window) Number() protoreflect.EnumNumber { func (x Window) Number() protoreflect.EnumNumber {
@@ -128,17 +188,20 @@ func (x Window) Number() protoreflect.EnumNumber {
// Deprecated: Use Window.Descriptor instead. // Deprecated: Use Window.Descriptor instead.
func (Window) EnumDescriptor() ([]byte, []int) { func (Window) EnumDescriptor() ([]byte, []int) {
return file_logtail_proto_rawDescGZIP(), []int{1} return file_logtail_proto_rawDescGZIP(), []int{2}
} }
// Filter restricts results to entries matching all specified fields. // Filter restricts results to entries matching all specified fields.
// Unset fields match everything. // Unset fields match everything. Exact-match and regex fields are ANDed.
type Filter struct { type Filter struct {
state protoimpl.MessageState `protogen:"open.v1"` state protoimpl.MessageState `protogen:"open.v1"`
Website *string `protobuf:"bytes,1,opt,name=website,proto3,oneof" json:"website,omitempty"` Website *string `protobuf:"bytes,1,opt,name=website,proto3,oneof" json:"website,omitempty"`
ClientPrefix *string `protobuf:"bytes,2,opt,name=client_prefix,json=clientPrefix,proto3,oneof" json:"client_prefix,omitempty"` ClientPrefix *string `protobuf:"bytes,2,opt,name=client_prefix,json=clientPrefix,proto3,oneof" json:"client_prefix,omitempty"`
HttpRequestUri *string `protobuf:"bytes,3,opt,name=http_request_uri,json=httpRequestUri,proto3,oneof" json:"http_request_uri,omitempty"` HttpRequestUri *string `protobuf:"bytes,3,opt,name=http_request_uri,json=httpRequestUri,proto3,oneof" json:"http_request_uri,omitempty"`
HttpResponse *int32 `protobuf:"varint,4,opt,name=http_response,json=httpResponse,proto3,oneof" json:"http_response,omitempty"` HttpResponse *int32 `protobuf:"varint,4,opt,name=http_response,json=httpResponse,proto3,oneof" json:"http_response,omitempty"`
StatusOp StatusOp `protobuf:"varint,5,opt,name=status_op,json=statusOp,proto3,enum=logtail.StatusOp" json:"status_op,omitempty"` // operator for http_response; ignored when unset
WebsiteRegex *string `protobuf:"bytes,6,opt,name=website_regex,json=websiteRegex,proto3,oneof" json:"website_regex,omitempty"` // RE2 regex matched against website
UriRegex *string `protobuf:"bytes,7,opt,name=uri_regex,json=uriRegex,proto3,oneof" json:"uri_regex,omitempty"` // RE2 regex matched against http_request_uri
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
} }
@@ -201,6 +264,27 @@ func (x *Filter) GetHttpResponse() int32 {
return 0 return 0
} }
func (x *Filter) GetStatusOp() StatusOp {
if x != nil {
return x.StatusOp
}
return StatusOp_EQ
}
func (x *Filter) GetWebsiteRegex() string {
if x != nil && x.WebsiteRegex != nil {
return *x.WebsiteRegex
}
return ""
}
func (x *Filter) GetUriRegex() string {
if x != nil && x.UriRegex != nil {
return *x.UriRegex
}
return ""
}
type TopNRequest struct { type TopNRequest struct {
state protoimpl.MessageState `protogen:"open.v1"` state protoimpl.MessageState `protogen:"open.v1"`
Filter *Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` Filter *Filter `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"`
@@ -629,17 +713,23 @@ var File_logtail_proto protoreflect.FileDescriptor
const file_logtail_proto_rawDesc = "" + const file_logtail_proto_rawDesc = "" +
"\n" + "\n" +
"\rlogtail.proto\x12\alogtail\"\xef\x01\n" + "\rlogtail.proto\x12\alogtail\"\x8b\x03\n" +
"\x06Filter\x12\x1d\n" + "\x06Filter\x12\x1d\n" +
"\awebsite\x18\x01 \x01(\tH\x00R\awebsite\x88\x01\x01\x12(\n" + "\awebsite\x18\x01 \x01(\tH\x00R\awebsite\x88\x01\x01\x12(\n" +
"\rclient_prefix\x18\x02 \x01(\tH\x01R\fclientPrefix\x88\x01\x01\x12-\n" + "\rclient_prefix\x18\x02 \x01(\tH\x01R\fclientPrefix\x88\x01\x01\x12-\n" +
"\x10http_request_uri\x18\x03 \x01(\tH\x02R\x0ehttpRequestUri\x88\x01\x01\x12(\n" + "\x10http_request_uri\x18\x03 \x01(\tH\x02R\x0ehttpRequestUri\x88\x01\x01\x12(\n" +
"\rhttp_response\x18\x04 \x01(\x05H\x03R\fhttpResponse\x88\x01\x01B\n" + "\rhttp_response\x18\x04 \x01(\x05H\x03R\fhttpResponse\x88\x01\x01\x12.\n" +
"\tstatus_op\x18\x05 \x01(\x0e2\x11.logtail.StatusOpR\bstatusOp\x12(\n" +
"\rwebsite_regex\x18\x06 \x01(\tH\x04R\fwebsiteRegex\x88\x01\x01\x12 \n" +
"\turi_regex\x18\a \x01(\tH\x05R\buriRegex\x88\x01\x01B\n" +
"\n" + "\n" +
"\b_websiteB\x10\n" + "\b_websiteB\x10\n" +
"\x0e_client_prefixB\x13\n" + "\x0e_client_prefixB\x13\n" +
"\x11_http_request_uriB\x10\n" + "\x11_http_request_uriB\x10\n" +
"\x0e_http_response\"\x9a\x01\n" + "\x0e_http_responseB\x10\n" +
"\x0e_website_regexB\f\n" +
"\n" +
"_uri_regex\"\x9a\x01\n" +
"\vTopNRequest\x12'\n" + "\vTopNRequest\x12'\n" +
"\x06filter\x18\x01 \x01(\v2\x0f.logtail.FilterR\x06filter\x12+\n" + "\x06filter\x18\x01 \x01(\v2\x0f.logtail.FilterR\x06filter\x12+\n" +
"\bgroup_by\x18\x02 \x01(\x0e2\x10.logtail.GroupByR\agroupBy\x12\f\n" + "\bgroup_by\x18\x02 \x01(\x0e2\x10.logtail.GroupByR\agroupBy\x12\f\n" +
@@ -665,7 +755,14 @@ const file_logtail_proto_rawDesc = "" +
"\bSnapshot\x12\x16\n" + "\bSnapshot\x12\x16\n" +
"\x06source\x18\x01 \x01(\tR\x06source\x12\x1c\n" + "\x06source\x18\x01 \x01(\tR\x06source\x12\x1c\n" +
"\ttimestamp\x18\x02 \x01(\x03R\ttimestamp\x12,\n" + "\ttimestamp\x18\x02 \x01(\x03R\ttimestamp\x12,\n" +
"\aentries\x18\x03 \x03(\v2\x12.logtail.TopNEntryR\aentries*M\n" + "\aentries\x18\x03 \x03(\v2\x12.logtail.TopNEntryR\aentries*:\n" +
"\bStatusOp\x12\x06\n" +
"\x02EQ\x10\x00\x12\x06\n" +
"\x02NE\x10\x01\x12\x06\n" +
"\x02GT\x10\x02\x12\x06\n" +
"\x02GE\x10\x03\x12\x06\n" +
"\x02LT\x10\x04\x12\x06\n" +
"\x02LE\x10\x05*M\n" +
"\aGroupBy\x12\v\n" + "\aGroupBy\x12\v\n" +
"\aWEBSITE\x10\x00\x12\x11\n" + "\aWEBSITE\x10\x00\x12\x11\n" +
"\rCLIENT_PREFIX\x10\x01\x12\x0f\n" + "\rCLIENT_PREFIX\x10\x01\x12\x0f\n" +
@@ -695,41 +792,43 @@ func file_logtail_proto_rawDescGZIP() []byte {
return file_logtail_proto_rawDescData return file_logtail_proto_rawDescData
} }
var file_logtail_proto_enumTypes = make([]protoimpl.EnumInfo, 2) var file_logtail_proto_enumTypes = make([]protoimpl.EnumInfo, 3)
var file_logtail_proto_msgTypes = make([]protoimpl.MessageInfo, 9) var file_logtail_proto_msgTypes = make([]protoimpl.MessageInfo, 9)
var file_logtail_proto_goTypes = []any{ var file_logtail_proto_goTypes = []any{
(GroupBy)(0), // 0: logtail.GroupBy (StatusOp)(0), // 0: logtail.StatusOp
(Window)(0), // 1: logtail.Window (GroupBy)(0), // 1: logtail.GroupBy
(*Filter)(nil), // 2: logtail.Filter (Window)(0), // 2: logtail.Window
(*TopNRequest)(nil), // 3: logtail.TopNRequest (*Filter)(nil), // 3: logtail.Filter
(*TopNEntry)(nil), // 4: logtail.TopNEntry (*TopNRequest)(nil), // 4: logtail.TopNRequest
(*TopNResponse)(nil), // 5: logtail.TopNResponse (*TopNEntry)(nil), // 5: logtail.TopNEntry
(*TrendRequest)(nil), // 6: logtail.TrendRequest (*TopNResponse)(nil), // 6: logtail.TopNResponse
(*TrendPoint)(nil), // 7: logtail.TrendPoint (*TrendRequest)(nil), // 7: logtail.TrendRequest
(*TrendResponse)(nil), // 8: logtail.TrendResponse (*TrendPoint)(nil), // 8: logtail.TrendPoint
(*SnapshotRequest)(nil), // 9: logtail.SnapshotRequest (*TrendResponse)(nil), // 9: logtail.TrendResponse
(*Snapshot)(nil), // 10: logtail.Snapshot (*SnapshotRequest)(nil), // 10: logtail.SnapshotRequest
(*Snapshot)(nil), // 11: logtail.Snapshot
} }
var file_logtail_proto_depIdxs = []int32{ var file_logtail_proto_depIdxs = []int32{
2, // 0: logtail.TopNRequest.filter:type_name -> logtail.Filter 0, // 0: logtail.Filter.status_op:type_name -> logtail.StatusOp
0, // 1: logtail.TopNRequest.group_by:type_name -> logtail.GroupBy 3, // 1: logtail.TopNRequest.filter:type_name -> logtail.Filter
1, // 2: logtail.TopNRequest.window:type_name -> logtail.Window 1, // 2: logtail.TopNRequest.group_by:type_name -> logtail.GroupBy
4, // 3: logtail.TopNResponse.entries:type_name -> logtail.TopNEntry 2, // 3: logtail.TopNRequest.window:type_name -> logtail.Window
2, // 4: logtail.TrendRequest.filter:type_name -> logtail.Filter 5, // 4: logtail.TopNResponse.entries:type_name -> logtail.TopNEntry
1, // 5: logtail.TrendRequest.window:type_name -> logtail.Window 3, // 5: logtail.TrendRequest.filter:type_name -> logtail.Filter
7, // 6: logtail.TrendResponse.points:type_name -> logtail.TrendPoint 2, // 6: logtail.TrendRequest.window:type_name -> logtail.Window
4, // 7: logtail.Snapshot.entries:type_name -> logtail.TopNEntry 8, // 7: logtail.TrendResponse.points:type_name -> logtail.TrendPoint
3, // 8: logtail.LogtailService.TopN:input_type -> logtail.TopNRequest 5, // 8: logtail.Snapshot.entries:type_name -> logtail.TopNEntry
6, // 9: logtail.LogtailService.Trend:input_type -> logtail.TrendRequest 4, // 9: logtail.LogtailService.TopN:input_type -> logtail.TopNRequest
9, // 10: logtail.LogtailService.StreamSnapshots:input_type -> logtail.SnapshotRequest 7, // 10: logtail.LogtailService.Trend:input_type -> logtail.TrendRequest
5, // 11: logtail.LogtailService.TopN:output_type -> logtail.TopNResponse 10, // 11: logtail.LogtailService.StreamSnapshots:input_type -> logtail.SnapshotRequest
8, // 12: logtail.LogtailService.Trend:output_type -> logtail.TrendResponse 6, // 12: logtail.LogtailService.TopN:output_type -> logtail.TopNResponse
10, // 13: logtail.LogtailService.StreamSnapshots:output_type -> logtail.Snapshot 9, // 13: logtail.LogtailService.Trend:output_type -> logtail.TrendResponse
11, // [11:14] is the sub-list for method output_type 11, // 14: logtail.LogtailService.StreamSnapshots:output_type -> logtail.Snapshot
8, // [8:11] is the sub-list for method input_type 12, // [12:15] is the sub-list for method output_type
8, // [8:8] is the sub-list for extension type_name 9, // [9:12] is the sub-list for method input_type
8, // [8:8] is the sub-list for extension extendee 9, // [9:9] is the sub-list for extension type_name
0, // [0:8] is the sub-list for field type_name 9, // [9:9] is the sub-list for extension extendee
0, // [0:9] is the sub-list for field type_name
} }
func init() { file_logtail_proto_init() } func init() { file_logtail_proto_init() }
@@ -743,7 +842,7 @@ func file_logtail_proto_init() {
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: unsafe.Slice(unsafe.StringData(file_logtail_proto_rawDesc), len(file_logtail_proto_rawDesc)), RawDescriptor: unsafe.Slice(unsafe.StringData(file_logtail_proto_rawDesc), len(file_logtail_proto_rawDesc)),
NumEnums: 2, NumEnums: 3,
NumMessages: 9, NumMessages: 9,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,