Fold tiledump into ctfetch. Add +sct, +issuer and +ctlog flags to print additional info
This commit is contained in:
@@ -5,18 +5,75 @@ package utils
|
||||
import (
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"crypto/x509"
|
||||
"encoding/asn1"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"filippo.io/sunlight"
|
||||
)
|
||||
|
||||
var (
|
||||
ctLogCache = map[string]map[string]CTLogInfo{}
|
||||
ctLogCacheMu sync.Mutex
|
||||
|
||||
issuerCache = map[string]*IssuerInfo{}
|
||||
issuerCacheMu sync.Mutex
|
||||
)
|
||||
|
||||
var oidSCTList = asn1.ObjectIdentifier{1, 3, 6, 1, 4, 1, 11129, 2, 4, 2}
|
||||
|
||||
// CTLogInfo holds details about a CT log from the log list.
|
||||
type CTLogInfo struct {
|
||||
Description string `json:"description"`
|
||||
URL string `json:"url"`
|
||||
Operator string `json:"operator"`
|
||||
State string `json:"state"`
|
||||
}
|
||||
|
||||
// SCT represents a Signed Certificate Timestamp.
|
||||
type SCT struct {
|
||||
Version int `json:"version"`
|
||||
LogID string `json:"log_id"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
TimestampHuman string `json:"timestamp_human"`
|
||||
Extensions string `json:"extensions,omitempty"`
|
||||
HashAlgorithm int `json:"hash_algorithm"`
|
||||
SigAlgorithm int `json:"sig_algorithm"`
|
||||
Signature string `json:"signature"`
|
||||
CTLog *CTLogInfo `json:"ctlog,omitempty"`
|
||||
}
|
||||
|
||||
const maxCompressRatio = 100
|
||||
|
||||
// Options controls which optional fields are fetched and included in output.
|
||||
type Options struct {
|
||||
LogURL string
|
||||
ShowSCT bool
|
||||
ShowIssuer bool
|
||||
ShowCTLog bool
|
||||
CTLogs map[string]CTLogInfo // keyed by hex log_id
|
||||
}
|
||||
|
||||
// IssuerInfo holds parsed details of an issuer certificate fetched from the log.
|
||||
type IssuerInfo struct {
|
||||
Fingerprint string `json:"fingerprint"`
|
||||
Subject string `json:"subject"`
|
||||
Issuer string `json:"issuer"`
|
||||
NotBefore string `json:"not_before"`
|
||||
NotAfter string `json:"not_after"`
|
||||
SerialNumber string `json:"serial_number"`
|
||||
}
|
||||
|
||||
// Entry represents a CT log entry in JSON format.
|
||||
type Entry struct {
|
||||
EntryNumber int `json:"entry_number"`
|
||||
@@ -27,6 +84,8 @@ type Entry struct {
|
||||
CertificateSize int `json:"certificate_size"`
|
||||
PreCertificateSize *int `json:"precertificate_size,omitempty"`
|
||||
ChainFingerprints []string `json:"chain_fingerprints"`
|
||||
Issuers []IssuerInfo `json:"issuers,omitempty"`
|
||||
SCTs []SCT `json:"scts,omitempty"`
|
||||
ParsedCertInfo json.RawMessage `json:"parsed_cert_info,omitempty"`
|
||||
}
|
||||
|
||||
@@ -38,9 +97,9 @@ type HashTileOutput struct {
|
||||
|
||||
// DumpResult is the result of dumping entries or hashes from a tile.
|
||||
type DumpResult struct {
|
||||
Entries []Entry `json:"entries,omitempty"`
|
||||
HashTile *HashTileOutput `json:"hash_tile,omitempty"`
|
||||
TotalEntries int `json:"total_entries,omitempty"`
|
||||
Entries []Entry `json:"entries,omitempty"`
|
||||
HashTile *HashTileOutput `json:"hash_tile,omitempty"`
|
||||
TotalEntries int `json:"total_entries,omitempty"`
|
||||
}
|
||||
|
||||
// FetchURL fetches data from a URL.
|
||||
@@ -58,6 +117,23 @@ func FetchURL(url string) ([]byte, error) {
|
||||
return io.ReadAll(resp.Body)
|
||||
}
|
||||
|
||||
// FetchTile fetches a tile from a URL, falling back from partial to full tile on 404.
|
||||
func FetchTile(url string) ([]byte, error) {
|
||||
data, err := FetchURL(url)
|
||||
if err == nil {
|
||||
return data, nil
|
||||
}
|
||||
// On 404, try stripping the partial-tile suffix (.p/NNN)
|
||||
if err.Error() == "HTTP 404" {
|
||||
if idx := strings.Index(url, ".p/"); idx != -1 {
|
||||
fullURL := url[:idx]
|
||||
fmt.Fprintf(os.Stderr, "Partial tile not found, trying full tile: %s\n", fullURL)
|
||||
return FetchURL(fullURL)
|
||||
}
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Decompress decompresses gzip-compressed data, or returns the data as-is if not compressed.
|
||||
func Decompress(data []byte) ([]byte, error) {
|
||||
r, err := gzip.NewReader(bytes.NewReader(data))
|
||||
@@ -71,9 +147,9 @@ func Decompress(data []byte) ([]byte, error) {
|
||||
|
||||
// DumpAllEntries reads and returns all entries from tile data as JSON-serializable structures.
|
||||
// Automatically detects if the tile is a data tile or hash tile.
|
||||
func DumpAllEntries(tileData []byte) (*DumpResult, error) {
|
||||
func DumpAllEntries(tileData []byte, opts Options) (*DumpResult, error) {
|
||||
// Try to read as data tile first
|
||||
result, err := dumpDataTile(tileData)
|
||||
result, err := dumpDataTile(tileData, opts)
|
||||
if err != nil {
|
||||
// If it fails, try as hash tile
|
||||
fmt.Fprintf(os.Stderr, "Not a data tile, trying as hash tile...\n")
|
||||
@@ -82,7 +158,7 @@ func DumpAllEntries(tileData []byte) (*DumpResult, error) {
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func dumpDataTile(tileData []byte) (*DumpResult, error) {
|
||||
func dumpDataTile(tileData []byte, opts Options) (*DumpResult, error) {
|
||||
entryNum := 0
|
||||
var entries []Entry
|
||||
for len(tileData) > 0 {
|
||||
@@ -92,7 +168,7 @@ func dumpDataTile(tileData []byte) (*DumpResult, error) {
|
||||
}
|
||||
tileData = remaining
|
||||
|
||||
entry := convertEntry(e, entryNum)
|
||||
entry := convertEntry(e, entryNum, opts)
|
||||
entries = append(entries, entry)
|
||||
entryNum++
|
||||
}
|
||||
@@ -127,7 +203,7 @@ func dumpHashTile(tileData []byte) (*DumpResult, error) {
|
||||
}
|
||||
|
||||
// DumpEntryAtPosition reads and returns a specific entry at the given position.
|
||||
func DumpEntryAtPosition(tileData []byte, position int, expectedIndex int64) (*Entry, error) {
|
||||
func DumpEntryAtPosition(tileData []byte, position int, expectedIndex int64, opts Options) (*Entry, error) {
|
||||
entryNum := 0
|
||||
for len(tileData) > 0 {
|
||||
e, remaining, err := sunlight.ReadTileLeaf(tileData)
|
||||
@@ -141,7 +217,7 @@ func DumpEntryAtPosition(tileData []byte, position int, expectedIndex int64) (*E
|
||||
fmt.Fprintf(os.Stderr, "WARNING: Expected leaf index %d but found %d at position %d\n",
|
||||
expectedIndex, e.LeafIndex, position)
|
||||
}
|
||||
entry := convertEntry(e, entryNum)
|
||||
entry := convertEntry(e, entryNum, opts)
|
||||
return &entry, nil
|
||||
}
|
||||
entryNum++
|
||||
@@ -150,7 +226,195 @@ func DumpEntryAtPosition(tileData []byte, position int, expectedIndex int64) (*E
|
||||
return nil, fmt.Errorf("position %d not found in tile (only %d entries)", position, entryNum)
|
||||
}
|
||||
|
||||
func convertEntry(e *sunlight.LogEntry, entryNum int) Entry {
|
||||
// parseEmbeddedSCTs extracts SCTs from the SCT list extension of a DER-encoded certificate.
|
||||
func parseEmbeddedSCTs(certDER []byte) ([]SCT, error) {
|
||||
cert, err := x509.ParseCertificate(certDER)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse certificate: %w", err)
|
||||
}
|
||||
|
||||
for _, ext := range cert.Extensions {
|
||||
if !ext.Id.Equal(oidSCTList) {
|
||||
continue
|
||||
}
|
||||
// ext.Value is the DER encoding of the extension value, which is an OCTET STRING
|
||||
// wrapping the TLS-encoded SCTList.
|
||||
var inner []byte
|
||||
if rest, err := asn1.Unmarshal(ext.Value, &inner); err != nil || len(rest) != 0 {
|
||||
return nil, fmt.Errorf("unmarshal SCT extension: %w", err)
|
||||
}
|
||||
return parseSCTList(inner)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// parseSCTList parses a TLS-encoded SignedCertificateTimestampList.
|
||||
func parseSCTList(data []byte) ([]SCT, error) {
|
||||
if len(data) < 2 {
|
||||
return nil, fmt.Errorf("SCT list too short")
|
||||
}
|
||||
listLen := int(binary.BigEndian.Uint16(data[:2]))
|
||||
data = data[2:]
|
||||
if len(data) < listLen {
|
||||
return nil, fmt.Errorf("SCT list truncated")
|
||||
}
|
||||
data = data[:listLen]
|
||||
|
||||
var scts []SCT
|
||||
for len(data) > 0 {
|
||||
if len(data) < 2 {
|
||||
return nil, fmt.Errorf("SCT entry length truncated")
|
||||
}
|
||||
sctLen := int(binary.BigEndian.Uint16(data[:2]))
|
||||
data = data[2:]
|
||||
if len(data) < sctLen {
|
||||
return nil, fmt.Errorf("SCT entry truncated")
|
||||
}
|
||||
sct, err := parseSCT(data[:sctLen])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
scts = append(scts, sct)
|
||||
data = data[sctLen:]
|
||||
}
|
||||
return scts, nil
|
||||
}
|
||||
|
||||
// parseSCT parses a single v1 SCT from raw bytes.
|
||||
func parseSCT(data []byte) (SCT, error) {
|
||||
// version(1) + log_id(32) + timestamp(8) + ext_len(2) = 43 bytes minimum
|
||||
if len(data) < 43 {
|
||||
return SCT{}, fmt.Errorf("SCT too short: %d bytes", len(data))
|
||||
}
|
||||
version := int(data[0])
|
||||
logID := hex.EncodeToString(data[1:33])
|
||||
ts := int64(binary.BigEndian.Uint64(data[33:41]))
|
||||
extLen := int(binary.BigEndian.Uint16(data[41:43]))
|
||||
pos := 43
|
||||
if len(data) < pos+extLen+4 {
|
||||
return SCT{}, fmt.Errorf("SCT extensions/signature truncated")
|
||||
}
|
||||
extensions := ""
|
||||
if extLen > 0 {
|
||||
extensions = hex.EncodeToString(data[pos : pos+extLen])
|
||||
}
|
||||
pos += extLen
|
||||
hashAlg := int(data[pos])
|
||||
sigAlg := int(data[pos+1])
|
||||
sigLen := int(binary.BigEndian.Uint16(data[pos+2 : pos+4]))
|
||||
pos += 4
|
||||
if len(data) < pos+sigLen {
|
||||
return SCT{}, fmt.Errorf("SCT signature truncated")
|
||||
}
|
||||
sig := hex.EncodeToString(data[pos : pos+sigLen])
|
||||
|
||||
return SCT{
|
||||
Version: version,
|
||||
LogID: logID,
|
||||
Timestamp: ts,
|
||||
TimestampHuman: time.UnixMilli(ts).UTC().Format(time.RFC3339),
|
||||
Extensions: extensions,
|
||||
HashAlgorithm: hashAlg,
|
||||
SigAlgorithm: sigAlg,
|
||||
Signature: sig,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// FetchCTLogList fetches the CT log list JSON and returns a map keyed by hex log_id.
|
||||
// Results are cached by URL so the network is only hit once per process.
|
||||
func FetchCTLogList(url string) (map[string]CTLogInfo, error) {
|
||||
ctLogCacheMu.Lock()
|
||||
if cached, ok := ctLogCache[url]; ok {
|
||||
ctLogCacheMu.Unlock()
|
||||
return cached, nil
|
||||
}
|
||||
ctLogCacheMu.Unlock()
|
||||
|
||||
data, err := FetchURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var list struct {
|
||||
Operators []struct {
|
||||
Name string `json:"name"`
|
||||
Logs []struct {
|
||||
Description string `json:"description"`
|
||||
LogID string `json:"log_id"`
|
||||
URL string `json:"url"`
|
||||
State map[string]json.RawMessage `json:"state"`
|
||||
} `json:"logs"`
|
||||
} `json:"operators"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &list); err != nil {
|
||||
return nil, fmt.Errorf("parse log list: %w", err)
|
||||
}
|
||||
|
||||
result := make(map[string]CTLogInfo)
|
||||
for _, op := range list.Operators {
|
||||
for _, log := range op.Logs {
|
||||
raw, err := base64.StdEncoding.DecodeString(log.LogID)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
hexID := hex.EncodeToString(raw)
|
||||
state := ""
|
||||
for k := range log.State {
|
||||
state = k
|
||||
break
|
||||
}
|
||||
result[hexID] = CTLogInfo{
|
||||
Description: log.Description,
|
||||
URL: log.URL,
|
||||
Operator: op.Name,
|
||||
State: state,
|
||||
}
|
||||
}
|
||||
}
|
||||
ctLogCacheMu.Lock()
|
||||
ctLogCache[url] = result
|
||||
ctLogCacheMu.Unlock()
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// fetchIssuer fetches the issuer certificate at /issuer/<fingerprint> and returns parsed info.
|
||||
// Results are cached by URL so the same issuer is only fetched once per process.
|
||||
func fetchIssuer(logURL, fingerprint string) (*IssuerInfo, error) {
|
||||
url := logURL + "/issuer/" + fingerprint
|
||||
|
||||
issuerCacheMu.Lock()
|
||||
if cached, ok := issuerCache[url]; ok {
|
||||
issuerCacheMu.Unlock()
|
||||
return cached, nil
|
||||
}
|
||||
issuerCacheMu.Unlock()
|
||||
|
||||
data, err := FetchURL(url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cert, err := x509.ParseCertificate(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse issuer cert: %w", err)
|
||||
}
|
||||
info := &IssuerInfo{
|
||||
Fingerprint: fingerprint,
|
||||
Subject: cert.Subject.String(),
|
||||
Issuer: cert.Issuer.String(),
|
||||
NotBefore: cert.NotBefore.UTC().Format(time.RFC3339),
|
||||
NotAfter: cert.NotAfter.UTC().Format(time.RFC3339),
|
||||
SerialNumber: cert.SerialNumber.String(),
|
||||
}
|
||||
|
||||
issuerCacheMu.Lock()
|
||||
issuerCache[url] = info
|
||||
issuerCacheMu.Unlock()
|
||||
|
||||
return info, nil
|
||||
}
|
||||
|
||||
func convertEntry(e *sunlight.LogEntry, entryNum int, opts Options) Entry {
|
||||
entry := Entry{
|
||||
EntryNumber: entryNum,
|
||||
LeafIndex: e.LeafIndex,
|
||||
@@ -168,11 +432,35 @@ func convertEntry(e *sunlight.LogEntry, entryNum int) Entry {
|
||||
entry.PreCertificateSize = &size
|
||||
}
|
||||
|
||||
// Convert chain fingerprints to hex strings
|
||||
// Convert chain fingerprints to hex strings and optionally fetch issuer details.
|
||||
entry.ChainFingerprints = make([]string, len(e.ChainFingerprints))
|
||||
for i, fp := range e.ChainFingerprints {
|
||||
entry.ChainFingerprints[i] = hex.EncodeToString(fp[:])
|
||||
}
|
||||
if opts.ShowIssuer && opts.LogURL != "" {
|
||||
for _, fp := range entry.ChainFingerprints {
|
||||
info, err := fetchIssuer(opts.LogURL, fp)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "WARNING: could not fetch issuer %s: %v\n", fp, err)
|
||||
continue
|
||||
}
|
||||
entry.Issuers = append(entry.Issuers, *info)
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally extract embedded SCTs from final (non-precert) certificates.
|
||||
if opts.ShowSCT && !e.IsPrecert && len(e.Certificate) > 0 {
|
||||
if scts, err := parseEmbeddedSCTs(e.Certificate); err == nil {
|
||||
if opts.ShowCTLog {
|
||||
for i := range scts {
|
||||
if info, ok := opts.CTLogs[scts[i].LogID]; ok {
|
||||
scts[i].CTLog = &info
|
||||
}
|
||||
}
|
||||
}
|
||||
entry.SCTs = scts
|
||||
}
|
||||
}
|
||||
|
||||
// Try to extract parsed certificate info
|
||||
if trimmed, err := e.TrimmedEntry(); err == nil {
|
||||
|
||||
Reference in New Issue
Block a user