Commit 88297a9f authored by ale's avatar ale
Browse files

Add a debug page with per-source summaries

parent c6fa55ac
Pipeline #20865 passed with stages
in 1 minute and 39 seconds
......@@ -2,6 +2,7 @@ package server
import (
"bytes"
"context"
"database/sql"
"fmt"
"html/template"
......@@ -68,6 +69,9 @@ var (
</tbody>
</table>
<p>
<a href="/sources">Source list</a>
</p>
</body>
</html>`
datasetsTemplate *template.Template
......@@ -139,6 +143,73 @@ var (
</html>`
atomsTemplate *template.Template
allSourcesHTML = `<!DOCTYPE html>
<html>
<head>
<title>Tabacco</title>
</head>
<body>
<h1>Tabacco</h1>
<p>Started at {{fmtDate .StartTime}}.</p>
<h3>Latest backups by source</h3>
<table>
<thead>
<tr>
<th>ID</th>
<th>Time</th>
<th>Host</th>
<th>Source</th>
</tr>
</thead>
<tbody>
{{range $idx, $b := .Backups}}
<tr>
<td><b><a href="/backup/by_id?id={{$b.BackupID}}">
{{$b.BackupID}}</a></b></td>
<td>{{fmtDate $b.BackupTimestamp}}</td>
<td><a href="/dataset/by_host?host={{$b.BackupHost}}">
{{$b.BackupHost}}</a></td>
<td><a href="/dataset/by_source?source={{$b.DatasetSource}}">
{{$b.DatasetSource}}</a></td>
</tr>
{{end}}
</tbody>
</table>
<h3>Backup size by source</h3>
// select a.dataset_source, sum(a.dataset_total_bytes) from log as a left join (select backup_id from log group by dataset_source, backup_host having backup_timestamp = max(backup_timestamp)) as b on a.backup_id = b.backup_id group by a.dataset_source;
<table>
<thead>
<tr>
<th>Source</th>
<th>Size (GB)</th>
</tr>
</thead>
<tbody>
{{range $idx, $b := .SourcesBySize}}
<tr>
<td><a href="/dataset/by_source?source={{$b.DatasetSource}}">
{{$b.DatasetSource}}</a></td>
<td><b>{{$b.Size}}</b></td>
</tr>
{{end}}
</tbody>
</table>
<p>
<a href="/">Latest backups</a>
</p>
</body>
</html>
`
allSourcesTemplate *template.Template
startTime time.Time
)
......@@ -153,6 +224,8 @@ func init() {
template.New("latest").Funcs(funcs).Parse(datasetsHTML))
atomsTemplate = template.Must(
template.New("source").Funcs(funcs).Parse(atomsHTML))
allSourcesTemplate = template.Must(
template.New("all_sources").Funcs(funcs).Parse(allSourcesHTML))
startTime = time.Now()
}
......@@ -331,6 +404,98 @@ func (s *httpServer) handleDebugDatasetByID(w http.ResponseWriter, r *http.Reque
io.Copy(w, &buf) // nolint
}
type latestBackup struct {
BackupID string
BackupTimestamp time.Time
BackupHost string
DatasetSource string
}
type sourceSize struct {
DatasetSource string
Size int64
}
func (s *httpServer) fetchLatestBackupsBySource(ctx context.Context) ([]*latestBackup, error) {
var out []*latestBackup
err := retryBusy(ctx, func() error {
return withTX(ctx, s.db, func(tx *sql.Tx) error {
stmt := s.stmts.get(tx, "get_latest_backup_by_source")
defer stmt.Close()
rows, err := stmt.Query()
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var lb latestBackup
if err := rows.Scan(
&lb.BackupID, &lb.BackupTimestamp, &lb.BackupHost, &lb.DatasetSource,
); err != nil {
return err
}
out = append(out, &lb)
}
return rows.Err()
})
})
return out, err
}
func (s *httpServer) fetchSourcesSize(ctx context.Context) ([]*sourceSize, error) {
var out []*sourceSize
err := retryBusy(ctx, func() error {
return withTX(ctx, s.db, func(tx *sql.Tx) error {
stmt := s.stmts.get(tx, "get_sources_size")
defer stmt.Close()
rows, err := stmt.Query()
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var s sourceSize
var sz int64
if err := rows.Scan(
&s.DatasetSource, &sz,
); err != nil {
return err
}
s.Size = (sz / 1000000000)
out = append(out, &s)
}
return rows.Err()
})
})
return out, err
}
func (s *httpServer) handleDebugAllSources(w http.ResponseWriter, r *http.Request) {
latest, err := s.fetchLatestBackupsBySource(r.Context())
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
sizes, err := s.fetchSourcesSize(r.Context())
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
var buf bytes.Buffer
if err := allSourcesTemplate.Execute(&buf, map[string]interface{}{
"Backups": latest,
"SourcesBySize": sizes,
"StartTime": startTime,
}); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
io.Copy(w, &buf) // nolint
}
func fmtDate(t time.Time) string {
return t.Format(time.RFC3339)
}
......@@ -56,6 +56,7 @@ func (s *httpServer) Handler() http.Handler {
m.HandleFunc("/dataset/by_host", s.handleDebugDatasetsByHost)
m.HandleFunc("/dataset/by_id", s.handleDebugDatasetByID)
m.HandleFunc("/backup/by_id", s.handleDebugBackupByID)
m.HandleFunc("/sources", s.handleDebugAllSources)
m.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
http.NotFound(w, r)
......
......@@ -298,6 +298,25 @@ var statements = map[string]string{
FROM log
WHERE dataset_id = ?
ORDER BY atom_name ASC
`,
"get_latest_backups_by_source": `
SELECT
backup_id, dataset_source, backup_host, backup_timestamp
FROM log
GROUP BY dataset_source, backup_host
HAVING backup_timestamp = max(backup_timestamp)
ORDER BY dataset_source ASC
`,
"get_sources_size": `
SELECT
a.dataset_source, sum(a.dataset_total_bytes)
FROM log AS a LEFT JOIN (
SELECT backup_id FROM log
GROUP BY dataset_source, backup_host
HAVING backup_timestamp = max(backup_timestamp)) AS b
ON a.backup_id = b.backup_id
GROUP BY a.dataset_source
ORDER BY a.dataset_source ASC
`,
}
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment