Commit 28d6b86b authored by ale's avatar ale

Refactor debug pages with per-dataset and per-backup views

parent 9189a5a5
Pipeline #8328 passed with stages
in 1 minute and 43 seconds
......@@ -3,6 +3,7 @@ package server
import (
"bytes"
"database/sql"
"fmt"
"html/template"
"io"
"net/http"
......@@ -12,7 +13,7 @@ import (
)
var (
latestDatasetsHTML = `<!DOCTYPE html>
datasetsHTML = `<!DOCTYPE html>
<html>
<head>
<title>Tabacco</title>
......@@ -23,7 +24,7 @@ var (
<p>Started at {{fmtDate .StartTime}}.</p>
<h3>Latest backups</h3>
<h3>{{.Title}}</h3>
<table>
<thead>
......@@ -33,6 +34,7 @@ var (
<th>Host</th>
<th>Dataset</th>
<th>Source</th>
<th>Atoms</th>
<th>Files</th>
<th>Size</th>
</tr>
......@@ -42,15 +44,18 @@ var (
{{range $idx, $d := $b.Datasets}}
<tr>
{{if eq $idx 0}}
<td><b>{{$b.BackupID}}</b></td>
<td><b><a href="/backup/by_id?id={{$b.BackupID}}">
{{$b.BackupID}}</a></b></td>
<td>{{fmtDate $b.BackupTimestamp}}</td>
<td>{{$b.BackupHost}}</td>
{{else}}
<td colspan="3"></td>
{{end}}
<td>{{$d.DatasetID}}</td>
<td><a href="/dataset?source={{$d.DatasetSource}}">
<td><a href="/dataset/by_id?id={{$d.DatasetID}}">
{{$d.DatasetID}}</a></td>
<td><a href="/dataset/by_source?source={{$d.DatasetSource}}">
{{$d.DatasetSource}}</a></td>
<td>{{$d.NumAtoms}}</td>
<td>{{$d.DatasetTotalFiles}}</td>
<td>{{humanBytes $d.DatasetTotalBytes}}
({{humanBytes $d.DatasetBytesAdded}} new)</td>
......@@ -62,9 +67,9 @@ var (
</body>
</html>`
latestDatasetsTemplate *template.Template
datasetsTemplate *template.Template
sourceDebugHTML = `<!DOCTYPE html>
atomsHTML = `<!DOCTYPE html>
<html>
<head>
<title>Tabacco</title>
......@@ -75,13 +80,14 @@ var (
<p>Started at {{fmtDate .StartTime}}.</p>
<h3>Source: {{.Source}}</h3>
<h3>Source: {{.Dataset.DatasetSource}}</h3>
<table>
<tbody>
<tr>
<td>Latest backup ID</td>
<td>{{.Backup.BackupID}}</td>
<td><a href="/backup/by_id?id={{.Backup.BackupID}}">
{{.Backup.BackupID}}</a></td>
</tr>
<tr>
<td>Latest timestamp</td>
......@@ -124,7 +130,7 @@ var (
</body>
</html>`
sourceDebugTemplate *template.Template
atomsTemplate *template.Template
startTime time.Time
)
......@@ -136,10 +142,10 @@ func init() {
return humanize.Bytes(uint64(i))
},
}
latestDatasetsTemplate = template.Must(
template.New("latest").Funcs(funcs).Parse(latestDatasetsHTML))
sourceDebugTemplate = template.Must(
template.New("source").Funcs(funcs).Parse(sourceDebugHTML))
datasetsTemplate = template.Must(
template.New("latest").Funcs(funcs).Parse(datasetsHTML))
atomsTemplate = template.Must(
template.New("source").Funcs(funcs).Parse(atomsHTML))
startTime = time.Now()
}
......@@ -151,6 +157,8 @@ type datasetDebug struct {
DatasetTotalBytes int64
DatasetBytesAdded int64
DatasetDuration int
NumAtoms int
}
type backupDebug struct {
......@@ -161,43 +169,49 @@ type backupDebug struct {
Datasets []*datasetDebug
}
func (s *httpServer) handleDebug(w http.ResponseWriter, r *http.Request) {
backupMap := make(map[string]*backupDebug)
func readDatasets(rows *sql.Rows) ([]*backupDebug, error) {
tmp := make(map[string]*backupDebug)
var backups []*backupDebug
for rows.Next() {
var bd backupDebug
var dd datasetDebug
if err := rows.Scan(
&bd.BackupID, &bd.BackupTimestamp, &bd.BackupHost,
&dd.DatasetID, &dd.DatasetSnapshotID, &dd.DatasetSource,
&dd.DatasetTotalFiles, &dd.DatasetTotalBytes,
&dd.DatasetBytesAdded, &dd.DatasetDuration, &dd.NumAtoms,
); err != nil {
return nil, err
}
b, ok := tmp[bd.BackupID]
if !ok {
b = &bd
tmp[bd.BackupID] = b
backups = append(backups, b)
}
b.Datasets = append(b.Datasets, &dd)
}
return backups, rows.Err()
}
func (s *httpServer) queryAndShowDatasets(w http.ResponseWriter, r *http.Request, title, queryName string, queryArgs ...interface{}) {
var backups []*backupDebug
err := retryBusy(r.Context(), func() error {
return withTX(r.Context(), s.db, func(tx *sql.Tx) error {
stmt := s.stmts.get(tx, "get_latest_datasets")
stmt := s.stmts.get(tx, queryName)
defer stmt.Close()
rows, err := stmt.Query(30)
rows, err := stmt.Query(queryArgs...)
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var bd backupDebug
var dd datasetDebug
if err := rows.Scan(
&bd.BackupID, &bd.BackupTimestamp, &bd.BackupHost,
&dd.DatasetID, &dd.DatasetSnapshotID, &dd.DatasetSource,
&dd.DatasetTotalFiles, &dd.DatasetTotalBytes,
&dd.DatasetBytesAdded, &dd.DatasetDuration,
); err != nil {
return err
}
b, ok := backupMap[bd.BackupID]
if !ok {
b = &bd
backupMap[bd.BackupID] = b
backups = append(backups, b)
}
b.Datasets = append(b.Datasets, &dd)
}
return rows.Err()
backups, err = readDatasets(rows)
return err
})
})
if err != nil {
......@@ -206,7 +220,8 @@ func (s *httpServer) handleDebug(w http.ResponseWriter, r *http.Request) {
}
var buf bytes.Buffer
if err := latestDatasetsTemplate.Execute(&buf, map[string]interface{}{
if err := datasetsTemplate.Execute(&buf, map[string]interface{}{
"Title": title,
"Backups": backups,
"StartTime": startTime,
}); err != nil {
......@@ -216,20 +231,44 @@ func (s *httpServer) handleDebug(w http.ResponseWriter, r *http.Request) {
io.Copy(w, &buf) // nolint
}
func (s *httpServer) handleDebugSource(w http.ResponseWriter, r *http.Request) {
datasetSource := r.FormValue("source")
if datasetSource == "" {
http.Error(w, "No dataset specified", http.StatusBadRequest)
func (s *httpServer) handleDebug(w http.ResponseWriter, r *http.Request) {
s.queryAndShowDatasets(w, r, "Latest backups", "get_latest_datasets", 30)
}
func (s *httpServer) handleDebugDatasetsBySource(w http.ResponseWriter, r *http.Request) {
source := r.FormValue("source")
if source == "" {
http.Error(w, "No source specified", http.StatusBadRequest)
return
}
s.queryAndShowDatasets(w, r, fmt.Sprintf("Latest backups for %s", source), "get_latest_datasets_by_source", source, 30)
}
func (s *httpServer) handleDebugBackupByID(w http.ResponseWriter, r *http.Request) {
id := r.FormValue("id")
if id == "" {
http.Error(w, "No id specified", http.StatusBadRequest)
return
}
s.queryAndShowDatasets(w, r, fmt.Sprintf("Backup %s", id), "get_backup_by_id", id)
}
func (s *httpServer) handleDebugDatasetByID(w http.ResponseWriter, r *http.Request) {
id := r.FormValue("id")
if id == "" {
http.Error(w, "No id specified", http.StatusBadRequest)
return
}
var atoms []*dbAtom
err := retryBusy(r.Context(), func() error {
return withTX(r.Context(), s.db, func(tx *sql.Tx) error {
stmt := s.stmts.get(tx, "get_latest_dataset_by_source")
stmt := s.stmts.get(tx, "get_dataset_by_id")
defer stmt.Close()
rows, err := stmt.Query(datasetSource)
rows, err := stmt.Query(id)
if err != nil {
return err
}
......@@ -263,8 +302,7 @@ func (s *httpServer) handleDebugSource(w http.ResponseWriter, r *http.Request) {
}
var buf bytes.Buffer
if err := sourceDebugTemplate.Execute(&buf, map[string]interface{}{
"Source": datasetSource,
if err := atomsTemplate.Execute(&buf, map[string]interface{}{
"Atoms": atoms,
"Backup": atoms[0],
"Dataset": atoms[0],
......
......@@ -32,13 +32,24 @@ func TestDebugPage(t *testing.T) {
}
resp.Body.Close()
// Backup debug page.
resp, err = http.Get(srv.URL + "/backup/by_id?id=1234")
if err != nil {
t.Fatalf("Get(/backup/by_id): %v", err)
}
if resp.StatusCode != 200 {
t.Errorf("Get(/backup/by_id): %s", resp.Status)
io.Copy(os.Stderr, resp.Body) // nolint
}
resp.Body.Close()
// Source debug page.
resp, err = http.Get(srv.URL + "/dataset?source=file")
resp, err = http.Get(srv.URL + "/dataset/by_source?source=file")
if err != nil {
t.Fatalf("Get(/dataset): %v", err)
t.Fatalf("Get(/dataset/by_source): %v", err)
}
if resp.StatusCode != 200 {
t.Errorf("Get(/dataset): %s", resp.Status)
t.Errorf("Get(/dataset/by_source): %s", resp.Status)
io.Copy(os.Stderr, resp.Body) // nolint
}
resp.Body.Close()
......
......@@ -52,7 +52,9 @@ func (s *httpServer) Handler() http.Handler {
m := http.NewServeMux()
m.HandleFunc("/api/add_dataset", s.handleAddDataset)
m.HandleFunc("/api/find_atoms", s.handleFindAtoms)
m.HandleFunc("/dataset", s.handleDebugSource)
m.HandleFunc("/dataset/by_source", s.handleDebugDatasetsBySource)
m.HandleFunc("/dataset/by_id", s.handleDebugDatasetByID)
m.HandleFunc("/backup/by_id", s.handleDebugBackupByID)
m.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path != "/" {
http.NotFound(w, r)
......
......@@ -226,7 +226,7 @@ var statements = map[string]string{
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration
dataset_duration, COUNT(*) AS num_atoms
FROM log
WHERE backup_id IN (
SELECT backup_id
......@@ -241,23 +241,47 @@ var statements = map[string]string{
dataset_duration
ORDER BY backup_timestamp DESC
`,
"get_latest_dataset_by_source": `
"get_latest_datasets_by_source": `
SELECT
l.backup_id, l.backup_timestamp, l.backup_host,
l.dataset_id, l.dataset_snapshot_id, l.dataset_source,
l.dataset_total_files, l.dataset_total_bytes, l.dataset_bytes_added,
l.dataset_duration,
l.atom_name, l.atom_path, l.atom_full_path
FROM (
SELECT backup_id, dataset_id, MAX(backup_timestamp)
FROM log
WHERE dataset_source = ?
) AS m
INNER JOIN log AS l
WHERE
m.backup_id = l.backup_id AND
m.dataset_id = l.dataset_id
ORDER BY l.atom_name ASC
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration, COUNT(*) AS num_atoms
FROM log
WHERE dataset_source = ?
GROUP BY
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration
ORDER BY backup_timestamp DESC
LIMIT ?
`,
"get_backup_by_id": `
SELECT
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration, COUNT(*) AS num_atoms
FROM log
WHERE backup_id = ?
GROUP BY
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration
ORDER BY dataset_source ASC
`,
"get_dataset_by_id": `
SELECT
backup_id, backup_timestamp, backup_host,
dataset_id, dataset_snapshot_id, dataset_source,
dataset_total_files, dataset_total_bytes, dataset_bytes_added,
dataset_duration,
atom_name, atom_path, atom_full_path
FROM log
WHERE dataset_id = ?
ORDER BY atom_name ASC
`,
}
......
......@@ -21,6 +21,7 @@ func addTestEntry(t *testing.T, svc *Service, backupID, host, dsName string) {
Timestamp: time.Now(),
},
tabacco.Dataset{
ID: "ds" + backupID,
Source: "file",
Atoms: []tabacco.Atom{
{
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment