Commit 3e8a4ef5 authored by Nick Groenen's avatar Nick Groenen

Support scraping of "global" scope

PR #5 (itself based on #4) introduced support to scrape the "global" scope,
however the format of these stats differs slightly from that of the
others.

As a result of this, combined with the silent return when `len(values)
!= len(columns)+1`, these metrics were being incorrectly exported as
mislabeled metrics with values of 0.

The changes in this PR accomodate for the format of the "global" scope
and now properly scrape those as well.

It will now also display an error when column counts mismatch, reducing
the likelihood of silent errors in the future.

Global metrics will now show up as:

```
dovecot_global_auth_cache_hits 0
dovecot_global_auth_cache_misses 0
dovecot_global_auth_db_tempfails 3
dovecot_global_auth_failures 6
dovecot_global_auth_master_successes 0
dovecot_global_auth_successes 228
dovecot_global_clock_time 0
dovecot_global_disk_input 0
dovecot_global_disk_output 0
dovecot_global_invol_cs 0
dovecot_global_last_update 1.516197189175826e+09
dovecot_global_mail_cache_hits 0
dovecot_global_mail_lookup_attr 0
dovecot_global_mail_lookup_path 0
dovecot_global_mail_read_bytes 0
dovecot_global_mail_read_count 0
dovecot_global_maj_faults 0
dovecot_global_min_faults 0
dovecot_global_num_cmds 0
dovecot_global_num_connected_sessions 234
dovecot_global_num_logins 234
dovecot_global_read_bytes 0
dovecot_global_read_count 0
dovecot_global_reset_timestamp 1.516190181e+09
dovecot_global_sys_cpu 0
dovecot_global_user_cpu 0
dovecot_global_vol_cs 0
dovecot_global_write_bytes 0
dovecot_global_write_count 0
dovecot_up{scope="global"} 1
```

...with the other scopes being unaffected.
parent 9bbdcdbf
......@@ -34,9 +34,89 @@ var dovecotUpDesc = prometheus.NewDesc(
[]string{"scope"},
nil)
// Converts the output of Dovecot's EXPORT command to metrics.
func CollectFromReader(file io.Reader, ch chan<- prometheus.Metric) error {
scanner := bufio.NewScanner(file)
// CollectFromReader converts the output of Dovecot's EXPORT command to metrics.
func CollectFromReader(file io.Reader, scope string, ch chan<- prometheus.Metric) error {
if scope == "global" {
return collectGlobalMetricsFromReader(file, scope, ch)
}
return collectDetailMetricsFromReader(file, scope, ch)
}
// CollectFromFile collects dovecot statistics from the given file
func CollectFromFile(path string, scope string, ch chan<- prometheus.Metric) error {
conn, err := os.Open(path)
if err != nil {
return err
}
return CollectFromReader(conn, scope, ch)
}
// CollectFromSocket collects statistics from dovecot's stats socket.
func CollectFromSocket(path string, scope string, ch chan<- prometheus.Metric) error {
conn, err := net.Dial("unix", path)
if err != nil {
return err
}
_, err = conn.Write([]byte("EXPORT\t" + scope + "\n"))
if err != nil {
return err
}
return CollectFromReader(conn, scope, ch)
}
// collectGlobalMetricsFromReader collects dovecot "global" scope metrics from
// the supplied reader.
func collectGlobalMetricsFromReader(reader io.Reader, scope string, ch chan<- prometheus.Metric) error {
scanner := bufio.NewScanner(reader)
scanner.Split(bufio.ScanLines)
// Read first line of input, containing the aggregation and column names.
if !scanner.Scan() {
return fmt.Errorf("Failed to extract columns from input")
}
columnNames := strings.Fields(scanner.Text())
if len(columnNames) < 1 {
return fmt.Errorf("Input does not provide any columns")
}
columns := []*prometheus.Desc{}
for _, columnName := range columnNames {
columns = append(columns, prometheus.NewDesc(
prometheus.BuildFQName("dovecot", scope, columnName),
"Help text not provided by this exporter.",
[]string{},
nil))
}
// Global metrics only have a single row containing values following the
// line with column names
if !scanner.Scan() {
return scanner.Err()
}
values := strings.Fields(scanner.Text())
if len(values) != len(columns) {
return fmt.Errorf("error while parsing row: value count does not match column count")
}
for i, value := range values {
f, err := strconv.ParseFloat(value, 64)
if err != nil {
return err
}
ch <- prometheus.MustNewConstMetric(
columns[i],
prometheus.UntypedValue,
f,
)
}
return scanner.Err()
}
// collectGlobalMetricsFromReader collects dovecot "non-global" scope metrics
// from the supplied reader.
func collectDetailMetricsFromReader(reader io.Reader, scope string, ch chan<- prometheus.Metric) error {
scanner := bufio.NewScanner(reader)
scanner.Split(bufio.ScanLines)
// Read first line of input, containing the aggregation and column names.
......@@ -47,6 +127,7 @@ func CollectFromReader(file io.Reader, ch chan<- prometheus.Metric) error {
if len(columnNames) < 2 {
return fmt.Errorf("Input does not provide any columns")
}
columns := []*prometheus.Desc{}
for _, columnName := range columnNames[1:] {
columns = append(columns, prometheus.NewDesc(
......@@ -58,10 +139,16 @@ func CollectFromReader(file io.Reader, ch chan<- prometheus.Metric) error {
// Read successive lines, containing the values.
for scanner.Scan() {
values := strings.Fields(scanner.Text())
if len(values) != len(columns)+1 {
row := scanner.Text()
if strings.TrimSpace(row) == "" {
break
}
values := strings.Fields(row)
if len(values) != len(columns)+1 {
return fmt.Errorf("error while parsing rows: value count does not match column count")
}
for i, value := range values[1:] {
f, err := strconv.ParseFloat(value, 64)
if err != nil {
......@@ -77,26 +164,6 @@ func CollectFromReader(file io.Reader, ch chan<- prometheus.Metric) error {
return scanner.Err()
}
func CollectFromFile(path string, ch chan<- prometheus.Metric) error {
conn, err := os.Open(path)
if err != nil {
return err
}
return CollectFromReader(conn, ch)
}
func CollectFromSocket(path string, scope string, ch chan<- prometheus.Metric) error {
conn, err := net.Dial("unix", path)
if err != nil {
return err
}
_, err = conn.Write([]byte("EXPORT\t" + scope + "\n"))
if err != nil {
return err
}
return CollectFromReader(conn, ch)
}
type DovecotExporter struct {
scopes []string
socketPath string
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment