Skip to content

PMM-7 add missing labels for pg exporter #75

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Sep 15, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions cmd/postgres_exporter/postgres_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,23 @@ func dumpMaps() {
}

var builtinMetricMaps = map[string]intermediateMetricMap{
"pg_stat_bgwriter": {
map[string]ColumnMapping{
"checkpoints_timed": {COUNTER, "Number of scheduled checkpoints that have been performed", nil, nil},
"checkpoints_req": {COUNTER, "Number of requested checkpoints that have been performed", nil, nil},
"checkpoint_write_time": {COUNTER, "Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds", nil, nil},
"checkpoint_sync_time": {COUNTER, "Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk, in milliseconds", nil, nil},
"buffers_checkpoint": {COUNTER, "Number of buffers written during checkpoints", nil, nil},
"buffers_clean": {COUNTER, "Number of buffers written by the background writer", nil, nil},
"maxwritten_clean": {COUNTER, "Number of times the background writer stopped a cleaning scan because it had written too many buffers", nil, nil},
"buffers_backend": {COUNTER, "Number of buffers written directly by a backend", nil, nil},
"buffers_backend_fsync": {COUNTER, "Number of times a backend had to execute its own fsync call (normally the background writer handles those even when the backend does its own write)", nil, nil},
"buffers_alloc": {COUNTER, "Number of buffers allocated", nil, nil},
"stats_reset": {COUNTER, "Time at which these statistics were last reset", nil, nil},
},
true,
0,
},
"pg_stat_database": {
map[string]ColumnMapping{
"datid": {LABEL, "OID of a database", nil, nil},
Expand Down Expand Up @@ -525,6 +542,8 @@ func WithConstantLabels(s string) ExporterOpt {
e.constantLabels = parseConstLabels(s)
if e.collectorName != "" {
e.constantLabels["collector"] = e.collectorName
} else {
e.constantLabels["collector"] = "exporter"
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

default label for compatibility with our old exporter

}
}
}
Expand Down
66 changes: 44 additions & 22 deletions collector/pg_stat_bgwriter.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,133 +38,133 @@ var statBGWriter = map[string]*prometheus.Desc{
"checkpoints_timed": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_timed_total"),
"Number of scheduled checkpoints that have been performed",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"checkpoints_req": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_req_total"),
"Number of requested checkpoints that have been performed",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"checkpoint_write_time": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_write_time_total"),
"Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"checkpoint_sync_time": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_sync_time_total"),
"Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk, in milliseconds",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"buffers_checkpoint": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_checkpoint_total"),
"Number of buffers written during checkpoints",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"buffers_clean": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_clean_total"),
"Number of buffers written by the background writer",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"maxwritten_clean": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "maxwritten_clean_total"),
"Number of times the background writer stopped a cleaning scan because it had written too many buffers",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"buffers_backend": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend_total"),
"Number of buffers written directly by a backend",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"buffers_backend_fsync": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend_fsync_total"),
"Number of times a backend had to execute its own fsync call (normally the background writer handles those even when the backend does its own write)",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"buffers_alloc": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_alloc_total"),
"Number of buffers allocated",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"stats_reset": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "stats_reset_total"),
"Time at which these statistics were last reset",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_checkpoints_timed": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_timed"),
"Number of scheduled checkpoints that have been performed",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_checkpoints_req": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_req"),
"Number of requested checkpoints that have been performed",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_checkpoint_write_time": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_write_time"),
"Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_checkpoint_sync_time": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_sync_time"),
"Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk, in milliseconds",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_buffers_checkpoint": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_checkpoint"),
"Number of buffers written during checkpoints",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_buffers_clean": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_clean"),
"Number of buffers written by the background writer",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_maxwritten_clean": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "maxwritten_clean"),
"Number of times the background writer stopped a cleaning scan because it had written too many buffers",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_buffers_backend": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend"),
"Number of buffers written directly by a backend",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_buffers_backend_fsync": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend_fsync"),
"Number of times a backend had to execute its own fsync call (normally the background writer handles those even when the backend does its own write)",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_buffers_alloc": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_alloc"),
"Number of buffers allocated",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
"percona_stats_reset": prometheus.NewDesc(
prometheus.BuildFQName(namespace, bgWriterSubsystem, "stats_reset"),
"Time at which these statistics were last reset",
[]string{"server"},
[]string{"collector", "server"},
prometheus.Labels{},
),
}
Expand Down Expand Up @@ -211,133 +211,155 @@ func (PGStatBGWriterCollector) Update(ctx context.Context, server *server, ch ch
statBGWriter["checkpoints_timed"],
prometheus.CounterValue,
float64(cpt),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["checkpoints_req"],
prometheus.CounterValue,
float64(cpr),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["checkpoint_write_time"],
prometheus.CounterValue,
float64(cpwt),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["checkpoint_sync_time"],
prometheus.CounterValue,
float64(cpst),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["buffers_checkpoint"],
prometheus.CounterValue,
float64(bcp),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["buffers_clean"],
prometheus.CounterValue,
float64(bc),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["maxwritten_clean"],
prometheus.CounterValue,
float64(mwc),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["buffers_backend"],
prometheus.CounterValue,
float64(bb),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["buffers_backend_fsync"],
prometheus.CounterValue,
float64(bbf),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["buffers_alloc"],
prometheus.CounterValue,
float64(ba),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["stats_reset"],
prometheus.CounterValue,
float64(sr.Unix()),
"exporter",
server.GetName(),
)

ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_checkpoints_timed"],
prometheus.CounterValue,
float64(cpt),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_checkpoints_req"],
prometheus.CounterValue,
float64(cpr),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_checkpoint_write_time"],
prometheus.CounterValue,
float64(cpwt),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_checkpoint_sync_time"],
prometheus.CounterValue,
float64(cpst),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_buffers_checkpoint"],
prometheus.CounterValue,
float64(bcp),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_buffers_clean"],
prometheus.CounterValue,
float64(bc),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_maxwritten_clean"],
prometheus.CounterValue,
float64(mwc),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_buffers_backend"],
prometheus.CounterValue,
float64(bb),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_buffers_backend_fsync"],
prometheus.CounterValue,
float64(bbf),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_buffers_alloc"],
prometheus.CounterValue,
float64(ba),
"exporter",
server.GetName(),
)
ch <- prometheus.MustNewConstMetric(
statBGWriter["percona_stats_reset"],
prometheus.CounterValue,
float64(sr.Unix()),
"exporter",
server.GetName(),
)

Expand Down