Skip to content

Commit 7fdcaba

Browse files
arvenilAlekSi
authored andcommitted
PMM-2224: Fix Exporter CPU Usage glitches.
1 parent 0a335cc commit 7fdcaba

File tree

2 files changed

+10
-8
lines changed

2 files changed

+10
-8
lines changed

collector/info_schema_auto_increment.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import (
99
)
1010

1111
const infoSchemaAutoIncrementQuery = `
12-
SELECT t.table_schema, t.table_name, column_name, `+"`auto_increment`"+`,
12+
SELECT t.table_schema, t.table_name, column_name, ` + "`auto_increment`" + `,
1313
pow(2, case data_type
1414
when 'tinyint' then 7
1515
when 'smallint' then 15

mysqld_exporter.go

+9-7
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ func init() {
198198
prometheus.MustRegister(version.NewCollector("mysqld_exporter"))
199199
}
200200

201-
func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper, stats *collector.Stats) http.HandlerFunc {
201+
func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper, stats *collector.Stats, defaultGatherer bool) http.HandlerFunc {
202202
return func(w http.ResponseWriter, r *http.Request) {
203203
filteredScrapers := scrapers
204204
params := r.URL.Query()["collect[]"]
@@ -232,10 +232,12 @@ func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper, stats *c
232232
registry := prometheus.NewRegistry()
233233
registry.MustRegister(collector.New(db, filteredScrapers, stats))
234234

235-
gatherers := prometheus.Gatherers{
236-
prometheus.DefaultGatherer,
237-
registry,
235+
gatherers := prometheus.Gatherers{}
236+
if defaultGatherer {
237+
gatherers = append(gatherers, prometheus.DefaultGatherer)
238238
}
239+
gatherers = append(gatherers, registry)
240+
239241
// Delegate http serving to Prometheus client library, which will call collector.Collect.
240242
h := promhttp.HandlerFor(gatherers, promhttp.HandlerOpts{
241243
// mysqld_exporter has multiple collectors, if one fails,
@@ -361,9 +363,9 @@ func main() {
361363

362364
// Defines what to scrape in each resolution.
363365
hr, mr, lr := enabledScrapers(scraperFlags)
364-
mux.Handle(*metricPath+"-hr", newHandler(cfg, db, hr, collector.NewStats("hr")))
365-
mux.Handle(*metricPath+"-mr", newHandler(cfg, db, mr, collector.NewStats("mr")))
366-
mux.Handle(*metricPath+"-lr", newHandler(cfg, db, lr, collector.NewStats("lr")))
366+
mux.Handle(*metricPath+"-hr", newHandler(cfg, db, hr, collector.NewStats("hr"), true))
367+
mux.Handle(*metricPath+"-mr", newHandler(cfg, db, mr, collector.NewStats("mr"), false))
368+
mux.Handle(*metricPath+"-lr", newHandler(cfg, db, lr, collector.NewStats("lr"), false))
367369

368370
// Log which scrapers are enabled.
369371
if len(hr) > 0 {

0 commit comments

Comments
 (0)