Skip to content

Commit 0a335cc

Browse files
arvenilAlekSi
authored andcommitted
PMM-2221: No rate of scrapes for MySQL & MySQL errors.
1 parent 74d5373 commit 0a335cc

File tree

3 files changed

+48
-35
lines changed

3 files changed

+48
-35
lines changed

collector/exporter.go

+38-27
Original file line numberDiff line numberDiff line change
@@ -34,31 +34,19 @@ var (
3434

3535
// Exporter collects MySQL metrics. It implements prometheus.Collector.
3636
type Exporter struct {
37-
db *sql.DB
38-
scrapers []Scraper
39-
error prometheus.Gauge
40-
totalScrapes prometheus.Counter
41-
scrapeErrors *prometheus.CounterVec
42-
mysqldUp prometheus.Gauge
37+
db *sql.DB
38+
scrapers []Scraper
39+
stats *Stats
40+
error prometheus.Gauge
41+
mysqldUp prometheus.Gauge
4342
}
4443

4544
// New returns a new MySQL exporter for the provided DSN.
46-
func New(db *sql.DB, scrapers []Scraper) *Exporter {
45+
func New(db *sql.DB, scrapers []Scraper, stats *Stats) *Exporter {
4746
return &Exporter{
4847
db: db,
4948
scrapers: scrapers,
50-
totalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
51-
Namespace: namespace,
52-
Subsystem: exporter,
53-
Name: "scrapes_total",
54-
Help: "Total number of times MySQL was scraped for metrics.",
55-
}),
56-
scrapeErrors: prometheus.NewCounterVec(prometheus.CounterOpts{
57-
Namespace: namespace,
58-
Subsystem: exporter,
59-
Name: "scrape_errors_total",
60-
Help: "Total number of times an error occurred scraping a MySQL.",
61-
}, []string{"collector"}),
49+
stats: stats,
6250
error: prometheus.NewGauge(prometheus.GaugeOpts{
6351
Namespace: namespace,
6452
Subsystem: exporter,
@@ -105,14 +93,14 @@ func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
10593
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
10694
e.scrape(ch)
10795

108-
ch <- e.totalScrapes
96+
ch <- e.stats.TotalScrapes
10997
ch <- e.error
110-
e.scrapeErrors.Collect(ch)
98+
e.stats.ScrapeErrors.Collect(ch)
11199
ch <- e.mysqldUp
112100
}
113101

114102
func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
115-
e.totalScrapes.Inc()
103+
e.stats.TotalScrapes.Inc()
116104
var err error
117105

118106
scrapeTime := time.Now()
@@ -122,13 +110,10 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
122110
e.error.Set(1)
123111
return
124112
}
125-
126113
e.mysqldUp.Set(1)
127-
128-
versionNum := getMySQLVersion(e.db)
129-
130114
ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), "connection")
131115

116+
versionNum := getMySQLVersion(e.db)
132117
wg := &sync.WaitGroup{}
133118
defer wg.Wait()
134119
for _, scraper := range e.scrapers {
@@ -142,7 +127,7 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
142127
scrapeTime := time.Now()
143128
if err := scraper.Scrape(e.db, ch); err != nil {
144129
log.Errorln("Error scraping for "+label+":", err)
145-
e.scrapeErrors.WithLabelValues(label).Inc()
130+
e.stats.ScrapeErrors.WithLabelValues(label).Inc()
146131
e.error.Set(1)
147132
}
148133
ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), label)
@@ -166,3 +151,29 @@ func getMySQLVersion(db *sql.DB) float64 {
166151
}
167152
return versionNum
168153
}
154+
155+
type Stats struct {
156+
TotalScrapes prometheus.Counter
157+
ScrapeErrors *prometheus.CounterVec
158+
}
159+
160+
func NewStats(resolution string) *Stats {
161+
subsystem := exporter
162+
if resolution != "" {
163+
subsystem = exporter + "_" + resolution
164+
}
165+
return &Stats{
166+
TotalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
167+
Namespace: namespace,
168+
Subsystem: subsystem,
169+
Name: "scrapes_total",
170+
Help: "Total number of times MySQL was scraped for metrics.",
171+
}),
172+
ScrapeErrors: prometheus.NewCounterVec(prometheus.CounterOpts{
173+
Namespace: namespace,
174+
Subsystem: subsystem,
175+
Name: "scrape_errors_total",
176+
Help: "Total number of times an error occurred scraping a MySQL.",
177+
}, []string{"collector"}),
178+
}
179+
}

collector/exporter_test.go

+5-3
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,11 @@ func TestExporter(t *testing.T) {
2323
}
2424
defer db.Close()
2525

26-
exporter := New(db, []Scraper{
27-
ScrapeGlobalStatus{},
28-
})
26+
exporter := New(
27+
db,
28+
[]Scraper{ScrapeGlobalStatus{}},
29+
NewStats(""),
30+
)
2931

3032
convey.Convey("Metrics describing", t, func() {
3133
ch := make(chan *prometheus.Desc)

mysqld_exporter.go

+5-5
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,7 @@ func init() {
198198
prometheus.MustRegister(version.NewCollector("mysqld_exporter"))
199199
}
200200

201-
func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper) http.HandlerFunc {
201+
func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper, stats *collector.Stats) http.HandlerFunc {
202202
return func(w http.ResponseWriter, r *http.Request) {
203203
filteredScrapers := scrapers
204204
params := r.URL.Query()["collect[]"]
@@ -230,7 +230,7 @@ func newHandler(cfg *webAuth, db *sql.DB, scrapers []collector.Scraper) http.Han
230230
}
231231

232232
registry := prometheus.NewRegistry()
233-
registry.MustRegister(collector.New(db, filteredScrapers))
233+
registry.MustRegister(collector.New(db, filteredScrapers, stats))
234234

235235
gatherers := prometheus.Gatherers{
236236
prometheus.DefaultGatherer,
@@ -361,9 +361,9 @@ func main() {
361361

362362
// Defines what to scrape in each resolution.
363363
hr, mr, lr := enabledScrapers(scraperFlags)
364-
mux.Handle(*metricPath+"-hr", newHandler(cfg, db, hr))
365-
mux.Handle(*metricPath+"-mr", newHandler(cfg, db, mr))
366-
mux.Handle(*metricPath+"-lr", newHandler(cfg, db, lr))
364+
mux.Handle(*metricPath+"-hr", newHandler(cfg, db, hr, collector.NewStats("hr")))
365+
mux.Handle(*metricPath+"-mr", newHandler(cfg, db, mr, collector.NewStats("mr")))
366+
mux.Handle(*metricPath+"-lr", newHandler(cfg, db, lr, collector.NewStats("lr")))
367367

368368
// Log which scrapers are enabled.
369369
if len(hr) > 0 {

0 commit comments

Comments
 (0)