99 "fmt"
1010 "hash"
1111 "io"
12+ "log/slog"
1213 "net/url"
1314 "os"
1415 "strconv"
@@ -17,8 +18,6 @@ import (
1718 "time"
1819
1920 "github.com/BurntSushi/toml"
20- "github.com/go-kit/log"
21- "github.com/go-kit/log/level"
2221 "github.com/prometheus/client_golang/prometheus"
2322 "sigs.k8s.io/yaml"
2423)
@@ -36,7 +35,7 @@ type Exporter struct {
3635 scrapeResults []prometheus.Metric
3736 up prometheus.Gauge
3837 db * sql.DB
39- logger log .Logger
38+ logger * slog .Logger
4039}
4140
4241// Config is the configuration of the exporter
8584 exporterName = "exporter"
8685)
8786
87+ func getMapKeys (m map [string ]string ) []string {
88+ keys := make ([]string , 0 , len (m ))
89+ for k := range m {
90+ keys = append (keys , k )
91+ }
92+ return keys
93+ }
94+
8895func maskDsn (dsn string ) string {
8996 parts := strings .Split (dsn , "@" )
9097 if len (parts ) > 1 {
@@ -95,7 +102,7 @@ func maskDsn(dsn string) string {
95102}
96103
97104// NewExporter creates a new Exporter instance
98- func NewExporter (logger log .Logger , cfg * Config ) (* Exporter , error ) {
105+ func NewExporter (logger * slog .Logger , cfg * Config ) (* Exporter , error ) {
99106 e := & Exporter {
100107 mu : & sync.Mutex {},
101108 dsn : cfg .DSN ,
@@ -254,21 +261,21 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
254261
255262 if err = e .db .Ping (); err != nil {
256263 if strings .Contains (err .Error (), "sql: database is closed" ) {
257- level . Info ( e .logger ). Log ("Reconnecting to DB" )
264+ e .logger . Info ("Reconnecting to DB" )
258265 err = e .connect ()
259266 if err != nil {
260- level . Error ( e .logger ). Log ( "error reconnecting to DB" , err .Error ())
267+ e .logger . Error ( "failed reconnect Oracle DB" , "err " , err .Error ())
261268 }
262269 }
263270 }
264271
265272 if err = e .db .Ping (); err != nil {
266- level . Error ( e .logger ). Log ( "error pinging oracle: " , err .Error ())
273+ e .logger . Error ( "failed ping Oracle DB" , "err " , err .Error ())
267274 e .up .Set (0 )
268275 return
269276 }
270277
271- level . Debug ( e .logger ). Log ( "Successfully pinged Oracle database: " , maskDsn (e .dsn ))
278+ e .logger . Debug ( "Succesful ping Oracle DB" , "connstring " , maskDsn (e .dsn ))
272279 e .up .Set (1 )
273280
274281 if e .checkIfMetricsChanged () {
@@ -284,31 +291,28 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
284291 f := func () {
285292 defer wg .Done ()
286293
287- level .Debug (e .logger ).Log ("About to scrape metric: " )
288- level .Debug (e .logger ).Log ("- Metric MetricsDesc: " , fmt .Sprintf ("%+v" , metric .MetricsDesc ))
289- level .Debug (e .logger ).Log ("- Metric Context: " , metric .Context )
290- level .Debug (e .logger ).Log ("- Metric MetricsType: " , fmt .Sprintf ("%+v" , metric .MetricsType ))
291- level .Debug (e .logger ).Log ("- Metric MetricsBuckets: " , fmt .Sprintf ("%+v" , metric .MetricsBuckets ), "(Ignored unless Histogram type)" )
292- level .Debug (e .logger ).Log ("- Metric Labels: " , fmt .Sprintf ("%+v" , metric .Labels ))
293- level .Debug (e .logger ).Log ("- Metric FieldToAppend: " , metric .FieldToAppend )
294- level .Debug (e .logger ).Log ("- Metric IgnoreZeroResult: " , fmt .Sprintf ("%+v" , metric .IgnoreZeroResult ))
295- level .Debug (e .logger ).Log ("- Metric Request: " , metric .Request )
294+ metricNames := strings .Join (getMapKeys (metric .MetricsDesc ), "," )
295+ e .logger .Debug ("Scraping metrics" ,
296+ "subsystem" , metric .Context , "metricsArray" , metricNames ,
297+ "metricstype" , metric .MetricsType , "metricbuckets" , metric .MetricsBuckets ,
298+ "labels" , fmt .Sprintf ("%+v" , metric .Labels ), "fieldToAppend" , metric .FieldToAppend ,
299+ "ignorezeroresult" , metric .IgnoreZeroResult , "query" , metric .Request )
296300
297301 if len (metric .Request ) == 0 {
298- level . Error ( e .logger ). Log ( "Error scraping for " , metric . MetricsDesc , ". Did you forget to define request in your metrics config file?" )
302+ e .logger . Error ( "metrics request is empty. Did you forget to define request in your metrics config file?", "subsystem" , metric . Context , "metricsArray" , metricNames )
299303 return
300304 }
301305
302306 if len (metric .MetricsDesc ) == 0 {
303- level . Error ( e .logger ). Log ( "Error scraping for query" , metric . Request , ". Did you forget to define metricsdesc in your metrics config file?" )
307+ e .logger . Error ( "Metric help is empty. Did you forget to define metricsdesc in your metrics config file?", "subsystem" , metric . Context , "query" , metric . Request )
304308 return
305309 }
306310
307311 for column , metricType := range metric .MetricsType {
308312 if metricType == "histogram" {
309313 _ , ok := metric .MetricsBuckets [column ]
310314 if ! ok {
311- level . Error ( e .logger ). Log ("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")" )
315+ e .logger . Error ("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")" )
312316 return
313317 }
314318 }
@@ -321,10 +325,10 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
321325 err = err1
322326 }
323327 errmutex .Unlock ()
324- level . Error ( e .logger ). Log ( "scrapeMetricContext " , metric .Context , "ScrapeDuration " , time .Since (scrapeStart ), "msg " , err1 .Error ())
328+ e .logger . Error ( "scrape metric failed " , "subsystem" , metric .Context , "metricsArray " , metricNames , "duration" , time .Since (scrapeStart ), "err " , err1 .Error ())
325329 e .scrapeErrors .WithLabelValues (metric .Context ).Inc ()
326330 } else {
327- level . Debug ( e .logger ). Log ("successfully scraped metric: " , metric .Context , metric . MetricsDesc , time .Since (scrapeStart ))
331+ e .logger . Debug ("successfully scraped metric" , "subsystem" , metric .Context , "metricsArray" , metricNames , "duration" , time .Since (scrapeStart ))
328332 }
329333 }
330334 go f ()
@@ -335,20 +339,21 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
335339func (e * Exporter ) connect () error {
336340 _ , err := url .Parse (e .dsn )
337341 if err != nil {
338- level . Error ( e .logger ). Log ("malformed DSN: " , maskDsn (e .dsn ))
342+ e .logger . Error ("malformed DSN" , "value " , maskDsn (e .dsn ))
339343 return err
340344 }
341- level . Debug ( e .logger ). Log ("launching connection: " , maskDsn (e .dsn ))
345+ e .logger . Debug ("launching connection: " , "connstring " , maskDsn (e .dsn ))
342346 db , err := sql .Open ("oracle" , e .dsn )
343347 if err != nil {
344- level . Error ( e .logger ). Log ( "error while connecting to" , e .dsn )
348+ e .logger . Error ( "failed to connect" , "connstring" , maskDsn ( e .dsn ) )
345349 return err
346350 }
347- level .Debug (e .logger ).Log ("set max idle connections to " , e .config .MaxIdleConns )
351+ e .logger .Debug ("config" , "DATABASE_MAXIDLECONNS" , e .config .MaxIdleConns )
352+ e .logger .Debug ("config" , "DATABASE_MAXOPENCONNS" , e .config .MaxOpenConns )
353+ e .logger .Debug ("config" , "QUERY_TIMEOUT" , e .config .QueryTimeout )
348354 db .SetMaxIdleConns (e .config .MaxIdleConns )
349- level .Debug (e .logger ).Log ("set max open connections to " , e .config .MaxOpenConns )
350355 db .SetMaxOpenConns (e .config .MaxOpenConns )
351- level . Debug ( e .logger ). Log ("successfully connected to: " , maskDsn (e .dsn ))
356+ e .logger . Debug ("successfully connected" , "connstring " , maskDsn (e .dsn ))
352357 e .db = db
353358 return nil
354359}
@@ -358,15 +363,15 @@ func (e *Exporter) checkIfMetricsChanged() bool {
358363 if len (_customMetrics ) == 0 {
359364 continue
360365 }
361- level . Debug ( e .logger ). Log ("checking modifications in following metrics definition file: " , _customMetrics )
366+ e .logger . Debug ("checking metrics definition file has changed" , " file" , _customMetrics )
362367 h := sha256 .New ()
363368 if err := hashFile (h , _customMetrics ); err != nil {
364- level . Error ( e .logger ). Log ("unable to get file hash" , err .Error ())
369+ e .logger . Error ("unable to get file hash" , "file" , _customMetrics , "err " , err .Error ())
365370 return false
366371 }
367372 // If any of files has been changed reload metrics
368373 if ! bytes .Equal (hashMap [i ], h .Sum (nil )) {
369- level . Info ( e .logger ). Log ( _customMetrics , " has been changed. Reloading metrics..." )
374+ e .logger . Info ( "metrics definition has been changed. Reloading metrics...", "file" , _customMetrics )
370375 hashMap [i ] = h .Sum (nil )
371376 return true
372377 }
@@ -406,12 +411,13 @@ func (e *Exporter) reloadMetrics() {
406411 panic (err )
407412 }
408413 }
409- level .Info (e .logger ).Log ("event" , "Successfully loaded custom metrics from " + _customMetrics )
410- level .Debug (e .logger ).Log ("custom metrics parsed content" , fmt .Sprintf ("%+v" , additionalMetrics ))
414+ e .logger .Info ("successfully loaded custom metrics from" , "file" , _customMetrics )
415+ e .logger .Debug ("custom metrics parsed content" , "value" , fmt .Sprintf ("%+v" , additionalMetrics ))
416+
411417 e .metricsToScrape .Metric = append (e .metricsToScrape .Metric , additionalMetrics .Metric ... )
412418 }
413419 } else {
414- level . Debug ( e .logger ). Log ("No custom metrics defined." )
420+ e .logger . Debug ("No custom metrics defined." )
415421 }
416422}
417423
@@ -435,7 +441,7 @@ func loadTomlMetricsConfig(_customMetrics string, metrics *Metrics) error {
435441
436442// ScrapeMetric is an interface method to call scrapeGenericValues using Metric struct values
437443func (e * Exporter ) ScrapeMetric (db * sql.DB , ch chan <- prometheus.Metric , metricDefinition Metric ) error {
438- level . Debug ( e .logger ). Log ("calling function ScrapeGenericValues()" )
444+ e .logger . Debug ("calling function ScrapeGenericValues()" )
439445 return e .scrapeGenericValues (db , ch , metricDefinition .Context , metricDefinition .Labels ,
440446 metricDefinition .MetricsDesc , metricDefinition .MetricsType , metricDefinition .MetricsBuckets ,
441447 metricDefinition .FieldToAppend , metricDefinition .IgnoreZeroResult ,
@@ -457,10 +463,10 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
457463 value , err := strconv .ParseFloat (strings .TrimSpace (row [metric ]), 64 )
458464 // If not a float, skip current metric
459465 if err != nil {
460- level . Error ( e .logger ). Log ( "msg" , "Unable to convert current value to float" , "metric" , metric , "metricHelp " , metricHelp , "value" , row [metric ])
466+ e .logger . Error ( " convert to float" , "metric" , metric , "msg " , fmt . Sprintf ( "Skipping due to error %s" , err . Error ()) , "value" , row [metric ])
461467 continue
462468 }
463- level . Debug ( e .logger ). Log ("Query result looks like: " , value )
469+ e .logger . Debug ("Query result looks like: " , "value " , value )
464470 // If metric do not use a field content in metric's name
465471 if strings .Compare (fieldToAppend , "" ) == 0 {
466472 desc := prometheus .NewDesc (
@@ -471,21 +477,21 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
471477 if metricsType [strings .ToLower (metric )] == "histogram" {
472478 count , err := strconv .ParseUint (strings .TrimSpace (row ["count" ]), 10 , 64 )
473479 if err != nil {
474- level . Error ( e .logger ). Log ("Unable to convert count value to int (metric=" + metric +
480+ e .logger . Error ("Unable to convert count value to int (metric=" + metric +
475481 ",metricHelp=" + metricHelp + ",value=<" + row ["count" ] + ">)" )
476482 continue
477483 }
478484 buckets := make (map [float64 ]uint64 )
479485 for field , le := range metricsBuckets [metric ] {
480486 lelimit , err := strconv .ParseFloat (strings .TrimSpace (le ), 64 )
481487 if err != nil {
482- level . Error ( e .logger ). Log ("Unable to convert bucket limit value to float (metric=" + metric +
488+ e .logger . Error ("Unable to convert bucket limit value to float (metric=" + metric +
483489 ",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)" )
484490 continue
485491 }
486492 counter , err := strconv .ParseUint (strings .TrimSpace (row [field ]), 10 , 64 )
487493 if err != nil {
488- level . Error ( e .logger ). Log ("Unable to convert " , field , " value to int (metric=" + metric +
494+ e .logger . Error ("Unable to convert " , field , " value to int (metric=" + metric +
489495 ",metricHelp=" + metricHelp + ",value=<" + row [field ]+ ">)" )
490496 continue
491497 }
@@ -505,21 +511,21 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
505511 if metricsType [strings .ToLower (metric )] == "histogram" {
506512 count , err := strconv .ParseUint (strings .TrimSpace (row ["count" ]), 10 , 64 )
507513 if err != nil {
508- level . Error ( e .logger ). Log ("Unable to convert count value to int (metric=" + metric +
514+ e .logger . Error ("Unable to convert count value to int (metric=" + metric +
509515 ",metricHelp=" + metricHelp + ",value=<" + row ["count" ] + ">)" )
510516 continue
511517 }
512518 buckets := make (map [float64 ]uint64 )
513519 for field , le := range metricsBuckets [metric ] {
514520 lelimit , err := strconv .ParseFloat (strings .TrimSpace (le ), 64 )
515521 if err != nil {
516- level . Error ( e .logger ). Log ("Unable to convert bucket limit value to float (metric=" + metric +
522+ e .logger . Error ("Unable to convert bucket limit value to float (metric=" + metric +
517523 ",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)" )
518524 continue
519525 }
520526 counter , err := strconv .ParseUint (strings .TrimSpace (row [field ]), 10 , 64 )
521527 if err != nil {
522- level . Error ( e .logger ). Log ("Unable to convert " , field , " value to int (metric=" + metric +
528+ e .logger . Error ("Unable to convert " , field , " value to int (metric=" + metric +
523529 ",metricHelp=" + metricHelp + ",value=<" + row [field ]+ ">)" )
524530 continue
525531 }
@@ -534,14 +540,15 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
534540 }
535541 return nil
536542 }
537- level . Debug ( e .logger ). Log ("Calling function GeneratePrometheusMetrics()" )
543+ e .logger . Debug ("Calling function GeneratePrometheusMetrics()" )
538544 err := e .generatePrometheusMetrics (db , genericParser , request )
539- level .Debug (e .logger ).Log ("ScrapeGenericValues() - metricsCount: " , metricsCount )
545+ metricNames := strings .Join (getMapKeys (metricsDesc ), "," )
546+ e .logger .Debug ("scrapeGenericValues" , "subsystem" , context , "metricsArray" , metricNames , "metricsCount" , metricsCount )
540547 if err != nil {
541548 return err
542549 }
543550 if ! ignoreZeroResult && metricsCount == 0 {
544- return errors .New ("No metrics found while parsing" )
551+ return errors .New ("no metrics found while parsing" )
545552 }
546553 return err
547554}
@@ -557,6 +564,9 @@ func (e *Exporter) generatePrometheusMetrics(db *sql.DB, parse func(row map[stri
557564 return errors .New ("oracle query timed out" )
558565 }
559566
567+ if ctx .Err () != nil {
568+ e .logger .Error ("ctxError" , "err" , ctx .Err ())
569+ }
560570 if err != nil {
561571 return err
562572 }
@@ -622,9 +632,9 @@ func cleanName(s string) string {
622632}
623633
624634func (e * Exporter ) logError (s string ) {
625- _ = level . Error ( e .logger ). Log (s )
635+ e .logger . Error (s )
626636}
627637
628638func (e * Exporter ) logDebug (s string ) {
629- _ = level . Debug ( e .logger ). Log (s )
639+ e .logger . Debug (s )
630640}
0 commit comments