@@ -99,7 +99,7 @@ func getEnv(key, fallback string) string {
9999func atoi (stringValue string , logger log.Logger ) int {
100100 intValue , err := strconv .Atoi (stringValue )
101101 if err != nil {
102- level .Error (logger ).Log ("error while converting to int: " , err )
102+ level .Error (logger ).Log ("msg" , " error while converting to int" , "err " , err )
103103 panic (err )
104104 }
105105 return intValue
@@ -115,17 +115,17 @@ func maskDsn(dsn string) string {
115115}
116116
117117func connect (dsn string , logger log.Logger ) * sql.DB {
118- level .Debug (logger ).Log ("Launching connection: " , maskDsn (dsn ))
118+ level .Debug (logger ).Log ("msg" , " Launching connection" , "dsn " , maskDsn (dsn ))
119119 db , err := sql .Open ("oci8" , dsn )
120120 if err != nil {
121- level .Error (logger ).Log ("Error while connecting to" , dsn )
121+ level .Error (logger ).Log ("msg" , " Error while connecting to" , "dsn " , dsn )
122122 panic (err )
123123 }
124- level .Debug (logger ).Log ("set max idle connections to " , * maxIdleConns )
124+ level .Debug (logger ).Log ("msg" , " set max idle connections to" , "value " , * maxIdleConns )
125125 db .SetMaxIdleConns (* maxIdleConns )
126- level .Debug (logger ).Log ("set max open connections to " , * maxOpenConns )
126+ level .Debug (logger ).Log ("msg" , " set max open connections to" , "value " , * maxOpenConns )
127127 db .SetMaxOpenConns (* maxOpenConns )
128- level .Debug (logger ).Log ("Successfully connected to: " , maskDsn (dsn ))
128+ level .Debug (logger ).Log ("msg" , " Successfully connected to" , "dsn " , maskDsn (dsn ))
129129 return db
130130}
131131
@@ -150,7 +150,7 @@ func NewExporter(dsn string, logger log.Logger) *Exporter {
150150 Namespace : namespace ,
151151 Subsystem : exporter ,
152152 Name : "scrape_errors_total" ,
153- Help : "Total number of times an error occured scraping a Oracle database." ,
153+ Help : "Total number of times an error occurred scraping a Oracle database." ,
154154 }, []string {"collector" }),
155155 error : prometheus .NewGauge (prometheus.GaugeOpts {
156156 Namespace : namespace ,
@@ -254,17 +254,17 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
254254
255255 if err = e .db .Ping (); err != nil {
256256 if strings .Contains (err .Error (), "sql: database is closed" ) {
257- level .Info (e .logger ).Log ("Reconnecting to DB" )
257+ level .Info (e .logger ).Log ("msg" , " Reconnecting to DB" )
258258 e .db = connect (e .dsn , e .logger )
259259 }
260260 }
261261 if err = e .db .Ping (); err != nil {
262- level .Error (e .logger ).Log ("Error pinging oracle: " , err )
262+ level .Error (e .logger ).Log ("msg" , " Error pinging oracle" , "err " , err )
263263 //e.db.Close()
264264 e .up .Set (0 )
265265 return
266266 } else {
267- level .Debug (e .logger ).Log ("Successfully pinged Oracle database: " , maskDsn (e .dsn ))
267+ level .Debug (e .logger ).Log ("msg" , " Successfully pinged Oracle database" , "dsn " , maskDsn (e .dsn ))
268268 e .up .Set (1 )
269269 }
270270
@@ -281,42 +281,42 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
281281 go func () {
282282 defer wg .Done ()
283283
284- level .Debug (e .logger ).Log ("About to scrape metric: " )
285- level .Debug (e .logger ).Log ("- Metric MetricsDesc: " , metric .MetricsDesc )
286- level .Debug (e .logger ).Log ("- Metric Context: " , metric .Context )
287- level .Debug (e .logger ).Log ("- Metric MetricsType: " , metric .MetricsType )
288- level .Debug (e .logger ).Log ("- Metric MetricsBuckets: " , metric .MetricsBuckets , "(Ignored unless Histogram type)" )
289- level .Debug (e .logger ).Log ("- Metric Labels: " , metric .Labels )
290- level .Debug (e .logger ).Log ("- Metric FieldToAppend: " , metric .FieldToAppend )
291- level .Debug (e .logger ).Log ("- Metric IgnoreZeroResult: " , metric .IgnoreZeroResult )
292- level .Debug (e .logger ).Log ("- Metric Request: " , metric .Request )
284+ level .Debug (e .logger ).Log ("msg" , " About to scrape metric" )
285+ level .Debug (e .logger ).Log ("metricsDesc " , metric .MetricsDesc )
286+ level .Debug (e .logger ).Log ("context " , metric .Context )
287+ level .Debug (e .logger ).Log ("metricsType " , metric .MetricsType )
288+ level .Debug (e .logger ).Log ("metricsBuckets " , metric .MetricsBuckets ) // , "(Ignored unless Histogram type)"
289+ level .Debug (e .logger ).Log ("labels " , metric .Labels )
290+ level .Debug (e .logger ).Log ("fieldToAppend " , metric .FieldToAppend )
291+ level .Debug (e .logger ).Log ("ignoreZeroResult " , metric .IgnoreZeroResult )
292+ level .Debug (e .logger ).Log ("request " , metric .Request )
293293
294294 if len (metric .Request ) == 0 {
295- level .Error (e .logger ).Log ("Error scraping for " , metric . MetricsDesc , " . Did you forget to define request in your toml file?" )
295+ level .Error (e .logger ).Log ("msg " , "Error scraping . Did you forget to define request in your toml file?", "metricsDesc" , metric . MetricsDesc )
296296 return
297297 }
298298
299299 if len (metric .MetricsDesc ) == 0 {
300- level .Error (e .logger ).Log ("Error scraping for query" , metric . Request , ". Did you forget to define metricsdesc in your toml file?" )
300+ level .Error (e .logger ).Log ("msg" , " Error scraping for query. Did you forget to define metricsdesc in your toml file?", "request" , metric . Request )
301301 return
302302 }
303303
304304 for column , metricType := range metric .MetricsType {
305305 if metricType == "histogram" {
306306 _ , ok := metric .MetricsBuckets [column ]
307307 if ! ok {
308- level .Error (e .logger ).Log ("Unable to find MetricsBuckets configuration key for metric. ( metric=" + column + ")" )
308+ level .Error (e .logger ).Log ("msg" , " Unable to find MetricsBuckets configuration key for metric" , " metric" , column )
309309 return
310310 }
311311 }
312312 }
313313
314314 scrapeStart := time .Now ()
315315 if err = ScrapeMetric (e .db , ch , metric , e .logger ); err != nil {
316- level .Error (e .logger ).Log ("Error scraping for" , metric .Context , "_ " , metric .MetricsDesc , time .Since (scrapeStart ), ": " , err )
316+ level .Error (e .logger ).Log ("msg" , " Error scraping for" , "context" , metric .Context , "metricsDesc " , metric .MetricsDesc , "since" , time .Since (scrapeStart ), "err " , err )
317317 e .scrapeErrors .WithLabelValues (metric .Context ).Inc ()
318318 } else {
319- level .Debug (e .logger ).Log ("Successfully scraped metric: " , metric .Context , metric .MetricsDesc , time .Since (scrapeStart ))
319+ level .Debug (e .logger ).Log ("msg" , " Successfully scraped metric" , "context" , metric .Context , "metricsDesc" , metric .MetricsDesc , "since" , time .Since (scrapeStart ))
320320 }
321321 }()
322322 }
@@ -343,7 +343,7 @@ func GetMetricType(metricType string, metricsType map[string]string) prometheus.
343343
344344// interface method to call ScrapeGenericValues using Metric struct values
345345func ScrapeMetric (db * sql.DB , ch chan <- prometheus.Metric , metricDefinition Metric , logger log.Logger ) error {
346- level .Debug (logger ).Log ("Calling function ScrapeGenericValues()" )
346+ level .Debug (logger ).Log ("msg" , " Calling function ScrapeGenericValues()" )
347347 return ScrapeGenericValues (db , ch , metricDefinition .Context , metricDefinition .Labels ,
348348 metricDefinition .MetricsDesc , metricDefinition .MetricsType , metricDefinition .MetricsBuckets ,
349349 metricDefinition .FieldToAppend , metricDefinition .IgnoreZeroResult ,
@@ -365,11 +365,11 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
365365 value , err := strconv .ParseFloat (strings .TrimSpace (row [metric ]), 64 )
366366 // If not a float, skip current metric
367367 if err != nil {
368- level .Error (logger ).Log ("Unable to convert current value to float ( metric=" + metric +
369- ", metricHelp=" + metricHelp + ", value=<" + row [metric ] + ">)" )
368+ level .Error (logger ).Log ("msg" , " Unable to convert current value to float" , " metric" , metric ,
369+ "metricHelp" , metricHelp , " value" , row [metric ])
370370 continue
371371 }
372- level .Debug (logger ).Log ("Query result looks like: " , value )
372+ level .Debug (logger ).Log ("msg" , " Query result looks like" , "value " , value )
373373 // If metric do not use a field content in metric's name
374374 if strings .Compare (fieldToAppend , "" ) == 0 {
375375 desc := prometheus .NewDesc (
@@ -380,22 +380,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
380380 if metricsType [strings .ToLower (metric )] == "histogram" {
381381 count , err := strconv .ParseUint (strings .TrimSpace (row ["count" ]), 10 , 64 )
382382 if err != nil {
383- level .Error (logger ).Log ("Unable to convert count value to int (metric=" + metric +
384- ", metricHelp=" + metricHelp + ", value=<" + row ["count" ] + ">)" )
383+ level .Error (logger ).Log ("msg" , " Unable to convert count value to int" , " metric" , metric ,
384+ "metricHelp" , metricHelp , " value" , row ["count" ])
385385 continue
386386 }
387387 buckets := make (map [float64 ]uint64 )
388388 for field , le := range metricsBuckets [metric ] {
389389 lelimit , err := strconv .ParseFloat (strings .TrimSpace (le ), 64 )
390390 if err != nil {
391- level .Error (logger ).Log ("Unable to convert bucket limit value to float ( metric=" + metric +
392- ", metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)" )
391+ level .Error (logger ).Log ("msg" , " Unable to convert bucket limit value to float" , " metric" , metric ,
392+ "metricHelp" , metricHelp , ",bucketlimit" , le )
393393 continue
394394 }
395395 counter , err := strconv .ParseUint (strings .TrimSpace (row [field ]), 10 , 64 )
396396 if err != nil {
397- level .Error (logger ).Log ("Unable to convert " , field , " value to int ( metric=" + metric +
398- ", metricHelp=" + metricHelp + ", value=<" + row [field ]+ ">)" )
397+ level .Error (logger ).Log ("msg" , " Unable to convert value to int " , " field" , field , " metric" , metric ,
398+ "metricHelp" , metricHelp , " value" , row [field ])
399399 continue
400400 }
401401 buckets [lelimit ] = counter
@@ -414,22 +414,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
414414 if metricsType [strings .ToLower (metric )] == "histogram" {
415415 count , err := strconv .ParseUint (strings .TrimSpace (row ["count" ]), 10 , 64 )
416416 if err != nil {
417- level .Error (logger ).Log ("Unable to convert count value to int ( metric=" + metric +
418- ", metricHelp=" + metricHelp + ", value=<" + row ["count" ] + ">)" )
417+ level .Error (logger ).Log ("msg" , " Unable to convert count value to int" , " metric" , metric ,
418+ "metricHelp" , metricHelp , " value" , row ["count" ])
419419 continue
420420 }
421421 buckets := make (map [float64 ]uint64 )
422422 for field , le := range metricsBuckets [metric ] {
423423 lelimit , err := strconv .ParseFloat (strings .TrimSpace (le ), 64 )
424424 if err != nil {
425- level .Error (logger ).Log ("Unable to convert bucket limit value to float ( metric=" + metric +
426- ", metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)" )
425+ level .Error (logger ).Log ("msg" , " Unable to convert bucket limit value to float" , " metric" , metric ,
426+ "metricHelp" , metricHelp , ",bucketlimit" , le )
427427 continue
428428 }
429429 counter , err := strconv .ParseUint (strings .TrimSpace (row [field ]), 10 , 64 )
430430 if err != nil {
431- level .Error (logger ).Log ("Unable to convert " , field , " value to int ( metric=" + metric +
432- ", metricHelp=" + metricHelp + ", value=<" + row [field ]+ ">)" )
431+ level .Error (logger ).Log ("msg" , " Unable to convert value to int " , " field" , field , " metric" , metric ,
432+ "metricHelp" , metricHelp , " value" , row [field ])
433433 continue
434434 }
435435 buckets [lelimit ] = counter
@@ -443,9 +443,9 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
443443 }
444444 return nil
445445 }
446- level .Debug (logger ).Log ("Calling function GeneratePrometheusMetrics()" )
446+ level .Debug (logger ).Log ("msg" , " Calling function GeneratePrometheusMetrics()" )
447447 err := GeneratePrometheusMetrics (db , genericParser , request , logger )
448- level .Debug (logger ).Log ("ScrapeGenericValues() - metricsCount: " , metricsCount )
448+ level .Debug (logger ).Log ("msg" , " ScrapeGenericValues()" , " metricsCount" , metricsCount )
449449 if err != nil {
450450 return err
451451 }
@@ -462,7 +462,7 @@ func GeneratePrometheusMetrics(db *sql.DB, parse func(row map[string]string) err
462462 // Add a timeout
463463 timeout , err := strconv .Atoi (* queryTimeout )
464464 if err != nil {
465- level .Error (logger ).Log ("error while converting timeout option value: " , err )
465+ level .Error (logger ).Log ("msg" , " error while converting timeout option" , "err " , err )
466466 panic (err )
467467 }
468468 ctx , cancel := context .WithTimeout (context .Background (), time .Duration (timeout )* time .Second )
@@ -538,15 +538,15 @@ func checkIfMetricsChanged(logger log.Logger) bool {
538538 if len (_customMetrics ) == 0 {
539539 continue
540540 }
541- level .Debug (logger ).Log ("Checking modifications in following metrics definition file: " , _customMetrics )
541+ level .Debug (logger ).Log ("msg" , " Checking modifications in following metrics definition" , " file" , _customMetrics )
542542 h := sha256 .New ()
543543 if err := hashFile (h , _customMetrics ); err != nil {
544- level .Error (logger ).Log ("Unable to get file hash" , err )
544+ level .Error (logger ).Log ("msg" , " Unable to get file hash" , "err " , err )
545545 return false
546546 }
547547 // If any of files has been changed reload metrics
548548 if ! bytes .Equal (hashMap [i ], h .Sum (nil )) {
549- level .Info (logger ).Log (_customMetrics , "has been changed. Reloading metrics ..." )
549+ level .Info (logger ).Log ("msg" , "Metrics file has been changed. Reloading..." , "file" , _customMetrics )
550550 hashMap [i ] = h .Sum (nil )
551551 return true
552552 }
@@ -560,25 +560,25 @@ func reloadMetrics(logger log.Logger) {
560560
561561 // Load default metrics
562562 if _ , err := toml .DecodeFile (* defaultFileMetrics , & metricsToScrap ); err != nil {
563- level .Error (logger ).Log (err )
563+ level .Error (logger ).Log ("msg" , err )
564564 panic (errors .New ("Error while loading " + * defaultFileMetrics ))
565565 } else {
566- level .Info (logger ).Log ("Successfully loaded default metrics from: " + * defaultFileMetrics )
566+ level .Info (logger ).Log ("msg" , " Successfully loaded default metrics" , "file" , * defaultFileMetrics )
567567 }
568568
569569 // If custom metrics, load it
570570 if strings .Compare (* customMetrics , "" ) != 0 {
571571 for _ , _customMetrics := range strings .Split (* customMetrics , "," ) {
572572 if _ , err := toml .DecodeFile (_customMetrics , & additionalMetrics ); err != nil {
573- level .Error (logger ).Log (err )
573+ level .Error (logger ).Log ("msg" , err )
574574 panic (errors .New ("Error while loading " + _customMetrics ))
575575 } else {
576- level .Info (logger ).Log ("Successfully loaded custom metrics from: " + _customMetrics )
576+ level .Info (logger ).Log ("msg" , " Successfully loaded custom metrics" , "file" , _customMetrics )
577577 }
578578 metricsToScrap .Metric = append (metricsToScrap .Metric , additionalMetrics .Metric ... )
579579 }
580580 } else {
581- level .Info (logger ).Log ("No custom metrics defined. " )
581+ level .Info (logger ).Log ("msg" , " No custom metrics defined" )
582582 }
583583}
584584
@@ -593,7 +593,7 @@ func main() {
593593 kingpin .Parse ()
594594 logger := promlog .New (promlogConfig )
595595
596- level .Info (logger ).Log ("Starting oracledb_exporter " + Version )
596+ level .Info (logger ).Log ("msg" , " Starting oracledb_exporter" , "version" , Version )
597597 dsn := os .Getenv ("DATA_SOURCE_NAME" )
598598
599599 // Load default and custom metrics
0 commit comments