@@ -19,7 +19,6 @@ import (
1919 "github.com/alecthomas/kingpin/v2"
2020 "github.com/aws/aws-msk-iam-sasl-signer-go/signer"
2121 "github.com/krallistic/kazoo-go"
22- "github.com/pkg/errors"
2322 "github.com/prometheus/client_golang/prometheus"
2423 "github.com/prometheus/client_golang/prometheus/promhttp"
2524 plog "github.com/prometheus/common/promlog"
@@ -212,7 +211,7 @@ func NewExporter(opts kafkaOpts, topicFilter string, topicExclude string, groupF
212211 case "plain" :
213212 default :
214213 return nil , fmt .Errorf (
215- `invalid sasl mechanism "%s" : can only be "scram-sha256", "scram-sha512", "gssapi", "awsiam" or "plain"` ,
214+ `invalid sasl mechanism %q : can only be "scram-sha256", "scram-sha512", "gssapi", "awsiam" or "plain"` ,
216215 opts .saslMechanism ,
217216 )
218217 }
@@ -248,7 +247,7 @@ func NewExporter(opts kafkaOpts, topicFilter string, topicExclude string, groupF
248247
249248 canReadCertAndKey , err := CanReadCertAndKey (opts .tlsCertFile , opts .tlsKeyFile )
250249 if err != nil {
251- return nil , errors . Wrap ( err , "error reading cert and key" )
250+ return nil , fmt . Errorf ( "error reading cert and key: %w" , err )
252251 }
253252 if canReadCertAndKey {
254253 cert , err := tls .LoadX509KeyPair (opts .tlsCertFile , opts .tlsKeyFile )
@@ -264,23 +263,22 @@ func NewExporter(opts kafkaOpts, topicFilter string, topicExclude string, groupF
264263 klog .V (DEBUG ).Infoln ("Using zookeeper lag, so connecting to zookeeper" )
265264 zookeeperClient , err = kazoo .NewKazoo (opts .uriZookeeper , nil )
266265 if err != nil {
267- return nil , errors . Wrap ( err , "error connecting to zookeeper" )
266+ return nil , fmt . Errorf ( "error connecting to zookeeper: %w" , err )
268267 }
269268 }
270269
271270 interval , err := time .ParseDuration (opts .metadataRefreshInterval )
272271 if err != nil {
273- return nil , errors . Wrap ( err , "Cannot parse metadata refresh interval" )
272+ return nil , fmt . Errorf ( "Cannot parse metadata refresh interval: %w" , err )
274273 }
275274
276275 config .Metadata .RefreshFrequency = interval
277276
278277 config .Metadata .AllowAutoTopicCreation = opts .allowAutoTopicCreation
279278
280279 client , err := sarama .NewClient (opts .uri , config )
281-
282280 if err != nil {
283- return nil , errors . Wrap ( err , "Error Init Kafka Client" )
281+ return nil , fmt . Errorf ( "Error Init Kafka Client: %w" , err )
284282 }
285283
286284 klog .V (TRACE ).Infoln ("Done Init Clients" )
@@ -387,7 +385,7 @@ func (e *Exporter) collectChans(quit chan struct{}) {
387385}
388386
389387func (e * Exporter ) collect (ch chan <- prometheus.Metric ) {
390- var wg = sync.WaitGroup {}
388+ wg : = sync.WaitGroup {}
391389 ch <- prometheus .MustNewConstMetric (
392390 clusterBrokers , prometheus .GaugeValue , float64 (len (e .client .Brokers ())),
393391 )
@@ -508,7 +506,6 @@ func (e *Exporter) collect(ch chan<- prometheus.Metric) {
508506
509507 if e .useZooKeeperLag {
510508 ConsumerGroups , err := e .zookeeperClient .Consumergroups ()
511-
512509 if err != nil {
513510 klog .Errorf ("Cannot get consumer group %v" , err )
514511 }
0 commit comments