Skip to content

Commit 6c2a9e6

Browse files
author
Guy Baron
authored
more linting fixes for goreportcard (#129)
1 parent 4c1c812 commit 6c2a9e6

File tree

7 files changed

+30
-3
lines changed

7 files changed

+30
-3
lines changed

.github/stale.yml

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
# Number of days of inactivity before an issue becomes stale
2+
daysUntilStale: 7
3+
# Number of days of inactivity before a stale issue is closed
4+
daysUntilClose: 7
5+
# Issues with these labels will never be considered stale
6+
exemptLabels:
7+
- bug
8+
- enhancment
9+
# Label to use when marking an issue as stale
10+
staleLabel: wontfix
11+
# Comment to post when marking an issue as stale. Set to `false` to disable
12+
markComment: >
13+
This issue has been automatically marked as stale because it has not had
14+
recent activity. It will be closed if no further activity occurs. Thank you
15+
for your contributions.
16+
# Comment to post when closing a stale issue. Set to `false` to disable
17+
closeComment: false

gbus/abstractions.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ type Saga interface {
127127
New() Saga
128128
}
129129

130-
//RegisterDeadletterHandler provides the ability to handle messages that were rejected as poision and arrive to the deadletter queue
130+
//Deadlettering provides the ability to handle messages that were rejected as poision and arrive to the deadletter queue
131131
type Deadlettering interface {
132132
HandleDeadletter(handler func(tx *sql.Tx, poision amqp.Delivery) error)
133133
ReturnDeadToQueue(ctx context.Context, publishing *amqp.Publishing) error

gbus/builder/builder.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,9 @@ func (builder *defaultBuilder) Build(svcName string) gbus.Bus {
7777
panic(err)
7878
}
7979
gb.TxProvider = mysqltx
80+
8081
mysql.EnsureSchema(mysqltx.Database, gb.SvcName)
82+
8183
//TODO move purge logic into the NewSagaStore factory method
8284
sagaStore = mysql.NewSagaStore(gb.SvcName, mysqltx)
8385
if builder.purgeOnStartup {

gbus/invocation.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ type defaultInvocationContext struct {
2323
deliveryInfo DeliveryInfo
2424
}
2525

26+
//DeliveryInfo provdes information as to the attempted deilvery of the invocation
2627
type DeliveryInfo struct {
2728
Attempt uint
2829
MaxRetryCount uint

gbus/metrics/handler_metrics.go

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ type handlerMetrics struct {
2626
latency prometheus.Summary
2727
}
2828

29+
//AddHandlerMetrics adds a handlere to be tracked with metrics
2930
func AddHandlerMetrics(handlerName string) {
3031
handlerMetrics := newHandlerMetrics(handlerName)
3132
_, exists := handlerMetricsByHandlerName.LoadOrStore(handlerName, handlerMetrics)
@@ -35,6 +36,7 @@ func AddHandlerMetrics(handlerName string) {
3536
}
3637
}
3738

39+
//RunHandlerWithMetric runs a specific handler with metrics being collected and reported to prometheus
3840
func RunHandlerWithMetric(handleMessage func() error, handlerName string, logger logrus.FieldLogger) error {
3941
handlerMetrics := GetHandlerMetrics(handlerName)
4042
defer func() {
@@ -63,6 +65,7 @@ func RunHandlerWithMetric(handleMessage func() error, handlerName string, logger
6365
return err
6466
}
6567

68+
//GetHandlerMetrics gets the metrics handler associated with the handlerName
6669
func GetHandlerMetrics(handlerName string) *handlerMetrics {
6770
entry, ok := handlerMetricsByHandlerName.Load(handlerName)
6871
if ok {
@@ -99,14 +102,17 @@ func trackTime(functionToTrack func() error, observer prometheus.Observer) error
99102
return functionToTrack()
100103
}
101104

105+
//GetSuccessCount gets the value of the handlers success value
102106
func (hm *handlerMetrics) GetSuccessCount() (float64, error) {
103107
return hm.getLabeledCounterValue(success)
104108
}
105109

110+
//GetFailureCount gets the value of the handlers failure value
106111
func (hm *handlerMetrics) GetFailureCount() (float64, error) {
107112
return hm.getLabeledCounterValue(failure)
108113
}
109114

115+
//GetLatencySampleCount gets the value of the handlers latency value
110116
func (hm *handlerMetrics) GetLatencySampleCount() (*uint64, error) {
111117
m := &io_prometheus_client.Metric{}
112118
err := hm.latency.Write(m)

gbus/metrics/message_metrics.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,19 @@ package metrics
33
import (
44
"github.com/prometheus/client_golang/prometheus"
55
"github.com/prometheus/client_golang/prometheus/promauto"
6-
"github.com/prometheus/client_model/go"
6+
io_prometheus_client "github.com/prometheus/client_model/go"
77
)
88

99
var (
1010
rejectedMessages = newRejectedMessagesCounter()
1111
)
1212

13+
//ReportRejectedMessage reports a message being rejected to the metrics counter
1314
func ReportRejectedMessage() {
1415
rejectedMessages.Inc()
1516
}
1617

18+
//GetRejectedMessagesValue gets the value of the rejected message counter
1719
func GetRejectedMessagesValue() (float64, error) {
1820
m := &io_prometheus_client.Metric{}
1921
err := rejectedMessages.Write(m)

gbus/outbox.go

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@ func (out *AMQPOutbox) init(amqp *amqp.Channel, confirm, resendOnNack bool) erro
5353
//Shutdown stops the outbox
5454
func (out *AMQPOutbox) Shutdown() {
5555
close(out.stop)
56-
5756
}
5857

5958
//Post implements Outbox.Send

0 commit comments

Comments
 (0)