Browse Source

fixed declaration dependencies and other lint issues (#1743)

Varun 5 years ago
parent
commit
25aedae6c3

+ 1 - 1
api_versions_request_test.go

@@ -3,7 +3,7 @@ package sarama
 import "testing"
 import "testing"
 
 
 var (
 var (
-	apiVersionRequest = []byte{}
+	apiVersionRequest []byte
 )
 )
 
 
 func TestApiVersionsRequest(t *testing.T) {
 func TestApiVersionsRequest(t *testing.T) {

+ 2 - 2
async_producer_test.go

@@ -12,7 +12,7 @@ import (
 	"time"
 	"time"
 
 
 	"github.com/fortytw2/leaktest"
 	"github.com/fortytw2/leaktest"
-	metrics "github.com/rcrowley/go-metrics"
+	"github.com/rcrowley/go-metrics"
 )
 )
 
 
 const TestMessage = "ABC THE MESSAGE"
 const TestMessage = "ABC THE MESSAGE"
@@ -92,7 +92,7 @@ func (f flakyEncoder) Length() int {
 }
 }
 
 
 func (f flakyEncoder) Encode() ([]byte, error) {
 func (f flakyEncoder) Encode() ([]byte, error) {
-	if !bool(f) {
+	if !f {
 		return nil, errors.New("flaky encoding error")
 		return nil, errors.New("flaky encoding error")
 	}
 	}
 	return []byte(TestMessage), nil
 	return []byte(TestMessage), nil

+ 2 - 2
broker.go

@@ -13,7 +13,7 @@ import (
 	"sync/atomic"
 	"sync/atomic"
 	"time"
 	"time"
 
 
-	metrics "github.com/rcrowley/go-metrics"
+	"github.com/rcrowley/go-metrics"
 )
 )
 
 
 // Broker represents a single Kafka broker connection. All operations on this object are entirely concurrency-safe.
 // Broker represents a single Kafka broker connection. All operations on this object are entirely concurrency-safe.
@@ -1027,7 +1027,7 @@ func (b *Broker) sendAndReceiveV0SASLPlainAuth() error {
 	length := len(b.conf.Net.SASL.AuthIdentity) + 1 + len(b.conf.Net.SASL.User) + 1 + len(b.conf.Net.SASL.Password)
 	length := len(b.conf.Net.SASL.AuthIdentity) + 1 + len(b.conf.Net.SASL.User) + 1 + len(b.conf.Net.SASL.Password)
 	authBytes := make([]byte, length+4) //4 byte length header + auth data
 	authBytes := make([]byte, length+4) //4 byte length header + auth data
 	binary.BigEndian.PutUint32(authBytes, uint32(length))
 	binary.BigEndian.PutUint32(authBytes, uint32(length))
-	copy(authBytes[4:], []byte(b.conf.Net.SASL.AuthIdentity+"\x00"+b.conf.Net.SASL.User+"\x00"+b.conf.Net.SASL.Password))
+	copy(authBytes[4:], b.conf.Net.SASL.AuthIdentity+"\x00"+b.conf.Net.SASL.User+"\x00"+b.conf.Net.SASL.Password)
 
 
 	requestTime := time.Now()
 	requestTime := time.Now()
 	// Will be decremented in updateIncomingCommunicationMetrics (except error)
 	// Will be decremented in updateIncomingCommunicationMetrics (except error)

+ 1 - 1
client.go

@@ -788,7 +788,7 @@ func (client *client) backgroundMetadataUpdater() {
 }
 }
 
 
 func (client *client) refreshMetadata() error {
 func (client *client) refreshMetadata() error {
-	topics := []string{}
+	var topics []string
 
 
 	if !client.conf.Metadata.Full {
 	if !client.conf.Metadata.Full {
 		if specificTopics, err := client.MetadataTopics(); err != nil {
 		if specificTopics, err := client.MetadataTopics(); err != nil {

+ 1 - 1
consumer.go

@@ -626,7 +626,7 @@ func (child *partitionConsumer) parseResponse(response *FetchResponse) ([]*Consu
 	abortedProducerIDs := make(map[int64]struct{}, len(block.AbortedTransactions))
 	abortedProducerIDs := make(map[int64]struct{}, len(block.AbortedTransactions))
 	abortedTransactions := block.getAbortedTransactions()
 	abortedTransactions := block.getAbortedTransactions()
 
 
-	messages := []*ConsumerMessage{}
+	var messages []*ConsumerMessage
 	for _, records := range block.RecordsSet {
 	for _, records := range block.RecordsSet {
 		switch records.recordsType {
 		switch records.recordsType {
 		case legacyRecords:
 		case legacyRecords:

+ 0 - 2
examples/README.md

@@ -8,5 +8,3 @@ In these examples, we use `github.com/Shopify/sarama` as import path. We do this
 
 
 [http_server](./http_server) is a simple HTTP server uses both the sync producer to produce data as part of the request handling cycle, as well as the async producer to maintain an access log. It also uses the [mocks subpackage](https://godoc.org/github.com/Shopify/sarama/mocks) to test both.
 [http_server](./http_server) is a simple HTTP server uses both the sync producer to produce data as part of the request handling cycle, as well as the async producer to maintain an access log. It also uses the [mocks subpackage](https://godoc.org/github.com/Shopify/sarama/mocks) to test both.
 
 
-#### SASL SCRAM Authentication
-[sasl_scram_authentication](./sasl_scram_authentication) is an example of how to authenticate to a Kafka cluster using SASL SCRAM-SHA-256 or SCRAM-SHA-512 mechanisms.

+ 2 - 2
examples/sasl_scram_client/main.go

@@ -91,10 +91,10 @@ func main() {
 	conf.Net.SASL.Handshake = true
 	conf.Net.SASL.Handshake = true
 	if *algorithm == "sha512" {
 	if *algorithm == "sha512" {
 		conf.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
 		conf.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
-		conf.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512)
+		conf.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512
 	} else if *algorithm == "sha256" {
 	} else if *algorithm == "sha256" {
 		conf.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
 		conf.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
-		conf.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA256)
+		conf.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA256
 
 
 	} else {
 	} else {
 		log.Fatalf("invalid SHA algorithm \"%s\": can be either \"sha256\" or \"sha512\"", *algorithm)
 		log.Fatalf("invalid SHA algorithm \"%s\": can be either \"sha256\" or \"sha512\"", *algorithm)

+ 6 - 6
mockresponses.go

@@ -177,8 +177,8 @@ func (mmr *MockMetadataResponse) For(reqBody versionedDecoder) encoderWithHeader
 	}
 	}
 
 
 	// Generate set of replicas
 	// Generate set of replicas
-	replicas := []int32{}
-	offlineReplicas := []int32{}
+	var replicas []int32
+	var offlineReplicas []int32
 	for _, brokerID := range mmr.brokers {
 	for _, brokerID := range mmr.brokers {
 		replicas = append(replicas, brokerID)
 		replicas = append(replicas, brokerID)
 	}
 	}
@@ -772,8 +772,8 @@ func (mr *MockDescribeConfigsResponse) For(reqBody versionedDecoder) encoderWith
 		Version: req.Version,
 		Version: req.Version,
 	}
 	}
 
 
-	includeSynonyms := (req.Version > 0)
-	includeSource := (req.Version > 0)
+	includeSynonyms := req.Version > 0
+	includeSource := req.Version > 0
 
 
 	for _, r := range req.Resources {
 	for _, r := range req.Resources {
 		var configEntries []*ConfigEntry
 		var configEntries []*ConfigEntry
@@ -1088,9 +1088,9 @@ func NewMockDescribeLogDirsResponse(t TestReporter) *MockDescribeLogDirsResponse
 }
 }
 
 
 func (m *MockDescribeLogDirsResponse) SetLogDirs(logDirPath string, topicPartitions map[string]int) *MockDescribeLogDirsResponse {
 func (m *MockDescribeLogDirsResponse) SetLogDirs(logDirPath string, topicPartitions map[string]int) *MockDescribeLogDirsResponse {
-	topics := []DescribeLogDirsResponseTopic{}
+	var topics []DescribeLogDirsResponseTopic
 	for topic := range topicPartitions {
 	for topic := range topicPartitions {
-		partitions := []DescribeLogDirsResponsePartition{}
+		var partitions []DescribeLogDirsResponsePartition
 		for i := 0; i < topicPartitions[topic]; i++ {
 		for i := 0; i < topicPartitions[topic]; i++ {
 			partitions = append(partitions, DescribeLogDirsResponsePartition{
 			partitions = append(partitions, DescribeLogDirsResponsePartition{
 				PartitionID: int32(i),
 				PartitionID: int32(i),

+ 1 - 1
tools/kafka-console-producer/kafka-console-producer.go

@@ -102,7 +102,7 @@ func main() {
 	}
 	}
 
 
 	if *headers != "" {
 	if *headers != "" {
-		hdrs := []sarama.RecordHeader{}
+		var hdrs []sarama.RecordHeader
 		arrHdrs := strings.Split(*headers, ",")
 		arrHdrs := strings.Split(*headers, ",")
 		for _, h := range arrHdrs {
 		for _, h := range arrHdrs {
 			if header := strings.Split(h, ":"); len(header) != 2 {
 			if header := strings.Split(h, ":"); len(header) != 2 {

+ 1 - 1
tools/kafka-producer-performance/main.go

@@ -14,7 +14,7 @@ import (
 	gosync "sync"
 	gosync "sync"
 	"time"
 	"time"
 
 
-	metrics "github.com/rcrowley/go-metrics"
+	"github.com/rcrowley/go-metrics"
 
 
 	"github.com/Shopify/sarama"
 	"github.com/Shopify/sarama"
 	"github.com/Shopify/sarama/tools/tls"
 	"github.com/Shopify/sarama/tools/tls"