Evan Huus преди 12 години
родител
ревизия
4969d24fa1
променени са 51 файла, в които са добавени 52 реда и са изтрити 52 реда
  1. 1 1
      broker.go
  2. 1 1
      broker_test.go
  3. 1 1
      client.go
  4. 1 1
      client_test.go
  5. 1 1
      consumer.go
  6. 1 1
      consumer_test.go
  7. 1 1
      crc32_field.go
  8. 1 1
      encoder_decoder.go
  9. 1 1
      errors.go
  10. 1 1
      fetch_request.go
  11. 1 1
      fetch_request_test.go
  12. 1 1
      fetch_response.go
  13. 1 1
      fetch_response_test.go
  14. 1 1
      length_field.go
  15. 1 1
      message.go
  16. 1 1
      message_set.go
  17. 1 1
      metadata_request.go
  18. 1 1
      metadata_request_test.go
  19. 1 1
      metadata_response.go
  20. 1 1
      metadata_response_test.go
  21. 1 1
      offset_commit_request.go
  22. 1 1
      offset_commit_request_test.go
  23. 1 1
      offset_commit_response.go
  24. 1 1
      offset_commit_response_test.go
  25. 1 1
      offset_fetch_request.go
  26. 1 1
      offset_fetch_request_test.go
  27. 1 1
      offset_fetch_response.go
  28. 1 1
      offset_fetch_response_test.go
  29. 1 1
      offset_request.go
  30. 1 1
      offset_request_test.go
  31. 1 1
      offset_response.go
  32. 1 1
      offset_response_test.go
  33. 1 1
      packet_decoder.go
  34. 1 1
      packet_encoder.go
  35. 1 1
      partitioner.go
  36. 1 1
      partitioner_test.go
  37. 1 1
      prep_encoder.go
  38. 1 1
      produce_request.go
  39. 1 1
      produce_request_test.go
  40. 1 1
      produce_response.go
  41. 1 1
      produce_response_test.go
  42. 1 1
      producer.go
  43. 1 1
      producer_test.go
  44. 1 1
      real_decoder.go
  45. 1 1
      real_encoder.go
  46. 1 1
      request.go
  47. 1 1
      request_test.go
  48. 1 1
      response_header.go
  49. 1 1
      response_header_test.go
  50. 2 2
      sarama.go
  51. 1 1
      utils.go

+ 1 - 1
broker.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"io"

+ 1 - 1
broker_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
client.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"sort"

+ 1 - 1
client_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
consumer.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // Consumer processes Kafka messages from a given topic and partition.
 // You MUST call Close() on a consumer to avoid leaks, it will not be garbage-collected automatically when

+ 1 - 1
consumer_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
crc32_field.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
encoder_decoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // Encoder is the interface that wraps the basic Encode method.
 // Anything implementing Encoder can be turned into bytes using Kafka's encoding rules.

+ 1 - 1
errors.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "errors"
 

+ 1 - 1
fetch_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type fetchRequestBlock struct {
 	fetchOffset int64

+ 1 - 1
fetch_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
fetch_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type FetchResponseBlock struct {
 	Err                 KError

+ 1 - 1
fetch_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"bytes"

+ 1 - 1
length_field.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "encoding/binary"
 

+ 1 - 1
message.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"bytes"

+ 1 - 1
message_set.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type MessageBlock struct {
 	Offset int64

+ 1 - 1
metadata_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type MetadataRequest struct {
 	Topics []string

+ 1 - 1
metadata_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
metadata_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type PartitionMetadata struct {
 	Err      KError

+ 1 - 1
metadata_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_commit_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type offsetCommitRequestBlock struct {
 	offset   int64

+ 1 - 1
offset_commit_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_commit_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type OffsetCommitResponse struct {
 	ClientID string

+ 1 - 1
offset_commit_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_fetch_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type OffsetFetchRequest struct {
 	ConsumerGroup string

+ 1 - 1
offset_fetch_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_fetch_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type OffsetFetchResponseBlock struct {
 	Offset   int64

+ 1 - 1
offset_fetch_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // OffsetTime is used in Offset Requests to ask for all messages before a certain time. Any positive int64
 // value will be interpreted as milliseconds, or use the special constants defined here.

+ 1 - 1
offset_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
offset_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type OffsetResponseBlock struct {
 	Err     KError

+ 1 - 1
offset_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
packet_decoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // PacketDecoder is the interface providing helpers for reading with Kafka's encoding rules.
 // Types implementing Decoder only need to worry about calling methods like GetString,

+ 1 - 1
packet_encoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // PacketEncoder is the interface providing helpers for writing with Kafka's encoding rules.
 // Types implementing Encoder only need to worry about calling methods like PutString,

+ 1 - 1
partitioner.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "math/rand"
 

+ 1 - 1
partitioner_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
prep_encoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "math"
 

+ 1 - 1
produce_request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // RequiredAcks is used in Produce Requests to tell the broker how many replica acknowledgements
 // it must see before responding. Any positive int16 value is valid, or the constants defined here.

+ 1 - 1
produce_request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
produce_response.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type ProduceResponseBlock struct {
 	Err    KError

+ 1 - 1
produce_response_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 1 - 1
producer.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // Producer publishes Kafka messages on a given topic. It routes messages to the correct broker, refreshing metadata as appropriate,
 // and parses responses for errors. A Producer itself does not need to be closed (thus no Close method) but you still need to close

+ 1 - 1
producer_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
real_decoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"encoding/binary"

+ 1 - 1
real_encoder.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "encoding/binary"
 

+ 1 - 1
request.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 type requestEncoder interface {
 	encoder

+ 1 - 1
request_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import (
 	"bytes"

+ 1 - 1
response_header.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "math"
 

+ 1 - 1
response_header_test.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 import "testing"
 

+ 2 - 2
sarama.go

@@ -1,7 +1,7 @@
 /*
-Package kafka provides client libraries for the Kafka 0.8 protocol. The Client, Producer and Consumer objects are the core of the high-level API. The Broker and Request/Response objects permit more precise control.
+Package sarama provides client libraries for the Kafka 0.8 protocol. The Client, Producer and Consumer objects are the core of the high-level API. The Broker and Request/Response objects permit more precise control.
 
 The Request/Response objects and properties are mostly undocumented, as they line up exactly with the
 protocol fields documented by Kafka at https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol
 */
-package kafka
+package sarama

+ 1 - 1
utils.go

@@ -1,4 +1,4 @@
-package kafka
+package sarama
 
 // make []int32 sortable so we can sort partition numbers
 type int32Slice []int32