FrancoisPoinsot 6 лет назад
Родитель
Сommit
fe395fa544

+ 15 - 0
tools/uncomitted-messages/DockerFile_CustomerProducer.yaml

@@ -0,0 +1,15 @@
+FROM maven:3-jdk-8-slim
+
+RUN apt-get update && apt-get install -y git && \
+cd /opt &&  \
+git clone https://github.com/FrancoisPoinsot/simplest-uncommitted-msg &&  \
+cd ./simplest-uncommitted-msg &&  \
+mvn clean install
+
+  # There is still thing downloaded on `mvn exec`. I guess it download the `exec plugin`
+  # This still work, but it force the download at each run instead of using local docker cache of images layer
+  # TODO: add necessary config in pom.xml so that `mvn install` will install the `exec` plugin
+CMD  cd /opt/simplest-uncommitted-msg && mvn exec:java -Dexec.mainClass="CustomProducer.Main" && \
+echo ----------- && \
+echo ready to run && \
+echo -----------

+ 41 - 0
tools/uncomitted-messages/README.md

@@ -0,0 +1,41 @@
+# uncommitted-messages
+
+A setup using Java clients to test how to consomme uncommitted messages
+
+## Dependencies
+
+- docker
+- golang 
+
+## How to use it
+
+You need to add this line in your host file:
+`127.0.0.1	kafka`
+
+Run `docker-compose up` to load a kafka broker.
+Wait for "ready to run" message. There the brokers and messages are setup.
+
+Now you can tweak the lib and run this command as much as you want to see the output:
+`go run kafka-console-consumer.go --brokers kafka:9092 --topic topic-test`
+
+You should see something like
+```
+2019/03/03 18:52:16 test msg: 0
+2019/03/03 18:52:16 test msg: 1
+2019/03/03 18:52:16 test msg: 2
+2019/03/03 18:52:16 test msg: 3
+2019/03/03 18:52:16 test msg: 4
+2019/03/03 18:52:16 test msg: 5
+2019/03/03 18:52:16 test msg: 6
+2019/03/03 18:52:16 test msg: 7
+2019/03/03 18:52:16 test msg: 8
+2019/03/03 18:52:16 test msg: 9
+```
+
+if you see all these messages: you are consuming some uncommitted ones.
+
+## Why add a line in host file?
+
+Because kafka has a `ADV_HOST` variable and he care a lot for that.
+Meaning you have to address him using the same host either the query come from your local computer or
+the inside of the docker-compose.

+ 25 - 0
tools/uncomitted-messages/docker-compose.yaml

@@ -0,0 +1,25 @@
+version: '3'
+services:
+  kafka:
+    image: landoop/fast-data-dev:1.1.1
+    ports:
+      - "2181:2181"
+      - "3030:3030"
+      - "8081-8083:8081-8083"
+      - "9581-9585:9581-9585"
+      - "9092:9092"
+    environment:
+      - ADV_HOST=kafka
+      - SAMPLEDATA=0
+      - RUNNING_SAMPLEDATA=0
+      - RUNTESTS=0
+      - FORWARDLOGS=0
+      - DISABLE_JMX=1
+      - DEBUG=1
+      - SUPERVISORWEB=0
+      - CONNECTORS=file
+
+  custom-producer:
+    build:
+      dockerfile: DockerFile_CustomerProducer.yaml
+      context: .

+ 71 - 0
tools/uncomitted-messages/kafka-console-consumer.go

@@ -0,0 +1,71 @@
+package main
+
+import (
+	"flag"
+	"github.com/Shopify/sarama"
+	"log"
+	"os"
+	"strings"
+	"time"
+)
+
+var (
+	brokerList = flag.String("brokers", os.Getenv("KAFKA_PEERS"), "The comma separated list of brokers in the Kafka cluster")
+	topic      = flag.String("topic", "", "REQUIRED: the topic to consume")
+)
+
+func main() {
+	flag.Parse()
+
+	if *brokerList == "" {
+		log.Fatal("You have to provide -brokers as a comma-separated list, or set the KAFKA_PEERS environment variable.")
+	}
+	if *topic == "" {
+		log.Fatal("-topic is required")
+	}
+
+	c, err := sarama.NewConsumer(strings.Split(*brokerList, ","), nil)
+	if err != nil {
+		log.Fatalf("Failed to start consumer: %s", err)
+	}
+
+	partitionList, err := c.Partitions(*topic)
+	if err != nil {
+		log.Fatalf("Failed to get the list of partitions: %s", err)
+	}
+
+	for _, partition := range partitionList {
+		pc, err := c.ConsumePartition(*topic, partition, sarama.OffsetOldest)
+		if err != nil {
+			log.Fatalf("Failed to start consumer for partition %d: %s", partition, err)
+		}
+
+		go func() {
+			for err := range pc.Errors() {
+				log.Fatal(err)
+			}
+		}()
+
+		msgChannel := pc.Messages()
+	read1Partition:
+		for {
+			timeout := time.NewTimer(1 * time.Second)
+			select {
+			case msg, open := <-msgChannel:
+				if !open {
+					log.Println("channel message is closed")
+					break read1Partition
+				}
+				log.Println(string(msg.Value))
+			case <-timeout.C:
+				break read1Partition
+			}
+		}
+	}
+
+	log.Println("Done consuming topic", *topic)
+
+	if err := c.Close(); err != nil {
+		log.Println("Failed to close consumer: ", err)
+	}
+}