forked from OneCricketeer/kafka-connect-sandbox
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMakefile
95 lines (75 loc) · 4.03 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
COMPOSE_PROJECT_NAME ?= kafka-connect-sandbox
CONNECT_COMPOSE_FILE = connect-compose.yml
BOOTSTRAP ?= localhost:9092
ZKROOT ?= localhost:2181/kafka
SR ?= http://localhost:8081
CONNECT_URL ?= http://localhost:8083
PARTITIONS ?= 1
TOPIC ?= clicks
CONNECT_TOPIC_NAMESPACE ?= kafka-connect
CONNECT_HEADERS = -H Content-Type:application/json
# docker-network:
# docker network create kafka-backend
# docker network create kafka-frontend
# docker network create kafka-connect
# clean-docker-network:
# docker network rm kafka-backend
# docker network rm kafka-frontend
# docker network rm kafka-connect
.PHONY: kafka connect
kafka:
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) up -d kafka
@echo "Waiting for Kafka"
@sleep 10
connect: kafka create-connect-topics-$(CONNECT_TOPIC_NAMESPACE)
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) up -d kafka-connect kafka-connect-ui
clean-connect:
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) rm -fs
list-topics:
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka \
kafka-topics --zookeeper zookeeper:2181/kafka --list
consume-$(TOPIC):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka \
kafka-console-consumer --topic=$(TOPIC) \
--bootstrap-server $(BOOTSTRAP) \
--property print.key=true \
--from-beginning
consume-avro-$(TOPIC):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec schema-registry \
kafka-avro-console-consumer --topic=$(TOPIC) \
--bootstrap-server $(BOOTSTRAP) \
--property schema.registry.url=http://schema-registry:8081 \
--property key-deserializer=org.apache.kafka.common.serialization.StringDeserializer \
--property print.key=true \
--from-beginning
create-topic-$(TOPIC):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka \
kafka-topics --create --topic $(TOPIC) \
--zookeeper zookeeper:2181/kafka \
--partitions $(PARTITIONS) --replication-factor=1
clean-topic-$(TOPIC):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka \
kafka-topics --zookeeper zookeeper:2181/kafka --delete --topic $(TOPIC)
get-connects:
curl $(CONNECT_URL)/connectors
create-connect-topics-$(CONNECT_TOPIC_NAMESPACE):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka bash -c \
"kafka-topics --create --if-not-exists --zookeeper zookeeper:2181/kafka --topic $(CONNECT_TOPIC_NAMESPACE)_connect_configs --replication-factor 1 --partitions 1 --config cleanup.policy=compact --disable-rack-aware \
&& kafka-topics --create --if-not-exists --zookeeper zookeeper:2181/kafka --topic $(CONNECT_TOPIC_NAMESPACE)_connect_offsets --replication-factor 1 --partitions 10 --config cleanup.policy=compact --disable-rack-aware \
&& kafka-topics --create --if-not-exists --zookeeper zookeeper:2181/kafka --topic $(CONNECT_TOPIC_NAMESPACE)_connect_status --replication-factor 1 --partitions 10 --config cleanup.policy=compact --disable-rack-aware"
clean-connect-topics-$(CONNECT_TOPIC_NAMESPACE):
docker-compose -f $(CONNECT_COMPOSE_FILE) -p $(COMPOSE_PROJECT_NAME) exec kafka bash -c \
"kafka-topics --zookeeper zookeeper:2181/kafka --delete --topic $(CONNECT_TOPIC_NAMESPACE)_connect_config
&& kafka-topics --zookeeper zookeeper:2181/kafka --delete --topic $(CONNECT_TOPIC_NAMESPACE)_connect_offsets
&& kafka-topics --zookeeper zookeeper:2181/kafka --delete --topic $(CONNECT_TOPIC_NAMESPACE)_connect_status"
clean-connector-$(CONNECTOR_NAME):
@curl -XDELETE $(CONNECT_URL)/connectors/$(CONNECTOR_NAME)
schemas-restore:
# export SCHEMA=$$(jq tostring LogLine.avsc)
# curl -XPOST -d"{\"schema\":$$SCHEMA}" http://localhost:8081/subjects/$TOPIC-value/versions
# curl -XPOST -H Content-Type:application/json -d'{"schema":"\"string\""}' http://localhost:8081/subjects/clicks-key/versions
kafka-console-producer --broker-list $(BOOTSTRAP) --topic _schemas --property parse.key=true < schemas.txt
include s3-minio/Makefile
include elastic/Makefile
include jdbc/mssql/Makefile
include cassandra/Makefile