forked from vdesabou/kafka-docker-playground
-
Notifications
You must be signed in to change notification settings - Fork 1
/
cloudwatch.sh
executable file
·61 lines (50 loc) · 2.42 KB
/
cloudwatch.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
#!/bin/bash
set -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
source ${DIR}/../../scripts/utils.sh
if [ ! -f $HOME/.aws/config ]
then
logerror "ERROR: $HOME/.aws/config is not set"
exit 1
fi
if [ ! -f $HOME/.aws/credentials ]
then
logerror "ERROR: $HOME/.aws/credentials is not set"
exit 1
fi
${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.yml"
# cleanup
set +e
aws logs delete-log-group --log-group my-log-group
set -e
log "Create a log group in AWS CloudWatch Logs."
aws logs create-log-group --log-group my-log-group
log "Create a log stream in AWS CloudWatch Logs."
aws logs create-log-stream --log-group my-log-group --log-stream my-log-stream
log "Insert Records into your log stream."
# If this is the first time inserting logs into a new log stream, then no sequence token is needed.
# However, after the first put, there will be a sequence token returned that will be needed as a parameter in the next put.
aws logs put-log-events --log-group my-log-group --log-stream my-log-stream --log-events timestamp=`date +%s000`,message="This is a log #0"
log "Injecting more messages"
for i in $(seq 1 10)
do
token=$(aws logs describe-log-streams --log-group my-log-group | jq -r .logStreams[0].uploadSequenceToken)
aws logs put-log-events --log-group my-log-group --log-stream my-log-stream --log-events timestamp=`date +%s000`,message="This is a log #${i}" --sequence-token ${token}
done
log "Creating AWS CloudWatch Logs Source connector"
curl -X PUT \
-H "Content-Type: application/json" \
--data '{
"connector.class": "io.confluent.connect.aws.cloudwatch.AwsCloudWatchSourceConnector",
"tasks.max": "1",
"aws.cloudwatch.logs.url": "https://logs.us-east-1.amazonaws.com",
"aws.cloudwatch.log.group": "my-log-group",
"aws.cloudwatch.log.streams": "my-log-stream",
"confluent.license": "",
"confluent.topic.bootstrap.servers": "broker:9092",
"confluent.topic.replication.factor": "1"
}' \
http://localhost:8083/connectors/aws-cloudwatch-logs-source/config | jq .
sleep 5
log "Verify we have received the data in my-log-group.my-log-stream topic"
timeout 60 docker exec broker kafka-console-consumer -bootstrap-server broker:9092 --topic my-log-group.my-log-stream --from-beginning --max-messages 10