-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathdocker-compose.yml
128 lines (118 loc) · 4.63 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
# Creates System Test env using Docker with the following containers:
# 1. Zookeeper
# 2. Kafka with topics for logs and Kafka Connect running in distributed mode
# 3. Kafka Connect with Scalyr Sink Connector installed
# 4. Filebeat configured to send logs from flog containers to Kafka
# 5. flog to generate fake log events
version: '3'
services:
zookeeper:
image: confluentinc/cp-zookeeper:6.1.0
container_name: zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka:6.1.0
container_name: kafka
depends_on:
- zookeeper
ports:
- 9092:9092
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
kafka-setup:
image: confluentinc/cp-kafka:6.1.0
depends_on:
- kafka
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -z zookeeper:2181 1 30 && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-config-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-offset-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --config cleanup.policy=compact --topic connect-status-storage && \
kafka-topics --create --if-not-exists --zookeeper zookeeper:2181 --partitions 1 --replication-factor 1 --topic logs'"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
connect:
build:
context: .
dockerfile: src/test/SystemTest/docker/kafka-connect/Dockerfile
container_name: connect
ports:
- "8088:8088"
depends_on:
- kafka
- kafka-setup
environment:
CONNECT_BOOTSTRAP_SERVERS: kafka:29092
CONNECT_GROUP_ID: "connect-group"
CONNECT_CONFIG_STORAGE_TOPIC: "connect-config-storage"
CONNECT_OFFSET_STORAGE_TOPIC: "connect-offset-storage"
CONNECT_STATUS_STORAGE_TOPIC: "connect-status-storage"
CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_PLUGIN_PATH: "/etc/kafka-connect/jars"
CONNECT_REST_ADVERTISED_HOST_NAME: "connect"
CONNECT_REST_HOST_NAME: "connect"
CONNECT_REST_PORT: "8088"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
filebeat:
build: src/test/SystemTest/docker/filebeat
depends_on:
- kafka
- kafka-setup
command: "--strict.perms=false -v -e -d autodiscover,docker"
user: root
volumes:
- /var/lib/docker/containers/:/var/lib/docker/containers/:ro
- /var/run/docker.sock:/var/run/docker.sock
environment:
KAFKA_SERVERS: kafka:29092
flog:
image: mingrammer/flog
command: /bin/flog --loop --format apache_combined --delay 0.02 --sleep 1
depends_on:
- filebeat
custom_app:
build: src/test/SystemTest/docker/custom-app
depends_on:
- kafka
- kafka-setup
environment:
KAFKA_SERVERS: kafka:29092
fluentd:
build: src/test/SystemTest/docker/fluentd
container_name: fluentd
ports:
- "24224:24224"
depends_on:
- kafka
- kafka-setup
flog-fluentd:
image: mingrammer/flog
command: /bin/flog --loop --format apache_combined --delay 0.02 --sleep 1
depends_on:
- fluentd
logging:
driver: fluentd
options:
fluentd-address: "localhost:24224"
tag: "apache.fluentd"
fluentbit-cpu-usage:
image: fluent/fluent-bit:1.4
command: ["/fluent-bit/bin/fluent-bit", "-i", "cpu", "-o", "kafka", "-p", "brokers=kafka:29092", "-p", "topics=logs",
"-F", "record_modifier", "-p", "Record=hostname fluentbit-cpu-test", "-p", "Record=tag fluentbit-cpu", "-m", "*"]
depends_on:
- kafka
- kafka-setup