Skip to content

Commit

Permalink
Added support for docker-compose.yml. Configured logging during testi…
Browse files Browse the repository at this point in the history
…ng. Added scripts for debugging.
  • Loading branch information
jcustenborder committed Oct 22, 2016
1 parent d281f0d commit b80418b
Show file tree
Hide file tree
Showing 7 changed files with 134 additions and 8 deletions.
5 changes: 5 additions & 0 deletions src/main/resources/META-INF/maven/archetype.xml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
<resource>config/MySinkConnector.properties</resource>
<resource>config/MySourceConnector.properties</resource>
<resource>README.md</resource>
<resource>docker-compose.yml</resource>
<resource>connect/connect-avro-docker.properties</resource>
<resource>bin/debug.sh</resource>
<resource>bin/suspend.sh</resource>
<resource>src/test/resources/logback.xml</resource>
</resources>
<testSources>
<source>src/test/java/MySinkConnectorConfigTest.java</source>
Expand Down
22 changes: 22 additions & 0 deletions src/main/resources/archetype-resources/bin/debug.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
#!/usr/bin/env bash
#
# Copyright (C) 2016 Jeremy Custenborder ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

mvn clean package

export CLASSPATH="$(find `pwd`/target/kafka-*-package/share/java/ -type f -name '*.jar' | tr '\n' ':')"

$CONFLUENT_HOME/bin/connect-standalone connect/connect-avro-docker.properties config/MySinkConnector.properties config/MySourceConnector.properties
23 changes: 23 additions & 0 deletions src/main/resources/archetype-resources/bin/suspend.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/usr/bin/env bash
#
# Copyright (C) 2016 Jeremy Custenborder ([email protected])
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

mvn clean package

export KAFKA_JMX_OPTS='-Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005'
export CLASSPATH="$(find `pwd`/target/kafka-*-package/share/java/ -type f -name '*.jar' | tr '\n' ':')"

$CONFLUENT_HOME/bin/connect-standalone connect/connect-avro-docker.properties config/MySinkConnector.properties config/MySourceConnector.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Sample configuration for a standalone Kafka Connect worker that uses Avro serialization and
# integrates the the SchemaConfig Registry. This sample configuration assumes a local installation of
# Confluent Platform with all services running on their default ports.
# Bootstrap Kafka servers. If multiple servers are specified, they should be comma-separated.
bootstrap.servers=confluent:9092
# The converters specify the format of data in Kafka and how to translate it into Connect data.
# Every Connect user will need to configure these based on the format they want their data in
# when loaded from or stored into Kafka
key.converter=io.confluent.connect.avro.AvroConverter
key.converter.schema.registry.url=http://confluent:8081
value.converter=io.confluent.connect.avro.AvroConverter
value.converter.schema.registry.url=http://confluent:8081
# The internal converter used for offsets and config data is configurable and must be specified,
# but most users will always want to use the built-in default. Offset and config data is never
# visible outside of Connect in this format.
internal.key.converter=org.apache.kafka.connect.json.JsonConverter
internal.value.converter=org.apache.kafka.connect.json.JsonConverter
internal.key.converter.schemas.enable=false
internal.value.converter.schemas.enable=false
# Local storage file for offset data
offset.storage.file.filename=/tmp/connect.offsets
# Confuent Control Center Integration -- uncomment these lines to enable Kafka client interceptors
# that will report audit data that can be displayed and analyzed in Confluent Control Center
# producer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor
# consumer.interceptor.classes=io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor
28 changes: 28 additions & 0 deletions src/main/resources/archetype-resources/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
version: "2"
services:
zookeeper:
image: confluent/zookeeper
ports:
- "2181:2181"
environment:
zk_id: "1"
network_mode: "host"
kafka:
image: confluent/kafka
depends_on:
- zookeeper
ports:
- "9092:9092"
environment:
KAFKA_ZOOKEEPER_CONNECT: "confluent:2181"
network_mode: "host"
schema-registry:
image: confluent/schema-registry
depends_on:
- kafka
- zookeeper
ports:
- "8081:8081"
environment:
SR_KAFKASTORE_CONNECTION_URL: "confluent:2181"
network_mode: "host"
29 changes: 21 additions & 8 deletions src/main/resources/archetype-resources/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,19 @@
<packaging>jar</packaging>

<name>${artifactId}</name>
<description>A Kafka Connect Connector for ${artifactId}</description>

<properties>
<kafka.version>0.10.0.0-cp1</kafka.version>
<junit.version>4.12</junit.version>
</properties>

<repositories>
<repository>
<id>confluent</id>
<url>http://packages.confluent.io/maven/</url>
</repository>
</repositories>

<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
Expand All @@ -24,7 +31,19 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>1.10.19</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.1.7</version>
<scope>test</scope>
</dependency>
</dependencies>
Expand Down Expand Up @@ -79,10 +98,4 @@
</resource>
</resources>
</build>
<repositories>
<repository>
<id>confluent</id>
<url>http://packages.confluent.io/maven/</url>
</repository>
</repositories>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger - %msg%n</pattern>
</encoder>
</appender>
<root level="debug">
<appender-ref ref="STDOUT" />
</root>
</configuration>

0 comments on commit b80418b

Please sign in to comment.