Develop kafka health check

feature/add-travis-ci-remove-gitlab-ci
Emanuel Zienecker 2019-03-28 16:35:14 +01:00
parent bbef55fadb
commit 81a6ee4ec5
11 changed files with 528 additions and 0 deletions

30
.gitignore vendored 100644
View File

@ -0,0 +1,30 @@
target/
application-default.yml
!.mvn/wrapper/maven-wrapper.jar
### STS ###
.apt_generated
.classpath
.factorypath
.project
.settings
.springBeans
### IntelliJ IDEA ###
.idea
*.iws
*.iml
*.ipr
### NetBeans ###
nbproject/private/
build/
nbbuild/
dist/
nbdist/
.nb-gradle/
### Custom ###
pom.xml.versionsBackup
kafka-health-check/config

37
.gitlab-ci.yml 100644
View File

@ -0,0 +1,37 @@
image: docker-proxy.device-insight.com/library/maven:alpine
variables:
DOCKER_DRIVER: overlay2
services:
- docker:stable-dind
mvn_test_job:
stage: build
script: >
mvn clean install -B -P gitlab
artifacts:
paths:
- kafka-health-check/target/*.log
when: on_failure
expire_in: 1 week
except:
- master
- develop
tags:
- dind
mvn_deploy_job:
stage: build
script: >
mvn clean deploy -B -P gitlab
artifacts:
paths:
- kafka-health-check/target/*.log
when: on_failure
expire_in: 1 week
only:
- master
- develop
tags:
- dind

3
README.adoc 100644
View File

@ -0,0 +1,3 @@
= KafkaHealthCheck
Spring kafka health check.

6
changelog.adoc 100644
View File

@ -0,0 +1,6 @@
= KafkaHealthCheck
:icons: font
== Version 0.1.0
* Develop kafka health check

127
pom.xml 100644
View File

@ -0,0 +1,127 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.deviceinsight.kafka</groupId>
<artifactId>kafka-health-check</artifactId>
<version>0.1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<properties>
<!-- Java -->
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
<!-- Encoding -->
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<!-- Versions -->
<junit.jupiter.version>5.3.2</junit.jupiter.version>
<docker-maven-plugin.version>0.27.2</docker-maven-plugin.version>
<jgitflow-maven-plugin.version>1.0-m5.1</jgitflow-maven-plugin.version>
<spring-boot-starter.version>2.1.1.RELEASE</spring-boot-starter.version>
<spring.kafka.version>2.1.7.RELEASE</spring.kafka.version>
<maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
<maven-failsafe-plugin.version>2.22.1</maven-failsafe-plugin.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
<version>${spring-boot-starter.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>${spring.kafka.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-params</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<pluginManagement>
<plugins>
<plugin>
<artifactId>maven-source-plugin</artifactId>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar-no-fork</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>external.atlassian.jgitflow</groupId>
<artifactId>jgitflow-maven-plugin</artifactId>
<version>${jgitflow-maven-plugin.version}</version>
<configuration>
<autoVersionSubmodules>true</autoVersionSubmodules>
<noDeploy>true</noDeploy>
<useReleaseProfile>false</useReleaseProfile>
</configuration>
</plugin>
<plugin>
<groupId>ch.acanda.maven</groupId>
<artifactId>spring-banner-plugin</artifactId>
<version>1.0</version>
<executions>
<execution>
<id>generate-spring-banner</id>
<phase>generate-resources</phase>
<goals>
<goal>generate</goal>
</goals>
</execution>
</executions>
<configuration>
<text>${project.name}</text>
<outputDirectory>${project.build.outputDirectory}</outputDirectory>
<filename>banner.txt</filename>
<info>v${project.version}</info>
<includeInfo>true</includeInfo>
<color>bright cyan</color>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
<distributionManagement>
<repository>
<id>releases</id>
<url>http://nexus.device-insight.de/nexus/content/repositories/releases</url>
</repository>
<snapshotRepository>
<id>snapshots</id>
<url>http://nexus.device-insight.de/nexus/content/repositories/snapshots</url>
</snapshotRepository>
</distributionManagement>
</project>

View File

@ -0,0 +1,43 @@
package com.deviceinsight.kafka.health;
final class KafkaCommunicationResult {
private final String topic;
private final Exception exception;
private KafkaCommunicationResult() {
this.topic = null;
this.exception = null;
}
private KafkaCommunicationResult(String topic, Exception exception) {
this.topic = topic;
this.exception = exception;
}
static KafkaCommunicationResult success(String topic) {
return new KafkaCommunicationResult();
}
static KafkaCommunicationResult failure(String topic, Exception exception) {
return new KafkaCommunicationResult(topic, exception);
}
String getTopic() {
return topic;
}
Exception getException() {
return exception;
}
@Override
public String toString() {
return "KafkaCommunication{topic='" + topic + "', exception=" + exception + '}';
}
public boolean isFailure() {
return exception != null;
}
}

View File

@ -0,0 +1,192 @@
package com.deviceinsight.kafka.health;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.health.AbstractHealthIndicator;
import org.springframework.boot.actuate.health.Health;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeoutException;
import java.util.stream.StreamSupport;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
public class KafkaConsumingHealthIndicator extends AbstractHealthIndicator {
private static final Logger logger = LoggerFactory.getLogger(KafkaConsumingHealthIndicator.class);
private final Consumer<String, String> consumer;
private final Producer<String, String> producer;
private final String topic;
private final long sendReceiveTimeoutMs;
private final long pollTimeoutMs;
private final long subscriptionTimeoutMs;
private final ExecutorService executor;
public KafkaConsumingHealthIndicator(KafkaHealthProperties kafkaHealthProperties,
Map<String, Object> kafkaConsumerProperties, Map<String, Object> kafkaProducerProperties) {
this.topic = kafkaHealthProperties.getTopic();
this.sendReceiveTimeoutMs = kafkaHealthProperties.getSendReceiveTimeoutMs();
this.pollTimeoutMs = kafkaHealthProperties.getPollTimeoutMs();
this.subscriptionTimeoutMs = kafkaHealthProperties.getSubscriptionTimeoutMs();
Map<String, Object> kafkaConsumerPropertiesCopy = new HashMap<>(kafkaConsumerProperties);
setConsumerGroup(kafkaConsumerPropertiesCopy);
StringDeserializer deserializer = new StringDeserializer();
StringSerializer serializer = new StringSerializer();
this.consumer = new KafkaConsumer<>(kafkaConsumerPropertiesCopy, deserializer, deserializer);
this.producer = new KafkaProducer<>(kafkaProducerProperties, serializer, serializer);
this.executor = new ThreadPoolExecutor(0, 1, 0L, MILLISECONDS, new SynchronousQueue<>(),
new ThreadPoolExecutor.AbortPolicy());
}
@PostConstruct
void subscribeAndSendMessage() throws InterruptedException {
subscribeToTopic();
KafkaCommunicationResult kafkaCommunicationResult = sendAndReceiveMessage();
if (kafkaCommunicationResult.isFailure()) {
throw new RuntimeException("Kafka health check failed", kafkaCommunicationResult.getException());
}
}
@PreDestroy
void shutdown() {
executor.shutdown();
producer.close();
consumer.close();
}
private void setConsumerGroup(Map<String, Object> kafkaConsumerProperties) {
try {
kafkaConsumerProperties.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG,
"health-check-" + InetAddress.getLocalHost().getHostAddress());
} catch (UnknownHostException e) {
throw new IllegalStateException(e);
}
}
private void subscribeToTopic() throws InterruptedException {
final CountDownLatch subscribed = new CountDownLatch(1);
logger.info("Subscribe to health check topic={}", topic);
consumer.subscribe(Collections.singleton(topic), new ConsumerRebalanceListener() {
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
// nothing to do her
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
logger.debug("Got partitions = {}", partitions);
if (!partitions.isEmpty()) {
subscribed.countDown();
}
}
});
consumer.poll(pollTimeoutMs);
if (!subscribed.await(subscriptionTimeoutMs, MILLISECONDS)) {
throw new RuntimeException("Subscription to kafka failed, topic=" + topic);
}
}
private KafkaCommunicationResult sendAndReceiveMessage() {
Future<Void> sendReceiveTask = null;
try {
sendReceiveTask = executor.submit(() -> {
sendAndReceiveKafkaMessage();
return null;
});
sendReceiveTask.get(sendReceiveTimeoutMs, MILLISECONDS);
} catch (ExecutionException e) {
logger.warn("Kafka health check execution failed.", e);
return KafkaCommunicationResult.failure(topic, e);
} catch (TimeoutException | InterruptedException e) {
logger.warn("Kafka health check timed out.", e);
sendReceiveTask.cancel(true);
return KafkaCommunicationResult.failure(topic, e);
} catch (RejectedExecutionException e) {
logger.debug("Ignore health check, already running...");
}
return KafkaCommunicationResult.success(topic);
}
private void sendAndReceiveKafkaMessage() throws Exception {
String message = UUID.randomUUID().toString();
long startTime = System.currentTimeMillis();
logger.debug("Send health check message = {}", message);
producer.send(new ProducerRecord<>(topic, message, message)).get(sendReceiveTimeoutMs, MILLISECONDS);
while (messageNotReceived(message)) {
logger.debug("Waiting for message={}", message);
}
logger.debug("Kafka health check succeeded. took= {} msec", System.currentTimeMillis() - startTime);
}
private boolean messageNotReceived(String message) {
return StreamSupport.stream(consumer.poll(pollTimeoutMs).spliterator(), false)
.noneMatch(msg -> msg.key().equals(message) && msg.value().equals(message));
}
@Override
protected void doHealthCheck(Health.Builder builder) {
KafkaCommunicationResult kafkaCommunicationResult = sendAndReceiveMessage();
if (kafkaCommunicationResult.isFailure()) {
builder.down(kafkaCommunicationResult.getException())
.withDetail("topic", kafkaCommunicationResult.getTopic());
} else {
builder.up();
}
}
}

View File

@ -0,0 +1,41 @@
package com.deviceinsight.kafka.health;
public class KafkaHealthProperties {
private String topic = "health-checks";
private long sendReceiveTimeoutMs = 2500;
private long pollTimeoutMs = 200;
private long subscriptionTimeoutMs = 5000;
public String getTopic() {
return topic;
}
public void setTopic(String topic) {
this.topic = topic;
}
public long getSendReceiveTimeoutMs() {
return sendReceiveTimeoutMs;
}
public void setSendReceiveTimeoutMs(long sendReceiveTimeoutMs) {
this.sendReceiveTimeoutMs = sendReceiveTimeoutMs;
}
public long getPollTimeoutMs() {
return pollTimeoutMs;
}
public void setPollTimeoutMs(long pollTimeoutMs) {
this.pollTimeoutMs = pollTimeoutMs;
}
public long getSubscriptionTimeoutMs() {
return subscriptionTimeoutMs;
}
public void setSubscriptionTimeoutMs(long subscriptionTimeoutMs) {
this.subscriptionTimeoutMs = subscriptionTimeoutMs;
}
}

View File

@ -0,0 +1,28 @@
spring:
jackson:
serialization:
write-dates-as-timestamps: false
management:
server:
port: 9090
metrics:
export:
prometheus:
enabled: true
endpoints:
web:
exposure:
include:
- health
- info
- prometheus
- loggers
- httptrace
- configprops
- metrics
- heapdump
- threaddump
endpoint:
health:
show-details: always

View File

@ -0,0 +1,5 @@
logging:
level:
com.deviceinsight.kafkahealthcheck.kafkahealthcheck: DEBUG
org.springframework: DEBUG

View File

@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<logger name="ch.qos.logback" level="WARN" />
<root level="WARN">
<appender-ref ref="STDOUT" />
</root>
</configuration>