Skip to content

Commit

Permalink
Remove Guava usages in favor of built-in JDK methods to avoid potenti… (
Browse files Browse the repository at this point in the history
#32)

* Remove Guava usages in favor of built-in JDK methods to avoid potential conflicts with newer versions of Curator

Curator appears to have shaded Guava a long time ago, and any transitive update could and probably will break consumers.
This replaces existing usages with JDK-comparable methods.

* Remove commented out code

* Update version to 3.1.1 and include an entry in changelog describing the reasoning
  • Loading branch information
mkobit authored and Crim committed Mar 23, 2019
1 parent 7ae67ca commit 5fa08e5
Show file tree
Hide file tree
Showing 9 changed files with 36 additions and 28 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## 3.1.1 (03/22/2019)
- Replace internal uses of Guava with JDK-comparable methods so that if a transitive dependency on Curator resolves to a more recent version that shades Guava this library will not break.

## 3.1.0 (12/13/2018)
- Officially support Kafka 2.0.x
- KafkaTestUtils.produceRecords() and its variants now set producer configuration "acks" to "all"
Expand Down
4 changes: 2 additions & 2 deletions kafka-junit-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
<parent>
<artifactId>kafka-junit</artifactId>
<groupId>com.salesforce.kafka.test</groupId>
<version>3.1.0</version>
<version>3.1.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>kafka-junit-core</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>

<!-- defined properties -->
<properties>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

package com.salesforce.kafka.test;

import com.google.common.base.Charsets;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.DescribeClusterResult;
Expand All @@ -51,6 +50,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.charset.StandardCharsets;
import java.time.Clock;
import java.util.ArrayList;
import java.util.Collection;
Expand Down Expand Up @@ -169,7 +169,7 @@ public List<ProducedKafkaRecord<byte[], byte[]>> produceRecords(
final String value = "value" + timeStamp;

// Add to map
keysAndValues.put(key.getBytes(Charsets.UTF_8), value.getBytes(Charsets.UTF_8));
keysAndValues.put(key.getBytes(StandardCharsets.UTF_8), value.getBytes(StandardCharsets.UTF_8));
}

return produceRecords(keysAndValues, topicName, partitionId);
Expand Down Expand Up @@ -542,4 +542,4 @@ private Map<String, Object> buildDefaultClientConfig() {

return defaultClientConfig;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@

package com.salesforce.kafka.test;

import com.google.common.io.Files;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;

/**
* Collection of Utilities.
Expand All @@ -39,11 +39,16 @@ class Utils {
*/
static File createTempDirectory() {
// Create temp path to store logs
final File logDir = Files.createTempDir();
final File logDir;
try {
logDir = Files.createTempDirectory("kafka-unit").toFile();
} catch (IOException e) {
throw new RuntimeException(e);
}

// Ensure its removed on termination.
logDir.deleteOnExit();

return logDir;
}
}
}
6 changes: 3 additions & 3 deletions kafka-junit4/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@
<parent>
<artifactId>kafka-junit</artifactId>
<groupId>com.salesforce.kafka.test</groupId>
<version>3.1.0</version>
<version>3.1.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<!-- Module Definition & Version -->
<artifactId>kafka-junit4</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>

<!-- defined properties -->
<properties>
Expand All @@ -50,7 +50,7 @@
<dependency>
<groupId>com.salesforce.kafka.test</groupId>
<artifactId>kafka-junit-core</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>
</dependency>

<!-- JUnit is Required -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

package com.salesforce.kafka.test.junit4;

import com.google.common.base.Charsets;
import com.salesforce.kafka.test.KafkaTestUtils;
import com.salesforce.kafka.test.ProducedKafkaRecord;
import org.apache.kafka.clients.admin.AdminClient;
Expand All @@ -42,6 +41,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.charset.StandardCharsets;
import java.time.Clock;
import java.util.Collections;
import java.util.Iterator;
Expand Down Expand Up @@ -196,10 +196,10 @@ public void testProducerAndConsumerUtils() {
// You can get details about what got produced into Kafka, including the partition and offset for each message.
for (final ProducedKafkaRecord<byte[], byte[]> producedKafkaRecord: producedRecordsList) {
// This is the key of the message that was produced.
final String key = new String(producedKafkaRecord.getKey(), Charsets.UTF_8);
final String key = new String(producedKafkaRecord.getKey(), StandardCharsets.UTF_8);

// This is the value of the message that was produced.
final String value = new String(producedKafkaRecord.getValue(), Charsets.UTF_8);
final String value = new String(producedKafkaRecord.getValue(), StandardCharsets.UTF_8);

// Other details about topic, partition, and offset it was written onto.
final String topic = producedKafkaRecord.getTopic();
Expand Down Expand Up @@ -228,8 +228,8 @@ public void testProducerAndConsumerUtils() {
final ConsumerRecord<String, String> consumerRecord = consumerRecordIterator.next();
final ProducedKafkaRecord<byte[], byte[]> producedKafkaRecord = producedKafkaRecordIterator.next();

final String expectedKey = new String(producedKafkaRecord.getKey(), Charsets.UTF_8);
final String expectedValue = new String(producedKafkaRecord.getValue(), Charsets.UTF_8);
final String expectedKey = new String(producedKafkaRecord.getKey(), StandardCharsets.UTF_8);
final String expectedValue = new String(producedKafkaRecord.getValue(), StandardCharsets.UTF_8);
final String actualKey = consumerRecord.key();
final String actualValue = consumerRecord.value();

Expand Down Expand Up @@ -262,4 +262,4 @@ public void testCreatingTopicMultipleTimes() {
private KafkaTestUtils getKafkaTestUtils() {
return sharedKafkaTestResource.getKafkaTestUtils();
}
}
}
6 changes: 3 additions & 3 deletions kafka-junit5/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,12 @@
<parent>
<artifactId>kafka-junit</artifactId>
<groupId>com.salesforce.kafka.test</groupId>
<version>3.1.0</version>
<version>3.1.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>

<artifactId>kafka-junit5</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>

<!-- defined properties -->
<properties>
Expand All @@ -48,7 +48,7 @@
<dependency>
<groupId>com.salesforce.kafka.test</groupId>
<artifactId>kafka-junit-core</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>
</dependency>

<!-- JUnit is Required -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

package com.salesforce.kafka.test.junit5;

import com.google.common.base.Charsets;
import com.salesforce.kafka.test.KafkaTestUtils;
import com.salesforce.kafka.test.ProducedKafkaRecord;
import org.apache.kafka.clients.admin.AdminClient;
Expand All @@ -43,6 +42,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.nio.charset.StandardCharsets;
import java.time.Clock;
import java.util.Collections;
import java.util.Iterator;
Expand Down Expand Up @@ -196,10 +196,10 @@ void testProducerAndConsumerUtils() {
// You can get details about what got produced into Kafka, including the partition and offset for each message.
for (final ProducedKafkaRecord<byte[], byte[]> producedKafkaRecord: producedRecordsList) {
// This is the key of the message that was produced.
final String key = new String(producedKafkaRecord.getKey(), Charsets.UTF_8);
final String key = new String(producedKafkaRecord.getKey(), StandardCharsets.UTF_8);

// This is the value of the message that was produced.
final String value = new String(producedKafkaRecord.getValue(), Charsets.UTF_8);
final String value = new String(producedKafkaRecord.getValue(), StandardCharsets.UTF_8);

// Other details about topic, partition, and offset it was written onto.
final String topic = producedKafkaRecord.getTopic();
Expand Down Expand Up @@ -228,8 +228,8 @@ void testProducerAndConsumerUtils() {
final ConsumerRecord<String, String> consumerRecord = consumerRecordIterator.next();
final ProducedKafkaRecord<byte[], byte[]> producedKafkaRecord = producedKafkaRecordIterator.next();

final String expectedKey = new String(producedKafkaRecord.getKey(), Charsets.UTF_8);
final String expectedValue = new String(producedKafkaRecord.getValue(), Charsets.UTF_8);
final String expectedKey = new String(producedKafkaRecord.getKey(), StandardCharsets.UTF_8);
final String expectedValue = new String(producedKafkaRecord.getValue(), StandardCharsets.UTF_8);
final String actualKey = consumerRecord.key();
final String actualValue = consumerRecord.value();

Expand Down Expand Up @@ -262,4 +262,4 @@ void testCreatingTopicMultipleTimes() {
private KafkaTestUtils getKafkaTestUtils() {
return sharedKafkaTestResource.getKafkaTestUtils();
}
}
}
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@

<groupId>com.salesforce.kafka.test</groupId>
<artifactId>kafka-junit</artifactId>
<version>3.1.0</version>
<version>3.1.1</version>

<!-- Submodules -->
<modules>
Expand Down

0 comments on commit 5fa08e5

Please sign in to comment.