Skip to content

Commit c581474

Browse files
authored
Refresh Java samples to support Gradle 7 (#75)
Update build.gradle files to support Gradle 7 Small code cleanups
1 parent 439d9ce commit c581474

File tree

22 files changed

+53
-166
lines changed

22 files changed

+53
-166
lines changed

kafka-java-console-sample/build.gradle

Lines changed: 11 additions & 75 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
plugins {
2121
id 'application'
2222
id 'java'
23-
id 'com.github.johnrengelman.shadow' version '4.0.2'
2423
id 'eclipse'
2524
}
2625

@@ -36,81 +35,18 @@ repositories {
3635
}
3736

3837
dependencies {
39-
compile 'org.apache.kafka:kafka-clients:2.4.+'
40-
compile 'log4j:log4j:1.2.17'
41-
compile 'org.slf4j:slf4j-log4j12:1.7.25'
42-
compile 'com.fasterxml.jackson.core:jackson-databind:2.9.7'
43-
}
44-
45-
task downloadDependencies(type: Exec) {
46-
configurations.testRuntime.files
47-
commandLine 'echo', 'Downloaded all dependencies'
48-
}
49-
50-
// Compile source code
51-
compileJava {
52-
doFirst {
53-
copy {
54-
from configurations.runtime
55-
into 'lib'
56-
}
57-
}
58-
59-
source = 'src'
60-
options.encoding = 'ISO-8859-1'
38+
implementation 'org.apache.kafka:kafka-clients:2.7.+'
39+
implementation 'log4j:log4j:1.2.17'
40+
implementation 'org.slf4j:slf4j-log4j12:1.7.25'
41+
implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.+'
6142
}
6243

6344
jar {
64-
from "resources/log4j.properties"
65-
66-
doLast {
67-
copy {
68-
from 'resources'
69-
into 'build/libs/resources'
70-
}
71-
72-
copy {
73-
from 'lib'
74-
into 'build/libs/lib'
75-
}
76-
}
77-
78-
// Provide Main-Class so the JAR executes properly, along with the required classpath data.
79-
manifest {
80-
attributes('Main-Class': mainClassName,
81-
'Class-Path': configurations.runtime.files.collect { './lib/' + it.name }.join(' '))
82-
}
83-
}
84-
85-
// Save JAR in build directory.
86-
uploadArchives {
87-
repositories {
88-
flatDir {
89-
dirs 'build'
90-
}
91-
}
92-
}
93-
94-
['Zip', 'Tar'].each { suffix ->
95-
"dist$suffix" {
96-
def basePath = baseName + '-' + version
97-
98-
from('resources') {
99-
into { basePath + '/resources' }
100-
exclude 'log4j.properties'
101-
}
102-
103-
from('resources') {
104-
into { basePath + '/lib' }
105-
include 'log4j.properties'
106-
}
107-
}
45+
duplicatesStrategy = DuplicatesStrategy.INCLUDE
46+
manifest {
47+
attributes('Main-Class': mainClassName)
48+
}
49+
from {
50+
configurations.compileClasspath.filter{ it.exists() }.collect { it.isDirectory() ? it : zipTree(it) }
51+
}
10852
}
109-
110-
sourceSets {
111-
main {
112-
java {
113-
srcDirs = ["src/main/java", "src/main/resources"]
114-
}
115-
}
116-
}

kafka-java-console-sample/docs/Docker_Local.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ To build and run the sample, you must have the done the following:
2121
```shell
2222
export VCAP_SERVICES='{
2323
"instance_id": "...",
24-
"mqlight_lookup_url": "...",
2524
"api_key": "...",
2625
"kafka_admin_url": "....",
2726
"kafka_rest_url": "...",

kafka-java-console-sample/src/main/java/com/eventstreams/samples/ConsumerRunnable.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -76,13 +76,13 @@ public void run() {
7676
}
7777

7878
} catch (final WakeupException e) {
79-
logger.warn("Consumer closing - caught exception: {}", e);
79+
logger.warn("Consumer closing - caught exception: {}", e, e);
8080
} catch (final KafkaException e) {
8181
logger.error("Sleeping for 5s - Consumer has caught: {}", e, e);
8282
try {
8383
Thread.sleep(5000); // Longer sleep before retrying
8484
} catch (InterruptedException e1) {
85-
logger.warn("Consumer closing - caught exception: {}", e);
85+
logger.warn("Consumer closing - caught exception: {}", e, e);
8686
}
8787
}
8888
}

kafka-java-console-sample/src/main/java/com/eventstreams/samples/EventStreamsConsoleSample.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,6 @@ static final Map<String, Object> getProducerConfigs(String bootstrapServers, Str
240240
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
241241
configs.put(ProducerConfig.CLIENT_ID_CONFIG, "kafka-java-console-sample-producer");
242242
configs.put(ProducerConfig.ACKS_CONFIG, "all");
243-
configs.put(ProducerConfig.CLIENT_DNS_LOOKUP_CONFIG,"use_all_dns_ips");
244243
configs.putAll(getCommonConfigs(bootstrapServers, apikey));
245244
return configs;
246245
}
@@ -252,7 +251,6 @@ static final Map<String, Object> getConsumerConfigs(String bootstrapServers, Str
252251
configs.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-java-console-sample-consumer");
253252
configs.put(ConsumerConfig.GROUP_ID_CONFIG, "kafka-java-console-sample-group");
254253
configs.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
255-
configs.put(ConsumerConfig.CLIENT_DNS_LOOKUP_CONFIG,"use_all_dns_ips");
256254
configs.putAll(getCommonConfigs(bootstrapServers, apikey));
257255
return configs;
258256
}
@@ -263,16 +261,12 @@ static final Map<String, Object> getCommonConfigs(String boostrapServers, String
263261
configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_SSL");
264262
configs.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
265263
configs.put(SaslConfigs.SASL_JAAS_CONFIG, "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"token\" password=\"" + apikey + "\";");
266-
configs.put(SslConfigs.SSL_PROTOCOL_CONFIG, "TLSv1.2");
267-
configs.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, "TLSv1.2");
268-
configs.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "HTTPS");
269264
return configs;
270265
}
271266

272267
static final Properties getAdminConfigs(String bootstrapServers, String apikey) {
273268
Properties configs = new Properties();
274269
configs.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafka-java-console-sample-admin");
275-
configs.put(AdminClientConfig.CLIENT_DNS_LOOKUP_CONFIG, "use_all_dns_ips");
276270
configs.putAll(getCommonConfigs(bootstrapServers, apikey));
277271
return configs;
278272
}

kafka-java-console-sample/src/main/java/com/eventstreams/samples/ProducerRunnable.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,13 +91,13 @@ public void run() {
9191
Thread.sleep(2000L);
9292

9393
} catch (final InterruptedException e) {
94-
logger.warn("Producer closing - caught exception: {}", e);
94+
logger.warn("Producer closing - caught exception: {}", e, e);
9595
} catch (final Exception e) {
9696
logger.error("Sleeping for 5s - Producer has caught : {}", e, e);
9797
try {
9898
Thread.sleep(5000L); // Longer sleep before retrying
9999
} catch (InterruptedException e1) {
100-
logger.warn("Producer closing - caught exception: {}", e);
100+
logger.warn("Producer closing - caught exception: {}", e, e);
101101
}
102102
}
103103
}

kafka-java-console-sample/src/main/resources/.gitignore

Lines changed: 0 additions & 1 deletion
This file was deleted.

kafka-java-console-schema-sample/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# IBM Event Streams for IBM Cloud Kafka Java console sample application
2-
This Java console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [Kafka](https://kafka.apache.org) Java API, and using the a third party Avro Serializer and Deserializer for the schema registry. It also shows how to create topics using the Kafka Admin API.
2+
This Java console application demonstrates how to connect to [IBM Event Streams for IBM Cloud](https://cloud.ibm.com/docs/services/EventStreams?topic=eventstreams-getting_started), send and receive messages using the [Kafka](https://kafka.apache.org) Java API, and using a third party Avro Serializer and Deserializer for the schema registry. It also shows how to create topics using the Kafka Admin API.
33

44
It can be run locally on your machine or deployed into [IBM Cloud](https://cloud.ibm.com/).
55

kafka-java-console-schema-sample/build.gradle

Lines changed: 9 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -41,16 +41,16 @@ repositories {
4141
}
4242

4343
dependencies {
44-
compile 'org.apache.kafka:kafka-clients:2.4.+'
45-
compile 'log4j:log4j:1.2.17'
46-
compile 'org.slf4j:slf4j-log4j12:1.7.25'
47-
compile 'com.fasterxml.jackson.core:jackson-databind:2.9.7'
48-
compile 'io.confluent:kafka-avro-serializer:5.3.1'
49-
compile "org.apache.avro:avro:1.10.0"
44+
implementation 'org.apache.kafka:kafka-clients:2.7.+'
45+
implementation 'log4j:log4j:1.2.17'
46+
implementation 'org.slf4j:slf4j-log4j12:1.7.25'
47+
implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.+'
48+
implementation 'io.confluent:kafka-avro-serializer:5.3.+'
49+
implementation 'org.apache.avro:avro:1.10.+'
5050
}
5151

5252
task downloadDependencies(type: Exec) {
53-
configurations.testRuntime.files
53+
configurations.testRuntimeClasspath.files
5454
commandLine 'echo', 'Downloaded all dependencies'
5555
}
5656

@@ -69,7 +69,7 @@ avro {
6969
compileJava {
7070
doFirst {
7171
copy {
72-
from configurations.runtime
72+
from configurations.runtimeClasspath
7373
into 'lib'
7474
}
7575
}
@@ -96,32 +96,7 @@ jar {
9696
// Provide Main-Class so the JAR executes properly, along with the required classpath data.
9797
manifest {
9898
attributes('Main-Class': mainClassName,
99-
'Class-Path': configurations.runtime.files.collect { './lib/' + it.name }.join(' '))
100-
}
101-
}
102-
103-
// Save JAR in build directory.
104-
uploadArchives {
105-
repositories {
106-
flatDir {
107-
dirs 'build'
108-
}
109-
}
110-
}
111-
112-
['Zip', 'Tar'].each { suffix ->
113-
"dist$suffix" {
114-
def basePath = baseName + '-' + version
115-
116-
from('resources') {
117-
into { basePath + '/resources' }
118-
exclude 'log4j.properties'
119-
}
120-
121-
from('resources') {
122-
into { basePath + '/lib' }
123-
include 'log4j.properties'
124-
}
99+
'Class-Path': configurations.runtimeClasspath.files.collect { './lib/' + it.name }.join(' '))
125100
}
126101
}
127102

kafka-java-console-schema-sample/docs/Docker_Local.md

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ To build and run the sample, you must have the done the following:
1212

1313
1. Build the container image from the `Dockerfile`:
1414
```shell
15-
docker build -t java-console-sample .
15+
docker build -t java-console-schema-sample .
1616
```
1717

1818
2. Export the Event Streams for IBM Cloud instance credentials:
@@ -21,7 +21,6 @@ To build and run the sample, you must have the done the following:
2121
```shell
2222
export VCAP_SERVICES='{
2323
"instance_id": "...",
24-
"mqlight_lookup_url": "...",
2524
"api_key": "...",
2625
"kafka_admin_url": "....",
2726
"kafka_rest_url": "...",
@@ -35,7 +34,7 @@ To build and run the sample, you must have the done the following:
3534

3635
3. Run the container image
3736
```shell
38-
docker run -e VCAP_SERVICES="$VCAP_SERVICES" java-console-sample
37+
docker run -e VCAP_SERVICES="$VCAP_SERVICES" java-console-schema-sample
3938
```
4039

4140
## Further references

kafka-java-console-schema-sample/settings.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
pluginManagement {
22
repositories {
33
gradlePluginPortal()
4-
jcenter()
4+
mavenCentral()
55
maven {
66
name "JCenter Gradle Plugins"
77
url "https://dl.bintray.com/gradle/gradle-plugins"

0 commit comments

Comments
 (0)