Skip to content

Commit e75ac3c

Browse files
upgrade-jetty-version: removed deprecated dependecy on monitoring-interceptors
1 parent 67af53a commit e75ac3c

15 files changed

+0
-155
lines changed

pom.xml

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -178,17 +178,6 @@
178178
<artifactId>jakarta.xml.bind-api</artifactId>
179179
<version>4.0.1</version>
180180
</dependency>
181-
<!--
182-
This dependency is included to demonstrate the use of
183-
Confluent Monitoring Interceptors in conjunction with
184-
Kafka Streams but isn't a required
185-
dependency for running Kafka Streams.
186-
-->
187-
<dependency>
188-
<groupId>io.confluent</groupId>
189-
<artifactId>monitoring-interceptors</artifactId>
190-
<version>${io.confluent.blueway.version}</version>
191-
</dependency>
192181

193182
<dependency>
194183
<groupId>com.twitter</groupId>

src/main/java/io/confluent/examples/streams/WikipediaFeedAvroExample.java

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
package io.confluent.examples.streams;
1717

1818
import io.confluent.examples.streams.avro.WikiFeed;
19-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
2019
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
2120
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde;
2221
import org.apache.kafka.clients.consumer.ConsumerConfig;
@@ -124,11 +123,6 @@ static KafkaStreams buildWikipediaFeed(final String bootstrapServers,
124123
// in order to keep this example interactive.
125124
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 10 * 1000);
126125

127-
// If Confluent monitoring interceptors are on the classpath,
128-
// then the producer and consumer interceptors are added to the
129-
// streams application.
130-
MonitoringInterceptorUtils.maybeConfigureInterceptorsStreams(streamsConfiguration);
131-
132126
final Serde<String> stringSerde = Serdes.String();
133127
final Serde<Long> longSerde = Serdes.Long();
134128

src/main/java/io/confluent/examples/streams/microservices/AddInventory.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import io.confluent.examples.streams.avro.microservices.Product;
44
import io.confluent.examples.streams.microservices.domain.Schemas;
55
import io.confluent.examples.streams.microservices.domain.Schemas.Topics;
6-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
76
import org.apache.commons.cli.*;
87
import org.apache.kafka.clients.producer.KafkaProducer;
98
import org.apache.kafka.clients.producer.ProducerConfig;
@@ -35,7 +34,6 @@ private static void sendInventory(final List<KeyValue<Product, Integer>> invento
3534
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
3635
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 1);
3736
producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "inventory-generator");
38-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(producerConfig);
3937

4038
final ProductTypeSerde productSerde = new ProductTypeSerde();
4139

src/main/java/io/confluent/examples/streams/microservices/OrderDetailsService.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
import io.confluent.examples.streams.avro.microservices.OrderValidation;
1313
import io.confluent.examples.streams.avro.microservices.OrderValidationResult;
1414
import io.confluent.examples.streams.microservices.domain.Schemas;
15-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
1615

1716
import java.io.IOException;
1817
import java.time.Duration;
@@ -133,7 +132,6 @@ private void startProducer(final String bootstrapServers, final Properties defau
133132
producerConfig.put(ProducerConfig.RETRIES_CONFIG, String.valueOf(Integer.MAX_VALUE));
134133
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
135134
producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "order-details-service-producer");
136-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(producerConfig);
137135

138136
producer = new KafkaProducer<>(producerConfig,
139137
Topics.ORDER_VALIDATIONS.keySerde().serializer(),
@@ -148,7 +146,6 @@ private void startConsumer(final String bootstrapServers, final Properties defau
148146
consumerConfig.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
149147
consumerConfig.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, !eosEnabled);
150148
consumerConfig.put(ConsumerConfig.CLIENT_ID_CONFIG, "order-details-service-consumer");
151-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(consumerConfig);
152149

153150
consumer = new KafkaConsumer<>(consumerConfig,
154151
Topics.ORDERS.keySerde().deserializer(),

src/main/java/io/confluent/examples/streams/microservices/PostOrdersAndPayments.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import io.confluent.examples.streams.microservices.domain.Schemas;
77
import io.confluent.examples.streams.microservices.domain.beans.OrderBean;
88
import io.confluent.examples.streams.microservices.util.Paths;
9-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
109
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
1110
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
1211
import org.apache.commons.cli.*;
@@ -53,7 +52,6 @@ private static KafkaProducer<String, Payment> buildPaymentProducer(final String
5352
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
5453
producerConfig.put(ProducerConfig.RETRIES_CONFIG, 1);
5554
producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "payment-generator");
56-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(producerConfig);
5755

5856
return new KafkaProducer<>(producerConfig, new StringSerializer(), paymentSerializer);
5957
}

src/main/java/io/confluent/examples/streams/microservices/util/ConsumeCustomers.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.microservices.domain.Schemas.Topics;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import org.apache.commons.cli.CommandLine;
76
import org.apache.commons.cli.DefaultParser;
@@ -60,7 +59,6 @@ public static void main(final String[] args) throws Exception {
6059
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
6160
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Topics.CUSTOMERS.keySerde().deserializer().getClass());
6261
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Topics.CUSTOMERS.valueSerde().deserializer().getClass());
63-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(props);
6462

6563
try (final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
6664
consumer.subscribe(Collections.singletonList("customers"));

src/main/java/io/confluent/examples/streams/microservices/util/ConsumeOrders.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.microservices.domain.Schemas.Topics;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import org.apache.commons.cli.CommandLine;
76
import org.apache.commons.cli.DefaultParser;
@@ -60,7 +59,6 @@ public static void main(final String[] args) throws Exception {
6059
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
6160
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Topics.ORDERS.keySerde().deserializer().getClass());
6261
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Topics.ORDERS.valueSerde().deserializer().getClass());
63-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(props);
6462

6563
try (final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
6664
consumer.subscribe(Collections.singletonList("orders"));

src/main/java/io/confluent/examples/streams/microservices/util/ConsumePayments.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.microservices.domain.Schemas.Topics;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import org.apache.commons.cli.CommandLine;
76
import org.apache.commons.cli.DefaultParser;
@@ -59,7 +58,6 @@ public static void main(final String[] args) throws Exception {
5958
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
6059
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Topics.PAYMENTS.keySerde().deserializer().getClass());
6160
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Topics.PAYMENTS.valueSerde().deserializer().getClass());
62-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(props);
6361

6462
try (final KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props)) {
6563
consumer.subscribe(Collections.singletonList("payments"));

src/main/java/io/confluent/examples/streams/microservices/util/MicroserviceUtils.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import io.confluent.examples.streams.avro.microservices.Product;
44
import io.confluent.examples.streams.microservices.Service;
55
import io.confluent.examples.streams.microservices.domain.Schemas;
6-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
76

87
import org.apache.kafka.clients.consumer.ConsumerConfig;
98
import org.apache.kafka.clients.producer.KafkaProducer;
@@ -85,7 +84,6 @@ public static Properties baseStreamsConfig(final String bootstrapServers,
8584
enableEOS ? "exactly_once" : "at_least_once");
8685
config.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 1); //commit as fast as possible
8786
config.put(StreamsConfig.consumerPrefix(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG), 30000);
88-
MonitoringInterceptorUtils.maybeConfigureInterceptorsStreams(config);
8987
return config;
9088
}
9189

@@ -198,7 +196,6 @@ public static <T> KafkaProducer startProducer(final String bootstrapServers,
198196
producerConfig.put(ProducerConfig.RETRIES_CONFIG, String.valueOf(Integer.MAX_VALUE));
199197
producerConfig.put(ProducerConfig.ACKS_CONFIG, "all");
200198
producerConfig.put(ProducerConfig.CLIENT_ID_CONFIG, "order-sender");
201-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(producerConfig);
202199

203200
return new KafkaProducer<>(producerConfig,
204201
topic.keySerde().serializer(),

src/main/java/io/confluent/examples/streams/microservices/util/ProduceCustomers.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.avro.microservices.Customer;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
76
import org.apache.commons.cli.CommandLine;
@@ -62,7 +61,6 @@ public static void main(final String[] args) throws Exception {
6261
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
6362
props.put(ProducerConfig.ACKS_CONFIG, "all");
6463
props.put(ProducerConfig.RETRIES_CONFIG, 1);
65-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(props);
6664

6765
try (final KafkaProducer<Long, Customer> producer = new KafkaProducer<>(props, new LongSerializer(), mySerializer)) {
6866
while (true) {

src/main/java/io/confluent/examples/streams/microservices/util/ProduceOrders.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.avro.microservices.Order;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
76
import org.apache.commons.cli.CommandLine;
@@ -64,7 +63,6 @@ public static void main(final String[] args) throws Exception {
6463
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
6564
props.put(ProducerConfig.ACKS_CONFIG, "all");
6665
props.put(ProducerConfig.RETRIES_CONFIG, 1);
67-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(props);
6866

6967
try (final KafkaProducer<String, Order> producer = new KafkaProducer<>(props, new StringSerializer(), mySerializer)) {
7068
while (true) {

src/main/java/io/confluent/examples/streams/microservices/util/ProducePayments.java

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package io.confluent.examples.streams.microservices.util;
22

33
import io.confluent.examples.streams.avro.microservices.Payment;
4-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
54
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
65
import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer;
76
import org.apache.commons.cli.CommandLine;
@@ -62,7 +61,6 @@ public static void main(final String[] args) throws Exception {
6261
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
6362
props.put(ProducerConfig.ACKS_CONFIG, "all");
6463
props.put(ProducerConfig.RETRIES_CONFIG, 1);
65-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(props);
6664

6765
try (final KafkaProducer<String, Payment> producer = new KafkaProducer<>(props, new StringSerializer(), mySerializer)) {
6866
while (true) {

src/main/java/io/confluent/examples/streams/utils/MonitoringInterceptorUtils.java

Lines changed: 0 additions & 108 deletions
This file was deleted.

src/test/java/io/confluent/examples/streams/AbstractGlobalStoresAndTablesTest.java

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
import io.confluent.examples.streams.avro.Order;
2121
import io.confluent.examples.streams.avro.Product;
2222
import io.confluent.examples.streams.kafka.EmbeddedSingleNodeKafkaCluster;
23-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
2423
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
2524
import io.confluent.kafka.serializers.KafkaAvroDeserializer;
2625
import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig;
@@ -114,8 +113,6 @@ public void shouldDemonstrateGlobalStoreJoins() throws Exception {
114113
consumerProps.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
115114
consumerProps.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
116115

117-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(consumerProps);
118-
119116
// receive the enriched orders
120117
final List<EnrichedOrder>
121118
enrichedOrders =

src/test/java/io/confluent/examples/streams/TopArticlesLambdaExampleTest.java

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
package io.confluent.examples.streams;
1717

1818
import io.confluent.examples.streams.kafka.EmbeddedSingleNodeKafkaCluster;
19-
import io.confluent.examples.streams.utils.MonitoringInterceptorUtils;
2019
import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig;
2120
import org.apache.avro.generic.GenericRecord;
2221
import org.apache.avro.generic.GenericRecordBuilder;
@@ -90,8 +89,6 @@ public void shouldProduceTopNArticles() throws Exception {
9089
io.confluent.kafka.serializers.KafkaAvroSerializer.class);
9190
props.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
9291

93-
MonitoringInterceptorUtils.maybeConfigureInterceptorsProducer(props);
94-
9592
final KafkaProducer<String, GenericRecord> producer = new KafkaProducer<>(props);
9693

9794
final GenericRecordBuilder pageViewBuilder =
@@ -122,8 +119,6 @@ public void shouldProduceTopNArticles() throws Exception {
122119
final Deserializer<Windowed<String>> windowedDeserializer =
123120
WindowedSerdes.timeWindowedSerdeFrom(String.class, TopArticlesLambdaExample.windowSize.toMillis()).deserializer();
124121

125-
MonitoringInterceptorUtils.maybeConfigureInterceptorsConsumer(consumerProperties);
126-
127122
final KafkaConsumer<Windowed<String>, String> consumer = new KafkaConsumer<>(consumerProperties,
128123
windowedDeserializer,
129124
Serdes.String().deserializer());

0 commit comments

Comments
 (0)