|
6 | 6 | import org.junit.Test;
|
7 | 7 |
|
8 | 8 | import java.util.Properties;
|
| 9 | +import java.util.concurrent.CountDownLatch; |
9 | 10 | import java.util.concurrent.ExecutionException;
|
10 | 11 |
|
11 | 12 | /**
|
|
14 | 15 | * 生产者
|
15 | 16 | */
|
16 | 17 | public class MsgProducerTest {
|
17 |
| - KafkaProducer<String, String> producer; |
18 |
| - |
19 |
| - @Before |
20 |
| - public void init() { |
21 |
| - Properties properties = new Properties(); |
22 |
| - properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.211.55.4:9092"); |
23 |
| - //0 不需要回复,1等待leader写入log 不需要follower -1 等待集群全部写入log |
24 |
| - properties.put(ProducerConfig.ACKS_CONFIG, "1"); |
25 |
| - //失败重试时间 |
26 |
| - properties.put(ProducerConfig.RETRIES_CONFIG, "3"); |
27 |
| - //本地缓存区时间 |
28 |
| - properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432"); |
29 |
| - //批量发送大小,批量满16k就发送 |
30 |
| - properties.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); |
31 |
| - //100毫秒批量未填充完成也发送 |
32 |
| - properties.put(ProducerConfig.LINGER_MS_CONFIG, "100"); |
33 |
| - //发送key从字符串转字节数组 |
34 |
| - properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); |
35 |
| - properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); |
36 |
| - producer = new KafkaProducer<>(properties); |
37 |
| - } |
38 |
| - |
39 |
| - @Test |
40 |
| - public void sendMsg() { |
41 |
| - for (int i = 0; i < 10; i++) { |
42 |
| - producer.send(new ProducerRecord<String, String>("test-topic", "test-key" + i, "key-value" + i)); |
43 |
| - } |
44 |
| - producer.close(); |
45 |
| - } |
| 18 | + KafkaProducer<String, String> producer; |
46 | 19 |
|
| 20 | + @Before |
| 21 | + public void init() { |
| 22 | + Properties properties = new Properties(); |
| 23 | + properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.211.55.22:9092,10.211.55.22:9093,10.211.55.22:9094"); |
| 24 | + /** |
| 25 | + * 0 不需要回复 |
| 26 | + * 1等待leader写入log不需要follower |
| 27 | + * -1 等待集群全部写入log |
| 28 | + */ |
| 29 | + properties.put(ProducerConfig.ACKS_CONFIG, "1"); |
| 30 | + // 失败重试时间 |
| 31 | + properties.put(ProducerConfig.RETRIES_CONFIG, "3"); |
| 32 | + // 本地缓存区时间 |
| 33 | + properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432"); |
| 34 | + // 批量发送大小,批量满16k就发送 |
| 35 | + properties.put(ProducerConfig.BATCH_SIZE_CONFIG, "16384"); |
| 36 | + //100毫秒批量未填充完成也发送 |
| 37 | + properties.put(ProducerConfig.LINGER_MS_CONFIG, "10"); |
| 38 | + //发送key从字符串转字节数组 |
| 39 | + properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); |
| 40 | + properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); |
| 41 | + producer = new KafkaProducer<>(properties); |
| 42 | + } |
47 | 43 |
|
48 |
| - @Test |
49 |
| - public void sendMsg1() throws ExecutionException, InterruptedException { |
50 |
| - for (int i = 0; i < 100; i++) { |
51 |
| - ProducerRecord<String, String> stringStringProducerRecord = new ProducerRecord<>("order-topic", 0, "test" + i, "test" + i); |
52 |
| - RecordMetadata recordMetadata = producer.send(stringStringProducerRecord).get(); |
53 |
| - System.out.println("发送成功:" + "topic:" + recordMetadata.topic() + "partition:" + recordMetadata.partition() + "offset:" + recordMetadata.offset()); |
54 |
| - } |
| 44 | + @Test |
| 45 | + public void sendMsg() { |
| 46 | + for (int i = 0; i < 10; i++) { |
| 47 | + producer.send(new ProducerRecord<String, String>("test-topic", "test-key" + i, "key-value" + i)); |
| 48 | + } |
| 49 | + producer.close(); |
| 50 | + } |
55 | 51 |
|
56 |
| - } |
57 | 52 |
|
| 53 | + @Test |
| 54 | + public void sendMsg1() throws ExecutionException, InterruptedException { |
| 55 | + for (int i = 0; i < 100; i++) { |
| 56 | + ProducerRecord<String, String> stringStringProducerRecord = new ProducerRecord<>("order-topic", 0, "test" + i, "test" + i); |
| 57 | + RecordMetadata recordMetadata = producer.send(stringStringProducerRecord).get(); |
| 58 | + System.out.println("发送成功:" + "topic:" + recordMetadata.topic() + "partition:" + recordMetadata.partition() + "offset:" + recordMetadata.offset()); |
| 59 | + } |
58 | 60 |
|
| 61 | + } |
59 | 62 |
|
60 | 63 |
|
61 |
| - @Test |
62 |
| - public void sendMsg2() throws InterruptedException { |
| 64 | + @Test |
| 65 | + public void sendMsg2() throws InterruptedException { |
| 66 | + final CountDownLatch countDownLatch = new CountDownLatch(1); |
63 | 67 | ProducerRecord<String, String> stringStringProducerRecord = new ProducerRecord<>("yibu-topic", 0, "test", "test");
|
64 |
| - producer.send(stringStringProducerRecord, new Callback() { |
65 |
| - @Override |
66 |
| - public void onCompletion(RecordMetadata recordMetadata, Exception e) { |
67 |
| - if (e!=null){ |
| 68 | + producer.send(stringStringProducerRecord, (recordMetadata, e) -> { |
| 69 | + if (e != null) { |
68 | 70 | System.err.println("发送信息失败");
|
69 | 71 | }
|
70 |
| - if (recordMetadata!=null){ |
71 |
| - System.out.println("topic:"+recordMetadata.topic()+"partition"+recordMetadata.partition()+"offset:"+recordMetadata.offset()); |
72 |
| - } |
| 72 | + if (recordMetadata != null) { |
| 73 | + System.out.println("topic:" + recordMetadata.topic() + "partition" + recordMetadata.partition() + "offset:" + recordMetadata.offset()); |
73 | 74 | }
|
| 75 | + countDownLatch.countDown(); |
74 | 76 | });
|
75 |
| - Thread.sleep(Integer.MAX_VALUE); |
| 77 | + countDownLatch.await(); |
| 78 | + producer.close(); |
76 | 79 | }
|
77 | 80 | }
|
0 commit comments