7
7
import io .confluent .parallelconsumer .ParallelConsumerOptions ;
8
8
import io .confluent .parallelconsumer .integrationTests .KafkaTest ;
9
9
import lombok .AllArgsConstructor ;
10
- import lombok .RequiredArgsConstructor ;
11
10
import lombok .SneakyThrows ;
12
11
import lombok .extern .slf4j .Slf4j ;
13
12
import org .apache .kafka .clients .consumer .Consumer ;
14
13
import org .apache .kafka .clients .consumer .ConsumerConfig ;
15
14
import org .apache .kafka .clients .consumer .KafkaConsumer ;
16
15
import org .apache .kafka .clients .producer .Producer ;
17
16
import org .apache .kafka .clients .producer .ProducerRecord ;
18
- import org .assertj .core .api . Assertions ;
19
- import org .awaitility . Awaitility ;
17
+ import org .awaitility .core .ConditionTimeoutException ;
18
+ import org .junit . jupiter . api . RepeatedTest ;
20
19
import org .junit .jupiter .api .Test ;
21
20
22
21
import java .time .Duration ;
22
+ import java .util .ArrayList ;
23
+ import java .util .List ;
23
24
import java .util .Properties ;
24
25
25
26
import static io .confluent .parallelconsumer .ParallelConsumerOptions .ProcessingOrder .KEY ;
26
27
import static org .assertj .core .api .Assertions .assertThat ;
28
+ import static org .assertj .core .api .Assertions .fail ;
27
29
import static org .awaitility .Awaitility .waitAtMost ;
28
30
29
31
@ Slf4j
30
32
public class Bug25AppTest extends KafkaTest <String , String > {
31
33
32
- int DEAFULT_MAX_POLL_RECORDS_CONFIG = 500 ;
34
+ int LOW_MAX_POLL_RECORDS_CONFIG = 1 ;
35
+ int DEFAULT_MAX_POLL_RECORDS_CONFIG = 500 ;
36
+ int HIGH_MAX_POLL_RECORDS_CONFIG = 10_000 ;
33
37
34
- @ Test
38
+ @ RepeatedTest ( 5 )
35
39
public void testTransactionalDefaultMaxPoll () {
36
40
boolean tx = true ;
37
- runTest (tx , DEAFULT_MAX_POLL_RECORDS_CONFIG );
41
+ runTest (tx , DEFAULT_MAX_POLL_RECORDS_CONFIG );
38
42
}
39
43
40
44
@ Test
41
45
public void testNonTransactionalDefaultMaxPoll () {
42
46
boolean tx = false ;
43
- runTest (tx , DEAFULT_MAX_POLL_RECORDS_CONFIG );
47
+ runTest (tx , DEFAULT_MAX_POLL_RECORDS_CONFIG );
44
48
}
45
49
46
50
@ Test
47
- public void testTransactional () {
51
+ public void testTransactionalLowMaxPoll () {
48
52
boolean tx = true ;
49
- runTest (tx , 1 ); // Sometimes causes test to fail (default 500)
53
+ runTest (tx , LOW_MAX_POLL_RECORDS_CONFIG );
50
54
}
51
55
52
56
@ Test
53
- public void testNonTransactional () {
57
+ public void testNonTransactionalLowMaxPoll () {
54
58
boolean tx = false ;
55
- runTest (tx , 1 ); // Sometimes causes test to fail (default 500)
59
+ runTest (tx , LOW_MAX_POLL_RECORDS_CONFIG );
60
+ }
61
+
62
+ @ Test
63
+ public void testTransactionalHighMaxPoll () {
64
+ boolean tx = true ;
65
+ runTest (tx , HIGH_MAX_POLL_RECORDS_CONFIG );
66
+ }
67
+
68
+ @ Test
69
+ public void testNonTransactionalHighMaxPoll () {
70
+ boolean tx = false ;
71
+ runTest (tx , HIGH_MAX_POLL_RECORDS_CONFIG );
56
72
}
57
73
58
74
@ SneakyThrows
59
75
private void runTest (boolean tx , int maxPoll ) {
60
- AppUnderTest coreApp = new AppUnderTest (tx , ParallelConsumerOptions .builder ().ordering (KEY ).usingTransactionalProducer (tx ).build (), maxPoll );
76
+ AppUnderTest coreApp = new AppUnderTest (tx , ParallelConsumerOptions .builder ()
77
+ .ordering (KEY )
78
+ .usingTransactionalProducer (tx )
79
+ .build (),
80
+ maxPoll );
61
81
62
82
ensureTopic (coreApp .inputTopic , 1 );
63
83
ensureTopic (coreApp .outputTopic , 1 );
64
84
65
- log .info ("Producing 1000 messages before starting application" );
85
+ // pre-produce messages to input-topic
86
+ List <String > expectedKeys = new ArrayList <>();
87
+ int expectedMessageCount = 1000 ;
88
+ log .info ("Producing {} messages before starting application" , expectedMessageCount );
66
89
try (Producer <String , String > kafkaProducer = kcu .createNewProducer (false )) {
67
- for (int i = 0 ; i < 1000 ; i ++) {
68
- kafkaProducer .send (new ProducerRecord <>(coreApp .inputTopic , "key-" + i , "value-" + i ));
90
+ for (int i = 0 ; i < expectedMessageCount ; i ++) {
91
+ String key = "key-" + i ;
92
+ kafkaProducer .send (new ProducerRecord <>(coreApp .inputTopic , key , "value-" + i ));
93
+ expectedKeys .add (key );
69
94
}
70
95
}
71
96
97
+ // run parallel-consumer
72
98
log .info ("Starting application..." );
73
99
coreApp .runPollAndProduce ();
74
100
75
- waitAtMost (Duration .ofSeconds (30 )).untilAsserted (() -> {
76
- log .info ("Processed-count: " + coreApp .messagesProcessed .get ());
77
- log .info ("Produced-count: " + coreApp .messagesProduced .get ());
78
- assertThat (coreApp .messagesProcessed .get ()).isEqualTo (1000 );
79
- assertThat (coreApp .messagesProduced .get ()).isEqualTo (1000 );
80
- });
101
+ // wait for all pre-produced messages to be processed and produced
102
+ try {
103
+ waitAtMost (Duration .ofSeconds (30 )).untilAsserted (() -> {
104
+ log .debug ("Processed-count: " + coreApp .processedCount .get ());
105
+ log .debug ("Produced-count: " + coreApp .producedCount .get ());
106
+ List <String > processedAndProducedKeys = new ArrayList <>(coreApp .processedAndProducedKeys ); // avoid concurrent-modification in assert
107
+ assertThat (processedAndProducedKeys ).contains (expectedKeys .toArray (new String [0 ]));
108
+ });
109
+ } catch (ConditionTimeoutException e ) {
110
+ String failureMessage = "All keys sent to input-topic should be processed and produced" ;
111
+ log .warn (failureMessage );
112
+ log .debug ("Expected keys=" + expectedKeys + "" );
113
+ log .debug ("Processed and produced keys=" + coreApp .processedAndProducedKeys + "" );
114
+ log .debug ("Missing keys=" + expectedKeys .removeAll (coreApp .processedAndProducedKeys ));
115
+ fail (failureMessage );
116
+ }
117
+
118
+
119
+ assertThat (coreApp .processedCount .get ())
120
+ .as ("messages processed and produced by parallel-consumer should be equal" )
121
+ .isEqualTo (coreApp .producedCount .get ());
122
+
81
123
82
124
coreApp .close ();
83
125
}
@@ -93,7 +135,6 @@ class AppUnderTest extends CoreApp {
93
135
Consumer <String , String > getKafkaConsumer () {
94
136
Properties props = kcu .props ;
95
137
props .put (ConsumerConfig .MAX_POLL_RECORDS_CONFIG , MAX_POLL_RECORDS_CONFIG );
96
- props .put (ConsumerConfig .MAX_POLL_RECORDS_CONFIG , 10000 );
97
138
return new KafkaConsumer <>(props );
98
139
}
99
140
0 commit comments