@@ -125,22 +125,37 @@ def append(self, data):
125
125
pass
126
126
127
127
128
- # Method for sending events to the queue.
129
- def send_to (handle , server_address , route ):
130
- def append ():
131
- while True :
128
+ class QueueReadThread (threading .Thread ):
129
+ def __init__ (self , handle , server_address , route ):
130
+ threading .Thread .__init__ (self )
131
+ self .stop_flag = threading .Event ()
132
+ self .run_flag = threading .Event ()
133
+ self .handle = handle
134
+ self .server_address = server_address
135
+ self .route = route
136
+
137
+ def run (self ):
138
+ while not self .stop_flag .is_set ():
132
139
try :
133
- response = requests .get (f'{ server_address } { route } ' ,
140
+ response = requests .get (f'{ self . server_address } { self . route } ' ,
134
141
timeout = (5 , None ))
135
142
if response .status_code != 200 :
136
143
time .sleep (1 )
137
144
continue
138
145
response .encoding = 'latin-1'
139
- handle .append .remote (response .text )
146
+ self . handle .append .remote (response .text )
140
147
except requests .exceptions .ConnectionError :
141
148
time .sleep (1 )
142
149
143
- threading .Thread (target = append ).start ()
150
+ self .run_flag .set ()
151
+
152
+
153
+ # Method for sending events to the queue.
154
+ def send_to (handle , server_address , route ):
155
+ queue_reading_thread = QueueReadThread (handle , server_address , route )
156
+ queue_reading_thread .daemon = True
157
+ queue_reading_thread .start ()
158
+ return queue_reading_thread
144
159
145
160
146
161
# Method for sending events to the sink.
@@ -222,30 +237,63 @@ def close_consumer(self):
222
237
self .kafka_consumer .close ()
223
238
224
239
240
+ class ConsumerEnablingThread (threading .Thread ):
241
+ def __init__ (self , handle , consumers ):
242
+ threading .Thread .__init__ (self )
243
+ self .stop_flag = threading .Event ()
244
+ self .run_flag = threading .Event ()
245
+ self .handle = handle
246
+ self .consumers = consumers
247
+
248
+ def run (self ):
249
+ subscribers , operator = ray .get (self .handle ._fetch_processors .remote ())
250
+ try :
251
+ consumer_references = [
252
+ consumer .enable_consumer .remote (subscribers , operator )
253
+ for consumer in self .consumers
254
+ ]
255
+ ray .get (consumer_references )
256
+ except KeyboardInterrupt :
257
+ for consumer in self .consumers :
258
+ consumer .disable_consumer .remote ()
259
+ finally :
260
+ for consumer in self .consumers :
261
+ consumer .close_consumer .remote ()
262
+
263
+ self .run_flag .set ()
264
+
265
+
266
+ class ConsumerThread (threading .Thread ):
267
+ def __init__ (self , handle , consumer ):
268
+ threading .Thread .__init__ (self )
269
+ self .stop_flag = threading .Event ()
270
+ self .run_flag = threading .Event ()
271
+ self .handle = handle
272
+ self .consumer = consumer
273
+
274
+ def run (self ):
275
+ while not self .stop_flag .is_set ():
276
+ msg = self .consumer .poll ()
277
+ if msg .error ():
278
+ print (f'consumer error: { msg .error ()} ' )
279
+ else :
280
+ self .handle .append .remote (msg .value ().decode ('utf-8' ))
281
+
282
+ self .run_flag .set ()
283
+
284
+
225
285
def kafka_send_to (kafka_transport_topic , kafka_transport_partitions , handle ):
286
+ # If source uses a scaling transport start multiple Kafka Consumers.
226
287
if kafka_transport_partitions > 1 :
227
288
consumers = [
228
289
KafkaConsumer .remote (kafka_transport_topic , handle )
229
290
for _ in range (kafka_transport_partitions )
230
291
]
231
292
232
- def append ():
233
- subscribers , operator = ray .get (handle ._fetch_processors .remote ())
234
- try :
235
- consumer_references = [
236
- consumer .enable_consumer .remote (subscribers , operator )
237
- for consumer in consumers
238
- ]
239
- ray .get (consumer_references )
240
- except KeyboardInterrupt :
241
- for consumer in consumers :
242
- consumer .disable_consumer .remote ()
243
- finally :
244
- for consumer in consumers :
245
- consumer .close_consumer .remote ()
246
-
247
- threading .Thread (target = append ).start ()
248
- return
293
+ consumer_enabling_thread = ConsumerEnablingThread (handle , consumers )
294
+ consumer_enabling_thread .daemon = True
295
+ consumer_enabling_thread .start ()
296
+ return consumer_enabling_thread
249
297
250
298
# Use kafka consumer thread to push from camel source to rayvens stream
251
299
consumer = Consumer ({
@@ -256,15 +304,10 @@ def append():
256
304
257
305
consumer .subscribe ([kafka_transport_topic ])
258
306
259
- def append ():
260
- while True :
261
- msg = consumer .poll ()
262
- if msg .error ():
263
- print (f'consumer error: { msg .error ()} ' )
264
- else :
265
- handle .append .remote (msg .value ().decode ('utf-8' ))
266
-
267
- threading .Thread (target = append ).start ()
307
+ consumer_thread = ConsumerThread (handle , consumer )
308
+ consumer_thread .daemon = True
309
+ consumer_thread .start ()
310
+ return consumer_thread
268
311
269
312
270
313
def kafka_recv_from (integration_name , kafka_transport_topic , handle ):
0 commit comments