|
166 | 166 | end
|
167 | 167 | end
|
168 | 168 |
|
169 |
| - describe '#each_batch' do |
170 |
| - it 'traces each_batch call' do |
171 |
| - skip "#{Rdkafka::VERSION} is not supported" unless instrumentation.compatible? |
172 |
| - |
173 |
| - rand_hash = SecureRandom.hex(10) |
174 |
| - topic_name = "consumer-patch-batch-trace-#{rand_hash}" |
175 |
| - config = { 'bootstrap.servers': "#{host}:#{port}" } |
176 |
| - |
177 |
| - producer = Rdkafka::Config.new(config).producer |
178 |
| - delivery_handles = [] |
179 |
| - |
180 |
| - delivery_handles << producer.produce( |
181 |
| - topic: topic_name, |
182 |
| - payload: 'wow', |
183 |
| - key: 'Key 1' |
184 |
| - ) |
185 |
| - |
186 |
| - delivery_handles << producer.produce( |
187 |
| - topic: topic_name, |
188 |
| - payload: 'super', |
189 |
| - key: 'Key 2' |
190 |
| - ) |
191 |
| - |
192 |
| - delivery_handles.each(&:wait) |
193 |
| - |
194 |
| - consumer_config = config.merge( |
195 |
| - 'group.id': 'me', |
196 |
| - 'auto.offset.reset': 'smallest' # https://stackoverflow.com/a/51081649 |
197 |
| - ) |
198 |
| - consumer = Rdkafka::Config.new(config.merge(consumer_config)).consumer |
199 |
| - consumer.subscribe(topic_name) |
200 |
| - |
201 |
| - begin |
202 |
| - consumer.each_batch(max_items: 2) do |messages| |
203 |
| - raise 'oops' unless messages.empty? |
| 169 | + # each_batch method is deleted in rdkafka 0.20.0 |
| 170 | + if Gem::Version.new(Rdkafka::VERSION) < Gem::Version.new('0.20.0') |
| 171 | + describe '#each_batch' do |
| 172 | + it 'traces each_batch call' do |
| 173 | + skip "#{Rdkafka::VERSION} is not supported" unless instrumentation.compatible? |
| 174 | + |
| 175 | + rand_hash = SecureRandom.hex(10) |
| 176 | + topic_name = "consumer-patch-batch-trace-#{rand_hash}" |
| 177 | + config = { 'bootstrap.servers': "#{host}:#{port}" } |
| 178 | + |
| 179 | + producer = Rdkafka::Config.new(config).producer |
| 180 | + delivery_handles = [] |
| 181 | + |
| 182 | + delivery_handles << producer.produce( |
| 183 | + topic: topic_name, |
| 184 | + payload: 'wow', |
| 185 | + key: 'Key 1' |
| 186 | + ) |
| 187 | + |
| 188 | + delivery_handles << producer.produce( |
| 189 | + topic: topic_name, |
| 190 | + payload: 'super', |
| 191 | + key: 'Key 2' |
| 192 | + ) |
| 193 | + |
| 194 | + delivery_handles.each(&:wait) |
| 195 | + |
| 196 | + consumer_config = config.merge( |
| 197 | + 'group.id': 'me', |
| 198 | + 'auto.offset.reset': 'smallest' # https://stackoverflow.com/a/51081649 |
| 199 | + ) |
| 200 | + consumer = Rdkafka::Config.new(config.merge(consumer_config)).consumer |
| 201 | + consumer.subscribe(topic_name) |
| 202 | + |
| 203 | + begin |
| 204 | + consumer.each_batch(max_items: 2) do |messages| |
| 205 | + raise 'oops' unless messages.empty? |
| 206 | + end |
| 207 | + rescue StandardError |
204 | 208 | end
|
205 |
| - rescue StandardError |
206 |
| - end |
207 |
| - |
208 |
| - span = spans.find { |s| s.name == 'batch process' } |
209 |
| - _(span.kind).must_equal(:consumer) |
210 |
| - _(span.attributes['messaging.kafka.message_count']).must_equal(2) |
211 | 209 |
|
212 |
| - event = span.events.first |
213 |
| - _(event.name).must_equal('exception') |
214 |
| - _(event.attributes['exception.type']).must_equal('RuntimeError') |
215 |
| - _(event.attributes['exception.message']).must_equal('oops') |
216 |
| - |
217 |
| - first_link = span.links[0] |
218 |
| - linked_span_context = first_link.span_context |
219 |
| - _(linked_span_context.trace_id).must_equal(spans[0].trace_id) |
220 |
| - _(linked_span_context.span_id).must_equal(spans[0].span_id) |
221 |
| - |
222 |
| - second_link = span.links[1] |
223 |
| - linked_span_context = second_link.span_context |
224 |
| - _(linked_span_context.trace_id).must_equal(spans[1].trace_id) |
225 |
| - _(linked_span_context.span_id).must_equal(spans[1].span_id) |
226 |
| - |
227 |
| - _(spans.size).must_equal(3) |
228 |
| - ensure |
229 |
| - begin; producer&.close; rescue StandardError; end |
230 |
| - begin; consumer&.close; rescue StandardError; end |
| 210 | + span = spans.find { |s| s.name == 'batch process' } |
| 211 | + _(span.kind).must_equal(:consumer) |
| 212 | + _(span.attributes['messaging.kafka.message_count']).must_equal(2) |
| 213 | + |
| 214 | + event = span.events.first |
| 215 | + _(event.name).must_equal('exception') |
| 216 | + _(event.attributes['exception.type']).must_equal('RuntimeError') |
| 217 | + _(event.attributes['exception.message']).must_equal('oops') |
| 218 | + |
| 219 | + first_link = span.links[0] |
| 220 | + linked_span_context = first_link.span_context |
| 221 | + _(linked_span_context.trace_id).must_equal(spans[0].trace_id) |
| 222 | + _(linked_span_context.span_id).must_equal(spans[0].span_id) |
| 223 | + |
| 224 | + second_link = span.links[1] |
| 225 | + linked_span_context = second_link.span_context |
| 226 | + _(linked_span_context.trace_id).must_equal(spans[1].trace_id) |
| 227 | + _(linked_span_context.span_id).must_equal(spans[1].span_id) |
| 228 | + |
| 229 | + _(spans.size).must_equal(3) |
| 230 | + ensure |
| 231 | + begin; producer&.close; rescue StandardError; end |
| 232 | + begin; consumer&.close; rescue StandardError; end |
| 233 | + end |
231 | 234 | end
|
232 | 235 | end
|
233 | 236 | end
|
|
0 commit comments