Skip to content

Commit 9709183

Browse files
authored
yard remarks (karafka#334)
1 parent 7fc07fa commit 9709183

File tree

8 files changed

+50
-87
lines changed

8 files changed

+50
-87
lines changed

lib/rdkafka.rb

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,11 @@
11
# frozen_string_literal: true
22

3-
require "rdkafka/version"
3+
require "logger"
4+
require "objspace"
5+
require "ffi"
6+
require "json"
47

8+
require "rdkafka/version"
59
require "rdkafka/helpers/time"
610
require "rdkafka/abstract_handle"
711
require "rdkafka/admin"

lib/rdkafka/abstract_handle.rb

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
# frozen_string_literal: true
22

3-
require "ffi"
4-
53
module Rdkafka
64
# This class serves as an abstract base class to represent handles within the Rdkafka module.
75
# As a subclass of `FFI::Struct`, this class provides a blueprint for other specific handle

lib/rdkafka/admin.rb

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
# frozen_string_literal: true
22

3-
require "objspace"
4-
53
module Rdkafka
64
class Admin
75
# @private
@@ -30,11 +28,12 @@ def closed?
3028

3129
# Create a topic with the given partition count and replication factor
3230
#
31+
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of
32+
# creating the topic
33+
#
3334
# @raise [ConfigError] When the partition count or replication factor are out of valid range
3435
# @raise [RdkafkaError] When the topic name is invalid or the topic already exists
3536
# @raise [RdkafkaError] When the topic configuration is invalid
36-
#
37-
# @return [CreateTopicHandle] Create topic handle that can be used to wait for the result of creating the topic
3837
def create_topic(topic_name, partition_count, replication_factor, topic_config={})
3938
closed_admin_check(__method__)
4039

@@ -107,11 +106,11 @@ def create_topic(topic_name, partition_count, replication_factor, topic_config={
107106
create_topic_handle
108107
end
109108

110-
# Delete the named topic
109+
# Deletes the named topic
111110
#
111+
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of
112+
# deleting the topic
112113
# @raise [RdkafkaError] When the topic name is invalid or the topic does not exist
113-
#
114-
# @return [DeleteTopicHandle] Delete topic handle that can be used to wait for the result of deleting the topic
115114
def delete_topic(topic_name)
116115
closed_admin_check(__method__)
117116

lib/rdkafka/bindings.rb

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
# frozen_string_literal: true
22

3-
require "ffi"
4-
require "json"
5-
require "logger"
6-
73
module Rdkafka
84
# @private
95
module Bindings

lib/rdkafka/config.rb

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
# frozen_string_literal: true
22

3-
require "logger"
4-
53
module Rdkafka
64
# Configuration for a Kafka consumer or producer. You can create an instance and use
75
# the consumer and producer methods to create a client. Documentation of the available
@@ -142,12 +140,12 @@ def consumer_rebalance_listener=(listener)
142140
@consumer_rebalance_listener = listener
143141
end
144142

145-
# Create a consumer with this configuration.
143+
# Creates a consumer with this configuration.
144+
#
145+
# @return [Consumer] The created consumer
146146
#
147147
# @raise [ConfigError] When the configuration contains invalid options
148148
# @raise [ClientCreationError] When the native client cannot be created
149-
#
150-
# @return [Consumer] The created consumer
151149
def consumer
152150
opaque = Opaque.new
153151
config = native_config(opaque)
@@ -175,10 +173,10 @@ def consumer
175173

176174
# Create a producer with this configuration.
177175
#
176+
# @return [Producer] The created producer
177+
#
178178
# @raise [ConfigError] When the configuration contains invalid options
179179
# @raise [ClientCreationError] When the native client cannot be created
180-
#
181-
# @return [Producer] The created producer
182180
def producer
183181
# Create opaque
184182
opaque = Opaque.new
@@ -196,12 +194,12 @@ def producer
196194
end
197195
end
198196

199-
# Create an admin instance with this configuration.
197+
# Creates an admin instance with this configuration.
198+
#
199+
# @return [Admin] The created admin instance
200200
#
201201
# @raise [ConfigError] When the configuration contains invalid options
202202
# @raise [ClientCreationError] When the native client cannot be created
203-
#
204-
# @return [Admin] The created admin instance
205203
def admin
206204
opaque = Opaque.new
207205
config = native_config(opaque)

lib/rdkafka/consumer.rb

Lines changed: 23 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -48,13 +48,11 @@ def closed?
4848
@native_kafka.closed?
4949
end
5050

51-
# Subscribe to one or more topics letting Kafka handle partition assignments.
51+
# Subscribes to one or more topics letting Kafka handle partition assignments.
5252
#
5353
# @param topics [Array<String>] One or more topic names
54-
#
55-
# @raise [RdkafkaError] When subscribing fails
56-
#
5754
# @return [nil]
55+
# @raise [RdkafkaError] When subscribing fails
5856
def subscribe(*topics)
5957
closed_consumer_check(__method__)
6058

@@ -78,9 +76,8 @@ def subscribe(*topics)
7876

7977
# Unsubscribe from all subscribed topics.
8078
#
81-
# @raise [RdkafkaError] When unsubscribing fails
82-
#
8379
# @return [nil]
80+
# @raise [RdkafkaError] When unsubscribing fails
8481
def unsubscribe
8582
closed_consumer_check(__method__)
8683

@@ -95,10 +92,8 @@ def unsubscribe
9592
# Pause producing or consumption for the provided list of partitions
9693
#
9794
# @param list [TopicPartitionList] The topic with partitions to pause
98-
#
99-
# @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
100-
#
10195
# @return [nil]
96+
# @raise [RdkafkaTopicPartitionListError] When pausing subscription fails.
10297
def pause(list)
10398
closed_consumer_check(__method__)
10499

@@ -122,13 +117,11 @@ def pause(list)
122117
end
123118
end
124119

125-
# Resume producing consumption for the provided list of partitions
120+
# Resumes producing consumption for the provided list of partitions
126121
#
127122
# @param list [TopicPartitionList] The topic with partitions to pause
128-
#
129-
# @raise [RdkafkaError] When resume subscription fails.
130-
#
131123
# @return [nil]
124+
# @raise [RdkafkaError] When resume subscription fails.
132125
def resume(list)
133126
closed_consumer_check(__method__)
134127

@@ -150,11 +143,10 @@ def resume(list)
150143
end
151144
end
152145

153-
# Return the current subscription to topics and partitions
154-
#
155-
# @raise [RdkafkaError] When getting the subscription fails.
146+
# Returns the current subscription to topics and partitions
156147
#
157148
# @return [TopicPartitionList]
149+
# @raise [RdkafkaError] When getting the subscription fails.
158150
def subscription
159151
closed_consumer_check(__method__)
160152

@@ -179,7 +171,6 @@ def subscription
179171
# Atomic assignment of partitions to consume
180172
#
181173
# @param list [TopicPartitionList] The topic with partitions to assign
182-
#
183174
# @raise [RdkafkaError] When assigning fails
184175
def assign(list)
185176
closed_consumer_check(__method__)
@@ -204,9 +195,8 @@ def assign(list)
204195

205196
# Returns the current partition assignment.
206197
#
207-
# @raise [RdkafkaError] When getting the assignment fails.
208-
#
209198
# @return [TopicPartitionList]
199+
# @raise [RdkafkaError] When getting the assignment fails.
210200
def assignment
211201
closed_consumer_check(__method__)
212202

@@ -232,14 +222,14 @@ def assignment
232222
end
233223

234224
# Return the current committed offset per partition for this consumer group.
235-
# The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
225+
# The offset field of each requested partition will either be set to stored offset or to -1001
226+
# in case there was no stored offset for that partition.
236227
#
237-
# @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil to use the current subscription.
228+
# @param list [TopicPartitionList, nil] The topic with partitions to get the offsets for or nil
229+
# to use the current subscription.
238230
# @param timeout_ms [Integer] The timeout for fetching this information.
239-
#
240-
# @raise [RdkafkaError] When getting the committed positions fails.
241-
#
242231
# @return [TopicPartitionList]
232+
# @raise [RdkafkaError] When getting the committed positions fails.
243233
def committed(list=nil, timeout_ms=1200)
244234
closed_consumer_check(__method__)
245235

@@ -269,10 +259,8 @@ def committed(list=nil, timeout_ms=1200)
269259
# @param topic [String] The topic to query
270260
# @param partition [Integer] The partition to query
271261
# @param timeout_ms [Integer] The timeout for querying the broker
272-
#
273-
# @raise [RdkafkaError] When querying the broker fails.
274-
#
275262
# @return [Integer] The low and high watermark
263+
# @raise [RdkafkaError] When querying the broker fails.
276264
def query_watermark_offsets(topic, partition, timeout_ms=200)
277265
closed_consumer_check(__method__)
278266

@@ -306,10 +294,9 @@ def query_watermark_offsets(topic, partition, timeout_ms=200)
306294
#
307295
# @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
308296
# @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
309-
#
297+
# @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
298+
# per partition
310299
# @raise [RdkafkaError] When querying the broker fails.
311-
#
312-
# @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag per partition
313300
def lag(topic_partition_list, watermark_timeout_ms=100)
314301
out = {}
315302

@@ -358,10 +345,8 @@ def member_id
358345
# When using this `enable.auto.offset.store` should be set to `false` in the config.
359346
#
360347
# @param message [Rdkafka::Consumer::Message] The message which offset will be stored
361-
#
362-
# @raise [RdkafkaError] When storing the offset fails
363-
#
364348
# @return [nil]
349+
# @raise [RdkafkaError] When storing the offset fails
365350
def store_offset(message)
366351
closed_consumer_check(__method__)
367352

@@ -392,10 +377,8 @@ def store_offset(message)
392377
# message at the given offset.
393378
#
394379
# @param message [Rdkafka::Consumer::Message] The message to which to seek
395-
#
396-
# @raise [RdkafkaError] When seeking fails
397-
#
398380
# @return [nil]
381+
# @raise [RdkafkaError] When seeking fails
399382
def seek(message)
400383
closed_consumer_check(__method__)
401384

@@ -434,10 +417,8 @@ def seek(message)
434417
#
435418
# @param list [TopicPartitionList,nil] The topic with partitions to commit
436419
# @param async [Boolean] Whether to commit async or wait for the commit to finish
437-
#
438-
# @raise [RdkafkaError] When committing fails
439-
#
440420
# @return [nil]
421+
# @raise [RdkafkaError] When committing fails
441422
def commit(list=nil, async=false)
442423
closed_consumer_check(__method__)
443424

@@ -462,10 +443,8 @@ def commit(list=nil, async=false)
462443
# Poll for the next message on one of the subscribed topics
463444
#
464445
# @param timeout_ms [Integer] Timeout of this poll
465-
#
466-
# @raise [RdkafkaError] When polling fails
467-
#
468446
# @return [Message, nil] A message or nil if there was no new message within the timeout
447+
# @raise [RdkafkaError] When polling fails
469448
def poll(timeout_ms)
470449
closed_consumer_check(__method__)
471450

@@ -494,14 +473,11 @@ def poll(timeout_ms)
494473
# Poll for new messages and yield for each received one. Iteration
495474
# will end when the consumer is closed.
496475
#
497-
# If `enable.partition.eof` is turned on in the config this will raise an
498-
# error when an eof is reached, so you probably want to disable that when
499-
# using this method of iteration.
476+
# If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
477+
# reached, so you probably want to disable that when using this method of iteration.
500478
#
501479
# @raise [RdkafkaError] When polling fails
502-
#
503480
# @yieldparam message [Message] Received message
504-
#
505481
# @return [nil]
506482
def each
507483
loop do
@@ -554,9 +530,7 @@ def each
554530
# that you may or may not see again.
555531
#
556532
# @param max_items [Integer] Maximum size of the yielded array of messages
557-
#
558533
# @param bytes_threshold [Integer] Threshold number of total message bytes in the yielded array of messages
559-
#
560534
# @param timeout_ms [Integer] max time to wait for up to max_items
561535
#
562536
# @raise [RdkafkaError] When polling fails

lib/rdkafka/consumer/headers.rb

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,11 @@ def [](key)
1818

1919
# Reads a librdkafka native message's headers and returns them as a Ruby Hash
2020
#
21-
# @param [librdkakfa message] native_message
21+
# @private
2222
#
23+
# @param [librdkakfa message] native_message
2324
# @return [Hash<String, String>] headers Hash for the native_message
24-
#
2525
# @raise [Rdkafka::RdkafkaError] when fail to read headers
26-
#
27-
# @private
2826
def self.from_native(native_message)
2927
headers_ptrptr = FFI::MemoryPointer.new(:pointer)
3028
err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)

lib/rdkafka/producer.rb

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
# frozen_string_literal: true
22

3-
require "objspace"
4-
53
module Rdkafka
64
# A producer for Kafka messages. To create a producer set up a {Config} and call {Config#producer producer} on that.
75
class Producer
@@ -108,18 +106,16 @@ def flush(timeout_ms=5_000)
108106
end
109107

110108
# Partition count for a given topic.
111-
# NOTE: If 'allow.auto.create.topics' is set to true in the broker, the topic will be auto-created after returning nil.
112109
#
113110
# @param topic [String] The topic name.
111+
# @return [Integer] partition count for a given topic
114112
#
115-
# @return partition count [Integer,nil]
116-
#
117-
# We cache the partition count for a given topic for given time
118-
# This prevents us in case someone uses `partition_key` from querying for the count with
119-
# each message. Instead we query once every 30 seconds at most
113+
# @note If 'allow.auto.create.topics' is set to true in the broker, the topic will be
114+
# auto-created after returning nil.
120115
#
121-
# @param [String] topic name
122-
# @return [Integer] partition count for a given topic
116+
# @note We cache the partition count for a given topic for given time.
117+
# This prevents us in case someone uses `partition_key` from querying for the count with
118+
# each message. Instead we query once every 30 seconds at most
123119
def partition_count(topic)
124120
closed_producer_check(__method__)
125121

0 commit comments

Comments
 (0)