| | Usage |
| | ***** |
| |
|
| |
|
| | KafkaConsumer |
| | ============= |
| |
|
| | .. code:: python |
| |
|
| | from kafka import KafkaConsumer |
| |
|
| | |
| | consumer = KafkaConsumer('my-topic', |
| | group_id='my-group', |
| | bootstrap_servers=['localhost:9092']) |
| | for message in consumer: |
| | |
| | |
| | print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, |
| | message.offset, message.key, |
| | message.value)) |
| |
|
| | |
| | KafkaConsumer(auto_offset_reset='earliest', enable_auto_commit=False) |
| |
|
| | |
| | KafkaConsumer(value_deserializer=lambda m: json.loads(m.decode('ascii'))) |
| |
|
| | |
| | KafkaConsumer(value_deserializer=msgpack.unpackb) |
| |
|
| | |
| | KafkaConsumer(consumer_timeout_ms=1000) |
| |
|
| | |
| | consumer = KafkaConsumer() |
| | consumer.subscribe(pattern='^awesome.*') |
| |
|
| | |
| | |
| | consumer1 = KafkaConsumer('my-topic', |
| | group_id='my-group', |
| | bootstrap_servers='my.server.com') |
| | consumer2 = KafkaConsumer('my-topic', |
| | group_id='my-group', |
| | bootstrap_servers='my.server.com') |
| |
|
| |
|
| | There are many configuration options for the consumer class. See |
| | :class:`~kafka.KafkaConsumer` API documentation for more details. |
| |
|
| |
|
| | KafkaProducer |
| | ============== |
| |
|
| | .. code:: python |
| |
|
| | from kafka import KafkaProducer |
| | from kafka.errors import KafkaError |
| |
|
| | producer = KafkaProducer(bootstrap_servers=['broker1:1234']) |
| |
|
| | |
| | future = producer.send('my-topic', b'raw_bytes') |
| |
|
| | |
| | try: |
| | record_metadata = future.get(timeout=10) |
| | except KafkaError: |
| | |
| | log.exception() |
| | pass |
| |
|
| | |
| | print (record_metadata.topic) |
| | print (record_metadata.partition) |
| | print (record_metadata.offset) |
| |
|
| | |
| | producer.send('my-topic', key=b'foo', value=b'bar') |
| |
|
| | |
| | producer = KafkaProducer(value_serializer=msgpack.dumps) |
| | producer.send('msgpack-topic', {'key': 'value'}) |
| |
|
| | |
| | producer = KafkaProducer(value_serializer=lambda m: json.dumps(m).encode('ascii')) |
| | producer.send('json-topic', {'key': 'value'}) |
| |
|
| | |
| | for _ in range(100): |
| | producer.send('my-topic', b'msg') |
| |
|
| | def on_send_success(record_metadata): |
| | print(record_metadata.topic) |
| | print(record_metadata.partition) |
| | print(record_metadata.offset) |
| |
|
| | def on_send_error(excp): |
| | log.error('I am an errback', exc_info=excp) |
| | |
| |
|
| | |
| | producer.send('my-topic', b'raw_bytes').add_callback(on_send_success).add_errback(on_send_error) |
| |
|
| | |
| | producer.flush() |
| |
|
| | |
| | producer = KafkaProducer(retries=5) |
| |
|