1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
from confluent_kafka import Producer, Consumer, KafkaError, KafkaException, \
TopicPartition, libversion
from struct import pack
def error_cb(err):
print('error_cb', err)
def test_basic_api():
""" Basic API tests, these wont really do anything since there is no
broker configured. """
with pytest.raises(TypeError) as ex:
p = Producer()
assert ex.match('expected configuration dict')
p = Producer({'socket.timeout.ms': 10,
'error_cb': error_cb,
'message.timeout.ms': 10})
p.produce('mytopic')
p.produce('mytopic', value='somedata', key='a key')
def on_delivery(err, msg):
print('delivery', err, msg)
# Since there is no broker, produced messages should time out.
assert err.code() == KafkaError._MSG_TIMED_OUT
print('message latency', msg.latency())
p.produce(topic='another_topic', value='testing', partition=9,
callback=on_delivery)
p.poll(0.001)
p.flush(0.002)
p.flush()
try:
p.list_topics(timeout=0.2)
except KafkaException as e:
assert e.args[0].code() in (KafkaError._TIMED_OUT, KafkaError._TRANSPORT)
def test_produce_timestamp():
""" Test produce() with timestamp arg """
p = Producer({'socket.timeout.ms': 10,
'error_cb': error_cb,
'message.timeout.ms': 10})
# Requires librdkafka >=v0.9.4
try:
p.produce('mytopic', timestamp=1234567)
except NotImplementedError:
# Should only fail on non-supporting librdkafka
if libversion()[1] >= 0x00090400:
raise
p.flush()
# Should be updated to 0.11.4 when it is released
@pytest.mark.skipif(libversion()[1] < 0x000b0400,
reason="requires librdkafka >=0.11.4")
def test_produce_headers():
""" Test produce() with timestamp arg """
p = Producer({'socket.timeout.ms': 10,
'error_cb': error_cb,
'message.timeout.ms': 10})
binval = pack('hhl', 1, 2, 3)
headers_to_test = [
[('headerkey', 'headervalue')],
[('dupkey', 'dupvalue'), ('empty', ''), ('dupkey', 'dupvalue')],
[('dupkey', 'dupvalue'), ('dupkey', 'diffvalue')],
[('key_with_null_value', None)],
[('binaryval', binval)],
[('alreadyutf8', u'Småland'.encode('utf-8'))],
[('isunicode', 'Jämtland')],
{'headerkey': 'headervalue'},
{'dupkey': 'dupvalue', 'empty': '', 'dupkey': 'dupvalue'}, # noqa: F601
{'dupkey': 'dupvalue', 'dupkey': 'diffvalue'}, # noqa: F601
{'key_with_null_value': None},
{'binaryval': binval},
{'alreadyutf8': u'Småland'.encode('utf-8')},
{'isunicode': 'Jämtland'}
]
for headers in headers_to_test:
print('headers', type(headers), headers)
p.produce('mytopic', value='somedata', key='a key', headers=headers)
p.produce('mytopic', value='somedata', headers=headers)
with pytest.raises(TypeError):
p.produce('mytopic', value='somedata', key='a key', headers=('a', 'b'))
with pytest.raises(TypeError):
p.produce('mytopic', value='somedata', key='a key', headers=[('malformed_header')])
with pytest.raises(TypeError):
p.produce('mytopic', value='somedata', headers={'anint': 1234})
p.flush()
# Should be updated to 0.11.4 when it is released
@pytest.mark.skipif(libversion()[1] >= 0x000b0400,
reason="Old versions should fail when using headers")
def test_produce_headers_should_fail():
""" Test produce() with timestamp arg """
p = Producer({'socket.timeout.ms': 10,
'error_cb': error_cb,
'message.timeout.ms': 10})
with pytest.raises(NotImplementedError) as ex:
p.produce('mytopic', value='somedata', key='a key', headers=[('headerkey', 'headervalue')])
assert ex.match('Producer message headers requires confluent-kafka-python built for librdkafka version >=v0.11.4')
def test_subclassing():
class SubProducer(Producer):
def __init__(self, conf, topic):
super(SubProducer, self).__init__(conf)
self.topic = topic
def produce_hi(self):
super(SubProducer, self).produce(self.topic, value='hi')
sp = SubProducer(dict(), 'atopic')
assert type(sp) == SubProducer
# Invalid config should fail
with pytest.raises(KafkaException):
sp = SubProducer({'should.fail': False}, 'mytopic')
sp = SubProducer({'log.thread.name': True}, 'mytopic')
sp.produce('someother', value='not hello')
sp.produce_hi()
def test_dr_msg_errstr():
"""
Test that the error string for failed messages works (issue #129).
The underlying problem is that librdkafka reuses the message payload
for error value on Consumer messages, but on Producer messages the
payload is the original payload and no rich error string exists.
"""
p = Producer({"message.timeout.ms": 10})
def handle_dr(err, msg):
# Neither message payloads must not affect the error string.
assert err is not None
assert err.code() == KafkaError._MSG_TIMED_OUT
assert "Message timed out" in err.str()
# Unicode safe string
p.produce('mytopic', "This is the message payload", on_delivery=handle_dr)
# Invalid unicode sequence
p.produce('mytopic', "\xc2\xc2", on_delivery=handle_dr)
p.flush()
def test_set_partitioner_murmur2():
"""
Test ability to set built-in partitioner type murmur
"""
Producer({'partitioner': 'murmur2'})
def test_set_partitioner_murmur2_random():
"""
Test ability to set built-in partitioner type murmur2_random
"""
Producer({'partitioner': 'murmur2_random'})
def test_set_invalid_partitioner_murmur():
"""
Assert invalid partitioner raises KafkaException
"""
with pytest.raises(KafkaException) as ex:
Producer({'partitioner': 'murmur'})
assert ex.match('Invalid value for configuration property "partitioner": murmur')
def test_transaction_api():
""" Excercise the transactional API """
p = Producer({"transactional.id": "test"})
with pytest.raises(KafkaException) as ex:
p.init_transactions(0.5)
assert ex.value.args[0].code() == KafkaError._TIMED_OUT
assert ex.value.args[0].retriable() is True
assert ex.value.args[0].fatal() is False
assert ex.value.args[0].txn_requires_abort() is False
# Any subsequent APIs will fail since init did not succeed.
with pytest.raises(KafkaException) as ex:
p.begin_transaction()
assert ex.value.args[0].code() == KafkaError._CONFLICT
assert ex.value.args[0].retriable() is True
assert ex.value.args[0].fatal() is False
assert ex.value.args[0].txn_requires_abort() is False
consumer = Consumer({"group.id": "testgroup"})
group_metadata = consumer.consumer_group_metadata()
consumer.close()
with pytest.raises(KafkaException) as ex:
p.send_offsets_to_transaction([TopicPartition("topic", 0, 123)],
group_metadata)
assert ex.value.args[0].code() == KafkaError._CONFLICT
assert ex.value.args[0].retriable() is True
assert ex.value.args[0].fatal() is False
assert ex.value.args[0].txn_requires_abort() is False
with pytest.raises(KafkaException) as ex:
p.commit_transaction(0.5)
assert ex.value.args[0].code() == KafkaError._CONFLICT
assert ex.value.args[0].retriable() is True
assert ex.value.args[0].fatal() is False
assert ex.value.args[0].txn_requires_abort() is False
with pytest.raises(KafkaException) as ex:
p.abort_transaction(0.5)
assert ex.value.args[0].code() == KafkaError._CONFLICT
assert ex.value.args[0].retriable() is True
assert ex.value.args[0].fatal() is False
assert ex.value.args[0].txn_requires_abort() is False
def test_purge():
"""
Verify that when we have a higher message.timeout.ms timeout, we can use purge()
to stop waiting for messages and get delivery reports
"""
p = Producer(
{"socket.timeout.ms": 10, "error_cb": error_cb, "message.timeout.ms": 30000}
) # 30 seconds
# Hack to detect on_delivery was called because inner functions can modify nonlocal objects.
# When python2 support is dropped, we can use the "nonlocal" keyword instead
cb_detector = {"on_delivery_called": False}
def on_delivery(err, msg):
cb_detector["on_delivery_called"] = True
# Because we are purging messages, we should see a PURGE_QUEUE kafka error
assert err.code() == KafkaError._PURGE_QUEUE
# Our message won't be delivered, but also won't timeout yet because our timeout is 30s.
p.produce(topic="some_topic", value="testing", partition=9, callback=on_delivery)
p.flush(0.002)
assert not cb_detector["on_delivery_called"]
# When in_queue set to false, we won't purge the message and get delivery callback
p.purge(in_queue=False)
p.flush(0.002)
assert not cb_detector["on_delivery_called"]
# When we purge including the queue, the message should have delivered a delivery report
# with a PURGE_QUEUE error
p.purge()
p.flush(0.002)
assert cb_detector["on_delivery_called"]
|