Skip to content

Commit 29383a8

Browse files
authored
Merge pull request #247 from PeterJCLaw/more-types
More typing improvements
2 parents e48ecbc + 27929a4 commit 29383a8

File tree

11 files changed

+52
-48
lines changed

11 files changed

+52
-48
lines changed

.github/workflows/python-app.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ jobs:
4747
4848
- name: Typecheck with mypy
4949
run: |
50-
poetry run mypy mockafka tests
50+
poetry run mypy
5151
5252
- name: Test with pytest
5353
run: |

mockafka/admin_client.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from __future__ import annotations
22

3+
from typing import Any
4+
35
from confluent_kafka.cimpl import ( # type: ignore[import-untyped]
46
NewPartitions,
57
NewTopic,
@@ -35,7 +37,7 @@ class FakeAdminClientImpl:
3537
store (not implemented).
3638
"""
3739

38-
def __init__(self, clean: bool = False, *args, **kwargs):
40+
def __init__(self, clean: bool = False, *args: Any, **kwargs) -> None:
3941
"""
4042
Initialize the FakeAdminClientImpl.
4143
@@ -49,7 +51,7 @@ def create_partitions(self, partitions: list[NewPartitions]) -> dict[str, NewPar
4951
Create partitions in the in-memory Kafka store.
5052
5153
Parameters:
52-
- partitions (List[NewPartitions]): List of partition objects to be created.
54+
- partitions (List[NewPartitions]): List of partition objects to be created.
5355
5456
Returns:
5557
- dict[str, NewPartitions]: Dictionary of created partitions.
@@ -60,7 +62,7 @@ def create_partitions(self, partitions: list[NewPartitions]) -> dict[str, NewPar
6062
result[partition.topic] = partition
6163
return result
6264

63-
def create_partition(self, partition: NewPartitions):
65+
def create_partition(self, partition: NewPartitions) -> None:
6466
"""
6567
Create a single partition in the in-memory Kafka store.
6668
@@ -71,7 +73,7 @@ def create_partition(self, partition: NewPartitions):
7173
topic=partition.topic, partitions=partition.new_total_count
7274
)
7375

74-
def create_topics(self, topics: list[NewTopic]):
76+
def create_topics(self, topics: list[NewTopic]) -> None:
7577
"""
7678
Create topics in the in-memory Kafka store.
7779
@@ -81,7 +83,7 @@ def create_topics(self, topics: list[NewTopic]):
8183
for topic in topics:
8284
self.create_topic(topic=topic)
8385

84-
def create_topic(self, topic: NewTopic):
86+
def create_topic(self, topic: NewTopic) -> None:
8587
"""
8688
Create a single topic in the in-memory Kafka store.
8789
@@ -106,7 +108,7 @@ def delete_topics(
106108
for topic in topics:
107109
self.delete_topic(topic=topic)
108110

109-
def delete_topic(self, topic: NewTopic):
111+
def delete_topic(self, topic: NewTopic) -> None:
110112
"""
111113
Delete a single topic from the in-memory Kafka store.
112114

mockafka/aiokafka/aiokafka_admin_client.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from __future__ import annotations
22

3-
from typing import Dict
3+
from typing import Any, Dict
44

55
from aiokafka.admin import NewPartitions, NewTopic # type: ignore[import-untyped]
66

@@ -31,13 +31,13 @@ class FakeAIOKafkaAdmin:
3131
Calls _create_partition() for each topic.
3232
"""
3333

34-
def __init__(self, clean: bool = False, *args, **kwargs):
34+
def __init__(self, clean: bool = False, *args: Any, **kwargs: Any) -> None:
3535
self.kafka = KafkaStore(clean=clean)
3636

37-
async def close(self):
37+
async def close(self) -> None:
3838
pass
3939

40-
async def start(self):
40+
async def start(self) -> None:
4141
pass
4242

4343
async def _create_topic(self, topic: NewTopic) -> None:
@@ -46,23 +46,26 @@ async def _create_topic(self, topic: NewTopic) -> None:
4646
topic=topic.name, partition_count=topic.num_partitions
4747
)
4848

49-
async def _remove_topic(self, topic: str):
49+
async def _remove_topic(self, topic: str) -> None:
5050
self.kafka.remove_topic(topic=topic)
5151

52-
async def create_topics(self, new_topics: list[NewTopic], *args, **kwargs):
52+
async def create_topics(self, new_topics: list[NewTopic], *args: Any, **kwargs: Any) -> None:
5353
for topic in new_topics:
5454
await self._create_topic(topic=topic)
5555

56-
async def delete_topics(self, topics: list[str], **kwargs) -> None:
56+
async def delete_topics(self, topics: list[str], **kwargs: Any) -> None:
5757
for topic in topics:
5858
await self._remove_topic(topic=topic)
5959

60-
async def _create_partition(self, topic: str, partition_count: int):
60+
async def _create_partition(self, topic: str, partition_count: int) -> None:
6161
self.kafka.create_partition(topic=topic, partitions=partition_count)
6262

6363
async def create_partitions(
64-
self, topic_partitions: Dict[str, NewPartitions], *args, **kwargs
65-
):
64+
self,
65+
topic_partitions: Dict[str, NewPartitions],
66+
*args: Any,
67+
**kwargs: Any,
68+
) -> None:
6669
for topic, partition in topic_partitions.items():
6770
await self._create_partition(
6871
topic=topic, partition_count=partition.total_count

mockafka/aiokafka/aiokafka_consumer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ def subscription(self) -> Set[str]:
160160
def unsubscribe(self) -> None:
161161
self.subscribed_topic = []
162162

163-
def _get_key(self, topic, partition) -> str:
163+
def _get_key(self, topic: str, partition: int) -> str:
164164
return f"{topic}*{partition}"
165165

166166
def _fetch_one(self, topic: str, partition: int) -> Optional[ConsumerRecord[bytes, bytes]]:

mockafka/aiokafka/aiokafka_producer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from __future__ import annotations
22

33
import asyncio
4-
from typing import Optional
4+
from typing import Any, Optional
55

66
from aiokafka.util import create_future # type: ignore[import-untyped]
77
from typing_extensions import LiteralString, Self
@@ -39,7 +39,7 @@ class FakeAIOKafkaProducer:
3939
- send_and_wait(): Call send().
4040
"""
4141

42-
def __init__(self, *args, **kwargs) -> None:
42+
def __init__(self, *args: Any, **kwargs: Any) -> None:
4343
self.kafka = KafkaStore()
4444

4545
async def _produce(
@@ -88,7 +88,7 @@ async def send(
8888
headers=headers,
8989
timestamp_ms=timestamp_ms,
9090
)
91-
future = create_future()
91+
future: asyncio.Future[None] = create_future()
9292
future.set_result(None)
9393
return future
9494

@@ -99,7 +99,7 @@ async def send_and_wait(
9999
key: Optional[bytes] = None,
100100
partition: int = 0,
101101
timestamp_ms: Optional[int] = None,
102-
headers=None,
102+
headers: Optional[list[tuple[str, Optional[bytes]]]] = None,
103103
) -> None:
104104
future = await self.send(
105105
topic=topic,

mockafka/broker_metadata.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,16 @@ class BrokerMetadata(object):
88
This class is typically not user instantiated.
99
"""
1010

11-
def __init__(self):
11+
def __init__(self) -> None:
1212
self.id = 1
1313
"""Broker id"""
1414
self.host = "fakebroker"
1515
"""Broker hostname"""
1616
self.port = 9091
1717
"""Broker port"""
1818

19-
def __repr__(self):
19+
def __repr__(self) -> str:
2020
return "BrokerMetadata({}, {}:{})".format(self.id, self.host, self.port)
2121

22-
def __str__(self):
22+
def __str__(self) -> str:
2323
return "{}:{}/{}".format(self.host, self.port, self.id)

mockafka/cluster_metadata.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,10 +36,10 @@ def __init__(self, topic: str | None = None):
3636
self.orig_broker_id = -1
3737
self.orig_broker_name = None
3838

39-
def __repr__(self):
39+
def __repr__(self) -> str:
4040
return "ClusterMetadata({})".format(self.cluster_id)
4141

42-
def __str__(self):
42+
def __str__(self) -> str:
4343
return str(self.cluster_id)
4444

4545

mockafka/kafka_store.py

Lines changed: 15 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
from __future__ import annotations
2020

2121
from copy import deepcopy
22+
from typing import Any
2223

2324
from confluent_kafka import KafkaException # type: ignore[import-untyped]
2425

@@ -32,9 +33,9 @@
3233

3334

3435
class SingletonMeta(type):
35-
_instances: dict[type[SingletonMeta], SingletonMeta] = {}
36+
_instances: dict[SingletonMeta, Any] = {}
3637

37-
def __call__(cls, *args, **kwargs):
38+
def __call__(cls, *args: Any, **kwargs: Any) -> Any:
3839
if cls not in cls._instances or "clean" in kwargs.keys():
3940
instance = super().__call__(*args, **kwargs)
4041
cls._instances[cls] = instance
@@ -49,7 +50,7 @@ class KafkaStore(metaclass=SingletonMeta):
4950
FIRST_OFFSET = "first_offset"
5051
NEXT_OFFSET = "next_offset"
5152

52-
def __init__(self, clean: bool = False):
53+
def __init__(self, clean: bool = False) -> None:
5354
if clean:
5455
mock_topics.clear()
5556
offset_store.clear()
@@ -70,13 +71,13 @@ def get_number_of_partition(topic: str) -> int:
7071
return len(mock_topics[topic].keys())
7172

7273
@staticmethod
73-
def create_topic(topic: str):
74+
def create_topic(topic: str) -> None:
7475
if mock_topics.get(topic, None) is not None:
7576
raise KafkaException(f"{topic} exist is fake kafka")
7677

7778
mock_topics[topic] = {}
7879

79-
def create_partition(self, topic: str, partitions: int):
80+
def create_partition(self, topic: str, partitions: int) -> None:
8081
if not self.is_topic_exist(topic=topic):
8182
self.create_topic(topic=topic)
8283

@@ -92,7 +93,7 @@ def create_partition(self, topic: str, partitions: int):
9293
else:
9394
raise KafkaException("can not decrease partition of topic")
9495

95-
def remove_topic(self, topic: str):
96+
def remove_topic(self, topic: str) -> None:
9697
if not self.is_topic_exist(topic=topic):
9798
return
9899

@@ -103,22 +104,22 @@ def remove_topic(self, topic: str):
103104
if topic in offset_key:
104105
offset_store.pop(offset_key)
105106

106-
def set_first_offset(self, topic: str, partition: int, value: int):
107+
def set_first_offset(self, topic: str, partition: int, value: int) -> None:
107108
offset_store_key = self.get_offset_store_key(topic=topic, partition=partition)
108109
first_offset = self.get_partition_first_offset(topic=topic, partition=partition)
109110
next_offset = self.get_partition_next_offset(topic=topic, partition=partition)
110111

111112
if first_offset < value <= next_offset:
112113
offset_store[offset_store_key][self.FIRST_OFFSET] = value
113114

114-
def _add_next_offset(self, topic: str, partition: int):
115+
def _add_next_offset(self, topic: str, partition: int) -> None:
115116
offset_store_key = self.get_offset_store_key(topic=topic, partition=partition)
116117
offset_store[offset_store_key][self.NEXT_OFFSET] += 1
117118

118-
def get_offset_store_key(self, topic: str, partition: int):
119+
def get_offset_store_key(self, topic: str, partition: int) -> str:
119120
return f"{topic}*{partition}"
120121

121-
def produce(self, message: Message, topic: str, partition: int):
122+
def produce(self, message: Message, topic: str, partition: int) -> None:
122123
if not topic:
123124
return
124125

@@ -174,15 +175,15 @@ def number_of_message_in_topic(self, topic: str) -> int:
174175

175176
return count_of_messages
176177

177-
def clear_topic_messages(self, topic: str):
178+
def clear_topic_messages(self, topic: str) -> None:
178179
for partition in self.partition_list(topic=topic):
179180
self.clear_partition_messages(topic=topic, partition=partition)
180181

181182
@staticmethod
182-
def clear_partition_messages(topic: str, partition: int):
183+
def clear_partition_messages(topic: str, partition: int) -> None:
183184
mock_topics[topic][partition] = []
184185

185-
def reset_offset(self, topic: str, strategy: str = "latest"):
186+
def reset_offset(self, topic: str, strategy: str = "latest") -> None:
186187
for partition in self.partition_list(topic=topic):
187188
key = self.get_offset_store_key(topic, partition)
188189

@@ -195,6 +196,6 @@ def reset_offset(self, topic: str, strategy: str = "latest"):
195196
offset_store[key][self.FIRST_OFFSET] = 0
196197

197198
@staticmethod
198-
def fresh():
199+
def fresh() -> None:
199200
mock_topics.clear()
200201
offset_store.clear()

mockafka/topic_metadata.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,16 +16,16 @@ class TopicMetadata(object):
1616
# Sphinx issue where it tries to reference the same instance variable
1717
# on other classes which raises a warning/error.
1818

19-
def __init__(self, topic_name: str, partition_num: Collection[int] = ()):
19+
def __init__(self, topic_name: str, partition_num: Collection[int] = ()) -> None:
2020
self.topic = topic_name
2121
"""Topic name"""
2222
self.partitions = {num: PartitionMetadata(id=num) for num in partition_num}
2323
"""Map of partitions indexed by partition id. Value is a PartitionMetadata object."""
2424
self.error = None
2525
"""Topic error, or None. Value is a KafkaError object."""
2626

27-
def __str__(self):
27+
def __str__(self) -> str:
2828
return self.topic
2929

30-
def __len__(self):
30+
def __len__(self) -> int:
3131
return len(self.partitions)

pyproject.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,8 @@ profile = "black"
4848
float_to_top = true
4949

5050
[tool.mypy]
51+
files = ["mockafka", "tests"]
52+
5153
warn_unused_ignores = true
5254

5355
no_implicit_optional = true

0 commit comments

Comments
 (0)