77The counter increments for each datum and emits a message every 10 items,
88plus a final message at the end.
99"""
10+
1011import asyncio
1112import signal
1213from collections .abc import AsyncIterable , AsyncIterator
1718class ReduceCounter (reducestreamer .ReduceStreamer ):
1819 """
1920 A reduce streaming counter that emits intermediate results.
20-
21+
2122 This demonstrates the key difference from regular Reducer:
2223 - Regular Reducer: waits for all data, then returns Messages
2324 - ReduceStreamer: yields Message objects incrementally as an async iterator
2425 """
25-
26+
2627 def __init__ (self , initial : int = 0 ) -> None :
2728 self .counter = initial
2829
@@ -34,21 +35,21 @@ async def handler(
3435 ) -> AsyncIterator [reducestreamer .Message ]:
3536 """
3637 Process datums and yield messages incrementally.
37-
38+
3839 Args:
3940 keys: List of keys for this window
4041 datums: Async iterable of incoming data
4142 md: Metadata containing window information
42-
43+
4344 Yields:
4445 Message objects to send to the next vertex
4546 """
4647 iw = md .interval_window
4748 print (f"Handler started for keys={ keys } , window=[{ iw .start } , { iw .end } ]" )
48-
49+
4950 async for _ in datums :
5051 self .counter += 1
51-
52+
5253 # Emit intermediate result every 10 items
5354 if self .counter % 10 == 0 :
5455 msg = (
@@ -59,7 +60,7 @@ async def handler(
5960 print (f"Yielding intermediate result: counter={ self .counter } " )
6061 # Early release of data - this is the key feature of reduce streaming!
6162 yield reducestreamer .Message (msg , keys = keys )
62-
63+
6364 # Emit final result
6465 msg = (
6566 f"counter:{ self .counter } (FINAL) "
@@ -105,4 +106,3 @@ async def start(creator: type, init_args: tuple):
105106
106107if __name__ == "__main__" :
107108 asyncio .run (start (ReduceCounter , (0 ,)))
108-
0 commit comments