Skip to content

Commit 9c5c33b

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 7d1a40b commit 9c5c33b

File tree

6 files changed

+88
-134
lines changed

6 files changed

+88
-134
lines changed

integration_tests/base_routes.py

+27-48
Original file line numberDiff line numberDiff line change
@@ -1091,30 +1091,25 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam
10911091

10921092
# --- Streaming responses ---
10931093

1094+
10941095
@app.get("/stream/sync", streaming=True)
10951096
async def sync_stream():
10961097
def generator():
10971098
for i in range(5):
10981099
yield f"Chunk {i}\n".encode()
1099-
1100+
11001101
headers = Headers({"Content-Type": "text/plain"})
1101-
return Response(
1102-
status_code=200,
1103-
description=generator(),
1104-
headers=headers
1105-
)
1102+
return Response(status_code=200, description=generator(), headers=headers)
1103+
11061104

11071105
@app.get("/stream/async", streaming=True)
11081106
async def async_stream():
11091107
async def generator():
11101108
for i in range(5):
11111109
yield f"Async Chunk {i}\n".encode()
1112-
1113-
return Response(
1114-
status_code=200,
1115-
headers={"Content-Type": "text/plain"},
1116-
description=generator()
1117-
)
1110+
1111+
return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())
1112+
11181113

11191114
@app.get("/stream/mixed", streaming=True)
11201115
async def mixed_stream():
@@ -1123,86 +1118,70 @@ async def generator():
11231118
yield "String chunk\n".encode()
11241119
yield str(42).encode() + b"\n"
11251120
yield json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
1126-
1127-
return Response(
1128-
status_code=200,
1129-
headers={"Content-Type": "text/plain"},
1130-
description=generator()
1131-
)
1121+
1122+
return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=generator())
1123+
11321124

11331125
@app.get("/stream/events", streaming=True)
11341126
async def server_sent_events():
11351127
async def event_generator():
11361128
import asyncio
11371129
import json
11381130
import time
1139-
1131+
11401132
# Regular event
11411133
yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode()
11421134
await asyncio.sleep(1)
1143-
1135+
11441136
# Event with ID
11451137
yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode()
11461138
await asyncio.sleep(1)
1147-
1139+
11481140
# Multiple data lines
1149-
data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
1141+
data = json.dumps({"status": "complete", "results": [1, 2, 3]}, indent=2)
11501142
yield f"event: complete\ndata: {data}\n\n".encode()
1151-
1143+
11521144
return Response(
1153-
status_code=200,
1154-
headers={
1155-
"Content-Type": "text/event-stream",
1156-
"Cache-Control": "no-cache",
1157-
"Connection": "keep-alive"
1158-
},
1159-
description=event_generator()
1145+
status_code=200, headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}, description=event_generator()
11601146
)
11611147

1148+
11621149
@app.get("/stream/large-file", streaming=True)
11631150
async def stream_large_file():
11641151
async def file_generator():
11651152
# Simulate streaming a large file in chunks
11661153
chunk_size = 1024 # 1KB chunks
11671154
total_size = 10 * chunk_size # 10KB total
1168-
1155+
11691156
for offset in range(0, total_size, chunk_size):
11701157
# Simulate reading file chunk
11711158
chunk = b"X" * min(chunk_size, total_size - offset)
11721159
yield chunk
1173-
1160+
11741161
return Response(
11751162
status_code=200,
1176-
headers={
1177-
"Content-Type": "application/octet-stream",
1178-
"Content-Disposition": "attachment; filename=large-file.bin"
1179-
},
1180-
description=file_generator()
1163+
headers={"Content-Type": "application/octet-stream", "Content-Disposition": "attachment; filename=large-file.bin"},
1164+
description=file_generator(),
11811165
)
11821166

1167+
11831168
@app.get("/stream/csv", streaming=True)
11841169
async def stream_csv():
11851170
async def csv_generator():
11861171
# CSV header
11871172
yield "id,name,value\n".encode()
1188-
1173+
11891174
import asyncio
11901175
import random
1191-
1176+
11921177
# Generate rows
11931178
for i in range(5):
11941179
await asyncio.sleep(0.5) # Simulate data processing
11951180
row = f"{i},item-{i},{random.randint(1, 100)}\n"
11961181
yield row.encode()
1197-
1198-
return Response(
1199-
status_code=200,
1200-
headers={
1201-
"Content-Type": "text/csv",
1202-
"Content-Disposition": "attachment; filename=data.csv"
1203-
},
1204-
description=csv_generator()
1205-
)
1182+
1183+
return Response(status_code=200, headers={"Content-Type": "text/csv", "Content-Disposition": "attachment; filename=data.csv"}, description=csv_generator())
1184+
12061185

12071186
def main():
12081187
app.set_response_header("server", "robyn")

integration_tests/conftest.py

+1-4
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,10 @@
88
from typing import List
99

1010
import pytest
11-
import pytest_asyncio
12-
from robyn import Robyn
13-
from integration_tests.base_routes import app
1411

1512
from integration_tests.helpers.network_helpers import get_network_host
1613

14+
1715
def spawn_process(command: List[str]) -> subprocess.Popen:
1816
if platform.system() == "Windows":
1917
command[0] = "python"
@@ -129,4 +127,3 @@ def env_file():
129127
env_path.unlink()
130128
del os.environ["ROBYN_PORT"]
131129
del os.environ["ROBYN_HOST"]
132-

integration_tests/test_streaming_responses.py

+10-9
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
# Mark all tests in this module as async
2020
pytestmark = pytest.mark.asyncio
2121

22+
2223
async def test_sync_stream():
2324
"""Test basic synchronous streaming response."""
2425
async with aiohttp.ClientSession() as client:
@@ -34,6 +35,7 @@ async def test_sync_stream():
3435
for i, chunk in enumerate(chunks):
3536
assert chunk == f"Chunk {i}\n"
3637

38+
3739
async def test_async_stream():
3840
"""Test asynchronous streaming response."""
3941
async with aiohttp.ClientSession() as client:
@@ -49,19 +51,15 @@ async def test_async_stream():
4951
for i, chunk in enumerate(chunks):
5052
assert chunk == f"Async Chunk {i}\n"
5153

54+
5255
async def test_mixed_stream():
5356
"""Test streaming of mixed content types."""
5457
async with aiohttp.ClientSession() as client:
5558
async with client.get("http://127.0.0.1:8080/stream/mixed") as response:
5659
assert response.status == 200
5760
assert response.headers["Content-Type"] == "text/plain"
5861

59-
expected = [
60-
b"Binary chunk\n",
61-
b"String chunk\n",
62-
b"42\n",
63-
json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
64-
]
62+
expected = [b"Binary chunk\n", b"String chunk\n", b"42\n", json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"]
6563

6664
chunks = []
6765
async for chunk in response.content:
@@ -71,6 +69,7 @@ async def test_mixed_stream():
7169
for chunk, expected_chunk in zip(chunks, expected):
7270
assert chunk == expected_chunk
7371

72+
7473
async def test_server_sent_events():
7574
"""Test Server-Sent Events (SSE) streaming."""
7675
async with aiohttp.ClientSession() as client:
@@ -103,6 +102,7 @@ async def test_server_sent_events():
103102
assert event_data["status"] == "complete"
104103
assert event_data["results"] == [1, 2, 3]
105104

105+
106106
async def test_large_file_stream():
107107
"""Test streaming of large files in chunks."""
108108
async with aiohttp.ClientSession() as client:
@@ -118,6 +118,7 @@ async def test_large_file_stream():
118118

119119
assert total_size == 10 * 1024 # 10KB total
120120

121+
121122
async def test_csv_stream():
122123
"""Test streaming of CSV data."""
123124
async with aiohttp.ClientSession() as client:
@@ -132,11 +133,11 @@ async def test_csv_stream():
132133

133134
# Verify header
134135
assert lines[0] == "id,name,value"
135-
136+
136137
# Verify data rows
137138
assert len(lines) == 6 # Header + 5 data rows
138139
for i, line in enumerate(lines[1:], 0):
139-
id_, name, value = line.split(',')
140+
id_, name, value = line.split(",")
140141
assert int(id_) == i
141142
assert name == f"item-{i}"
142-
assert 1 <= int(value) <= 100
143+
assert 1 <= int(value) <= 100

0 commit comments

Comments
 (0)