hello, this is my code
# middlewares.py
@app.middleware('request')
async def request_mid(request):
key = request.route.uri
connections = getattr(request.app.ctx, 'connections', dict())
connections[key] = connections.get(key, 0) + 1
setattr(request.app.ctx, 'connections', connections)
ic(connections, 'req')
@app.middleware('response')
async def response_mid(request, response):
key = request.route.uri
connections = getattr(request.app.ctx, 'connections', dict())
connections[key] = connections.get(key, 1) - 1
setattr(request.app.ctx, 'connections', connections)
ic(connections, 'resp')
# signals.py
@app.signal('sys.server.ping')
async def timer():
c = getattr(app.ctx, 'connections', dict())
ic(c)
await sleep(1)
await app.dispatch('sys.server.ping')
# views.py
status_bp = Blueprint("status", "status")
@status_bp.get("/123")
async def status(request):
print("statu")
await sleep(10)
return text("ok")
# output
ic| connections: {'/status/123': 1}, 'req' ①
statu
ic| c: {'/status/123': 1}
ic| c: {'/status/123': 1}
ic| c: {'/status/123': 1}
ic| connections: {'/status/123': 0}, 'resp' ②
[2023-10-28 22:25:42 +0800] - (sanic.access)[INFO][127.0.0.1:12625]: GET http://127.0.0.1:9998/status/123 200 2
ic| connections: {'/status/123': 1}, 'req' ③
statu
ic| c: {'/status/123': 1}
ic| c: {'/status/123': 1}
ic| c: {'/status/123': 1}
ic| connections: {'/status/123': 0}, 'resp' ④
[2023-10-28 22:25:52 +0800] - (sanic.access)[INFO][127.0.0.1:12624]: GET http://127.0.0.1:9998/status/123 200 2
I used to believe that when one request is blocked, another request would be able to proceed normally (①req1 ②req2 ③resp1 ④resp2). However, the actual execution of the code does not match this expectation (①req1 ②resp1 ③req2 ④resp2). Is it an issue with my code or something else?
Thank you for reviewing my code.