Franz Weckesser
2018-07-05 21:35:21 UTC
I have a service that normally runs well but can sometimes get backed up
depending on the complexity of the input. I've been given the requirement
to only let N number of requests back up before rejecting requests with a
503, service unavailable. I decided to use a tornado queue to store hold
the backlog and process the actual work asynchronously. I am curious if
anyone is aware of any significant flaws with this approach. Sample code
below. Any feedback appreciated.
from tornado.ioloop import IOLoop
from tornado.queues import QueueFull
from tornado.queues import Queue
from tornado import gen
from tornado import web
import random
import time
requestQueue = Queue(maxsize=10)
class MainHandler(web.RequestHandler):
@web.asynchronous
def get(self):
try:
requestQueue.put_nowait(self)
print("ENQDEPTH: " + str(requestQueue.qsize()))
except QueueFull:
IOLoop.instance().add_callback(responder, self, "Full\n", 503)
return
@gen.coroutine
def worker():
while True:
request = yield requestQueue.get()
print("DEQDEPTH: " + str(requestQueue.qsize()))
try:
# simulate slow request
time.sleep(random.randint(5,15))
IOLoop.instance().add_callback(responder, request, "Done\n",
200)
yield gen.sleep(0.01)
finally:
requestQueue.task_done()
@gen.coroutine
def responder(req, text, code):
req.write(text)
req.set_status(code)
req.finish()
return
def make_app():
return web.Application([
(r"/", MainHandler),
])
@gen.coroutine
def main():
yield worker()
if __name__ == '__main__':
main()
app = make_app()
app.listen(8888)
IOLoop.current().start()
depending on the complexity of the input. I've been given the requirement
to only let N number of requests back up before rejecting requests with a
503, service unavailable. I decided to use a tornado queue to store hold
the backlog and process the actual work asynchronously. I am curious if
anyone is aware of any significant flaws with this approach. Sample code
below. Any feedback appreciated.
from tornado.ioloop import IOLoop
from tornado.queues import QueueFull
from tornado.queues import Queue
from tornado import gen
from tornado import web
import random
import time
requestQueue = Queue(maxsize=10)
class MainHandler(web.RequestHandler):
@web.asynchronous
def get(self):
try:
requestQueue.put_nowait(self)
print("ENQDEPTH: " + str(requestQueue.qsize()))
except QueueFull:
IOLoop.instance().add_callback(responder, self, "Full\n", 503)
return
@gen.coroutine
def worker():
while True:
request = yield requestQueue.get()
print("DEQDEPTH: " + str(requestQueue.qsize()))
try:
# simulate slow request
time.sleep(random.randint(5,15))
IOLoop.instance().add_callback(responder, request, "Done\n",
200)
yield gen.sleep(0.01)
finally:
requestQueue.task_done()
@gen.coroutine
def responder(req, text, code):
req.write(text)
req.set_status(code)
req.finish()
return
def make_app():
return web.Application([
(r"/", MainHandler),
])
@gen.coroutine
def main():
yield worker()
if __name__ == '__main__':
main()
app = make_app()
app.listen(8888)
IOLoop.current().start()
--
You received this message because you are subscribed to the Google Groups "Tornado Web Server" group.
To unsubscribe from this group and stop receiving emails from it, send an email to python-tornado+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.
You received this message because you are subscribed to the Google Groups "Tornado Web Server" group.
To unsubscribe from this group and stop receiving emails from it, send an email to python-tornado+***@googlegroups.com.
For more options, visit https://groups.google.com/d/optout.