Can two aiohttp.web.Application()
objects be running in the same process, e.g. on different ports?
I see a bunch of examples of aiohttp code like:
from aiohttp import web
app = web.Application()
app.router.add_get('/foo', foo_view, name='foo')
web.run_app(app, host='0.0.0.0', port=10000)
I'm wondering if there's some equivalent where multiple web.Applications()
can be configured to run at the same time. Something like:
from aiohttp import web
app1 = web.Application()
app1.router.add_get('/foo', foo_view, name='foo')
app2 = web.Application()
app2.router.add_get('/bar', bar_view, name='bar')
# This is the wishful thinking code:
web.configure_app(app1, host='0.0.0.0', port=10000)
web.configure_app(app2, host='0.0.0.0', port=10001)
web.run_apps()
My use case is that I have an existing python web framework that does this kind of thing, and I'm building a prototype that's analogous in python 3.6 with aiohttp.
I understand that multiple python servers can run behind e.g. nginx (see also http://aiohttp.readthedocs.io/en/stable/deployment.html); that's not what I'm after. I want to explore the possibility of two aiohttp web servers with the same asyncio event loop, running in the same python process, serving on two different ports.
Yes, you can - just write some wrapper with re-implementation of run_app
.
Here is a simple example. All app-specific parts of run_app
are moved to the dedicated class AppWrapper
. The MultiApp
is responsible only for initialize all configured apps, keep running the loop and clean up.
import asyncio
from aiohttp import web
class AppWrapper:
def __init__(self, aioapp, port, loop):
self.port = port
self.aioapp = aioapp
self.loop = loop
self.uris = []
self.servers = []
def initialize(self):
self.loop.run_until_complete(self.aioapp.startup())
handler = self.aioapp.make_handler(loop=self.loop)
server_creations, self.uris = web._make_server_creators(
handler, loop=self.loop, ssl_context=None,
host=None, port=self.port, path=None, sock=None,
backlog=128)
self.servers = self.loop.run_until_complete(
asyncio.gather(*server_creations, loop=self.loop)
)
def shutdown(self):
server_closures = []
for srv in self.servers:
srv.close()
server_closures.append(srv.wait_closed())
self.loop.run_until_complete(
asyncio.gather(*server_closures, loop=self.loop))
self.loop.run_until_complete(self.aioapp.shutdown())
def cleanup(self):
self.loop.run_until_complete(self.aioapp.cleanup())
def show_info(self):
print("======== Running on {} ========\n".format(', '.join(self.uris)))
class MultiApp:
def __init__(self, loop=None):
self._apps = []
self.user_supplied_loop = loop is not None
if loop is None:
self.loop = asyncio.get_event_loop()
else:
self.loop = loop
def configure_app(self, app, port):
app._set_loop(self.loop)
self._apps.append(
AppWrapper(app, port, self.loop)
)
def run_all(self):
try:
for app in self._apps:
app.initialize()
try:
for app in self._apps:
app.show_info()
print("(Press CTRL+C to quit)")
self.loop.run_forever()
except KeyboardInterrupt: # pragma: no cover
pass
finally:
for app in self._apps:
app.shutdown()
finally:
for app in self._apps:
app.cleanup()
if not self.user_supplied_loop:
self.loop.close()
Note: be aware of the use of internal aiohttp
's method, that may be subject of change.
Now let's use it:
from aiohttp import web
async def handle1(request):
return web.Response(text='SERVER 1')
async def handle2(request):
return web.Response(text='SERVER 2')
app1 = web.Application()
app1.router.add_get('/', handle1)
app2 = web.Application()
app2.router.add_get('/', handle2)
ma = MultiApp()
ma.configure_app(app1, port=8081)
ma.configure_app(app2, port=8071)
ma.run_all()
As a side note, think again why you need this. In almost all cases the decoupling is the better choice. Setting many endpoints in the same process make them depend on each other. There's one case that comes to my mind and has "good" reasoning, the internal stats / debug endpoint.
Though the above answer has been accepted, here is an another approach:
Create test.py:
from aiohttp import web
import asyncio
import sys
@asyncio.coroutine
def status1(request):
return web.json_response('App1 OK')
@asyncio.coroutine
def status2(request):
return web.json_response('App2 OK')
def start():
try:
loop = asyncio.get_event_loop()
# App1
app1 = web.Application()
app1.router.add_get('/status', status1)
handler1 = app1.make_handler()
coroutine1 = loop.create_server(handler1, '0.0.0.0', 8081)
server1 = loop.run_until_complete(coroutine1)
address1, port1 = server1.sockets[0].getsockname()
print('App1 started on http://{}:{}'.format(address1, port1))
# App2
app2 = web.Application()
app2.router.add_get('/status', status2)
handler2 = app2.make_handler()
coroutine2 = loop.create_server(handler2, '0.0.0.0', 8082)
server2 = loop.run_until_complete(coroutine2)
address2, port2 = server2.sockets[0].getsockname()
print('App2 started on http://{}:{}'.format(address2, port2))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server1.close()
loop.run_until_complete(app1.shutdown())
loop.run_until_complete(handler1.shutdown(60.0))
loop.run_until_complete(handler1.finish_connections(1.0))
loop.run_until_complete(app1.cleanup())
server2.close()
loop.run_until_complete(app2.shutdown())
loop.run_until_complete(handler2.shutdown(60.0))
loop.run_until_complete(handler2.finish_connections(1.0))
loop.run_until_complete(app2.cleanup())
loop.close()
except Exception as e:
sys.stderr.write('Error: ' + format(str(e)) + "\n")
sys.exit(1)
if __name__ == '__main__':
start()
At terminal, open two tabs. In one tab, run
python test.py
In other tab, run
curl -X GET http://localhost:8081/status
curl -X GET http://localhost:8082/status
You will get response
"App1 OK"
"App2 OK"