Browse Source

[aiohttp] - disable reset query and remove explicit call prepare (#9829)

* [aiohttp] - disable reset query and remove explicit call prepare

* revert import change

Co-authored-by: Sam Bull <[email protected]>

---------

Co-authored-by: Sam Bull <[email protected]>
Sergey Kovalev 3 months ago
parent
commit
1ebc727481
2 changed files with 9 additions and 6 deletions
  1. 6 1
      frameworks/Python/aiohttp/app/main.py
  2. 3 5
      frameworks/Python/aiohttp/app/views.py

+ 6 - 1
frameworks/Python/aiohttp/app/main.py

@@ -37,6 +37,11 @@ def pg_dsn(dialect=None) -> str:
     )
     )
     return url.render_as_string(hide_password=False)
     return url.render_as_string(hide_password=False)
 
 
+class NoResetConnection(asyncpg.Connection):
+    __slots__ = ()
+
+    def get_reset_query(self):
+        return ''
 
 
 async def db_ctx(app: web.Application):
 async def db_ctx(app: web.Application):
     # number of gunicorn workers = multiprocessing.cpu_count() as per gunicorn_conf.py
     # number of gunicorn workers = multiprocessing.cpu_count() as per gunicorn_conf.py
@@ -52,7 +57,7 @@ async def db_ctx(app: web.Application):
         app['db_session'] = async_sessionmaker(engine)
         app['db_session'] = async_sessionmaker(engine)
     else:
     else:
         dsn = pg_dsn()
         dsn = pg_dsn()
-        app['pg'] = await asyncpg.create_pool(dsn=dsn, min_size=min_size, max_size=max_size, loop=app.loop)
+        app['pg'] = await asyncpg.create_pool(dsn=dsn, min_size=min_size, max_size=max_size, loop=app.loop, connection_class=NoResetConnection)
 
 
     yield
     yield
 
 

+ 3 - 5
frameworks/Python/aiohttp/app/views.py

@@ -66,7 +66,7 @@ async def single_database_query_raw(request):
     id_ = randint(1, 10000)
     id_ = randint(1, 10000)
 
 
     async with request.app['pg'].acquire() as conn:
     async with request.app['pg'].acquire() as conn:
-        r = await conn.fetchval('SELECT id,randomnumber FROM world WHERE id = $1', id_)
+        r = await conn.fetchval(READ_ROW_SQL, id_)
     return json_response({'id': id_, 'randomNumber': r})
     return json_response({'id': id_, 'randomNumber': r})
 
 
 
 
@@ -96,11 +96,10 @@ async def multiple_database_queries_raw(request):
 
 
     result = []
     result = []
     async with request.app['pg'].acquire() as conn:
     async with request.app['pg'].acquire() as conn:
-        stmt = await conn.prepare(READ_ROW_SQL)
         for id_ in ids:
         for id_ in ids:
             result.append({
             result.append({
                 'id': id_,
                 'id': id_,
-                'randomNumber': await stmt.fetchval(id_),
+                'randomNumber': await conn.fetchval(READ_ROW_SQL, id_),
             })
             })
     return json_response(result)
     return json_response(result)
 
 
@@ -160,10 +159,9 @@ async def updates_raw(request):
     worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates]
     worlds = [{'id': row_id, 'randomNumber': number} for row_id, number in updates]
 
 
     async with request.app['pg'].acquire() as conn:
     async with request.app['pg'].acquire() as conn:
-        stmt = await conn.prepare(READ_ROW_SQL)
         for id_, _ in updates:
         for id_, _ in updates:
             # the result of this is the int previous random number which we don't actually use
             # the result of this is the int previous random number which we don't actually use
-            await stmt.fetchval(id_)
+            await conn.fetchval(READ_ROW_SQL, id_)
         await conn.executemany(WRITE_ROW_SQL, updates)
         await conn.executemany(WRITE_ROW_SQL, updates)
 
 
     return json_response(worlds)
     return json_response(worlds)