0.2.14
Some checks failed
deploy / deploy (push) Failing after 2m1s

This commit is contained in:
2023-11-22 19:38:39 +03:00
parent e2082b48d3
commit db76ba3733
22 changed files with 271 additions and 314 deletions

View File

@@ -1,35 +1,41 @@
from contextlib import contextmanager
import logging
from typing import TypeVar, Any, Dict, Generic, Callable
from sqlalchemy import create_engine, Column, Integer
# from contextlib import contextmanager
from typing import Any, Callable, Dict, TypeVar
# from psycopg2.errors import UniqueViolation
from sqlalchemy import Column, Integer, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
Session = sessionmaker(bind=engine, expire_on_commit=False)
T = TypeVar("T")
REGISTRY: Dict[str, type] = {}
@contextmanager
def local_session():
session = Session()
try:
yield session
session.commit()
except Exception as e:
print(f"[services.db] Error session: {e}")
session.rollback()
raise
finally:
session.close()
# @contextmanager
def local_session(src=""):
return Session(bind=engine, expire_on_commit=False)
# try:
# yield session
# session.commit()
# except Exception as e:
# if not (src == "create_shout" and isinstance(e, UniqueViolation)):
# import traceback
# session.rollback()
# print(f"[services.db] {src}: {e}")
# traceback.print_exc()
# raise Exception("[services.db] exception")
# finally:
# session.close()
class Base(declarative_base()):
@@ -46,38 +52,17 @@ class Base(declarative_base()):
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
@classmethod
def create(cls: Generic[T], **kwargs) -> Generic[T]:
try:
instance = cls(**kwargs)
return instance.save()
except Exception as e:
print(f"[services.db] Error create: {e}")
return None
def save(self) -> Generic[T]:
with local_session() as session:
try:
session.add(self)
except Exception as e:
print(f"[services.db] Error save: {e}")
return self
def update(self, input):
column_names = self.__table__.columns.keys()
for name, value in input.items():
if name in column_names:
setattr(self, name, value)
with local_session() as session:
try:
session.commit()
except Exception as e:
print(f"[services.db] Error update: {e}")
def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys()
if "_sa_instance_state" in column_names:
column_names.remove("_sa_instance_state")
try:
return {c: getattr(self, c) for c in column_names}
except Exception as e:
print(f"[services.db] Error dict: {e}")
return {}
def update(self, values: Dict[str, Any]) -> None:
for key, value in values.items():
if hasattr(self, key):
setattr(self, key, value)

View File

@@ -1,6 +1,7 @@
import redis.asyncio as aredis
from settings import REDIS_URL
class RedisCache:
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
@@ -11,28 +12,25 @@ class RedisCache:
self._client = aredis.Redis.from_url(self._uri, decode_responses=True)
async def disconnect(self):
await self._client.aclose()
if self._client:
await self._client.close()
async def execute(self, command, *args, **kwargs):
if not self._client:
await self.connect()
try:
print(f"[redis] {command} {args}")
return await self._client.execute_command(command, *args, **kwargs)
except Exception as e:
print(f"[redis] ERROR: {e} with: {command} {args}")
import traceback
traceback.print_exc()
if self._client:
try:
print("[redis] " + command + " " + " ".join(args))
r = await self._client.execute_command(command, *args, **kwargs)
return r
except Exception as e:
print(f"[redis] error: {e}")
return None
async def subscribe(self, *channels):
if not self._client:
await self.connect()
async with self._client.pubsub() as pubsub:
for channel in channels:
await pubsub.subscribe(channel)
self.pubsub_channels.append(channel)
if self._client:
async with self._client.pubsub() as pubsub:
for channel in channels:
await pubsub.subscribe(channel)
self.pubsub_channels.append(channel)
async def unsubscribe(self, *channels):
if not self._client:
@@ -48,12 +46,15 @@ class RedisCache:
await self._client.publish(channel, data)
async def lrange(self, key, start, stop):
print(f"[redis] LRANGE {key} {start} {stop}")
return await self._client.lrange(key, start, stop)
if self._client:
print(f"[redis] LRANGE {key} {start} {stop}")
return await self._client.lrange(key, start, stop)
async def mget(self, key, *keys):
print(f"[redis] MGET {key} {keys}")
return await self._client.mget(key, *keys)
if self._client:
print(f"[redis] MGET {key} {keys}")
return await self._client.mget(key, *keys)
redis = RedisCache()

View File

@@ -11,13 +11,14 @@ def serialize_datetime(value):
return value.isoformat()
@query.field("_service")
def resolve_service(*_):
# Load the full SDL from your SDL file
with open("schemas/core.graphql", "r") as file:
full_sdl = file.read()
return {"sdl": full_sdl}
# NOTE: was used by studio
# @query.field("_service")
# def resolve_service(*_):
# # Load the full SDL from your SDL file
# with open("schemas/core.graphql", "r") as file:
# full_sdl = file.read()
#
# return {"sdl": full_sdl}
resolvers = [query, mutation, datetime_scalar]