Compare commits

...

2 Commits

Author SHA1 Message Date
4890df9876 Merge pull request 'Finally fix setting delivery' (#3) from settings-refactoring into dev
Some checks failed
CI/CD Pipeline / lint-sast (push) Failing after 13m3s
CI/CD Pipeline / deploy (push) Has been skipped
Reviewed-on: http://79.137.198.26/danyadjan/db_kyc_project/pulls/3
2025-05-31 16:19:52 +03:00
269847c6a2 Finally fix setting delivery 2025-05-31 16:15:06 +03:00
18 changed files with 68 additions and 53 deletions

View File

@ -5,8 +5,7 @@ from misc.models import Setting
@app.task(autoretry_for=(Exception,), retry_backoff=True)
def deliver_setting(setting_name):
setting = Setting.objects.get(name=setting_name)
def deliver_setting(setting):
rabbitmq_conf = settings.RABBITMQ
dsn = f'{rabbitmq_conf["PROTOCOL"]}://{rabbitmq_conf["USER"]}:{rabbitmq_conf["PASSWORD"]}@{rabbitmq_conf["HOST"]}:{rabbitmq_conf["PORT"]}/'
queue = Queue(settings.SETTINGS_QUEUE_NAME, exchange='', routing_key=settings.SETTINGS_QUEUE_NAME, durable=True)
@ -14,7 +13,7 @@ def deliver_setting(setting_name):
with conn.channel() as channel:
producer = Producer(channel)
producer.publish(
{setting.name: setting.value['value']},
setting,
exchange='',
routing_key=settings.SETTINGS_QUEUE_NAME,
declare=[queue],

View File

@ -64,5 +64,27 @@
"value": 30
}
}
},
{
"model": "misc.setting",
"pk": 7,
"fields": {
"name": "DEFAULT_TOP_LIMIT",
"description": "Количество пользователей в топе",
"value": {
"value": 10
}
}
},
{
"model": "misc.setting",
"pk": 8,
"fields": {
"name": "DEFAULT_NEIGHBOUR_LIMIT",
"description": "Количество соседей",
"value": {
"value": 3
}
}
}
]

View File

@ -7,4 +7,4 @@ class Command(BaseCommand):
def handle(self, *args, **options):
for setting in Setting.objects.all():
deliver_setting.delay(setting.name)
deliver_setting.delay({setting.name: setting.value['value']})

View File

@ -6,4 +6,4 @@ from misc.celery import deliver_setting as deliver_setting_celery
@receiver(post_save, sender=Setting, dispatch_uid='deliver_setting')
def deliver_setting(sender, instance, **kwargs):
deliver_setting_celery.delay(instance.name)
deliver_setting_celery.delay({instance.name: instance.value['value']})

View File

@ -3,6 +3,7 @@ from fastapi import Depends, FastAPI, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from functools import partial
from starlette.exceptions import HTTPException
from contextlib import asynccontextmanager
from app.src.routers.api import router as router_api
from app.src.routers.handlers import http_error_handler
@ -10,8 +11,23 @@ from app.src.domain.setting import launch_consumer
from app.src.db import connect_pg, get_connection, get_channel, get_rmq, get_pg
@asynccontextmanager
async def lifespan(app: FastAPI):
launch_consumer(connect_pg, get_connection)
app.state.pg_pool = await connect_pg()
rmq_conn_pool = aio_pika.pool.Pool(get_connection, max_size=2)
rmq_chan_pool = aio_pika.pool.Pool(partial(get_channel, conn_pool=rmq_conn_pool), max_size=10)
app.state.rmq_chan_pool = rmq_chan_pool
yield
await app.state.pg_pool.close()
def get_application() -> FastAPI:
application = FastAPI()
application = FastAPI(lifespan=lifespan)
application.include_router(router_api, prefix='/api')
@ -28,19 +44,3 @@ def get_application() -> FastAPI:
return application
app = get_application()
@app.on_event("startup")
async def startup():
launch_consumer(connect_pg, get_connection)
app.state.pg_pool = await connect_pg()
rmq_conn_pool = aio_pika.pool.Pool(get_connection, max_size=2)
rmq_chan_pool = aio_pika.pool.Pool(partial(get_channel, conn_pool=rmq_conn_pool), max_size=10)
app.state.rmq_chan_pool = rmq_chan_pool
@app.on_event("shutdown")
async def shutdown():
await app.state.pg_pool.close()

View File

@ -1,7 +1,7 @@
CREATE TABLE users(
id BIGINT PRIMARY KEY,
energy INTEGER NOT NULL CONSTRAINT non_negative_energy CHECK (energy >= 0),
session VARCHAR(255) NOT NULL
session TEXT NOT NULL
);
CREATE TABLE clicks(

View File

@ -1,10 +1,7 @@
import asyncio
from app.src.config import PG_HOST, PG_PORT, PG_USER, PG_PASSWORD, PG_DB
from pathlib import Path
from starlette.requests import Request
import asyncpg
import logging
from asyncpg_trek import plan, execute, Direction
from asyncpg_trek.asyncpg import AsyncpgBackend
@ -12,8 +9,6 @@ from asyncpg_trek.asyncpg import AsyncpgBackend
DB_URL = f'postgresql://{PG_USER}:{str(PG_PASSWORD)}@{PG_HOST}:{PG_PORT}/{PG_DB}'
MIGRATIONS_DIR = Path(__file__).parent.resolve() / "migrations"
logger = logging.getLogger("uvicorn")
async def connect_pg() -> asyncpg.Pool:
return await asyncpg.create_pool(DB_URL)

View File

@ -1,6 +1,4 @@
import asyncio
import aio_pika
import logging
from starlette.requests import Request
from aio_pika.abc import AbstractRobustConnection

View File

@ -4,6 +4,7 @@ import aio_pika
from typing import Callable
import asyncpg
SETTING_QUEUE_NAME = "settings"

View File

@ -1,4 +1,3 @@
import logging
from aiogram import Bot, types
from aiogram.dispatcher import Dispatcher
from create_bot import bot, token, WEBHOOK_URL

8
bot/scripts/dev.sh Executable file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
set -o xtrace
uvicorn main_api:app --host 0.0.0.0 --port "${HTTP_PORT}" --log-config /app/log.ini

View File

@ -1,7 +0,0 @@
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
gunicorn main_api:app -w 17 -b 0.0.0.0:7313 -k uvicorn.workers.UvicornWorker --timeout 600

7
bot/scripts/prod.sh Executable file
View File

@ -0,0 +1,7 @@
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
gunicorn main_api:app -w "${WORKER_COUNT}" -b "0.0.0.0:${HTTP_PORT}" -k uvicorn.workers.UvicornWorker --timeout 600

View File

@ -1,8 +0,0 @@
#!/usr/bin/env bash
set -o errexit
set -o pipefail
set -o nounset
set -o xtrace
uvicorn main_api:app --host 0.0.0.0 --port 7313 --log-config /app/log.ini

View File

@ -11,7 +11,7 @@ services:
volumes:
- backend_media:/app/media
- backend_static:/app/static
command: /app/scripts/gunicorn.sh
command: /app/scripts/prod.sh
restart: on-failure
depends_on:
postgres: &healthy-dependency
@ -21,7 +21,7 @@ services:
- .env/prod/pg
- .env/prod/back
- .env/prod/rmq
- .env/prod/bot
- .env/prod/bot_token
- .env/prod/web
bot:
@ -32,8 +32,9 @@ services:
batcher: *started-dependency
env_file:
- .env/prod/bot
- .env/prod/bot_token
- .env/prod/web
command: /app/scripts/gunicorn.sh
command: /app/scripts/prod.sh
restart: on-failure
postgres: &pg-conf
@ -92,7 +93,7 @@ services:
- .env/prod/rmq
- .env/prod/batcher-pg
- .env/prod/batcher
- .env/prod/bot
- .env/prod/bot_token
batcher-postgres:
<<: *pg-conf

View File

@ -14,13 +14,13 @@ services:
volumes:
- backend_media:/app/media
- backend_static:/app/static
command: /app/scripts/start.sh
command: /app/scripts/dev.sh
restart: on-failure
env_file: &backend-env-files
- .env/dev/pg
- .env/dev/back
- .env/dev/rmq
- .env/dev/bot
- .env/dev/bot_token
- .env/dev/web
ports:
- '8000:8000'
@ -74,7 +74,7 @@ services:
- .env/dev/rmq
- .env/dev/batcher-pg
- .env/dev/batcher
- .env/dev/bot
- .env/dev/bot_token
- .env/dev/web
ports:
- '8080:8080'