From c6e60fab39db1f199561a08b6405cb7090a67ce0 Mon Sep 17 00:00:00 2001 From: Michail Kostochka Date: Fri, 13 Dec 2024 16:45:21 +0300 Subject: [PATCH] Fix backend & batcher --- .gitignore | 2 + backend/clicker/settings.py | 5 ++- backend/misc/apps.py | 4 -- backend/misc/management/__init__.py | 0 backend/misc/management/commands/__init__.py | 0 .../misc/management/commands/send_settings.py | 10 +++++ backend/scripts/gunicorn.sh | 1 + backend/scripts/start.sh | 1 + backend/users/authentication.py | 4 +- batcher/Dockerfile | 4 +- batcher/Pipfile | 11 ------ batcher/app/main.py | 28 ++++++++++++-- batcher/app/migrate.py | 2 +- batcher/app/src/config.py | 6 +-- batcher/app/src/db/__init__.py | 6 +-- batcher/app/src/db/pg/__init__.py | 2 +- ...ro.sql => 20241023_first_down_initial.sql} | 0 ...init.sql => 20241023_initial_up_first.sql} | 0 batcher/app/src/db/pg/pg.py | 23 +++++------ batcher/app/src/db/redis.py | 9 +++-- batcher/app/src/db/rmq.py | 26 +++++++------ batcher/app/src/dependencies.py | 4 +- batcher/app/src/domain/click/models.py | 6 +-- batcher/app/src/domain/click/repos/pg.py | 6 +-- batcher/app/src/domain/click/repos/redis.py | 31 ++++++++------- batcher/app/src/domain/click/repos/rmq.py | 29 ++++++++++---- batcher/app/src/domain/click/usecase.py | 11 +++--- batcher/app/src/domain/setting/repos/rmq.py | 7 +--- batcher/app/src/domain/setting/usecase.py | 16 ++++++-- batcher/app/src/routers/click.py | 18 ++++----- batcher/migrate.sh | 0 batcher/requirements.txt | 1 + bot/.DS_Store | Bin 6148 -> 0 bytes bot/__pycache__/create_bot.cpython-312.pyc | Bin 1265 -> 0 bytes bot/__pycache__/memcached_def.cpython-312.pyc | Bin 875 -> 0 bytes bot/__pycache__/messages.cpython-312.pyc | Bin 736 -> 0 bytes bot/__pycache__/req.cpython-312.pyc | Bin 601 -> 0 bytes bot/__pycache__/wrapper.cpython-312.pyc | Bin 1134 -> 0 bytes bot/create_bot.py | 2 +- .../__pycache__/__init__.cpython-312.pyc | Bin 209 -> 0 bytes .../__pycache__/instruction.cpython-312.pyc | Bin 6149 -> 0 bytes .../register_handlers.cpython-312.pyc | Bin 1941 -> 0 bytes .../__pycache__/start_handler.cpython-312.pyc | Bin 7414 -> 0 bytes bot/pictures/.DS_Store | Bin 6148 -> 0 bytes docker-compose-prod.yml | 2 - docker-compose.yml | 2 - frontend/.DS_Store | Bin 6148 -> 0 bytes frontend/public/.DS_Store | Bin 6148 -> 0 bytes frontend/public/assets/.DS_Store | Bin 6148 -> 0 bytes frontend/src/.DS_Store | Bin 6148 -> 0 bytes frontend/src/assets/.DS_Store | Bin 6148 -> 0 bytes .../shared/Pages/StoragePage/StoragePage.tsx | 2 +- .../Pages/WrongSourcePage/WrongSourcePage.tsx | 2 +- nginx/nginx.conf | 36 +++++++++--------- 54 files changed, 182 insertions(+), 137 deletions(-) create mode 100644 backend/misc/management/__init__.py create mode 100644 backend/misc/management/commands/__init__.py create mode 100644 backend/misc/management/commands/send_settings.py delete mode 100644 batcher/Pipfile rename batcher/app/src/db/pg/migrations/{20241023_init_down_zero.sql => 20241023_first_down_initial.sql} (100%) rename batcher/app/src/db/pg/migrations/{20241023_zero_up_init.sql => 20241023_initial_up_first.sql} (100%) mode change 100644 => 100755 batcher/migrate.sh delete mode 100644 bot/.DS_Store delete mode 100644 bot/__pycache__/create_bot.cpython-312.pyc delete mode 100644 bot/__pycache__/memcached_def.cpython-312.pyc delete mode 100644 bot/__pycache__/messages.cpython-312.pyc delete mode 100644 bot/__pycache__/req.cpython-312.pyc delete mode 100644 bot/__pycache__/wrapper.cpython-312.pyc delete mode 100644 bot/handlers/__pycache__/__init__.cpython-312.pyc delete mode 100644 bot/handlers/__pycache__/instruction.cpython-312.pyc delete mode 100644 bot/handlers/__pycache__/register_handlers.cpython-312.pyc delete mode 100644 bot/handlers/__pycache__/start_handler.cpython-312.pyc delete mode 100644 bot/pictures/.DS_Store delete mode 100644 frontend/.DS_Store delete mode 100644 frontend/public/.DS_Store delete mode 100644 frontend/public/assets/.DS_Store delete mode 100644 frontend/src/.DS_Store delete mode 100644 frontend/src/assets/.DS_Store diff --git a/.gitignore b/.gitignore index 7617c11..2b0c955 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,5 @@ .DS_Store __pycache__/ *.py[cod] +celerybeat-schedule +backend/static diff --git a/backend/clicker/settings.py b/backend/clicker/settings.py index 6379648..ca18d57 100644 --- a/backend/clicker/settings.py +++ b/backend/clicker/settings.py @@ -25,7 +25,10 @@ SECRET_KEY = os.getenv('SECRET_KEY', 'django-insecure-4nww@d-th@7^(chggt5q+$e*d_ DEBUG = int(os.getenv('DEBUG', 0)) PROD = 1 - DEBUG -ALLOWED_HOSTS = ['crowngame.ru', 'backend', '127.0.0.1'] +ALLOWED_HOSTS = ['backend', '127.0.0.1'] +if app_url := os.getenv('APP_URL', None): + ALLOWED_HOSTS.append(app_url) + SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') USE_X_FORWARDED_HOST = True USE_X_FORWARDED_PORT = True diff --git a/backend/misc/apps.py b/backend/misc/apps.py index 2528402..a9c3da5 100644 --- a/backend/misc/apps.py +++ b/backend/misc/apps.py @@ -6,9 +6,5 @@ class MiscConfig(AppConfig): name = "misc" def ready(self): - from .celery import deliver_setting as deliver_setting_celery from .signals import deliver_setting - from misc.models import Setting - for setting in Setting.objects.all(): - deliver_setting_celery.delay(setting.name) diff --git a/backend/misc/management/__init__.py b/backend/misc/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/misc/management/commands/__init__.py b/backend/misc/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/misc/management/commands/send_settings.py b/backend/misc/management/commands/send_settings.py new file mode 100644 index 0000000..ab49748 --- /dev/null +++ b/backend/misc/management/commands/send_settings.py @@ -0,0 +1,10 @@ +from django.core.management.base import BaseCommand, CommandError +from misc.celery import deliver_setting +from misc.models import Setting + +class Command(BaseCommand): + help = 'Sends all settings to rmq for batcher to consume' + + def handle(self, *args, **options): + for setting in Setting.objects.all(): + deliver_setting.delay(setting.name) diff --git a/backend/scripts/gunicorn.sh b/backend/scripts/gunicorn.sh index b8a9f9e..a48058a 100644 --- a/backend/scripts/gunicorn.sh +++ b/backend/scripts/gunicorn.sh @@ -5,5 +5,6 @@ set -o pipefail set -o nounset python manage.py migrate +python manage.py send_settings python manage.py collectstatic --noinput --verbosity 0 gunicorn clicker.wsgi -b 0.0.0.0:8000 -w 17 --timeout 600 --chdir=/app --access-logfile - diff --git a/backend/scripts/start.sh b/backend/scripts/start.sh index e8171a5..11001f4 100644 --- a/backend/scripts/start.sh +++ b/backend/scripts/start.sh @@ -6,5 +6,6 @@ set -o nounset set -o xtrace python manage.py migrate +python manage.py send_settings python manage.py collectstatic --noinput --verbosity 0 python manage.py runserver 0.0.0.0:8000 diff --git a/backend/users/authentication.py b/backend/users/authentication.py index 4d426b5..93eded2 100644 --- a/backend/users/authentication.py +++ b/backend/users/authentication.py @@ -33,7 +33,7 @@ class TelegramValidationAuthentication(authentication.BaseAuthentication): split_res = base64.b64decode(token).decode('utf-8').split(':') try: data_check_string = ':'.join(split_res[:-1]).strip().replace('/', '\\/') - hash = split_res[-1] + _hash = split_res[-1] except IndexError: raise exceptions.AuthenticationFailed('Invalid token format') secret = hmac.new( @@ -46,7 +46,7 @@ class TelegramValidationAuthentication(authentication.BaseAuthentication): msg=data_check_string.encode('utf-8'), digestmod=hashlib.sha256 ).hexdigest() - if hash != actual_hash: + if _hash != actual_hash: raise exceptions.AuthenticationFailed('Invalid token (hash check failed)') data_dict = dict([x.split('=') for x in data_check_string.split('\n')]) diff --git a/batcher/Dockerfile b/batcher/Dockerfile index 7432fed..7c5b6e8 100644 --- a/batcher/Dockerfile +++ b/batcher/Dockerfile @@ -8,4 +8,6 @@ RUN pip install --no-cache-dir --upgrade -r /batcher/requirements.txt COPY ./app /batcher/app -CMD ["fastapi", "run", "app/main.py", "--port", "$HTTP_PORT"] \ No newline at end of file +ENV PYTHONPATH="${PYTHONPATH}:/batcher/app" + +CMD uvicorn app.main:app --host 0.0.0.0 --port "${HTTP_PORT}" \ No newline at end of file diff --git a/batcher/Pipfile b/batcher/Pipfile deleted file mode 100644 index 645a67e..0000000 --- a/batcher/Pipfile +++ /dev/null @@ -1,11 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] - -[dev-packages] - -[requires] -python_version = "3.12" diff --git a/batcher/app/main.py b/batcher/app/main.py index 9e6f5bd..76f5593 100644 --- a/batcher/app/main.py +++ b/batcher/app/main.py @@ -1,9 +1,13 @@ +import aio_pika from fastapi import Depends, FastAPI, Request, Response from fastapi.middleware.cors import CORSMiddleware +from functools import partial from starlette.exceptions import HTTPException -from .src.routers.api import router as router_api -from .src.routers.handlers import http_error_handler +from app.src.routers.api import router as router_api +from app.src.routers.handlers import http_error_handler +from app.src.domain.setting import launch_consumer +from app.src.db import connect_pg, connect_redis, get_connection, get_channel, get_rmq def get_application() -> FastAPI: @@ -23,5 +27,23 @@ def get_application() -> FastAPI: return application +app = get_application() + +@app.on_event("startup") +async def startup(): + launch_consumer(get_connection) + + app.state.pg_pool = await connect_pg() + + app.state.redis_pool = connect_redis() + + rmq_conn_pool = aio_pika.pool.Pool(get_connection, max_size=2) + rmq_chan_pool = aio_pika.pool.Pool(partial(get_channel, conn_pool=rmq_conn_pool), max_size=10) + app.state.rmq_chan_pool = rmq_chan_pool + + +@app.on_event("shutdown") +async def shutdown(): + await app.state.pg_pool.close() + await app.state.redis.close() -app = get_application() \ No newline at end of file diff --git a/batcher/app/migrate.py b/batcher/app/migrate.py index 7643a71..b97289b 100644 --- a/batcher/app/migrate.py +++ b/batcher/app/migrate.py @@ -1,6 +1,6 @@ import sys import asyncio -from .src.db.pg import migrate +from app.src.db.pg import migrate if __name__ == '__main__': diff --git a/batcher/app/src/config.py b/batcher/app/src/config.py index dce7c4f..bb2ec0d 100644 --- a/batcher/app/src/config.py +++ b/batcher/app/src/config.py @@ -2,7 +2,7 @@ from starlette.config import Config from starlette.datastructures import Secret from functools import lru_cache -config = Config('.env') +config = Config() REDIS_USER = config('REDIS_USER') @@ -11,8 +11,6 @@ REDIS_PORT = config('REDIS_PORT', cast=int) REDIS_HOST = config('REDIS_HOST') REDIS_DB = config('REDIS_DB') -HTTP_PORT = config('HTTP_PORT', cast=int) - PG_HOST = config('POSTGRES_HOST') PG_PORT = config('POSTGRES_PORT', cast=int) PG_USER = config('POSTGRES_USER') @@ -22,7 +20,7 @@ PG_DB = config('POSTGRES_DB') RMQ_HOST = config('RABBITMQ_HOST') RMQ_PORT = config('RABBITMQ_PORT', cast=int) RMQ_USER = config('RABBITMQ_DEFAULT_USER') -RMQ_PASSWORD = config('RABBITMQ_DEFAULT_PASSWORD', cast=Secret) +RMQ_PASSWORD = config('RABBITMQ_DEFAULT_PASS', cast=Secret) TG_TOKEN = config('TG_TOKEN', cast=Secret) diff --git a/batcher/app/src/db/__init__.py b/batcher/app/src/db/__init__.py index ac05ff3..ce5d7cb 100644 --- a/batcher/app/src/db/__init__.py +++ b/batcher/app/src/db/__init__.py @@ -1,3 +1,3 @@ -from .pg import get_pg -from .redis import get_redis -from .rmq import get_rmq \ No newline at end of file +from .pg import get_pg, connect_pg +from .redis import get_redis, connect_redis +from .rmq import get_rmq, get_channel, get_connection \ No newline at end of file diff --git a/batcher/app/src/db/pg/__init__.py b/batcher/app/src/db/pg/__init__.py index 1e976f8..54858c6 100644 --- a/batcher/app/src/db/pg/__init__.py +++ b/batcher/app/src/db/pg/__init__.py @@ -1 +1 @@ -from .pg import get_pg, migrate +from .pg import get_pg, migrate, connect_pg diff --git a/batcher/app/src/db/pg/migrations/20241023_init_down_zero.sql b/batcher/app/src/db/pg/migrations/20241023_first_down_initial.sql similarity index 100% rename from batcher/app/src/db/pg/migrations/20241023_init_down_zero.sql rename to batcher/app/src/db/pg/migrations/20241023_first_down_initial.sql diff --git a/batcher/app/src/db/pg/migrations/20241023_zero_up_init.sql b/batcher/app/src/db/pg/migrations/20241023_initial_up_first.sql similarity index 100% rename from batcher/app/src/db/pg/migrations/20241023_zero_up_init.sql rename to batcher/app/src/db/pg/migrations/20241023_initial_up_first.sql diff --git a/batcher/app/src/db/pg/pg.py b/batcher/app/src/db/pg/pg.py index f998230..757fdc2 100644 --- a/batcher/app/src/db/pg/pg.py +++ b/batcher/app/src/db/pg/pg.py @@ -1,32 +1,29 @@ -from batcher.app.src.config import PG_HOST, PG_PORT, PG_USER, PG_PASSWORD, PG_DB +from app.src.config import PG_HOST, PG_PORT, PG_USER, PG_PASSWORD, PG_DB from pathlib import Path -import asyncio +from starlette.requests import Request import asyncpg from asyncpg_trek import plan, execute, Direction from asyncpg_trek.asyncpg import AsyncpgBackend DB_URL = f'postgresql://{PG_USER}:{str(PG_PASSWORD)}@{PG_HOST}:{PG_PORT}/{PG_DB}' -MIGRATIONS_DIR = Path(__file__) / "migrations" +MIGRATIONS_DIR = Path(__file__).parent.resolve() / "migrations" -async def connect_db() -> asyncpg.Pool: +async def connect_pg() -> asyncpg.Pool: return await asyncpg.create_pool(DB_URL) -pool = asyncio.run(connect_db()) - - -async def get_pg() -> asyncpg.Connection: - async with pool.acquire() as conn: +async def get_pg(request: Request) -> asyncpg.Connection: + async with request.app.state.pg_pool.acquire() as conn: yield conn async def migrate( - target_revision: str, + target_revision: str, ) -> None: + pool = await connect_pg() async with pool.acquire() as conn: backend = AsyncpgBackend(conn) - async with backend.connect() as conn: - planned = await plan(conn, backend, MIGRATIONS_DIR, target_revision=target_revision, direction=Direction.up) - await execute(conn, backend, planned) + planned = await plan(backend, MIGRATIONS_DIR, target_revision=target_revision, direction=Direction.up) + await execute(backend, planned) diff --git a/batcher/app/src/db/redis.py b/batcher/app/src/db/redis.py index 4430e3f..10a0577 100644 --- a/batcher/app/src/db/redis.py +++ b/batcher/app/src/db/redis.py @@ -1,11 +1,14 @@ -import asyncio +from starlette.requests import Request import redis.asyncio as redis from ..config import REDIS_HOST, REDIS_PORT, REDIS_USER, REDIS_PASSWORD, REDIS_DB -r = asyncio.run(redis.Redis(host=REDIS_HOST, port=REDIS_PORT, username=REDIS_USER, password=REDIS_PASSWORD, db=REDIS_DB)) +def connect_redis() -> redis.ConnectionPool: + return redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, username=REDIS_USER, password=str(REDIS_PASSWORD), db=REDIS_DB) -def get_redis() -> redis.Redis: +async def get_redis(request: Request) -> redis.Redis: + r = redis.Redis(connection_pool=request.app.state.redis_pool) yield r + await r.aclose() diff --git a/batcher/app/src/db/rmq.py b/batcher/app/src/db/rmq.py index eac9504..4b86267 100644 --- a/batcher/app/src/db/rmq.py +++ b/batcher/app/src/db/rmq.py @@ -1,26 +1,28 @@ -import aio_pika -from aio_pika.abc import AbstractRobustConnection import asyncio +import aio_pika +from starlette.requests import Request +from aio_pika.abc import AbstractRobustConnection from ..config import RMQ_HOST, RMQ_PORT, RMQ_USER, RMQ_PASSWORD +fqdn = f'amqp://{RMQ_USER}:{str(RMQ_PASSWORD)}@{RMQ_HOST}:{RMQ_PORT}/' + async def get_connection() -> AbstractRobustConnection: - return await aio_pika.connect_robust(f'amqp://{RMQ_USER}:{RMQ_PASSWORD}@{RMQ_HOST}:{RMQ_PORT}/') + while True: + try: + conn = await aio_pika.connect_robust(fqdn) + return conn + except ConnectionError: + await asyncio.sleep(2) -conn_pool = aio_pika.pool.Pool(get_connection, max_size=2) - - -async def get_channel() -> aio_pika.Channel: +async def get_channel(conn_pool: AbstractRobustConnection) -> aio_pika.Channel: async with conn_pool.acquire() as connection: return await connection.channel() -chan_pool = aio_pika.pool.Pool(get_channel, max_size=10) - - -async def get_rmq() -> aio_pika.Channel: - async with chan_pool.acquire() as chan: +async def get_rmq(request: Request) -> aio_pika.Channel: + async with request.app.state.rmq_chan_pool.acquire() as chan: yield chan diff --git a/batcher/app/src/dependencies.py b/batcher/app/src/dependencies.py index faee168..b1356ff 100644 --- a/batcher/app/src/dependencies.py +++ b/batcher/app/src/dependencies.py @@ -25,7 +25,7 @@ async def get_token_header(authorization: str = Header()) -> (int, str): raise HTTPException(status_code=403, detail='Unauthorized') secret = hmac.new( 'WebAppData'.encode(), - TG_TOKEN.encode('utf-8'), + str(TG_TOKEN).encode('utf-8'), digestmod=hashlib.sha256 ).digest() actual_hash = hmac.new( @@ -33,7 +33,7 @@ async def get_token_header(authorization: str = Header()) -> (int, str): msg=data_check_string.encode('utf-8'), digestmod=hashlib.sha256 ).hexdigest() - if hash != actual_hash: + if _hash != actual_hash: raise HTTPException(status_code=403, detail='Unauthorized') data_dict = dict([x.split('=') for x in data_check_string.split('\n')]) diff --git a/batcher/app/src/domain/click/models.py b/batcher/app/src/domain/click/models.py index 300b121..bc2c90c 100644 --- a/batcher/app/src/domain/click/models.py +++ b/batcher/app/src/domain/click/models.py @@ -4,6 +4,6 @@ import pydantic class Click(pydantic.BaseModel): - UserID: int - DateTime: datetime.datetime - Value: decimal.Decimal + userId: int + dateTime: datetime.datetime + value: decimal.Decimal diff --git a/batcher/app/src/domain/click/repos/pg.py b/batcher/app/src/domain/click/repos/pg.py index b1ad83c..0c1ae28 100644 --- a/batcher/app/src/domain/click/repos/pg.py +++ b/batcher/app/src/domain/click/repos/pg.py @@ -33,13 +33,13 @@ async def store(conn: Connection, click: Click) -> int: RETURNING id ; ''' - return await conn.fetchval(query, click.UserID, click.DateTime, click.Value) + return await conn.fetchval(query, click.userId, click.dateTime, click.value) async def bulk_store_copy(conn: Connection, click: Click, count: int) -> None: - args = [(click.UserID, click.DateTime. click.Value) for _ in range(count)] + args = [(click.userId, click.dateTime, click.value) for _ in range(count)] query = ''' - INSERT INTO clicks(user_id, time, values, expiry_info) + INSERT INTO clicks(user_id, time, value, expiry_info) VALUES($1, $2, $3, '{"period_24": false, "period_168": false}') ; ''' diff --git a/batcher/app/src/domain/click/repos/redis.py b/batcher/app/src/domain/click/repos/redis.py index 5a64365..68b67c3 100644 --- a/batcher/app/src/domain/click/repos/redis.py +++ b/batcher/app/src/domain/click/repos/redis.py @@ -5,10 +5,10 @@ import redis.asyncio as redis async def get_period_sum(r: redis.Redis, user_id: int, period: int) -> decimal.Decimal: - sum_str = await r.get(f'period_{period}_user_{user_id}') - if sum_str is None: + sum_bytes = await r.get(f'period_{period}_user_{user_id}') + if sum_bytes is None: return decimal.Decimal(0) - return decimal.Decimal(sum_str) + return decimal.Decimal(sum_bytes.decode()) async def incr_period_sum(r: redis.Redis, user_id: int, _period: int, value: decimal.Decimal) -> decimal.Decimal: @@ -16,10 +16,10 @@ async def incr_period_sum(r: redis.Redis, user_id: int, _period: int, value: dec async def get_max_period_sum(r: redis.Redis, _period: int) -> decimal.Decimal: - max_sum_str = await r.get(f'max_period_{_period}') - if max_sum_str is None: + max_sum_bytes = await r.get(f'max_period_{_period}') + if max_sum_bytes is None: return decimal.Decimal(0) - return decimal.Decimal(max_sum_str) + return decimal.Decimal(max_sum_bytes.decode()) async def compare_max_period_sum(r: redis.Redis, _period: int, _sum: decimal.Decimal) -> None: @@ -64,10 +64,10 @@ async def decr_energy(r: redis.Redis, user_id: int, amount: int) -> (int, int): async def get_global_average(r: redis.Redis) -> decimal.Decimal: - avg_str = await r.get('global_average') - if avg_str is None: + avg_bytes = await r.get('global_average') + if avg_bytes is None: return decimal.Decimal(0) - return decimal.Decimal(avg_str) + return decimal.Decimal(avg_bytes.decode()) async def update_global_average(r: redis.Redis, value_to_add: decimal.Decimal) -> decimal.Decimal: @@ -75,14 +75,14 @@ async def update_global_average(r: redis.Redis, value_to_add: decimal.Decimal) - local delta = tonumber(ARGV[1]) / tonumber(redis.call('GET', KEYS[1])) return redis.call('INCRBYFLOAT', KEYS[2], delta) ''') - return decimal.Decimal(await _script(keys=["user_count", "global_average"], args=[float(value_to_add)])) + return decimal.Decimal((await _script(keys=["user_count", "global_average"], args=[float(value_to_add)])).decode()) async def get_user_total(r: redis.Redis, user_id: int) -> decimal.Decimal: - total_str = await r.get(f'total_{user_id}') - if total_str is None: + total_bytes = await r.get(f'total_{user_id}') + if total_bytes is None: return decimal.Decimal(0) - return decimal.Decimal(total_str) + return decimal.Decimal(total_bytes.decode()) async def incr_user_count_if_no_clicks(r: redis.Redis, user_id: int) -> int: @@ -111,7 +111,10 @@ async def incr_user_total(r: redis.Redis, user_id: int, value: decimal.Decimal) async def get_user_session(r: redis.Redis, user_id: int) -> Optional[str]: - return await r.get(f'session_{user_id}') + session_bytes = await r.get(f'session_{user_id}') + if session_bytes is None: + return None + return session_bytes.decode() async def set_user_session(r: redis.Redis, user_id: int, token: str) -> None: diff --git a/batcher/app/src/domain/click/repos/rmq.py b/batcher/app/src/domain/click/repos/rmq.py index a016a0d..598bc16 100644 --- a/batcher/app/src/domain/click/repos/rmq.py +++ b/batcher/app/src/domain/click/repos/rmq.py @@ -1,6 +1,7 @@ import json import aio_pika import uuid +from datetime import datetime from ..models import Click @@ -9,14 +10,28 @@ CELERY_QUEUE_NAME = "celery" CLICK_TASK_NAME = "clicks.celery.click.handle_click" -def send_click_batch_copy(chan: aio_pika.Channel, click: Click, count: int): +async def send_click_batch_copy(chan: aio_pika.Channel, click: Click, count: int): + args = (click.userId, int(click.dateTime.timestamp() * 1e3), str(click.value), count) await chan.default_exchange.publish( - message=aio_pika.Message(json.dumps({ - 'id': str(uuid.uuid4()), - 'task': CLICK_TASK_NAME, - 'args': [click.UserID, int(click.DateTime.timestamp() * 1e3), str(click.Value), count], - 'kwargs': dict(), - }).encode('utf-8')), + message=aio_pika.Message( + body=json.dumps([ + args, + {}, + {"callbacks": None, "errbacks": None, "chain": None, "chord": None}, + ]).encode('utf-8'), + headers={ + 'task': CLICK_TASK_NAME, + 'lang': 'py', + 'argsrepr': repr(args), + 'kwargsrepr': '{}', + 'id': str(uuid.uuid4()), + 'eta': datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f'), + 'retries': 0, + }, + content_type='application/json', + content_encoding='utf-8', + ), routing_key=CELERY_QUEUE_NAME, mandatory=False, + immediate=False, ) diff --git a/batcher/app/src/domain/click/usecase.py b/batcher/app/src/domain/click/usecase.py index 2158dc8..ae65cf7 100644 --- a/batcher/app/src/domain/click/usecase.py +++ b/batcher/app/src/domain/click/usecase.py @@ -6,7 +6,7 @@ import redis.asyncio as redis import aio_pika import asyncpg -from batcher.app.src.domain.setting import get_setting +from app.src.domain.setting import get_setting from .repos.redis import ( get_period_sum, incr_period_sum, get_max_period_sum, get_user_total, get_global_average, incr_user_count_if_no_clicks, update_global_average, incr_user_total, compare_max_period_sum, @@ -35,17 +35,16 @@ async def add_click_batch_copy(r: redis.Redis, pg: asyncpg.Connection, rmq: aio await compare_max_period_sum(r, period, new_period_sum) click = Click( - UserID=user_id, - DateTime=datetime.now(), - Value=_click_value, - + userId=user_id, + dateTime=datetime.now(), + value=_click_value, ) # insert click await bulk_store_copy(pg, click, count) # send click to backend - send_click_batch_copy(rmq, click, count) + await send_click_batch_copy(rmq, click, count) return click diff --git a/batcher/app/src/domain/setting/repos/rmq.py b/batcher/app/src/domain/setting/repos/rmq.py index 52d9de8..bbdd515 100644 --- a/batcher/app/src/domain/setting/repos/rmq.py +++ b/batcher/app/src/domain/setting/repos/rmq.py @@ -1,15 +1,12 @@ import decimal import json - import aio_pika from typing import Callable SETTING_QUEUE_NAME = "settings" -SETTING_TASK_NAME = "misc.celery.deliver_setting.deliver_setting" - -async def consume_setting_updates(update_setting_func: Callable[[str, decimal.Decimal], None], chan: aio_pika.Channel): +async def consume_setting_updates(set_setting_func: Callable[[str, decimal.Decimal], None], chan: aio_pika.abc.AbstractChannel): queue = await chan.get_queue(SETTING_QUEUE_NAME) async with queue.iterator() as queue_iter: @@ -17,4 +14,4 @@ async def consume_setting_updates(update_setting_func: Callable[[str, decimal.De async with msg.process(): settings = json.loads(msg.body.decode('utf-8')) for name, value in settings.items(): - update_setting_func(name, value) + set_setting_func(name, decimal.Decimal(value)) diff --git a/batcher/app/src/domain/setting/usecase.py b/batcher/app/src/domain/setting/usecase.py index 11865a4..babb8a1 100644 --- a/batcher/app/src/domain/setting/usecase.py +++ b/batcher/app/src/domain/setting/usecase.py @@ -1,16 +1,24 @@ import decimal import threading - +import asyncio +from collections.abc import Callable, Awaitable import aio_pika -from .repos.in_memory_storage import get_setting as ims_get_setting +from .repos.in_memory_storage import set_setting, get_setting as ims_get_setting from .repos.rmq import consume_setting_updates + def get_setting(name: str) -> decimal.Decimal: return ims_get_setting(name) +async def start_thread(rmq_connect_func: Callable[[], Awaitable[aio_pika.abc.AbstractRobustConnection]], *args): + conn = await rmq_connect_func() + async with conn: + chan = await conn.channel() + await consume_setting_updates(set_setting, chan) -def launch_consumer(rmq: aio_pika.Connection): - t = threading.Thread(target=consume_setting_updates, args=(ims_get_setting, rmq)) + +def launch_consumer(rmq_connect_func: Callable[[], Awaitable[aio_pika.abc.AbstractRobustConnection]]): + t = threading.Thread(target=asyncio.run, args=(start_thread(rmq_connect_func),)) t.start() diff --git a/batcher/app/src/routers/click.py b/batcher/app/src/routers/click.py index 65a2412..9d66857 100644 --- a/batcher/app/src/routers/click.py +++ b/batcher/app/src/routers/click.py @@ -2,7 +2,7 @@ import aio_pika import asyncpg import redis from fastapi import APIRouter, Depends, HTTPException -from typing import Annotated +from typing import Annotated, Tuple from ..domain.click import ( ClickResponse, BatchClickRequest, EnergyResponse, ClickValueResponse, add_click_batch_copy, check_registration, check_energy, get_energy, click_value, delete_user_info @@ -22,9 +22,9 @@ router = APIRouter( @router.post("/batch-click/", response_model=ClickResponse, status_code=200) -async def batch_click(req: BatchClickRequest, auth_info: Annotated[(int, str), Depends(get_token_header)], pg: Annotated[asyncpg.Connection, Depends(get_pg)], r: Annotated[redis.Redis, Depends(get_redis)], rmq: Annotated[aio_pika.Channel, Depends(get_rmq)]): +async def batch_click(req: BatchClickRequest, auth_info: Annotated[Tuple[int, str], Depends(get_token_header)], pg: Annotated[asyncpg.Connection, Depends(get_pg)], r: Annotated[redis.Redis, Depends(get_redis)], rmq: Annotated[aio_pika.Channel, Depends(get_rmq)]): user_id, token = auth_info - if not check_registration(r, user_id, token, BACKEND_URL): + if not await check_registration(r, user_id, token, BACKEND_URL): raise HTTPException(status_code=403, detail='Unauthorized') _energy, spent = await check_energy(r, user_id, req.count, token) @@ -39,9 +39,9 @@ async def batch_click(req: BatchClickRequest, auth_info: Annotated[(int, str), D @router.get("/energy", response_model=EnergyResponse, status_code=200) -async def energy(auth_info: Annotated[(int, str), Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)]): +async def energy(auth_info: Annotated[Tuple[int, str], Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)]): user_id, token = auth_info - if not check_registration(r, user_id, token, BACKEND_URL): + if not await check_registration(r, user_id, token, BACKEND_URL): raise HTTPException(status_code=403, detail='Unauthorized') _energy = await get_energy(r, user_id, token) @@ -51,9 +51,9 @@ async def energy(auth_info: Annotated[(int, str), Depends(get_token_header)], r: @router.get('/coefficient', response_model=ClickValueResponse, status_code=200) -async def coefficient(auth_info: Annotated[(int, str), Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]): +async def coefficient(auth_info: Annotated[Tuple[int, str], Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]): user_id, token = auth_info - if not check_registration(r, user_id, token, BACKEND_URL): + if not await check_registration(r, user_id, token, BACKEND_URL): raise HTTPException(status_code=403, detail='Unauthorized') value = await click_value(r, pg, user_id) @@ -63,9 +63,9 @@ async def coefficient(auth_info: Annotated[(int, str), Depends(get_token_header) @router.delete('/internal/user', status_code=204) -async def delete_user(auth_info: Annotated[(int, str), Depends(get_token_header())], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]): +async def delete_user(auth_info: Annotated[Tuple[int, str], Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]): user_id, token = auth_info - if not check_registration(r, user_id, token, BACKEND_URL): + if not await check_registration(r, user_id, token, BACKEND_URL): raise HTTPException(status_code=403, detail='Unauthorized') await delete_user_info(r, pg, user_id) \ No newline at end of file diff --git a/batcher/migrate.sh b/batcher/migrate.sh old mode 100644 new mode 100755 diff --git a/batcher/requirements.txt b/batcher/requirements.txt index 84debec..9849d19 100644 --- a/batcher/requirements.txt +++ b/batcher/requirements.txt @@ -27,5 +27,6 @@ starlette==0.40.0 typing_extensions==4.12.2 tzdata==2024.2 urllib3==2.2.3 +uvicorn==0.32.1 vine==5.1.0 yarl==1.15.5 diff --git a/bot/.DS_Store b/bot/.DS_Store deleted file mode 100644 index c7b068cba8aa93467ae4988ebbca3f4fc25177fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK%}T>S5T3QwZYe?z3Oz1(E!ZD_5HBIt7cim+m717>!J3pNp@&k)SzpK}@p+ut z-4=sAi^vSjezWtl*?f@f4gd(RhHoCvlc{yPsmKR^Q%f$cAjm2md*p`+1NTnHS9NXm*{Z zlW=7R;Y~cAkDR@WG|7WF8P9Y<9E~yM{x(jcbnc}^5@ov9Hv!p{&5_eyEC$E@j_RNG zmL0V?>vucqXmGMzHs$`o;rZ3*W%8P&Z-z`k`zP5oupZyn78}d}Gr$Zm1I$2`0eg1H zR@KcjW`G%B2Ix)?3YE~cSQ^y74jgm~fLKSj7PP6Cpc-Y-wOAU&2#PSJh^AE77DJeF z^eda^S}YBkauBxp5cbKd=0)hId^UriJ#2ICcj$3GiHDp z_-71=`p_SG_)_+4{qcHq)+*FTR1(T7$v{E7atXi*{YTn5XyS@&%yTW425A-Y>vBZC N2xvlhUVc3;=~PUZ#^+?o2)o=BtOqL?|bjfeDD1pjV=;+ z6@R%`-v|)I-#(bF(9P!cLy{nVBp?CFCeb2QlJxgflTs;)K-5uq7Vzc|12FiKy2fho zR#pgx=d94JtOZEVSz#1@PQb_uN?o{>zX+K*pT3pF!swh8fiZOv##IL1QCXN!qlm^; zmT;9ATvp>KhS3U&U)kVu;zdZkgY%Lo`0MIRtc}0aM1xS5@VDHfHrM{~k2Xm!P%xd6 zM^``9s^z_%Qa_c=N(#@5Ia%1s=XZ)fA`RqW<8{cGeZ51Bsd$8z5 z@|EIssP~>Oi8Y`Y1_BvfhA@K^B?lA$MJ2}@NRSL9%0ST#k16E0ak0nR;TNCxm!CLJ zXN%)>U>u`1+``rQevkv5h8OVtOTOR6gt}?gnd_w&ge!`ftgHFN&_(oa`lAbjX5^|y> z^fzYrvc(iYeULJmW*2h*zf6$BH}t5JCzm9Ldl7O>tia}qw*VSu-2yEO4?N*frtO96 z$U*I=R@6U;U~RN3uE*S{rgg2R$5`mwuK#Wy1bjo|{8}5d5GHo9yf=XE+Q_;(raQp4 zPfQDXEY`kbHrm(G>{mO5-Ew(P+bw@t_2#IOukL!x%!j+<1^gEz8m*3LIRFQ@kZprH z!s(tO4BOq7j;x=F*Z#O&=D9;$*nZO{?y+ffTbO8b70aw+kFFi}46S3CN62t|c{syD zz(Ia5Q=Q%={itQaZWDcEZQzXlXxqPG^KXD8$-jv8KZ(ecP7J8C$Z2Hw{^(w5Oqboz zgk2pJ&WfkS;qho=f6SKL$RxHt(9asDjq~W}!J~2PLzkH(-Wo>FtE1REcpr|t`VWm~ zjbUiC{=qo>&<#wOl|lL}cbXfjqx*$1v+dH;*j-Fz1ecyL``E-Kmz^%H4p)b8@SV#} y;>*}1?ha$;;3xL{`-_8%?nQkRlU?RjI5G_V5ngix)0MRmk(kC4&sgg|e#w6f^JL8c diff --git a/bot/__pycache__/memcached_def.cpython-312.pyc b/bot/__pycache__/memcached_def.cpython-312.pyc deleted file mode 100644 index 7f834b70c8c2cc4682ae22845d7d9bf8a6db907c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 875 zcmbVKO=uHA6n?Y2*<@o%tt2W$H1xIz-9s-T;z8TX3Z(QV_iOV88j^oA>7Z?eWY^7EnICy;92& zfFDvxBt0eL9+Ey#pu~f+JQa^Lm9j!W0j4zcB;f2sJ2Wz9dcvqmwL@j1{nIB!^~7gg zImD`@lv6bQ49YsqAf`iN8b76Z#Uzndbp(qtP1dYz_#%i5&vzX!rm?Z@^Pqlw9l)Jt zmVuDMPsMM12cZuE;zSzK`j-UKJC+?tD%v4Ql>|Zo6e;D3wl^tTn;7u&C1t9oleO){^gbniwgpRqogDrdrty-AbGL4dw<`)ekJ} z&vo1lMk|!9FSNrb?ND0b%;o3tRk;L_N9e+dmVItNwO@+f;@d3hJS{F04Iq9aF5xtDtLf8@$8Pf?UdcAZGQuyF5b_m_Z!rJe$iK+%7k7)j zdav5AzpB5>zP~rRc;m=Cn*U^eG(X$J)wR*hhr`07k@2{z|JDd8jdg`wj??@B>mRnk diff --git a/bot/__pycache__/messages.cpython-312.pyc b/bot/__pycache__/messages.cpython-312.pyc deleted file mode 100644 index 3ff6d4d71a99a58ad2bd992c0e0ec4f5090e0965..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 736 zcmX|9&1(}u6rb5mjV4y`;8B=>mxQK^_ToV-)SinVMeq`m>?T-EHtfeClE5}Cw$MW2 zM==d1@Auv^-{`{xMee1#OG(Kns%<7|3QV5stV8 zl=-pU{pD9tsE#d=Aq#u_Dfql91wW!y5rPYh@1Qk=BfiJrR8G;xb|-uv+YWIa9Oo4C zBJdUOHfmP*KEvt%?+|hC%iUgn5W9(X4ElU;SxUXfvcYV(qgfxm3mqmn7#4f^GYg~Q z2lSBCN6D64%a1%!LZK-QeIn7oSk2klaRA}E#2XTq>;S4WJkw=u-8L*+)06pPQr}4C zwN*3i=8U{!Yj-U(i_kU)H3yzx>L_ISDH>U1K~IX;u9<* pYf&m2Sz=r(EZ_pd^!%-ysk@Jj+tx*NrHs%!Qz@krWsWXN?jLV5J{bT2 diff --git a/bot/__pycache__/req.cpython-312.pyc b/bot/__pycache__/req.cpython-312.pyc deleted file mode 100644 index 767513fdb7d3b313641eda4ba85ef767a0799ecd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 601 zcmah_yH3ME5WI67$Bq*o4H6Ad(4+wO2vH$QL_=dCS;k%&C$=x_or`o7`~$xL|G*#c z1L#mhk%|tH4jT5HM3h`HJG-}<9nDL>-(~ckA9u1Jsx#B9p0yc!3RjX)~c4%|+*DBhj6FOxl>@>YyxTP0*4gcF~-?OlrI)wg&4>}*_)cJsGrw~~N zIIy&ja9e{^QH_Q6;&C1+F@lPAy19Zf1El~HzVx^%Eh2N9;DM2@b$<55p15wSEXq@9 zmiNAof=dY~gEX!taXO5v;8Y}a36+w;1&WM*uA|W;iN+{~kf`8VsDR8IjwgC8xq)O9 zA!NCv^VlOr6QFDp+_6`GV@`zqYYR=b=o;_{7S7Nos{O@@I4$!E*J;u>3oHE|ns(6B z_E3sSTL{`+1|^&DYOK+Opt}h|tRN!yXaYeapWXl8r=M<&JLz*m&ljR`R3f=yx%zd1&HvBcK`qY diff --git a/bot/__pycache__/wrapper.cpython-312.pyc b/bot/__pycache__/wrapper.cpython-312.pyc deleted file mode 100644 index ee3a9b6439f2d41be7796ac09d5b520223bfb00f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1134 zcmZ8gTWb?R6h5=JB%3DUrCy?v6-CHPf+z^0LPZo&>s=%-al1Q-se5B~VoB3Nyr6a_6A+IPxJr;6d9&g8y&7B*R;ZET!^+VRZ#6xDcC>9JaY_*at<6_vsS<_jp z#8nK_2YS5i(^+fA>iMDD@em%edd*JY@OBs^b&uYn-he^l7)QXNP%*iSzm<0wP6I*hIY|j~Lof?m z@{%5R4Ke~kRvPKx+Qiqctk(Gfj7&r{`@ zdSBP^%w0GLkM%d|6saiVsCFhE3ZG%}EX5XE-jcpaBESaKDj5ObUHMihEp zJ7_TtITe+ymsG`#gfXDSve&Y#)uXj5F&d!e+CkTLC+(niJajrf4R~BT&+t}j?nL8e z*J(s7oTLt~p-pO2%#I?;s!>;DSSM&;jcvh!@*fe8WB37o`tYT={ZZII`6ajU`N@Uc zp8m041{60vy4F8|KrcPtyP)k_MwD69Mn7nyPnrwb&Tr<(qFMT2mR{`tWR@4T@<*+_ z8gIancg9A?%H-`9GPYBDyVXEEUMY%F>fq`>U#FFUJf_2_&1rH(sfEbc?XbmcpUKNk z#1y4rsoISF>QL>fpO`+8JTZKSby!xmNa=-YyKeHyFJT39V4E?lW)BZMf0S96N?*lC zFf6NtkR>?01f{RAdkGFK!8XL-!0a0j%-iO}L-W?v`RmR3xVH>&M!BH;#_o&CidG;N HE8~Q}$=V#5 diff --git a/bot/create_bot.py b/bot/create_bot.py index a17ec01..a0f79fd 100644 --- a/bot/create_bot.py +++ b/bot/create_bot.py @@ -5,7 +5,7 @@ token = os.getenv('TG_TOKEN', '7748003961:AAEIXu8NFICPabNaQP5JQ3AcY79nZdUbKdI') api_token = os.getenv('API_TOKEN', 'b43fa8ccea5b6dd5e889a8ad3890ce14ce36a8bc') # TODO: remove backend_url = os.getenv('BACKEND_URL', 'http://backend:8000') request_url = f'{backend_url}/api' -url = os.getenv('URL', 'https://google.com') +url = os.getenv('APP_URL', 'https://google.com') bot_name = os.getenv('BOT_NAME', 'https://t.me/danyadjan_test_bot') bucket_name = 'brawny-basket' diff --git a/bot/handlers/__pycache__/__init__.cpython-312.pyc b/bot/handlers/__pycache__/__init__.cpython-312.pyc deleted file mode 100644 index f3499953a23fe5c8dc06adf2943fce24509a8f53..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 209 zcmX@j%ge<81b_Efq)7tl#~=<2FhLog1%Qm{3@HpLj5!Rsj8Tk?43$ip%r8OGnvA#D zGZOPsa#D-@G?{L(CZ}hXBo#3OWmYnL1}XUEp&weDT2!o`l9*SSn39#4r|*)VT$-Dj zS5mAWP?Vnzlu3%uu1t^ diff --git a/bot/handlers/__pycache__/instruction.cpython-312.pyc b/bot/handlers/__pycache__/instruction.cpython-312.pyc deleted file mode 100644 index 3970864c5cf6c52fac56f33858e838900877549a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6149 zcmbVQeQX@X6`$FU`@ZwXoxc)0zQj(*Il<1CNoq*w*^bGlA%p-q2=4iIW6r+4J7)Ld z*gY4MpkRxLIHe+vD2iz-;V40Mi;$wWR3zpPr2eyKo1VM40`)^G|G5cLkol)?_HOTN zCq~dY-OSsWH*eqUyx)8C<}XgC4MEv8zNV|(h0s6ggHgCFafiVOT|xp9a0E#>fH9>R zpqT(8u>n?pW+PmHgMK!`2Y6_?h)FUB%#tNwk*oo$&cjD+l09IT907;^Y>GG~SHL9| z1PY|WK%rC=D3Xc;#gaSV*2m2ek5m#U0ow8^3X}>~sLKQ!)a8ObP|<}1$FFc;mEaU{ z_d|x-`Sv6uxL)M~l|liGH|6^cN+=Z9oaBTe;HeghSCrv~ac>{%^L#}o`mn|;eQ{Ct zF&f((Q#4y^SdIsk(2F8z_put=34KLjDIOzAFsgJ&qAUlyM9m?d65%nLkUB*IbIn0P z=pbT9Gk1v!C445${iGO=^c@uYI%7d191fD6L|k(j&&>%%iA9$&Jt<1DQ=;bCAB}{g zqQRP-x~w-Vz}|Lre>9#@+QJbLR%439y1GOXf=$j99w-+6ck1-5??MAuzypku;}IBv zz0O@@GBXC){74cBd_Q!*jRaGYc^kdM3g$(eT%J>~2-Za-{Gt(?U|%$1Qm8I^F{EG1 zY9jDG8xbO=B$wp7m@Yhkqg5+&Cz&iao8+?GrXl)jz2psvo7^&knN5gHqgDw-)6kBd=(0M?PuqM0)0s4+dA8ZUqa1vPW0 zZf08zVw*v1H;5gY#gHvqK#n(C77jKXlSLvo2*GGyQ0NXu8(L$b1YMTg@C1o*4f(BeAK ztSAoxFm-GItPubR#+icwm6Ivgvh4r^J{Nw*42;5PfPI;1MJM(mG{7af`eyE|vp zhFk<5-Z1Xx0p0nz6l<4}@4ifYhSB;#HPEXNUs-}z@wv&T^{ zu0aa5D81Ao;sUtQ{nm@X8v4*#eMoT_BfwcYU|z(@CC&Noa2&qMfca-{{d9=i0HSW-F?$thu~qy23Yf;4X_?>*h_Uq-@Cgxo6E#D;->QdDYvM z>55IMicRCK(-m8X4-B=8l+3QLd+)h-pZoQRq5UJFOW}**H+!ZW>*je>Qax`$g%zr$ z{B99Q1KfQ3bun^O-9`+&*y2=O)iW0F-wNDwc2B-<-I#yIvSF^k{p~ytd<*gp(Dsa@ z8KXCf{Eh4te+iH`8+rd8^UWqR(6_Lsxskn9W}$Vpqj?K^YYPu_C>x1jIzrThbWqu# z_Y(RJP%%m7J3+-R2h|dIGKP@>JeikSfZ9_lLoIK<0#^YF-VbsH90&L$pTwsSSq=JG z>5`U~BoEOQIFdY-L?i7xTpk?fTUVI1nCEE#BJR$CD=**B0amjXp&a!2Bzp?W_`H1u z*%^&Y_vli?!S`SO75naW0PhWn=M^+srzF0nETDxSTI$(FZdc~rf{a0GYF8HccN7qrcIhE>Q> zavNb-9rX<01=5xcDa(d$=UM1mkdMIf&p29Yzzek4*()BPKJf2oDPwPKtblrwu|UtH zop14&CkxG#F5_Fg=E+Ky(q6r;v9>boD$}9Y-8ASg zD}Wok0(UMT=BEHRvl#LD(6@q&`@9kSQC4G2^~|9w|Ca6?!t#BFcQ^|u`ziR?x0p-p zIi81ap%fWh-Ngo#Z@<4@N{KN`-~`^#QK6?`z2yDqbIXA5!Pk8>tJrN9cFOA@nAQC^zUPqA{fN zD%Ko?W@a6do|5?-gf3R&!qJm4h~_ltCg9|&F=0WoP%0XfL_!_6W;XIaQ|5q9=vHVp z84lqc@fTxCj1)qLezNsp(gw6la{_NRv@bdo8<`||62=cx#!;yLgT5I=bDZt9gRdMM z*>P#l#XV_vZOUD%7Ox+BYP|Nh^;hfDb^cVHU-dPs%>lLWxf$;He{uGq)(h>!?P*6< z%274?(hRruOPlM$=Hbn0TUE+dHEpX-bJZ$WJ!f-Xs2{E$_0YpYx}q*sQFr~Ix@+I3 zwbPp)pLrp!zC@;9kY{X4np0AoGS4F3`^_w;EXeCYxiihU#e;9LxTTQ0#raL}Floar z#oVM_r;F(@c^o)3YgFv*&<{CFLOK0fI^=!>#B#_1WU#NW3z3Qk;zv$mqNtuke+{N=H9)wWdCwi_ig zRgccJ|3nRTPq+7|w#sQ+B+W&pxX6M`v35Kx@Zp%;Z_#(FWh z4IAre1)RQ>cXQw0r5O?oX9mvFZM+4fcnmtbwZ z0zI5U4}XE`?lK4;V(tR(A?Dwx;x<=+J-62tLRIU=56o{y&ay%4Z4>7x8MMqhOgMqZ z_NHrhrfPTILG)?9kiilj6W^2GmF5wpKW6Xf?YzZ>E63NT>vpH=c7KiFLD%ztoEl~X diff --git a/bot/handlers/__pycache__/register_handlers.cpython-312.pyc b/bot/handlers/__pycache__/register_handlers.cpython-312.pyc deleted file mode 100644 index efb8663f2b8f0a493eb1f8d733e6f1b39c0dfcc3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1941 zcmbtUO=ufe5Pna3TFH`S=f^hKPD+yyx1wP6)Ywi5E-emrsHoF~ggtb--d!uJ?2q$y zl^_Tug+k9gxrZKnY)E>K!PFEAHS`i;8I?zT%BeRO2Ld^D=IySm$UembJ8$01?3;P> zzL~#^#XJG}{kJpKi;R$$xJf7#3;Qb|tP+!$)FCcaDMdNuq};TcmU0^OOl4ALPR3PK z#m%Z&H>c*LF5~pMc{T4A)B@}ki!P_k><>)skL4W5#aQkGd7zOm75>HWB`TEbem%4} zNZHSQ*VR1}L^@YWi=q+jwII}aD2A$5sJXiBX|CnfHQfsySX_+vXqVwEZN)aiI=4zG zaoYBRkk^gS_VK?U(5sdhHgv~X(v7=;)@2q7mzfweE(|W|_}l*&gjEuDY!AtkSf(UI zFG=0n2HdU!DV)6ncXBL^TU9&2?BsPi$&AJ3u1pf!W6BF~Y=D8@f3sM=8CX0ho4VJ~ z&6@6&zwnJZ)FKSZUva+*mL=_O!_Zc^U$cy`yyS=F=y3R}6Ft#Pt~A7OM+*hf5(_^G z@dhyPDPWJt3wCa8qQy!t*n203w{Fk#e|(k!%x5-k$+hJ z$IADWHXCoU@n61tdgsw5 z%eDjKGLSHd5HXUBsL5!C#@h%@5*&ygG1PJEf@6dmc_VE52r-!VV)N9pulK1DxRgTV;PUY=2%{t6ph R_xdTlN!RYbBDl&x{{tZJl7|2Q diff --git a/bot/handlers/__pycache__/start_handler.cpython-312.pyc b/bot/handlers/__pycache__/start_handler.cpython-312.pyc deleted file mode 100644 index 88a2c465347a8fb61c4975a06320ac80f82d7a96..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7414 zcmdTpYitzPxihn~Z|@pEz`+jo7;IpTW8>yQln|)bJmNT*BtU3}rt9_2c)jbL-OZWV z>zXX8F|$(nwXMYDR3e+zC;xnq2ATSKSb)lwbY6 zGdt@CCTewCMLpU*&+j{r?|jcQzX=3<45T$bX^g*F#V{XX!%keOawE$!%yC9yBsRr} zSaK;=;Y3bxhz^ApdDa?picSjS5?!=(i*8zaL=P>!q8Ca>%BT26zfvJqC;>5`1jV3I zDOM^=#3f3VSfx~p)k=+6L+9jEwaQX)X=(mtVjbW+Q+FxL#pSfmm8w@7#0FY-r|wo( zh%1yvv5~etsU}4b1zPu}nw6E}N@bO}N@)>Wlvc4-SuL(s)`)B9xG%L<35g+EXH#ur z8?56`ty9{?cIc}(%!v0$wNl_PD|SdhXmv`JGM^l<2{l^|anh2*yx8?Q<5e*JC8-+l zhi38Y8sOK!_72a^4EVO~+R>9qYx38%5NA4f zX;Do!{j^ZGlT0oPFy1GkM3r}BBst_VYo3y`sj)tJY%mig(vBz@$z{!;-45q8Et6gV z^pvb*{zW!xd(){zTDCz;s0(^a6ZrPZgWcI|Z+a*L8*rsE@i;w)3-&sch{tTwW68tc z0sk9chU_?_0pD9mVkPdBsR!%f__Oqt6w%=Hoavcp5#m($z}#((Nt`n!jdz=M8{mVzz$4 zh6nzX@;9L}pYpRb*6@C4`X1<@Jo`Q72M)NE64inJ_Wf{kZq6yWPPxk?jyWTq*_kEp zqVw=`pnZ~m&OFXM7iaSh`18OCjsvbdL#p$xq}$%%Y?*h)S$mHxqj~L-yz5kj-G9KH zcLT=+`g5R!nVDB#xgzM_1-3b_U)0YU?9@QJbi^`B4sS{Sk^b-c5A`3J71(XdKUoPwA*aQi z^-!lfG+nSQ&?pwadQ4A3jYK6Skv2VWo=6Pz0BHqan~}d{SMIed_mR~&*&4`HTpie& zCW7o{-c{B`Ht*ZVx*vw`Mx;ZKmDuv(cg4b9QGa@$zHeAhJf{y6eYaZJsuhB{@$Hmx zx9WQmggNPdUY9A*1^u!B zdJN6q&p{U*0&`bE(DZ{FL2bS-i(WxQLASt9P>Y#-0_?Tv9U>Vef^Le*B_vXTBW~NT z@lrlSyGfS706~!_ctX>&qx-48Ej#!2pJd6y(4nH-2^KHbmQ@CIUo3M*9tD7Qg!~3% z|Hs?}p5%Qm^d0P*yzls?S2h)ET8x?&y}EU3_iHV0be!xcwsjkA-FhetEZ4(^k@Kp0 zkQtp@xNE-PvG$kRi$1~d35Dhdt~Nhd*dDpMJz7YQ{)u5SY&Z8utI&M{SVDi8gKlS- z`;_LlRJ^Z=up=B`-*&QLKmWGN1@*;Q_wYY0>)PyjJKN1NbrLUjLArxjEh2*O4e@g$fq9Y^Udo3fYD@ z+aa4PBMl2=WC*5;WD_whM`ZFi0Ix@cpFs9Ei^|uis6?o!49tqk3d6VJs;{ZYH|c!S zbzh+9t2cc0C+>T7(|>F#HnbZJ?S+n>s~uYlyHi(pr$Ns0>{gVTJll`Lgavf^cT!2( ziCxZ}+>C>93O8pstlv`6&zR_L?qS$pu42O*`CqnlS3~3N2iWi?{_PDetZ!l=rzD&w zZRpfdLvanb!7>yQ7+l~)7+9DjPGahrWJ#&c9hJ(iDyh5!E|0ii%AGbIz%9br$wiEa zZSW^qHLLk;{3J5Pg7nwV35E%rjYW45Trep#0opa!jLn*&WZ4AGV%=fhMNTHUZ&S5&TcWznWUGCH*Gz3935Ni5^$v6KYSWQL~-C zi{Qdtg13yz5IIZ>)(Zjnc>;8RsXB+I3LZDkK(s(ZPbkL$<1}LA(2zo4djx`4oG!Nn zJQrxcBIs{`W7+pi&%B6N(BA|bIMFQV$LXxo(4K}^7hr=as3 zEO_2p)SBo`<4EarXJHQGARd0&coEKh0a`HOS&X)Ecg9f|x?*)xiU7h}ls(;?dxsL~Wtj8?oeMX2R6hqX{QM)I7GTX%U=Nr!`2|=S9+%(?1%%4E)}NK^pQw3C;!1d5P}fjD7{_M!~6L59gT%fJ>BahhUEgj;_cAqT<1TNG~J3 zAb`|Q8;53Zfu7^H)@8pT=>LhNMS>v|u4r9^ahS#lbE!nvzoMUkUEoc^Gg&)?6MzUC z$7`N*RWxXArNp-{AC?s$vc>0)bJo$|b}f#a$2$}i_Dv~^*H&gF=)x(io0(7D=?)>+ zNRRZd<8UMZnucS+0)S8=fqu3*haM#Qev63-uzeQ{TmD<6^>E{(Pi*RFLK`L+$j$NP zFbJ~zJ1T|f1CTzEwk3s1A(M42@U4>G>6M%;s1>W=`bTP6EhA>i6Hvy|w!Lyt@_(J|*ezJQqth?)q?v;jn<8~`r{j4**L{bIrN&A7d_Yg%TKH+ z)~_?_*A?qKjQWn>`8z-M1dsV&@=uNy>(&}|Ym0U3jJkFDvi8@uOxG2=!bVrP*wtrr z_37&$hd^lu`#3s*OLw5l=I!UM2dj#~MkCmGVzk(_-e_81Y`WKIx_A2N!eh@~eJoN4 z4ir5Dx@X|y#o=J82Of38RC}FCCxlabohfWpDo*eZy^0gYA^#lY=N$n482>B}$dB<6 z9+3xlL>}M~d4T^M z7;oiYOGo{2Nw1X)o}rJ+&hiG!Q7$%g8V#L=u5DMl9xd$6zzOz4fC{_(3QHr@C*VZ| z4)+8%h;A|#Fy@a@&pAdt=NLEBz<8tVpJ#X^9W4fUT8!{F|Hrfc=8TVV#=xgmfRQG8 ze7$Y#I~8ni1OHCoVW_|BVS5|-cfDBu70>oI^S^Rp{XK^5ZQtG z@av^)?^^!X%UoD*WHE21c?-=~(|j%FA#PiL=zh~3Rmak?L{WwCZaH^`g zBOsa(Vxf~vYH%=M55gA~MYSF^%6;_X55Y*5VDM^b1XMkvIw-hIwGjp$5 ziA5|!WZVdR`5@>-(D!@m=>#9;%s@}+3&fK-nT*kQ5gPHC?j6?WB&tA8GW1Q2zNp;t zY=noKTuRQyFSGn3rs4x;<)=K)*50V_vbCQsbFj5DJ`Q+#YEQBC vL8JA-8w|E)JPvjz_uAHC`(~qk^9=@DGrn3j#!a>U_?{o$^BIG6y6?XLl+pH1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0^-w1f4rmY`RwTgfUh`DkC(@Qfii3jTn~YjJL|`OUKkKht=?5^<=9F#p3C_zePH% zPt_^{B5+Hk>asy_nGMJ57&LEs9DjT~A4 diff --git a/frontend/public/.DS_Store b/frontend/public/.DS_Store deleted file mode 100644 index 0331f6a994252c7b465586ec92ebe760de72709f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKyG{c^3>-s1k!VsT1 z)lG4Z0oeL|e+R4pEa{H;@Gv)j?mn}N$~an_@rH3fJ{=y1;V`Q{pK$Iq9x(Fvj3Yh| z#1&pJ;Pp6t{uE`UfE17dQa}nwf!`|NEta;tNmP^qQa}oPE8yRUMtAImQ(}BN7-9q< zE|?DEI%WxC^8~RMPKnIWEUCn#T8$W%bmm*t^};DJ>9Dw&=hV$s6N<&{%(p0q^+ZJ} zAO%hpxXkU!`~N-thxz}Mq@5Iy0{=<@TWq$QHD9TE>+I#c*Eaf&?lqruH?D)i5bc;4 j?U);H$G1_GbH1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0}{ zJ9`SR7l3R|`#WF_U`1ENrw((|b@iEDMWiB%?a`y#chASCra8{Ce-0>jiAQw24jA#d zAu7C}!Rt7zyDTlY#XA}8_`He>JsB+y({PUuyfOQ)Pp7A)r2?ryDv%1K0+*$LmRQ>A z#xciKAQeajz7^2#L!m3yfrF!cIyhJhKwL0v#(V1}h(!a$I&g4ggeFcUI#r^@5T`R< zBCie{9Gwn{=0oCSi4H~Vbnag)9a1~ymH1@V-^m;4Wg<&0T*E43hX&L&p$$qDprKhvt+--jT7}7np#A3 zem<@ulZcFPQ@L2!n>{z**++&mCkOWA81W14cNZlEfg7;MkzE(HCqgga^y>{tEnwC%0;vJ&^%eQ zLs35+`xjp>T0(state => state.userTg.id); const [page, setPage] = useState('storage'); - const refLink = `https://t.me/sapphirecrown_bot?start=user_${userId}`; + const refLink = `https://t.me/kyc_clicker_bot?start=user_${userId}`; const [showNotif, setShow] = useState(false); const navigate = useNavigate(); diff --git a/frontend/src/shared/Pages/WrongSourcePage/WrongSourcePage.tsx b/frontend/src/shared/Pages/WrongSourcePage/WrongSourcePage.tsx index 2f6737d..4842093 100644 --- a/frontend/src/shared/Pages/WrongSourcePage/WrongSourcePage.tsx +++ b/frontend/src/shared/Pages/WrongSourcePage/WrongSourcePage.tsx @@ -11,7 +11,7 @@ export function WrongSourcePage() {

Похоже вы вошли не по той ссылке...

-
); diff --git a/nginx/nginx.conf b/nginx/nginx.conf index f3a20bb..0bac9ec 100644 --- a/nginx/nginx.conf +++ b/nginx/nginx.conf @@ -42,32 +42,32 @@ http { access_log /var/log/nginx/access.log upstreamlog; error_log /var/log/nginx/error.log; listen 80; - listen 443 ssl http2; +; listen 443 ssl http2; charset utf-8; - server_name crowngame.ru www.crowngame.ru; +; server_name kyc_clicker.ru www.kyc_clicker.ru; root /dist/; index index.html; - ssl_certificate /etc/letsencrypt/live/crowngame.ru/fullchain.pem; - ssl_certificate_key /etc/letsencrypt/live/crowngame.ru/privkey.pem; +; ssl_certificate /etc/letsencrypt/live/kyc_clicker.ru/fullchain.pem; +; ssl_certificate_key /etc/letsencrypt/live/kyc_clicker.ru/privkey.pem; - include /etc/letsencrypt/options-ssl-nginx.conf; - ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; +; include /etc/letsencrypt/options-ssl-nginx.conf; +; ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; - if ($server_port = 80) { - set $https_redirect 1; - } - if ($host ~ '^www\.') { - set $https_redirect 1; - } - if ($https_redirect = 1) { - return 301 https://crowngame.ru$request_uri; - } +; if ($server_port = 80) { +; set $https_redirect 1; +; } +; if ($host ~ '^www\.') { +; set $https_redirect 1; +; } +; if ($https_redirect = 1) { +; return 301 https://crowngame.ru$request_uri; +; } - location /.well-known/acme-challenge/ { - root /var/www/certbot; - } +; location /.well-known/acme-challenge/ { +; root /var/www/certbot; +; } # frontend location / {