Compare commits
4 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
44e2d34be8 | ||
![]() |
ed66b2a3d8 | ||
![]() |
c1869e90c2 | ||
![]() |
faa240dff0 |
169
.gitignore
vendored
169
.gitignore
vendored
|
@ -1,2 +1,169 @@
|
|||
.idea
|
||||
.DS_Store
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
#uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
|
BIN
architecture.png
Normal file
BIN
architecture.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 35 KiB |
|
@ -1,11 +0,0 @@
|
|||
FROM python:3.12
|
||||
|
||||
WORKDIR /batcher
|
||||
|
||||
COPY ./requirements.txt /batcher/requirements.txt
|
||||
|
||||
RUN pip install --no-cache-dir --upgrade -r /batcher/requirements.txt
|
||||
|
||||
COPY ./app /batcher/app
|
||||
|
||||
CMD ["fastapi", "run", "app/main.py", "--port", "$HTTP_PORT"]
|
|
@ -1,11 +0,0 @@
|
|||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.12"
|
|
@ -1,27 +0,0 @@
|
|||
from fastapi import Depends, FastAPI, Request, Response
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from starlette.exceptions import HTTPException
|
||||
|
||||
from .src.routers.api import router as router_api
|
||||
from .src.routers.handlers import http_error_handler
|
||||
|
||||
|
||||
def get_application() -> FastAPI:
|
||||
application = FastAPI()
|
||||
|
||||
application.include_router(router_api, prefix='/api')
|
||||
|
||||
application.add_exception_handler(HTTPException, http_error_handler)
|
||||
|
||||
application.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
return application
|
||||
|
||||
|
||||
app = get_application()
|
|
@ -1,9 +0,0 @@
|
|||
import sys
|
||||
import asyncio
|
||||
from .src.db.pg import migrate
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) < 2:
|
||||
raise RuntimeError('you need to specify target revision')
|
||||
asyncio.run(migrate(sys.argv[1]))
|
|
@ -1,29 +0,0 @@
|
|||
from starlette.config import Config
|
||||
from starlette.datastructures import Secret
|
||||
from functools import lru_cache
|
||||
|
||||
config = Config('.env')
|
||||
|
||||
|
||||
REDIS_USER = config('REDIS_USER')
|
||||
REDIS_PASSWORD = config('REDIS_PASSWORD', cast=Secret)
|
||||
REDIS_PORT = config('REDIS_PORT', cast=int)
|
||||
REDIS_HOST = config('REDIS_HOST')
|
||||
REDIS_DB = config('REDIS_DB')
|
||||
|
||||
HTTP_PORT = config('HTTP_PORT', cast=int)
|
||||
|
||||
PG_HOST = config('POSTGRES_HOST')
|
||||
PG_PORT = config('POSTGRES_PORT', cast=int)
|
||||
PG_USER = config('POSTGRES_USER')
|
||||
PG_PASSWORD = config('POSTGRES_PASSWORD', cast=Secret)
|
||||
PG_DB = config('POSTGRES_DB')
|
||||
|
||||
RMQ_HOST = config('RABBITMQ_HOST')
|
||||
RMQ_PORT = config('RABBITMQ_PORT', cast=int)
|
||||
RMQ_USER = config('RABBITMQ_DEFAULT_USER')
|
||||
RMQ_PASSWORD = config('RABBITMQ_DEFAULT_PASSWORD', cast=Secret)
|
||||
|
||||
TG_TOKEN = config('TG_TOKEN', cast=Secret)
|
||||
|
||||
BACKEND_URL = config('BACKEND_URL', default='http://backend:8000')
|
|
@ -1,3 +0,0 @@
|
|||
from .pg import get_pg
|
||||
from .redis import get_redis
|
||||
from .rmq import get_rmq
|
|
@ -1 +0,0 @@
|
|||
from .pg import get_pg, migrate
|
|
@ -1,2 +0,0 @@
|
|||
DROP INDEX clicks_user_id_time_idx;
|
||||
DROP TABLE clicks;
|
|
@ -1,8 +0,0 @@
|
|||
CREATE TABLE IF NOT EXISTS clicks(
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
user_id BIGINT,
|
||||
time TIMESTAMP,
|
||||
value DECIMAL(100, 2),
|
||||
expiry_info JSONB
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS clicks_user_id_time_idx ON clicks(user_id, time);
|
|
@ -1,32 +0,0 @@
|
|||
from batcher.app.src.config import PG_HOST, PG_PORT, PG_USER, PG_PASSWORD, PG_DB
|
||||
from pathlib import Path
|
||||
import asyncio
|
||||
import asyncpg
|
||||
from asyncpg_trek import plan, execute, Direction
|
||||
from asyncpg_trek.asyncpg import AsyncpgBackend
|
||||
|
||||
|
||||
DB_URL = f'postgresql://{PG_USER}:{str(PG_PASSWORD)}@{PG_HOST}:{PG_PORT}/{PG_DB}'
|
||||
MIGRATIONS_DIR = Path(__file__) / "migrations"
|
||||
|
||||
|
||||
async def connect_db() -> asyncpg.Pool:
|
||||
return await asyncpg.create_pool(DB_URL)
|
||||
|
||||
|
||||
pool = asyncio.run(connect_db())
|
||||
|
||||
|
||||
async def get_pg() -> asyncpg.Connection:
|
||||
async with pool.acquire() as conn:
|
||||
yield conn
|
||||
|
||||
|
||||
async def migrate(
|
||||
target_revision: str,
|
||||
) -> None:
|
||||
async with pool.acquire() as conn:
|
||||
backend = AsyncpgBackend(conn)
|
||||
async with backend.connect() as conn:
|
||||
planned = await plan(conn, backend, MIGRATIONS_DIR, target_revision=target_revision, direction=Direction.up)
|
||||
await execute(conn, backend, planned)
|
|
@ -1,11 +0,0 @@
|
|||
import asyncio
|
||||
import redis.asyncio as redis
|
||||
|
||||
from ..config import REDIS_HOST, REDIS_PORT, REDIS_USER, REDIS_PASSWORD, REDIS_DB
|
||||
|
||||
|
||||
r = asyncio.run(redis.Redis(host=REDIS_HOST, port=REDIS_PORT, username=REDIS_USER, password=REDIS_PASSWORD, db=REDIS_DB))
|
||||
|
||||
|
||||
def get_redis() -> redis.Redis:
|
||||
yield r
|
|
@ -1,26 +0,0 @@
|
|||
import aio_pika
|
||||
from aio_pika.abc import AbstractRobustConnection
|
||||
import asyncio
|
||||
|
||||
from ..config import RMQ_HOST, RMQ_PORT, RMQ_USER, RMQ_PASSWORD
|
||||
|
||||
|
||||
async def get_connection() -> AbstractRobustConnection:
|
||||
return await aio_pika.connect_robust(f'amqp://{RMQ_USER}:{RMQ_PASSWORD}@{RMQ_HOST}:{RMQ_PORT}/')
|
||||
|
||||
|
||||
conn_pool = aio_pika.pool.Pool(get_connection, max_size=2)
|
||||
|
||||
|
||||
async def get_channel() -> aio_pika.Channel:
|
||||
async with conn_pool.acquire() as connection:
|
||||
return await connection.channel()
|
||||
|
||||
|
||||
chan_pool = aio_pika.pool.Pool(get_channel, max_size=10)
|
||||
|
||||
|
||||
async def get_rmq() -> aio_pika.Channel:
|
||||
async with chan_pool.acquire() as chan:
|
||||
yield chan
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
import time
|
||||
import hmac
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
from fastapi import Header, HTTPException
|
||||
|
||||
from .config import TG_TOKEN
|
||||
|
||||
|
||||
async def get_token_header(authorization: str = Header()) -> (int, str):
|
||||
if not authorization:
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
if not authorization.startswith('TelegramToken '):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
token = ' '.join(authorization.split()[1:])
|
||||
|
||||
split_res = base64.b64decode(token).decode('utf-8').split(':')
|
||||
try:
|
||||
data_check_string = ':'.join(split_res[:-1]).strip().replace('/', '\\/')
|
||||
_hash = split_res[-1]
|
||||
except IndexError:
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
secret = hmac.new(
|
||||
'WebAppData'.encode(),
|
||||
TG_TOKEN.encode('utf-8'),
|
||||
digestmod=hashlib.sha256
|
||||
).digest()
|
||||
actual_hash = hmac.new(
|
||||
secret,
|
||||
msg=data_check_string.encode('utf-8'),
|
||||
digestmod=hashlib.sha256
|
||||
).hexdigest()
|
||||
if hash != actual_hash:
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
data_dict = dict([x.split('=') for x in data_check_string.split('\n')])
|
||||
try:
|
||||
auth_date = int(data_dict['auth_date'])
|
||||
except KeyError:
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
if auth_date + 60 * 30 < int(time.time()):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
user_info = json.loads(data_dict['user'])
|
||||
return user_info['id'], authorization
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
from .schemas import ClickResponse, BatchClickRequest, EnergyResponse, ClickValueResponse
|
||||
from .usecase import add_click_batch_copy, check_registration, check_energy, get_energy, click_value, delete_user_info
|
|
@ -1,9 +0,0 @@
|
|||
import datetime
|
||||
import decimal
|
||||
import pydantic
|
||||
|
||||
|
||||
class Click(pydantic.BaseModel):
|
||||
UserID: int
|
||||
DateTime: datetime.datetime
|
||||
Value: decimal.Decimal
|
|
@ -1,50 +0,0 @@
|
|||
from datetime import datetime, timedelta
|
||||
import decimal
|
||||
from asyncpg import Connection
|
||||
|
||||
from ..models import Click
|
||||
|
||||
|
||||
async def update_click_expiry(conn: Connection, user_id: int, period: int) -> decimal.Decimal:
|
||||
cur_time = datetime.now()
|
||||
cutoff_time = cur_time - timedelta(hours=period)
|
||||
query = '''
|
||||
WITH expired_values AS(
|
||||
UPDATE clicks
|
||||
SET expiry_info=jsonb_set(expiry_info, $1, 'true')
|
||||
WHERE 1=1
|
||||
AND time < $2
|
||||
AND user_id =$3
|
||||
AND not (expiry_info->>$4)::bool
|
||||
RETURNING value
|
||||
)
|
||||
SELECT COALESCE(SUM(value), 0)
|
||||
FROM expired_values
|
||||
;
|
||||
'''
|
||||
period_key = f'period_{period}'
|
||||
return await conn.fetchval(query, [period_key], cutoff_time, user_id, period_key)
|
||||
|
||||
|
||||
async def store(conn: Connection, click: Click) -> int:
|
||||
query = '''
|
||||
INSERT INTO clicks(user_id, time, value, expiry_info)
|
||||
VALUES($1, $2, $3, '{"period_24": false, "period_168": false}')
|
||||
RETURNING id
|
||||
;
|
||||
'''
|
||||
return await conn.fetchval(query, click.UserID, click.DateTime, click.Value)
|
||||
|
||||
|
||||
async def bulk_store_copy(conn: Connection, click: Click, count: int) -> None:
|
||||
args = [(click.UserID, click.DateTime. click.Value) for _ in range(count)]
|
||||
query = '''
|
||||
INSERT INTO clicks(user_id, time, values, expiry_info)
|
||||
VALUES($1, $2, $3, '{"period_24": false, "period_168": false}')
|
||||
;
|
||||
'''
|
||||
await conn.executemany(query, args)
|
||||
|
||||
|
||||
async def delete_by_user_id(conn: Connection, user_id: int):
|
||||
await conn.execute('DELETE FROM clicks WHERE user_id=$1', user_id)
|
|
@ -1,169 +0,0 @@
|
|||
import decimal
|
||||
from typing import Optional, List
|
||||
|
||||
import redis.asyncio as redis
|
||||
|
||||
|
||||
async def get_period_sum(r: redis.Redis, user_id: int, period: int) -> decimal.Decimal:
|
||||
sum_str = await r.get(f'period_{period}_user_{user_id}')
|
||||
if sum_str is None:
|
||||
return decimal.Decimal(0)
|
||||
return decimal.Decimal(sum_str)
|
||||
|
||||
|
||||
async def incr_period_sum(r: redis.Redis, user_id: int, _period: int, value: decimal.Decimal) -> decimal.Decimal:
|
||||
return await r.incrbyfloat(f'period_{_period}_user_{user_id}', float(value))
|
||||
|
||||
|
||||
async def get_max_period_sum(r: redis.Redis, _period: int) -> decimal.Decimal:
|
||||
max_sum_str = await r.get(f'max_period_{_period}')
|
||||
if max_sum_str is None:
|
||||
return decimal.Decimal(0)
|
||||
return decimal.Decimal(max_sum_str)
|
||||
|
||||
|
||||
async def compare_max_period_sum(r: redis.Redis, _period: int, _sum: decimal.Decimal) -> None:
|
||||
_script = r.register_script('''
|
||||
local currentValue = tonumber(redis.call('GET', KEYS[1]))
|
||||
local cmpValue = tonumber(ARGV[1])
|
||||
if not currentValue or cmpValue > currentValue then
|
||||
redis.call('SET', KEYS[1], ARGV[1])
|
||||
return cmpValue
|
||||
else
|
||||
return currentValue
|
||||
end
|
||||
''')
|
||||
await _script(keys=[f'max_period_{_period}'], args=[str(_sum)])
|
||||
|
||||
|
||||
async def get_energy(r: redis.Redis, user_id: int) -> int:
|
||||
energy_str = await r.get(f'energy_{user_id}')
|
||||
if energy_str is None:
|
||||
return 0
|
||||
return int(energy_str)
|
||||
|
||||
|
||||
async def set_energy(r: redis.Redis, user_id: int, energy: int) -> int:
|
||||
await r.set(f'energy_{user_id}', energy)
|
||||
|
||||
|
||||
async def decr_energy(r: redis.Redis, user_id: int, amount: int) -> (int, int):
|
||||
_script = r.register_script('''
|
||||
local energy = tonumber(redis.call('GET', KEYS[1]))
|
||||
local delta = tonumber(ARGV[1])
|
||||
if energy < delta then
|
||||
redis.call('SET', KEYS[1], 0)
|
||||
return {0, energy}
|
||||
else
|
||||
local newEnergy = tonumber(redis.call('DECRBY', KEYS[1], ARGV[1]))
|
||||
return {newEnergy, delta}
|
||||
end
|
||||
''')
|
||||
new_energy, spent= map(int, await _script(keys=[f'energy_{user_id}'], args=[amount]))
|
||||
return new_energy, spent
|
||||
|
||||
|
||||
async def get_global_average(r: redis.Redis) -> decimal.Decimal:
|
||||
avg_str = await r.get('global_average')
|
||||
if avg_str is None:
|
||||
return decimal.Decimal(0)
|
||||
return decimal.Decimal(avg_str)
|
||||
|
||||
|
||||
async def update_global_average(r: redis.Redis, value_to_add: decimal.Decimal) -> decimal.Decimal:
|
||||
_script = r.register_script('''
|
||||
local delta = tonumber(ARGV[1]) / tonumber(redis.call('GET', KEYS[1]))
|
||||
return redis.call('INCRBYFLOAT', KEYS[2], delta)
|
||||
''')
|
||||
return decimal.Decimal(await _script(keys=["user_count", "global_average"], args=[float(value_to_add)]))
|
||||
|
||||
|
||||
async def get_user_total(r: redis.Redis, user_id: int) -> decimal.Decimal:
|
||||
total_str = await r.get(f'total_{user_id}')
|
||||
if total_str is None:
|
||||
return decimal.Decimal(0)
|
||||
return decimal.Decimal(total_str)
|
||||
|
||||
|
||||
async def incr_user_count_if_no_clicks(r: redis.Redis, user_id: int) -> int:
|
||||
_script = r.register_script('''
|
||||
local clickCount = tonumber(redis.call('GET', KEYS[1]))
|
||||
local userCount = tonumber(redis.call('GET', KEYS[2]))
|
||||
if (not clickCount) then
|
||||
local oldUserCount = redis.call('GET', KEYS[2])
|
||||
if (not oldUserCount) then
|
||||
redis.call('SET', KEYS[2], 1)
|
||||
redis.call('SET', KEYS[3], 0)
|
||||
return 1
|
||||
end
|
||||
userCount = tonumber(redis.call('INCR', KEYS[2]))
|
||||
oldUserCount = tonumber(oldUserCount)
|
||||
local globalAverage = tonumber(redis.call('GET', KEYS[3]))
|
||||
redis.call('SET', KEYS[3], globalAverage / userCount * oldUserCount)
|
||||
end
|
||||
return userCount
|
||||
''')
|
||||
return int(await _script(keys=[f'total_{user_id}', 'user_count', 'global_average'], args=[]))
|
||||
|
||||
|
||||
async def incr_user_total(r: redis.Redis, user_id: int, value: decimal.Decimal) -> decimal.Decimal:
|
||||
return await r.incrbyfloat(f'total_{user_id}', float(value))
|
||||
|
||||
|
||||
async def get_user_session(r: redis.Redis, user_id: int) -> Optional[str]:
|
||||
return await r.get(f'session_{user_id}')
|
||||
|
||||
|
||||
async def set_user_session(r: redis.Redis, user_id: int, token: str) -> None:
|
||||
await r.set(f'session_{user_id}', token, ex=30 * 60)
|
||||
|
||||
|
||||
async def get_user_count(r: redis.Redis) -> int:
|
||||
user_count_str = await r.get('user_count')
|
||||
if user_count_str is None:
|
||||
return 0
|
||||
return int(user_count_str)
|
||||
|
||||
|
||||
async def incr_user_count(r: redis.Redis) -> int:
|
||||
_script = r.register_script('''
|
||||
local oldCount = redis.call('GET', KEYS[1])
|
||||
if (not oldCount) then
|
||||
redis.call('SET', KEYS[1], 1)
|
||||
redis.call('SET', KEYS[2], 0)
|
||||
return 1
|
||||
end
|
||||
local newCount = tonumber(redis.call('INCR', KEYS[1]))
|
||||
local globalAverage = tonumber(redis.call('GET', KEYS[2]))
|
||||
redis.call('SET', KEYS[2], globalAverage / newCount * oldCount)
|
||||
return newCount
|
||||
''')
|
||||
return int(await _script(keys=['user_count', 'global_average'], args=[]))
|
||||
|
||||
|
||||
async def delete_user_info(r: redis.Redis, user_id: int, periods: List[int]):
|
||||
_script = r.register_script('''
|
||||
local userTotal = redis.call('GET', KEYS[3])
|
||||
if (not userTotal) then
|
||||
return
|
||||
end
|
||||
local oldUserCount = tonumber(redis.call('GET', KEYS[1]))
|
||||
local newUserCount = tonumber(redis.call('DECR', KEYS[1]))
|
||||
local globalAverage = tonumber(redis.call('GET', KEYS[2]))
|
||||
redis.call('SET', KEYS[2], (globalAverage * oldUserCount - userTotal) / newUserCount)
|
||||
for i, v in ipairs(KEYS) do
|
||||
if (i > 2) then
|
||||
redis.call('DEL', v)
|
||||
end
|
||||
end
|
||||
''')
|
||||
keys = [
|
||||
'user_count',
|
||||
'global_average',
|
||||
f'total_{user_id}'
|
||||
f'energy_{user_id}',
|
||||
f'session_{user_id}',
|
||||
]
|
||||
for period in periods:
|
||||
keys.append(f'period_{period}_user_{user_id}')
|
||||
await _script(keys=keys, args=[])
|
|
@ -1,22 +0,0 @@
|
|||
import json
|
||||
import aio_pika
|
||||
import uuid
|
||||
|
||||
from ..models import Click
|
||||
|
||||
|
||||
CELERY_QUEUE_NAME = "celery"
|
||||
CLICK_TASK_NAME = "clicks.celery.click.handle_click"
|
||||
|
||||
|
||||
def send_click_batch_copy(chan: aio_pika.Channel, click: Click, count: int):
|
||||
await chan.default_exchange.publish(
|
||||
message=aio_pika.Message(json.dumps({
|
||||
'id': str(uuid.uuid4()),
|
||||
'task': CLICK_TASK_NAME,
|
||||
'args': [click.UserID, int(click.DateTime.timestamp() * 1e3), str(click.Value), count],
|
||||
'kwargs': dict(),
|
||||
}).encode('utf-8')),
|
||||
routing_key=CELERY_QUEUE_NAME,
|
||||
mandatory=False,
|
||||
)
|
|
@ -1,22 +0,0 @@
|
|||
import decimal
|
||||
|
||||
import pydantic
|
||||
|
||||
from .models import Click
|
||||
|
||||
|
||||
class ClickResponse(pydantic.BaseModel):
|
||||
click: Click
|
||||
energy: int
|
||||
|
||||
|
||||
class ClickValueResponse(pydantic.BaseModel):
|
||||
value: decimal.Decimal
|
||||
|
||||
|
||||
class EnergyResponse(pydantic.BaseModel):
|
||||
energy: int
|
||||
|
||||
|
||||
class BatchClickRequest(pydantic.BaseModel):
|
||||
count: int
|
|
@ -1,125 +0,0 @@
|
|||
from datetime import datetime
|
||||
import decimal
|
||||
from typing import Tuple
|
||||
import aiohttp
|
||||
import redis.asyncio as redis
|
||||
import aio_pika
|
||||
import asyncpg
|
||||
|
||||
from batcher.app.src.domain.setting import get_setting
|
||||
from .repos.redis import (
|
||||
get_period_sum, incr_period_sum, get_max_period_sum, get_user_total, get_global_average,
|
||||
incr_user_count_if_no_clicks, update_global_average, incr_user_total, compare_max_period_sum,
|
||||
delete_user_info as r_delete_user_info, get_user_session, set_user_session, set_energy, get_energy as r_get_energy,
|
||||
decr_energy,
|
||||
)
|
||||
from .repos.pg import update_click_expiry, bulk_store_copy, delete_by_user_id
|
||||
from .repos.rmq import send_click_batch_copy
|
||||
from .models import Click
|
||||
|
||||
|
||||
PRECISION = 2
|
||||
|
||||
|
||||
async def add_click_batch_copy(r: redis.Redis, pg: asyncpg.Connection, rmq: aio_pika.Channel, user_id: int, count: int) -> Click:
|
||||
_click_value = await click_value(r, pg, user_id)
|
||||
click_value_sum = _click_value * count
|
||||
|
||||
# update variables
|
||||
await incr_user_count_if_no_clicks(r, user_id)
|
||||
await update_global_average(r, click_value_sum)
|
||||
await incr_user_total(r, user_id, click_value_sum)
|
||||
|
||||
for period in (24, 24*7):
|
||||
new_period_sum = await incr_period_sum(r, user_id, period, click_value_sum)
|
||||
await compare_max_period_sum(r, period, new_period_sum)
|
||||
|
||||
click = Click(
|
||||
UserID=user_id,
|
||||
DateTime=datetime.now(),
|
||||
Value=_click_value,
|
||||
|
||||
)
|
||||
|
||||
# insert click
|
||||
await bulk_store_copy(pg, click, count)
|
||||
|
||||
# send click to backend
|
||||
send_click_batch_copy(rmq, click, count)
|
||||
|
||||
return click
|
||||
|
||||
|
||||
async def delete_user_info(r: redis.Redis, pg: asyncpg.Connection, user_id: int) -> None:
|
||||
await r_delete_user_info(r, user_id, [24, 168])
|
||||
await delete_by_user_id(pg, user_id)
|
||||
|
||||
|
||||
async def click_value(r: redis.Redis, pg: asyncpg.Connection, user_id: int) -> decimal.Decimal:
|
||||
price_per_click = get_setting('PRICE_PER_CLICK')
|
||||
day_multiplier = get_setting('DAY_MULT')
|
||||
week_multiplier = get_setting('WEEK_MULT')
|
||||
progress_multiplier = get_setting('PROGRESS_MULT')
|
||||
|
||||
# period coefficients
|
||||
day_coef = await period_coefficient(r, pg, user_id, 24, day_multiplier)
|
||||
week_coef = await period_coefficient(r, pg, user_id, 24*7, week_multiplier)
|
||||
|
||||
# progress coefficient
|
||||
user_total = await get_user_total(r, user_id)
|
||||
global_avg = await get_global_average(r)
|
||||
progress_coef = progress_coefficient(user_total, global_avg, progress_multiplier)
|
||||
|
||||
return round(price_per_click * day_coef * week_coef * progress_coef, PRECISION)
|
||||
|
||||
|
||||
async def period_coefficient(r: redis.Redis, pg: asyncpg.Connection, user_id: int, period: int, multiplier: decimal.Decimal) -> decimal.Decimal:
|
||||
current_sum = await get_period_sum(r, user_id, period)
|
||||
expired_sum = await update_click_expiry(pg, user_id, period)
|
||||
new_sum = current_sum - expired_sum
|
||||
await incr_period_sum(r, user_id, period, -expired_sum)
|
||||
max_period_sum = await get_max_period_sum(r, period)
|
||||
if max_period_sum == decimal.Decimal(0):
|
||||
return decimal.Decimal(1)
|
||||
return new_sum * multiplier / max_period_sum + 1
|
||||
|
||||
|
||||
def progress_coefficient(user_total: decimal.Decimal, global_avg: decimal.Decimal, multiplier: decimal.Decimal) -> decimal.Decimal:
|
||||
if user_total == decimal.Decimal(0):
|
||||
return decimal.Decimal(1)
|
||||
return min(global_avg * multiplier / user_total + 1, decimal.Decimal(2))
|
||||
|
||||
|
||||
async def check_registration(r: redis.Redis, user_id: int, _token: str, backend_url: str) -> bool:
|
||||
if await _has_any_clicks(r, user_id):
|
||||
return True
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(f'{backend_url}/api/v1/users/{user_id}', headers={'Authorization': _token}) as resp:
|
||||
return resp.status == 200
|
||||
|
||||
|
||||
async def _has_any_clicks(r: redis.Redis, user_id: int) -> bool:
|
||||
total_value = await get_user_total(r, user_id)
|
||||
return total_value > decimal.Decimal(0)
|
||||
|
||||
|
||||
async def _get_refresh_energy(r: redis.Redis, user_id: int, req_token: str) -> int:
|
||||
current_token = await get_user_session(r, user_id)
|
||||
if current_token != req_token:
|
||||
session_energy = int(get_setting('SESSION_ENERGY'))
|
||||
await set_user_session(r, user_id, req_token)
|
||||
await set_energy(r, user_id, session_energy)
|
||||
return session_energy
|
||||
else:
|
||||
return await r_get_energy(r, user_id)
|
||||
|
||||
|
||||
async def check_energy(r: redis.Redis, user_id: int, amount: int, _token: str) -> Tuple[int, int]:
|
||||
_energy = await _get_refresh_energy(r, user_id, _token)
|
||||
if _energy == 0:
|
||||
return 0, 0
|
||||
return await decr_energy(r, user_id, amount)
|
||||
|
||||
|
||||
async def get_energy(r: redis.Redis, user_id: int, _token: str) -> int:
|
||||
return await _get_refresh_energy(r, user_id, _token)
|
|
@ -1 +0,0 @@
|
|||
from .usecase import get_setting, launch_consumer
|
|
@ -1,21 +0,0 @@
|
|||
import decimal
|
||||
import threading
|
||||
|
||||
_settings = dict()
|
||||
mx = threading.Lock()
|
||||
|
||||
|
||||
def get_setting(name: str) -> decimal.Decimal:
|
||||
try:
|
||||
mx.acquire()
|
||||
return _settings[name]
|
||||
finally:
|
||||
mx.release()
|
||||
|
||||
|
||||
def set_setting(name: str, value: decimal.Decimal):
|
||||
try:
|
||||
mx.acquire()
|
||||
_settings[name] = value
|
||||
finally:
|
||||
mx.release()
|
|
@ -1,20 +0,0 @@
|
|||
import decimal
|
||||
import json
|
||||
|
||||
import aio_pika
|
||||
from typing import Callable
|
||||
|
||||
|
||||
SETTING_QUEUE_NAME = "settings"
|
||||
SETTING_TASK_NAME = "misc.celery.deliver_setting.deliver_setting"
|
||||
|
||||
|
||||
async def consume_setting_updates(update_setting_func: Callable[[str, decimal.Decimal], None], chan: aio_pika.Channel):
|
||||
queue = await chan.get_queue(SETTING_QUEUE_NAME)
|
||||
|
||||
async with queue.iterator() as queue_iter:
|
||||
async for msg in queue_iter:
|
||||
async with msg.process():
|
||||
settings = json.loads(msg.body.decode('utf-8'))
|
||||
for name, value in settings.items():
|
||||
update_setting_func(name, value)
|
|
@ -1,16 +0,0 @@
|
|||
import decimal
|
||||
import threading
|
||||
|
||||
import aio_pika
|
||||
|
||||
from .repos.in_memory_storage import get_setting as ims_get_setting
|
||||
from .repos.rmq import consume_setting_updates
|
||||
|
||||
|
||||
def get_setting(name: str) -> decimal.Decimal:
|
||||
return ims_get_setting(name)
|
||||
|
||||
|
||||
def launch_consumer(rmq: aio_pika.Connection):
|
||||
t = threading.Thread(target=consume_setting_updates, args=(ims_get_setting, rmq))
|
||||
t.start()
|
|
@ -1,2 +0,0 @@
|
|||
from .api import router
|
||||
from .handlers import http_error_handler
|
|
@ -1,11 +0,0 @@
|
|||
from fastapi import APIRouter
|
||||
from . import click
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
def include_api_routes():
|
||||
router.include_router(click.router, prefix='/v1')
|
||||
|
||||
|
||||
include_api_routes()
|
|
@ -1,71 +0,0 @@
|
|||
import aio_pika
|
||||
import asyncpg
|
||||
import redis
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from typing import Annotated
|
||||
from ..domain.click import (
|
||||
ClickResponse, BatchClickRequest, EnergyResponse, ClickValueResponse,
|
||||
add_click_batch_copy, check_registration, check_energy, get_energy, click_value, delete_user_info
|
||||
)
|
||||
|
||||
from ..dependencies import get_token_header
|
||||
from ..db import get_pg, get_redis, get_rmq
|
||||
from ..config import BACKEND_URL
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
prefix="",
|
||||
tags=['click'],
|
||||
dependencies=[],
|
||||
responses={404: {'description': 'Not found'}},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/batch-click/", response_model=ClickResponse, status_code=200)
|
||||
async def batch_click(req: BatchClickRequest, auth_info: Annotated[(int, str), Depends(get_token_header)], pg: Annotated[asyncpg.Connection, Depends(get_pg)], r: Annotated[redis.Redis, Depends(get_redis)], rmq: Annotated[aio_pika.Channel, Depends(get_rmq)]):
|
||||
user_id, token = auth_info
|
||||
if not check_registration(r, user_id, token, BACKEND_URL):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
_energy, spent = await check_energy(r, user_id, req.count, token)
|
||||
if spent == 0:
|
||||
raise HTTPException(status_code=400, detail='No energy')
|
||||
|
||||
click = await add_click_batch_copy(r, pg, rmq, user_id, spent)
|
||||
return ClickResponse(
|
||||
click=click,
|
||||
energy=_energy
|
||||
)
|
||||
|
||||
|
||||
@router.get("/energy", response_model=EnergyResponse, status_code=200)
|
||||
async def energy(auth_info: Annotated[(int, str), Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)]):
|
||||
user_id, token = auth_info
|
||||
if not check_registration(r, user_id, token, BACKEND_URL):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
_energy = await get_energy(r, user_id, token)
|
||||
return EnergyResponse(
|
||||
energy=_energy
|
||||
)
|
||||
|
||||
|
||||
@router.get('/coefficient', response_model=ClickValueResponse, status_code=200)
|
||||
async def coefficient(auth_info: Annotated[(int, str), Depends(get_token_header)], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]):
|
||||
user_id, token = auth_info
|
||||
if not check_registration(r, user_id, token, BACKEND_URL):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
value = await click_value(r, pg, user_id)
|
||||
return ClickValueResponse(
|
||||
value=value
|
||||
)
|
||||
|
||||
|
||||
@router.delete('/internal/user', status_code=204)
|
||||
async def delete_user(auth_info: Annotated[(int, str), Depends(get_token_header())], r: Annotated[redis.Redis, Depends(get_redis)], pg: Annotated[asyncpg.Connection, Depends(get_pg)]):
|
||||
user_id, token = auth_info
|
||||
if not check_registration(r, user_id, token, BACKEND_URL):
|
||||
raise HTTPException(status_code=403, detail='Unauthorized')
|
||||
|
||||
await delete_user_info(r, pg, user_id)
|
|
@ -1 +0,0 @@
|
|||
from .http_error_handler import http_error_handler
|
|
@ -1,7 +0,0 @@
|
|||
from fastapi import HTTPException
|
||||
from starlette.requests import Request
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
|
||||
async def http_error_handler(_: Request, exc: HTTPException) -> JSONResponse:
|
||||
return JSONResponse({"error": exc.detail}, status_code=exc.status_code)
|
|
@ -1 +0,0 @@
|
|||
docker exec -it $1 python /batcher/app/migrate.py $2
|
|
@ -1,31 +0,0 @@
|
|||
aio-pika==9.4.3
|
||||
aiohappyeyeballs==2.4.3
|
||||
aiohttp==3.10.10
|
||||
aiormq==6.8.1
|
||||
aiosignal==1.3.1
|
||||
amqp==5.2.0
|
||||
annotated-types==0.7.0
|
||||
anyio==4.6.2.post1
|
||||
async-timeout==4.0.3
|
||||
asyncpg==0.29.0
|
||||
asyncpg-trek==0.4.0
|
||||
attrs==24.2.0
|
||||
certifi==2024.8.30
|
||||
charset-normalizer==3.4.0
|
||||
fastapi==0.115.2
|
||||
frozenlist==1.4.1
|
||||
idna==3.10
|
||||
multidict==6.1.0
|
||||
pamqp==3.3.0
|
||||
propcache==0.2.0
|
||||
pydantic==2.9.2
|
||||
pydantic_core==2.23.4
|
||||
redis==5.1.1
|
||||
requests==2.32.3
|
||||
sniffio==1.3.1
|
||||
starlette==0.40.0
|
||||
typing_extensions==4.12.2
|
||||
tzdata==2024.2
|
||||
urllib3==2.2.3
|
||||
vine==5.1.0
|
||||
yarl==1.15.5
|
|
@ -1,143 +0,0 @@
|
|||
version: '3.9'
|
||||
|
||||
volumes:
|
||||
db_data: {}
|
||||
batcher_db_data: {}
|
||||
redis_data: {}
|
||||
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
command: /gunicorn.sh
|
||||
entrypoint: /entrypoint.sh
|
||||
restart: on-failure
|
||||
depends_on:
|
||||
- postgres
|
||||
- rabbitmq
|
||||
env_file:
|
||||
- .env/prod/pg
|
||||
- .env/prod/back
|
||||
- .env/prod/rmq
|
||||
- .env/prod/bot
|
||||
|
||||
bot:
|
||||
build:
|
||||
context: ./clicker_bot
|
||||
depends_on:
|
||||
- backend
|
||||
volumes:
|
||||
- ./clicker_bot:/app
|
||||
environment:
|
||||
PROD: 1
|
||||
env_file:
|
||||
- .env/prod/bot
|
||||
command:
|
||||
- /gunicorn.sh
|
||||
restart: on-failure
|
||||
#
|
||||
# memcached:
|
||||
# container_name: memcached
|
||||
# image: memcached:latest
|
||||
|
||||
postgres:
|
||||
image: postgres:14.5-alpine
|
||||
volumes:
|
||||
- db_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- .env/prod/pg
|
||||
|
||||
nginx:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: nginx/Dockerfile
|
||||
ports:
|
||||
- '80:80'
|
||||
- '443:443'
|
||||
depends_on:
|
||||
- backend
|
||||
- bot
|
||||
- rabbitmq
|
||||
- batcher
|
||||
volumes:
|
||||
- ./backend/static/:/static/
|
||||
- ./nginx/certbot/conf:/etc/letsencrypt
|
||||
- ./nginx/certbot/www:/var/www/certbot
|
||||
restart: unless-stopped
|
||||
command: '/bin/sh -c ''while :; do sleep 6h & wait $${!}; nginx -s reload; done & nginx -g "daemon off;"'''
|
||||
|
||||
certbot:
|
||||
container_name: certbot
|
||||
image: certbot/certbot
|
||||
volumes:
|
||||
- ./nginx/certbot/conf:/etc/letsencrypt
|
||||
- ./nginx/certbot/www:/var/www/certbot
|
||||
restart: unless-stopped
|
||||
entrypoint: "/bin/sh -c 'trap exit TERM; while :; do certbot renew; sleep 12h & wait $${!}; done;'"
|
||||
|
||||
celery:
|
||||
build: ./backend
|
||||
command: /start_celery.sh
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
env_file:
|
||||
- .env/prod/back
|
||||
- .env/prod/rmq
|
||||
- .env/prod/pg
|
||||
- .env/prod/bot
|
||||
depends_on:
|
||||
- backend
|
||||
- rabbitmq
|
||||
|
||||
celery-beat:
|
||||
build: ./backend
|
||||
command: celery -A clicker beat -l info
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
env_file:
|
||||
- .env/prod/back
|
||||
- .env/prod/rmq
|
||||
- .env/prod/pg
|
||||
- .env/prod/bot
|
||||
depends_on:
|
||||
- backend
|
||||
- rabbitmq
|
||||
|
||||
rabbitmq:
|
||||
container_name: 'rabbitmq'
|
||||
image: 'rabbitmq:3-management-alpine'
|
||||
env_file:
|
||||
- .env/prod/rmq
|
||||
ports:
|
||||
- '15672:15672'
|
||||
|
||||
redis:
|
||||
env_file:
|
||||
- .env/prod/redis
|
||||
image: redis
|
||||
command: bash -c "redis-server --appendonly yes --requirepass $${REDIS_PASSWORD}"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
batcher:
|
||||
build:
|
||||
context: ./batcher
|
||||
depends_on:
|
||||
- redis
|
||||
- batcher-postgres
|
||||
- rabbitmq
|
||||
env_file:
|
||||
- .env/prod/rmq
|
||||
- .env/prod/redis
|
||||
- .env/prod/batcher-pg
|
||||
- .env/prod/batcher
|
||||
- .env/prod/bot
|
||||
|
||||
batcher-postgres:
|
||||
image: postgres:14.5-alpine
|
||||
volumes:
|
||||
- batcher_db_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- .env/prod/batcher-pg
|
|
@ -1,110 +0,0 @@
|
|||
version: '3.9'
|
||||
|
||||
volumes:
|
||||
db_data: {}
|
||||
batcher_db_data: {}
|
||||
redis_data: {}
|
||||
|
||||
services:
|
||||
backend:
|
||||
build:
|
||||
context: ./backend
|
||||
depends_on:
|
||||
- postgres
|
||||
- rabbitmq
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
command: /start.sh
|
||||
entrypoint: /entrypoint.sh
|
||||
restart: on-failure
|
||||
env_file:
|
||||
- .env/dev/pg
|
||||
- .env/dev/back
|
||||
- .env/dev/rmq
|
||||
- .env/dev/bot
|
||||
- .env/dev/web
|
||||
ports:
|
||||
- '8000:8000'
|
||||
|
||||
postgres:
|
||||
image: postgres:14.5-alpine
|
||||
volumes:
|
||||
- db_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- .env/dev/pg
|
||||
ports:
|
||||
- '5432:5432'
|
||||
|
||||
celery:
|
||||
build: ./backend
|
||||
command: celery -A clicker worker -l info
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
env_file:
|
||||
- .env/dev/back
|
||||
- .env/dev/rmq
|
||||
- .env/dev/pg
|
||||
- .env/dev/bot
|
||||
- .env/dev/web
|
||||
depends_on:
|
||||
- backend
|
||||
- rabbitmq
|
||||
|
||||
celery-beat:
|
||||
build: ./backend
|
||||
command: celery -A clicker beat -l info
|
||||
volumes:
|
||||
- ./backend:/app
|
||||
env_file:
|
||||
- .env/dev/back
|
||||
- .env/dev/rmq
|
||||
- .env/dev/pg
|
||||
- .env/dev/bot
|
||||
- .env/dev/web
|
||||
depends_on:
|
||||
- backend
|
||||
- rabbitmq
|
||||
|
||||
rabbitmq:
|
||||
container_name: 'rabbitmq'
|
||||
image: 'rabbitmq:3-management-alpine'
|
||||
env_file:
|
||||
- .env/dev/rmq
|
||||
ports:
|
||||
- '5672:5672'
|
||||
- '15672:15672'
|
||||
|
||||
redis:
|
||||
env_file:
|
||||
- .env/dev/redis
|
||||
image: redis
|
||||
command: bash -c "redis-server --appendonly yes --requirepass $${REDIS_PASSWORD}"
|
||||
ports:
|
||||
- '6379:6379'
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
batcher:
|
||||
build:
|
||||
context: ./batcher
|
||||
depends_on:
|
||||
- redis
|
||||
- batcher-postgres
|
||||
env_file:
|
||||
- .env/dev/rmq
|
||||
- .env/dev/redis
|
||||
- .env/dev/batcher-pg
|
||||
- .env/dev/batcher
|
||||
- .env/dev/bot
|
||||
- .env/dev/web
|
||||
ports:
|
||||
- '8080:8080'
|
||||
|
||||
batcher-postgres:
|
||||
image: postgres:14.5-alpine
|
||||
volumes:
|
||||
- batcher_db_data:/var/lib/postgresql/data
|
||||
env_file:
|
||||
- .env/dev/batcher-pg
|
||||
ports:
|
||||
- '5433:5432'
|
|
@ -1,20 +0,0 @@
|
|||
# stage 1 - build frontend
|
||||
FROM node:16.20.0 as build-deps
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./frontend/package.json /app/
|
||||
COPY ./frontend/package-lock.json /app/
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY ./frontend /app/
|
||||
RUN npm run build
|
||||
|
||||
# stage 2 - nginx
|
||||
FROM nginx:stable
|
||||
COPY nginx/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY --from=build-deps /app/dist/ /dist/
|
||||
|
||||
CMD ["nginx", "-g", "daemon off;"]
|
||||
|
158
nginx/nginx.conf
158
nginx/nginx.conf
|
@ -1,158 +0,0 @@
|
|||
user nginx;
|
||||
worker_processes 1;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
log_format upstreamlog '[$time_local] $remote_addr - $remote_user - $server_name $host to: $upstream_addr: $request $status upstream_response_time $upstream_response_time msec $msec request_time $request_time';
|
||||
include /etc/nginx/mime.types;
|
||||
client_max_body_size 100m;
|
||||
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_send_timeout 300;
|
||||
proxy_buffer_size 8m;
|
||||
proxy_busy_buffers_size 12m;
|
||||
proxy_buffers 16 1m;
|
||||
uwsgi_buffers 16 1m;
|
||||
uwsgi_buffer_size 8m;
|
||||
uwsgi_busy_buffers_size 12m;
|
||||
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
upstream backend {
|
||||
server backend:8000;
|
||||
}
|
||||
upstream batcher {
|
||||
server batcher:8080;
|
||||
}
|
||||
upstream rabbitmq {
|
||||
server rabbitmq:15672;
|
||||
}
|
||||
upstream bot {
|
||||
server bot:7313;
|
||||
}
|
||||
|
||||
server {
|
||||
access_log /var/log/nginx/access.log upstreamlog;
|
||||
error_log /var/log/nginx/error.log;
|
||||
listen 80;
|
||||
listen 443 ssl http2;
|
||||
charset utf-8;
|
||||
server_name crowngame.ru www.crowngame.ru;
|
||||
|
||||
root /dist/;
|
||||
index index.html;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/crowngame.ru/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/crowngame.ru/privkey.pem;
|
||||
|
||||
include /etc/letsencrypt/options-ssl-nginx.conf;
|
||||
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
|
||||
|
||||
if ($server_port = 80) {
|
||||
set $https_redirect 1;
|
||||
}
|
||||
if ($host ~ '^www\.') {
|
||||
set $https_redirect 1;
|
||||
}
|
||||
if ($https_redirect = 1) {
|
||||
return 301 https://crowngame.ru$request_uri;
|
||||
}
|
||||
|
||||
location /.well-known/acme-challenge/ {
|
||||
root /var/www/certbot;
|
||||
}
|
||||
|
||||
# frontend
|
||||
location / {
|
||||
try_files $uri $uri/ @rewrites;
|
||||
}
|
||||
|
||||
location @rewrites {
|
||||
rewrite ^(.+)$ /index.html last;
|
||||
}
|
||||
|
||||
# batcher
|
||||
location ~ ^/api/v1/(batch\-click|click|energy|coefficient)(/(.*))? {
|
||||
proxy_pass http://batcher;
|
||||
proxy_pass_header Authorization;
|
||||
}
|
||||
|
||||
location ^~ /api/internal {
|
||||
deny all;
|
||||
}
|
||||
|
||||
# backend
|
||||
location ~ ^/(admin|api) {
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://backend;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
if ($uri ~* \.(?:ico|js|css|gif|jpe?g|png|webp)/?$) {
|
||||
expires max;
|
||||
add_header Pragma public;
|
||||
add_header Cache-Control "public, must-revalidate, proxy-revalidate";
|
||||
}
|
||||
}
|
||||
|
||||
# bot
|
||||
location ~ ^/bot {
|
||||
proxy_http_version 1.1;
|
||||
proxy_pass http://bot;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Connection $connection_upgrade;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
# backend static
|
||||
location ~ ^/(static)/(.*)$ {
|
||||
alias /$1/$2;
|
||||
}
|
||||
|
||||
|
||||
# Some basic cache-control for static files to be sent to the browser
|
||||
location ~* \.(?:ico|css|js|gif|jpe?g|png|webp)$ {
|
||||
expires max;
|
||||
add_header Pragma public;
|
||||
add_header Cache-Control "public, must-revalidate, proxy-revalidate";
|
||||
}
|
||||
|
||||
location ~ ^/rabbitmq/api/(.*?)/(.*) {
|
||||
proxy_pass http://rabbitmq/api/$1/%2F/$2?$query_string;
|
||||
proxy_buffering off;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location ~ ^/rabbitmq/(.*) {
|
||||
rewrite ^/rabbitmq/(.*)$ /$1 break;
|
||||
proxy_pass http://rabbitmq;
|
||||
proxy_buffering off;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user