Compare commits

...

2 Commits

Author SHA1 Message Date
3e417e578c refactored docker-compose 2023-11-28 16:29:22 +03:00
2bf35852cd refactored logging with json_logs func 2023-11-27 02:09:53 +03:00
8 changed files with 108 additions and 13 deletions

17
Dockerfile Normal file
View File

@ -0,0 +1,17 @@
FROM python:3.11.4
WORKDIR /app
COPY poetry.lock pyproject.toml /app/
RUN apt-get -y update
RUN apt-get -y upgrade
RUN apt-get install -y ffmpeg
RUN pip install poetry
RUN poetry install --no-root
COPY .. /app
CMD poetry run python main.py

View File

@ -1,19 +1,60 @@
version: "2.1" version: "2.1"
networks:
network:
services: services:
web_service:
container_name: web_service
build:
context: .
dockerfile: web.Dockerfile
ports:
- "8000:8000"
depends_on:
redis:
condition: service_started
rabbitmq:
condition: service_healthy
restart: always
networks:
- network
download_service:
container_name: download_service
build:
context: .
dockerfile: Dockerfile
depends_on:
redis:
condition: service_started
rabbitmq:
condition: service_healthy
restart: always
networks:
- network
rabbitmq: rabbitmq:
container_name: rabbitmq
image: rabbitmq:3.10.7-management image: rabbitmq:3.10.7-management
hostname: rabbitmq hostname: rabbitmq
restart: always restart: always
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 30s
retries: 3
environment: environment:
- RABBITMQ_DEFAULT_USER=guest - RABBITMQ_DEFAULT_USER=guest
- RABBITMQ_DEFAULT_PASS=guest - RABBITMQ_DEFAULT_PASS=guest
volumes: volumes:
- ./rabbitmq:/var/lib/rabbitmq - ./rabbitmq:/var/lib/rabbitmq
ports: ports:
- 15672:15672 - "15672:15672"
- 5672:5672 - "5672:5672"
networks:
- network
redis: redis:
container_name: redis_video_downloader container_name: redis_video_downloader
image: redis:latest image: redis:latest
ports: ports:
- "6379:6379" - "6379:6379"
networks:
- network

22
main.py
View File

@ -1,8 +1,30 @@
import asyncio import asyncio
import json
from typing import Any
from multiprocessing import freeze_support from multiprocessing import freeze_support
from src.core.master_service import MasterService from src.core.master_service import MasterService
from loguru import logger
def json_logs(message: Any) -> None:
record = message.record
data = {
"timestamp": record["time"].strftime("%d.%m.%y %H.%M.%S %Z%z"),
"level": record["level"].name,
"message": record["message"],
"path": record["file"].path,
"function": record["function"],
"line": record["line"],
}
print(json.dumps(data))
logger.remove(0)
logger.add(json_logs)
if __name__ == '__main__': if __name__ == '__main__':
freeze_support() freeze_support()
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()

View File

@ -1,6 +1,7 @@
import asyncio import asyncio
import json import json
from loguru import logger
from playwright.async_api import async_playwright from playwright.async_api import async_playwright
from playwright.async_api import Playwright from playwright.async_api import Playwright
from aio_pika import Message, connect, DeliveryMode from aio_pika import Message, connect, DeliveryMode
@ -38,12 +39,13 @@ async def run(playwright: Playwright):
routing_key='hello', routing_key='hello',
) )
print(f" [x] Sent '{body}'") logger.info(f" [x] Sent '{body}'")
await page.keyboard.press("ArrowDown") await page.keyboard.press("ArrowDown")
while title == await page.title(): while title == await page.title():
await page.title() await page.title()
async def main(): async def main():
async with async_playwright() as playwright: async with async_playwright() as playwright:
await run(playwright) await run(playwright)

View File

@ -4,16 +4,17 @@ from functools import partial
from aio_pika import connect, Message, DeliveryMode from aio_pika import connect, Message, DeliveryMode
from aio_pika.abc import AbstractIncomingMessage from aio_pika.abc import AbstractIncomingMessage
from loguru import logger
async def on_message(message: AbstractIncomingMessage, queue) -> None: async def on_message(message: AbstractIncomingMessage, queue) -> None:
async with message.process(): async with message.process():
await queue.put(json.loads(message.body)) await queue.put(json.loads(message.body))
print(f" Message body is: {message.body!r}") logger.info(f" Message body is: {message.body!r}")
async def get_messages(inner_queue) -> None: async def get_messages(inner_queue) -> None:
async with await connect("amqp://guest:guest@localhost/") as connection: async with await connect("amqp://guest:guest@rabbitmq/") as connection:
channel = await connection.channel() channel = await connection.channel()
await channel.set_qos(prefetch_count=1) await channel.set_qos(prefetch_count=1)
@ -23,14 +24,13 @@ async def get_messages(inner_queue) -> None:
) )
await queue.consume(partial(on_message, queue=inner_queue)) await queue.consume(partial(on_message, queue=inner_queue))
logger.info("[*] Waiting for messages. To exit press CTRL+C")
print(" [*] Waiting for messages. To exit press CTRL+C")
await asyncio.Future() await asyncio.Future()
async def publish_message_with_task_done(task: dict | list) -> None: async def publish_message_with_task_done(task: dict | list) -> None:
queue_name = "tasks_done" queue_name = "tasks_done"
async with await connect("amqp://guest:guest@localhost/") as connection: async with await connect("amqp://guest:guest@rabbitmq/") as connection:
# Creating channel # Creating channel
channel = await connection.channel() channel = await connection.channel()

View File

@ -9,7 +9,7 @@ class RedisClient:
TASKS_DONE_NAME = "tasks_done" TASKS_DONE_NAME = "tasks_done"
def __init__(self): def __init__(self):
self.connection = redis.Redis(host="localhost", port=6379, db=0) self.connection = redis.Redis(host="redis_video_downloader", port=6379, db=0)
async def _set_task(self, queue_name: str, link: str, task: dict | list, ) -> int: async def _set_task(self, queue_name: str, link: str, task: dict | list, ) -> int:
async with self.connection as connection: async with self.connection as connection:

View File

@ -1,4 +1,5 @@
import json import json
import os
import uvicorn import uvicorn
import logging import logging
@ -15,8 +16,7 @@ from src.web.schemes.submit import SubmitIn, CheckIn, DeleteFromS3, CopyToAnothe
app = FastAPI( app = FastAPI(
title="video_downloader", openapi_url=f"/api/v1/openapi.json" title="video_downloader", openapi_url=f"/api/v1/openapi.json"
) )
templates = Jinja2Templates(directory=f"{os.path.dirname(os.path.dirname(os.path.abspath(__file__)))}/web/templates")
templates = Jinja2Templates(directory="templates")
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
@ -102,7 +102,7 @@ async def get_url_for_download_video(request: Request, data: SubmitIn = Depends(
return JSONResponse({"result": links_to_download_video}) return JSONResponse({"result": links_to_download_video})
# TODO: учесть, что если делать запрос CURL\urllib3\etc, в теле может быть несколько ссылок -> должно быть создано несколько задач # TODO: учесть, что если делать запрос CURL\urllib3\etc, в теле может быть несколько ссылок -> должно быть создано несколько задач
async with await connect("amqp://guest:guest@localhost/") as connection: async with await connect("amqp://guest:guest@rabbitmq/") as connection:
# Creating a channel # Creating a channel
channel = await connection.channel() channel = await connection.channel()
body = [ body = [
@ -204,4 +204,4 @@ async def delete_video_from_s3(data: CopyToAnotherBucketS3):
) )
uvicorn.run("src.web.main:app", host="0.0.0.0", log_level="info") uvicorn.run("src.web.main:app", host="0.0.0.0", port=8000, log_level="info")

13
web.Dockerfile Normal file
View File

@ -0,0 +1,13 @@
FROM python:3.11.4
WORKDIR /app
COPY poetry.lock pyproject.toml /app/
RUN pip install poetry
RUN poetry install --no-root
COPY .. /app
CMD poetry run python main_web.py