Files
habr-article-API/src/router.py
n0body 6da6ace82f Readability and functionality improvements
- Refactored all DBwork functions to not set and close connection inside
their body, they use connection as a parameter instead
- Added single file to configure a FastAPI app
- Implemented FastAPI's lifespan function that calls certain functions
on app startup and shutdown
- Added error logging for scraping functions
- Fixed /articles/get/html and /articles/get/md endpoints
- All POST methods now return base64 encoded html/md strings to avoid
weird json formatting
2025-09-04 23:05:12 +03:00

101 lines
2.9 KiB
Python

import DBwork
import scraper
from DBwork import connection as conn
from fastapi import Response, status, APIRouter
from pydantic import BaseModel
import psycopg2
from json import dumps
import base64
router = APIRouter(prefix='/api')
class Entry(BaseModel):
url: str
rating: int | None = None
class Article(BaseModel):
url: str
class Amount(BaseModel):
amount: int
@router.get('/ping')
async def ping():
return {'message': 'pong'}
@router.get('/rates')
async def get_rates():
result = dumps(DBwork.get_all_entries(conn))
return result
@router.post('/article/rate')
async def save_rating(entry: Entry, response: Response):
try:
DBwork.add_entry(article_url=entry.url,
rating=entry.rating,
connection=conn
)
message = 'success'
except psycopg2.Error:
response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
message = 'internal server error'
finally:
return {'message': message,
'url': entry.url,
'rating': entry.rating
}
@router.post('/article/remove_rate')
async def remove_rating(entry: Entry, response: Response):
try:
DBwork.delete_entry(entry.url, conn)
message = 'success'
except psycopg2.Error:
response.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
message = 'internal server error'
finally:
return {'message': message}
@router.post('/article/get/html')
async def get_article_html(article: Article, response: Response = None):
html_string = await scraper.get_article_html(article.url)
b64_string = base64.b64encode(html_string.encode('utf-8')).decode('utf-8')
return Response(content=b64_string, media_type='text/plain')
@router.post('/article/get/md')
async def get_article_md(article: Article, response: Response = None):
md_string = await scraper.get_article_html(article.url, md=True)
b64_string = base64.b64encode(md_string.encode('utf-8')).decode('utf-8')
return Response(content=b64_string, media_type='text/plain')
@router.post('/articles/get/html')
async def get_n_articles_html(amount: Amount, response: Response = None):
articles = {}
urls = await scraper.get_articles_from_feed(amount.amount)
for url in urls:
html = await scraper.get_article_html(f'https://habr.com{url}')
b64_string = base64.b64encode(html.encode('utf-8')).decode('utf-8')
articles[f'https://habr.com{url}'] = b64_string
return articles
@router.post('/articles/get/md')
async def get_n_articles_md(amount: Amount, response: Response = None):
articles = {}
for url in await scraper.get_articles_from_feed(amount.amount):
md = await scraper.get_article_html(f'https://habr.com{url}', md=True)
b64_string = base64.b64encode(md.encode('utf-8')).decode('utf-8')
articles[f'https://habr.com{url}'] = b64_string
return articles