Merge pull request 'asynchronous-porridger' (#49) from asynchronous-porridger into main
Reviewed-on: dm1sh/porridger#49
This commit is contained in:
commit
7ecfe6faa4
@ -29,4 +29,7 @@ dist-ssr
|
|||||||
uploads/
|
uploads/
|
||||||
.env
|
.env
|
||||||
|
|
||||||
|
poems.txt
|
||||||
|
poem_pic/
|
||||||
|
|
||||||
__pycache__
|
__pycache__
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -30,4 +30,6 @@ uploads/
|
|||||||
.env
|
.env
|
||||||
poem_pic/
|
poem_pic/
|
||||||
|
|
||||||
|
poem_pic/
|
||||||
|
|
||||||
__pycache__
|
__pycache__
|
@ -35,5 +35,5 @@ Only docker/podman are required
|
|||||||
```sh
|
```sh
|
||||||
docker build . -t porridger:build
|
docker build . -t porridger:build
|
||||||
|
|
||||||
docker run --name porridger -p 8000:8000 -v ./sql_app.db:/srv/sql_app.db -v uploads:/srv/uploads -v poem_pic:/srv/poem_pic porridger:build
|
docker run --name porridger -p 8000:8000 -v ./sql_app.db:/srv/sql_app.db -v ./poems.txt:/srv/poems.txt -v ./poem_pic:/srv/poem_pic -v uploads:/srv/uploads porridger:build
|
||||||
```
|
```
|
||||||
|
18
alembic.ini
18
alembic.ini
@ -4,8 +4,9 @@
|
|||||||
# path to migration scripts
|
# path to migration scripts
|
||||||
script_location = migrations
|
script_location = migrations
|
||||||
|
|
||||||
# template used to generate migration files
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
# defaults to the current working directory.
|
# defaults to the current working directory.
|
||||||
@ -48,11 +49,16 @@ prepend_sys_path = .
|
|||||||
# version_path_separator = space
|
# version_path_separator = space
|
||||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
# the output encoding used when revision files
|
# the output encoding used when revision files
|
||||||
# are written from script.py.mako
|
# are written from script.py.mako
|
||||||
# output_encoding = utf-8
|
# output_encoding = utf-8
|
||||||
|
|
||||||
# sqlalchemy.url = driver://user:pass@localhost/dbname
|
; sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
[post_write_hooks]
|
||||||
@ -66,6 +72,12 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne
|
|||||||
# black.entrypoint = black
|
# black.entrypoint = black
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
# Logging configuration
|
# Logging configuration
|
||||||
[loggers]
|
[loggers]
|
||||||
keys = root,sqlalchemy,alembic
|
keys = root,sqlalchemy,alembic
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.sql import text, literal_column
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
from . import auth_utils, orm_models, pydantic_schemas
|
from sqlalchemy import select, or_, and_
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# Переменные для получения данных о мусорках с внешнего API
|
from . import auth_utils, orm_models, pydantic_schemas
|
||||||
# url API
|
|
||||||
BASE_URL='https://geointelect2.gate.petersburg.ru'#адрес сайта и мой токин
|
|
||||||
# токен для получения данных
|
|
||||||
my_token='eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJhU1RaZm42bHpTdURYcUttRkg1SzN5UDFhT0FxUkhTNm9OendMUExaTXhFIn0.eyJleHAiOjE3ODM3ODk4NjgsImlhdCI6MTY4OTA5NTQ2OCwianRpIjoiNDUzNjQzZTgtYTkyMi00NTI4LWIzYmMtYWJiYTNmYjkyNTkxIiwiaXNzIjoiaHR0cHM6Ly9rYy5wZXRlcnNidXJnLnJ1L3JlYWxtcy9lZ3MtYXBpIiwiYXVkIjoiYWNjb3VudCIsInN1YiI6ImJjYjQ2NzljLTU3ZGItNDU5ZC1iNWUxLWRlOGI4Yzg5MTMwMyIsInR5cCI6IkJlYXJlciIsImF6cCI6ImFkbWluLXJlc3QtY2xpZW50Iiwic2Vzc2lvbl9zdGF0ZSI6ImM2ZDJiOTZhLWMxNjMtNDAxZS05ZjMzLTI0MmE0NDcxMDY5OCIsImFjciI6IjEiLCJhbGxvd2VkLW9yaWdpbnMiOlsiLyoiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbImRlZmF1bHQtcm9sZXMtZWdzLWFwaSIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJhY2NvdW50Ijp7InJvbGVzIjpbIm1hbmFnZS1hY2NvdW50IiwibWFuYWdlLWFjY291bnQtbGlua3MiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6ImVtYWlsIHByb2ZpbGUiLCJzaWQiOiJjNmQyYjk2YS1jMTYzLTQwMWUtOWYzMy0yNDJhNDQ3MTA2OTgiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsIm5hbWUiOiLQktC70LDQtNC40LzQuNGAINCv0LrQvtCy0LvQtdCyIiwicHJlZmVycmVkX3VzZXJuYW1lIjoiZTBmYzc2OGRhOTA4MjNiODgwZGQzOGVhMDJjMmQ5NTciLCJnaXZlbl9uYW1lIjoi0JLQu9Cw0LTQuNC80LjRgCIsImZhbWlseV9uYW1lIjoi0K_QutC-0LLQu9C10LIifQ.E2bW0B-c6W5Lj63eP_G8eI453NlDMnW05l11TZT0GSsAtGayXGaolHtWrmI90D5Yxz7v9FGkkCmcUZYy1ywAdO9dDt_XrtFEJWFpG-3csavuMjXmqfQQ9SmPwDw-3toO64NuZVv6qVqoUlPPj57sLx4bLtVbB4pdqgyJYcrDHg7sgwz4d1Z3tAeUfSpum9s5ZfELequfpLoZMXn6CaYZhePaoK-CxeU3KPBPTPOVPKZZ19s7QY10VdkxLULknqf9opdvLs4j8NMimtwoIiHNBFlgQz10Cr7bhDKWugfvSRsICouniIiBJo76wrj5T92s-ztf1FShJuqnQcKE_QLd2A'
|
|
||||||
|
|
||||||
# Загружаем стихи
|
# Загружаем стихи
|
||||||
def add_poems_to_db(db: Session):
|
async def add_poems_to_db(async_db: AsyncSession):
|
||||||
f1 = open('text121.txt', encoding='utf-8', mode='r')#открыть фаил для чтения на русском
|
f1 = open('poems.txt', encoding='utf-8', mode='r')#открыть фаил для чтения на русском
|
||||||
for a in range(1, 110):
|
for a in range(1, 110):
|
||||||
f1.seek(0)#перейти к началу
|
f1.seek(0)#перейти к началу
|
||||||
i=0
|
i=0
|
||||||
@ -36,40 +35,78 @@ def add_poems_to_db(db: Session):
|
|||||||
author += str1
|
author += str1
|
||||||
poem = orm_models.Poems(title=name, text=stixi, author=author)
|
poem = orm_models.Poems(title=name, text=stixi, author=author)
|
||||||
# В конце каждой итерации добавляем в базу данных
|
# В конце каждой итерации добавляем в базу данных
|
||||||
db.add(poem)
|
async_db.add(poem)
|
||||||
db.commit()
|
async_db.commit()
|
||||||
# db.refresh(poem)
|
|
||||||
# close the file
|
# close the file
|
||||||
f1.close()
|
f1.close()
|
||||||
|
|
||||||
|
async def filter_ann(schema: pydantic_schemas.SortAnnouncements, db: AsyncSession):
|
||||||
def filter_ann(schema: pydantic_schemas.SortAnnouncements, db: Annotated[Session, Depends(auth_utils.get_db)]):
|
|
||||||
"""Функция для последовательного применения различных фильтров (через схему SortAnnouncements)"""
|
"""Функция для последовательного применения различных фильтров (через схему SortAnnouncements)"""
|
||||||
res = db.query(orm_models.Announcement)
|
|
||||||
fields = schema.__dict__ # параметры передоваемой схемы SortAnnouncements (ключи и значения)
|
fields = schema.__dict__ # параметры передоваемой схемы SortAnnouncements (ключи и значения)
|
||||||
# проходим по названиям фильтров и их значениям
|
# проходим по названиям фильтров и их значениям
|
||||||
for name, filt in fields.items():
|
# выбираем все строки
|
||||||
|
query = await db.execute(select(orm_models.Announcement))
|
||||||
|
res = set(query.scalars().all())
|
||||||
|
for name, filt_val in fields.items():
|
||||||
|
# res = await db.execute(statement)
|
||||||
# если фильтр задан
|
# если фильтр задан
|
||||||
if filt is not None:
|
if filt_val is not None:
|
||||||
d = {name: filt}
|
if name == "obsolete":
|
||||||
# фильтруем
|
filt_val = bool(filt_val)
|
||||||
res = res.filter_by(**d)
|
filter_query = await db.execute(select(orm_models.Announcement).where(literal_column(f"announcements.{name}") == filt_val))
|
||||||
|
filtered = set(filter_query.scalars().all())
|
||||||
|
res = res.intersection(filtered)
|
||||||
|
# # отфильтровываем подходящие объявления
|
||||||
|
# res = await db.execute(
|
||||||
|
# select(orm_models.Announcement).where(
|
||||||
|
# ((schema.obsolete == None) | ((schema.obsolete != None) & (orm_models.Announcement.obsolete == schema.obsolete)))
|
||||||
|
# & ((schema.user_id == None) | ((schema.user_id != None) & (orm_models.Announcement.user_id == schema.user_id)))
|
||||||
|
# & ((schema.metro == None) | ((schema.metro != None) & (orm_models.Announcement.metro == schema.metro)))
|
||||||
|
# & ((schema.category == None) | ((schema.category != None) & (orm_models.Announcement.category == schema.category)))
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
# .where(schema.user_id != None and orm_models.Announcement.user_id == schema.user_id)
|
||||||
|
# .where(schema.metro != None and orm_models.Announcement.metro == schema.metro)
|
||||||
|
# .where(schema.category != None and orm_models.Announcement.category == schema.category)
|
||||||
|
# statement = text("SELECT * FROM announcements "
|
||||||
|
# "WHERE announcements.obsolete = :obsolete "
|
||||||
|
# "INTERSECT"
|
||||||
|
# "SELECT * FROM announcements "
|
||||||
|
# "WHERE announcements.user_id == :user_id "
|
||||||
|
# "INTERSECT"
|
||||||
|
# "SELECT * FROM announcements "
|
||||||
|
# "WHERE announcements.metro == :metro "
|
||||||
|
# "INTERSECT"
|
||||||
|
# "SELECT * FROM announcements "
|
||||||
|
# "WHERE announcements.category == :category")
|
||||||
|
|
||||||
|
# res = await db.execute(statement,
|
||||||
|
# {"obsolete": schema.obsolete,
|
||||||
|
# "user_id": schema.user_id,
|
||||||
|
# "metro": schema.metro,
|
||||||
|
# "category": schema.category}
|
||||||
|
# )
|
||||||
|
|
||||||
# возвращаем все подходящие объявления
|
# возвращаем все подходящие объявления
|
||||||
return res.all()
|
return res
|
||||||
|
|
||||||
|
|
||||||
def check_obsolete(db: Annotated[Session, Depends(auth_utils.get_db)], current_date: datetime.date):
|
async def check_obsolete(db: AsyncSession, current_date: datetime.date):
|
||||||
"""
|
"""
|
||||||
Функция участвует в процессе обновления поля obsolete у всех объявлений раз в сутки
|
Функция участвует в процессе обновления поля obsolete у всех объявлений раз в сутки
|
||||||
"""
|
"""
|
||||||
# обращаемся ко всем объявлениям бд
|
# обращаемся ко всем объявлениям бд
|
||||||
announcements = db.query(orm_models.Announcement).all()
|
query_announcements = await db.execute(select(orm_models.Announcement))
|
||||||
|
announcements = query_announcements.scalars().all()
|
||||||
# для каждого объявления
|
# для каждого объявления
|
||||||
for ann in announcements:
|
for ann in announcements:
|
||||||
# если просрочено
|
# если просрочено
|
||||||
if ann.best_by < current_date:
|
if ann.best_by < current_date:
|
||||||
ann.obsolete = True
|
ann.obsolete = True
|
||||||
db.commit()
|
await db.commit()
|
||||||
db.refresh(ann) # обновляем состояние объекта
|
await db.refresh(ann) # обновляем состояние объекта
|
||||||
|
|
||||||
|
|
||||||
|
174
back/api.py
174
back/api.py
@ -10,6 +10,7 @@ from fastapi.requests import Request
|
|||||||
from typing import Any, Annotated, List, Union
|
from typing import Any, Annotated, List, Union
|
||||||
from starlette.staticfiles import StaticFiles
|
from starlette.staticfiles import StaticFiles
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
@ -22,15 +23,18 @@ import pathlib
|
|||||||
import shutil
|
import shutil
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .db import database
|
|
||||||
from . import add_poems_and_filters, auth_utils, orm_models, pydantic_schemas
|
from . import add_poems_and_filters, auth_utils, orm_models, pydantic_schemas
|
||||||
|
|
||||||
|
from .config import TRASHBOXES_BASE_URL, TRASHBOXES_TOKEN
|
||||||
|
|
||||||
# создаем приложение Fastapi
|
# создаем приложение Fastapi
|
||||||
app = FastAPI()
|
app = FastAPI()
|
||||||
|
|
||||||
# Jinja2 - шаблоны
|
# Jinja2 - шаблоны
|
||||||
templates = Jinja2Templates(directory="./front/dist")
|
templates = Jinja2Templates(directory="./front/dist")
|
||||||
|
|
||||||
|
# хранение картинок для стихов
|
||||||
|
app.mount("/poem_pic", StaticFiles(directory = "./poem_pic"))
|
||||||
# создаем эндпоинт для хранения статических файлов
|
# создаем эндпоинт для хранения статических файлов
|
||||||
app.mount("/static", StaticFiles(directory = "./front/dist"))
|
app.mount("/static", StaticFiles(directory = "./front/dist"))
|
||||||
# проверяем, что папка uploads еще не создана
|
# проверяем, что папка uploads еще не создана
|
||||||
@ -39,38 +43,42 @@ if not os.path.exists("./uploads"):
|
|||||||
# создаем эндпоинт для хранения файлов пользователя
|
# создаем эндпоинт для хранения файлов пользователя
|
||||||
app.mount("/uploads", StaticFiles(directory = "./uploads"))
|
app.mount("/uploads", StaticFiles(directory = "./uploads"))
|
||||||
|
|
||||||
|
# эндпоинт для возвращения согласия в pdf
|
||||||
|
@app.get("/privacy_policy.pdf")
|
||||||
|
async def privacy_policy():
|
||||||
|
return FileResponse("./privacy_policy.pdf")
|
||||||
|
|
||||||
# получение списка объявлений
|
# получение списка объявлений
|
||||||
@app.get("/api/announcements", response_model=List[pydantic_schemas.Announcement])#адрес объявлений
|
@app.get("/api/announcements", response_model=List[pydantic_schemas.Announcement])#адрес объявлений
|
||||||
def announcements_list(db: Annotated[Session, Depends(auth_utils.get_db)], obsolete: Union[bool, None] = False, user_id: Union[int, None] = None,
|
async def announcements_list(db: Annotated[Session, Depends(auth_utils.get_session)], obsolete: Union[bool, None] = False, user_id: Union[int, None] = None,
|
||||||
metro: Union[str, None] = None,category: Union[str, None] = None):
|
metro: Union[str, None] = None,category: Union[str, None] = None):
|
||||||
# параметры для сортировки (схема pydantic schemas.SortAnnouncements)
|
# параметры для сортировки (схема pydantic schemas.SortAnnouncements)
|
||||||
params_to_sort = pydantic_schemas.SortAnnouncements(obsolete=obsolete, user_id=user_id, metro=metro, category=category)
|
params_to_sort = pydantic_schemas.SortAnnouncements(obsolete=obsolete, user_id=user_id, metro=metro, category=category)
|
||||||
# получаем результат
|
# получаем результат
|
||||||
result = add_poems_and_filters.filter_ann(db=db, schema=params_to_sort)
|
result = await add_poems_and_filters.filter_ann(db=db, schema=params_to_sort)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
# получаем данные одного объявления
|
# получаем данные одного объявления
|
||||||
@app.get("/api/announcement", response_model=pydantic_schemas.AnnResponce)
|
@app.get("/api/announcement", response_model=pydantic_schemas.AnnResponce)
|
||||||
def single_announcement(ann_id:int, db: Annotated[Session, Depends(auth_utils.get_db)]): # передаем индекс обявления
|
async def single_announcement(ann_id:int, db: Annotated[Session, Depends(auth_utils.get_session)]): # передаем индекс обявления
|
||||||
# Считываем данные из Body и отображаем их на странице.
|
# Считываем данные из Body и отображаем их на странице.
|
||||||
# В последствии будем вставлять данные в html-форму
|
# В последствии будем вставлять данные в html-форму
|
||||||
try:
|
announcement = await db.get(orm_models.Announcement, ann_id)
|
||||||
announcement = db.get(orm_models.Announcement, ann_id)
|
#announcement = await db.execute(select(orm_models.Announcement)).scalars().all()
|
||||||
|
if not announcement:
|
||||||
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
return announcement
|
return announcement
|
||||||
except:
|
|
||||||
return {"Answer" : False} #если неуданый доступ, то сообщаем об этом
|
|
||||||
|
|
||||||
|
|
||||||
# Занести объявление в базу данных
|
# Занести объявление в базу данных
|
||||||
@app.put("/api/announcement")
|
@app.put("/api/announcement")
|
||||||
def put_in_db(name: Annotated[str, Form()], category: Annotated[str, Form()], bestBy: Annotated[datetime.date, Form()],
|
async def put_in_db(name: Annotated[str, Form()], category: Annotated[str, Form()], bestBy: Annotated[datetime.date, Form()],
|
||||||
address: Annotated[str, Form()], longtitude: Annotated[float, Form()], latitude: Annotated[float, Form()],
|
address: Annotated[str, Form()], longtitude: Annotated[float, Form()], latitude: Annotated[float, Form()],
|
||||||
description: Annotated[str, Form()], metro: Annotated[str, Form()], current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_active_user)],
|
description: Annotated[str, Form()], metro: Annotated[str, Form()], current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_active_user)],
|
||||||
db: Annotated[Session, Depends(auth_utils.get_db)], src: Union[UploadFile, None] = None, trashId: Annotated[int, Form()] = None):
|
db: Annotated[Session, Depends(auth_utils.get_session)], src: Union[UploadFile, None] = None, trashId: Annotated[int, Form()] = None):
|
||||||
try:
|
|
||||||
# имя загруженного файла по умолчанию - пустая строка
|
# имя загруженного файла по умолчанию - пустая строка
|
||||||
uploaded_name = ""
|
uploaded_name = ""
|
||||||
# если пользователь загрузил картинку
|
# если пользователь загрузил картинку
|
||||||
@ -91,33 +99,42 @@ def put_in_db(name: Annotated[str, Form()], category: Annotated[str, Form()], be
|
|||||||
temp_ancmt = orm_models.Announcement(user_id=current_user.id, name=name, category=category, best_by=bestBy,
|
temp_ancmt = orm_models.Announcement(user_id=current_user.id, name=name, category=category, best_by=bestBy,
|
||||||
address=address, longtitude=longtitude, latitude=latitude, description=description, metro=metro,
|
address=address, longtitude=longtitude, latitude=latitude, description=description, metro=metro,
|
||||||
trashId=trashId, src=uploaded_name, booked_by=0)
|
trashId=trashId, src=uploaded_name, booked_by=0)
|
||||||
|
try:
|
||||||
db.add(temp_ancmt) # добавляем в бд
|
db.add(temp_ancmt) # добавляем в бд
|
||||||
db.commit() # сохраняем изменения
|
await db.commit() # сохраняем изменения
|
||||||
db.refresh(temp_ancmt) # обновляем состояние объекта
|
await db.refresh(temp_ancmt) # обновляем состояние объекта
|
||||||
return {"Answer" : True}
|
|
||||||
|
return {"Success": True}
|
||||||
except:
|
except:
|
||||||
return {"Answer" : False}
|
raise HTTPException(status_code=500, detail="problem with adding object to db")
|
||||||
|
|
||||||
|
|
||||||
# Удалить объявления из базы
|
# Удалить объявления из базы
|
||||||
@app.delete("/api/announcement") #адрес объявления
|
@app.delete("/api/announcement") #адрес объявления
|
||||||
def delete_from_db(announcement: pydantic_schemas.DelAnnouncement, db: Annotated[Session, Depends(auth_utils.get_db)]): # функция удаления объекта из БД
|
async def delete_from_db(announcement: pydantic_schemas.DelAnnouncement, db: Annotated[Session, Depends(auth_utils.get_session)]): # функция удаления объекта из БД
|
||||||
try:
|
|
||||||
# находим объект с заданным id в бд
|
# находим объект с заданным id в бд
|
||||||
to_delete = db.query(orm_models.Announcement).filter(orm_models.Announcement.id==announcement.id).first()
|
#to_delete = db.query(orm_models.Announcement).filter(orm_models.Announcement.id==announcement.id).first()
|
||||||
db.delete(to_delete) # удаление из БД
|
query = await db.execute(select(orm_models.Announcement).where(orm_models.Announcement.id==announcement.id))
|
||||||
db.commit() # сохраняем изменения
|
to_delete = query.scalars().first()
|
||||||
return {"Answer" : True}
|
if not to_delete:
|
||||||
|
raise HTTPException(status_code=404, detail="Item not found. Can't delete")
|
||||||
|
try:
|
||||||
|
await db.delete(to_delete) # удаление из БД
|
||||||
|
await db.commit() # сохраняем изменения
|
||||||
|
|
||||||
|
return {"Success": True}
|
||||||
except:
|
except:
|
||||||
return {"Answer" : False}
|
raise HTTPException(status_code=500, detail="Problem with adding to database")
|
||||||
|
|
||||||
|
|
||||||
# Забронировать объявление
|
# Забронировать объявление
|
||||||
@app.post("/api/book")
|
@app.post("/api/book")
|
||||||
def change_book_status(data: pydantic_schemas.Book, current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)],
|
async def change_book_status(data: pydantic_schemas.Book, current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)],
|
||||||
db: Annotated[Session, Depends(auth_utils.get_db)]):
|
db: Annotated[Session, Depends(auth_utils.get_session)]):
|
||||||
# Находим объявление по данному id
|
# Находим объявление по данному id
|
||||||
announcement_to_change = db.query(orm_models.Announcement).filter(orm_models.Announcement.id == data.id).first()
|
#announcement_to_change = db.query(orm_models.Announcement).filter(orm_models.Announcement.id == data.id).first()
|
||||||
|
query = await db.execute(select(orm_models.Announcement).where(orm_models.Announcement.id == data.id))
|
||||||
|
announcement_to_change = query.scalars().first()
|
||||||
# Проверяем, что объявление с данным id существует
|
# Проверяем, что объявление с данным id существует
|
||||||
if not announcement_to_change:
|
if not announcement_to_change:
|
||||||
raise HTTPException(status_code=404, detail="Item not found")
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
@ -128,25 +145,28 @@ def change_book_status(data: pydantic_schemas.Book, current_user: Annotated[pyda
|
|||||||
# Инкрементируем поле booked_by на 1
|
# Инкрементируем поле booked_by на 1
|
||||||
announcement_to_change.booked_by += 1
|
announcement_to_change.booked_by += 1
|
||||||
# фиксируем изменения в бд
|
# фиксируем изменения в бд
|
||||||
db.commit()
|
await db.commit()
|
||||||
db.refresh(announcement_to_change)
|
await db.refresh(announcement_to_change)
|
||||||
return {"Success": True}
|
return {"Success": True}
|
||||||
|
|
||||||
|
|
||||||
# reginstration
|
# reginstration
|
||||||
@app.post("/api/signup")
|
@app.post("/api/signup")
|
||||||
def create_user(nickname: Annotated[str, Form()], password: Annotated[str, Form()], db: Annotated[Session, Depends(auth_utils.get_db)],
|
async def create_user(nickname: Annotated[str, Form()], password: Annotated[str, Form()], db: Annotated[Session, Depends(auth_utils.get_session)],
|
||||||
name: Annotated[str, Form()]=None, surname: Annotated[str, Form()]=None, avatar: Annotated[UploadFile, Form()]=None):
|
name: Annotated[str, Form()]=None, surname: Annotated[str, Form()]=None, avatar: Annotated[UploadFile, Form()]=None):
|
||||||
|
|
||||||
# проверяем, что юзера с введенным никнеймом не существует в бд
|
# проверяем, что юзера с введенным никнеймом не существует в бд
|
||||||
if db.query(orm_models.User).filter(orm_models.User.nickname == nickname).first() == None:
|
#if db.query(orm_models.User).filter(orm_models.User.nickname == nickname).first() == None:
|
||||||
|
query_user = await db.execute(select(orm_models.User).where(orm_models.User.nickname == nickname))
|
||||||
|
user_with_entered_nick = query_user.scalars().first()
|
||||||
|
if user_with_entered_nick == None:
|
||||||
# создаем нового юзера
|
# создаем нового юзера
|
||||||
new_user = orm_models.User(nickname=nickname, hashed_password=auth_utils.get_password_hash(password),
|
new_user = orm_models.User(nickname=nickname, hashed_password=auth_utils.get_password_hash(password),
|
||||||
name=name, surname=surname, reg_date=datetime.date.today())
|
name=name, surname=surname, reg_date=datetime.date.today())
|
||||||
# добавляем в бд
|
# добавляем в бд
|
||||||
db.add(new_user)
|
db.add(new_user)
|
||||||
db.commit()
|
await db.commit()
|
||||||
db.refresh(new_user) # обновляем состояние объекта
|
await db.refresh(new_user) # обновляем состояние объекта
|
||||||
return {"Success": True}
|
return {"Success": True}
|
||||||
return {"Success": False, "Message": "Пользователь с таким email уже зарегестрирован"}
|
return {"Success": False, "Message": "Пользователь с таким email уже зарегестрирован"}
|
||||||
|
|
||||||
@ -154,10 +174,10 @@ def create_user(nickname: Annotated[str, Form()], password: Annotated[str, Form(
|
|||||||
# функция для генерации токена после успешного входа пользователя
|
# функция для генерации токена после успешного входа пользователя
|
||||||
@app.post("/api/token", response_model=pydantic_schemas.Token)
|
@app.post("/api/token", response_model=pydantic_schemas.Token)
|
||||||
async def login_for_access_token(
|
async def login_for_access_token(
|
||||||
form_data: Annotated[OAuth2PasswordRequestForm, Depends()], db: Annotated[Session, Depends(auth_utils.get_db)]
|
form_data: Annotated[OAuth2PasswordRequestForm, Depends()], db: Annotated[Session, Depends(auth_utils.get_session)]
|
||||||
):
|
):
|
||||||
# пробуем найти юзера в бд по введенным паролю и никнейму
|
# пробуем найти юзера в бд по введенным паролю и никнейму
|
||||||
user = auth_utils.authenticate_user(db, form_data.username, form_data.password)
|
user = await auth_utils.authenticate_user(db, form_data.username, form_data.password)
|
||||||
# если не нашли - кидаем ошибку
|
# если не нашли - кидаем ошибку
|
||||||
if not user:
|
if not user:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
@ -176,53 +196,70 @@ async def login_for_access_token(
|
|||||||
|
|
||||||
# получаем данные успешно вошедшего пользователя
|
# получаем данные успешно вошедшего пользователя
|
||||||
@app.get("/api/users/me", response_model=pydantic_schemas.User) #
|
@app.get("/api/users/me", response_model=pydantic_schemas.User) #
|
||||||
async def read_users_me(current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_active_user)]):
|
def read_users_me(current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_active_user)]):
|
||||||
return current_user
|
return current_user
|
||||||
|
|
||||||
|
|
||||||
# изменяем рейтинг пользователя
|
# изменяем рейтинг пользователя
|
||||||
@app.post("/api/user/rating")
|
@app.post("/api/user/rating")
|
||||||
def add_points(data: pydantic_schemas.AddRating, current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)], db: Annotated[Session, Depends(auth_utils.get_db)]):
|
async def add_points(data: pydantic_schemas.AddRating, current_user: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)], db: Annotated[Session, Depends(auth_utils.get_session)]):
|
||||||
# проверяем,
|
# проверяем,
|
||||||
if current_user.id != data.user_id:
|
if current_user.id != data.user_id:
|
||||||
user = auth_utils.get_user_by_id(db, data.user_id)
|
user = await auth_utils.get_user_by_id(db, data.user_id)
|
||||||
if not user:
|
if not user:
|
||||||
raise HTTPException(status_code=404, detail="Item not found")
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
user.rating = (user.rating*user.num_of_ratings + data.rate)/(user.num_of_ratings + 1)
|
user.rating = (user.rating*user.num_of_ratings + data.rate)/(user.num_of_ratings + 1)
|
||||||
user.num_of_ratings += 1
|
user.num_of_ratings += 1
|
||||||
db.commit()
|
await db.commit()
|
||||||
db.refresh(user) # обновляем состояние объекта
|
await db.refresh(user) # обновляем состояние объекта
|
||||||
return {"Success": True}
|
return {"Success": True}
|
||||||
|
|
||||||
|
|
||||||
# получаем рейтинг пользователя
|
# получаем рейтинг пользователя
|
||||||
@app.get("/api/user/rating")
|
@app.get("/api/user/rating")
|
||||||
def add_points(user_id: int, db: Annotated[Session, Depends(auth_utils.get_db)]):
|
async def add_points(user_id: int, db: Annotated[Session, Depends(auth_utils.get_session)]):
|
||||||
user = auth_utils.get_user_by_id(db, user_id=user_id)
|
user = await auth_utils.get_user_by_id(db, user_id=user_id)
|
||||||
if not user:
|
if not user:
|
||||||
raise HTTPException(status_code=404, detail="Item not found")
|
raise HTTPException(status_code=404, detail="Item not found")
|
||||||
return {"rating": user.rating}
|
return {"rating": user.rating}
|
||||||
|
|
||||||
|
|
||||||
# Отправляем стихи
|
# Отправляем стихи
|
||||||
@app.get("/api/user/poem", response_model=pydantic_schemas.Poem) # пока не работает
|
@app.get("/api/user/poem", response_model=pydantic_schemas.Poem)
|
||||||
def poems_to_front(db: Annotated[Session, Depends(auth_utils.get_db)]): # db: Annotated[Session, Depends(utils.get_db)]
|
async def poems_to_front(db: Annotated[Session, Depends(auth_utils.get_session)]):
|
||||||
num_of_poems = db.query(orm_models.Poems).count() # определяем кол-во стихов в бд
|
#num_of_poems = db.query(orm_models.Poems).count() # определяем кол-во стихов в бд
|
||||||
|
query = await db.execute(select(orm_models.Poems)) # определяем кол-во стихов в бд
|
||||||
|
num_of_poems = len(query.scalars().all())
|
||||||
|
# если стихов в бд нет
|
||||||
if num_of_poems < 1:
|
if num_of_poems < 1:
|
||||||
add_poems_and_filters.add_poems_to_db(database) # добавляем поэмы в базу данных
|
await add_poems_and_filters.add_poems_to_db(db) # добавляем поэмы в базу данных
|
||||||
|
# после добавления стихов снова определяем кол-во стихов в бд
|
||||||
|
query = await db.execute(select(orm_models.Poems))
|
||||||
|
num_of_poems = len(query.scalars().all())
|
||||||
rand_id = random.randint(1, num_of_poems) # генерируем номер стихотворения
|
rand_id = random.randint(1, num_of_poems) # генерируем номер стихотворения
|
||||||
poem = db.query(orm_models.Poems).filter(orm_models.Poems.id == rand_id).first() # находим стих в бд
|
#poem = db.query(orm_models.Poems).filter(orm_models.Poems.id == rand_id).first() # находим стих в бд
|
||||||
|
query_poem = await db.execute(select(orm_models.Poems).where(orm_models.Poems.id == rand_id)) # находим стих в бд
|
||||||
|
poem = query_poem.scalars().first()
|
||||||
if not poem:
|
if not poem:
|
||||||
raise HTTPException(status_code=404, detail="Poem not found")
|
raise HTTPException(status_code=404, detail="Poem not found")
|
||||||
return poem
|
return poem
|
||||||
|
|
||||||
|
trashboxes_category = {
|
||||||
|
"PORRIDGE": ["Опасные отходы", "Иное"],
|
||||||
|
"conspects": ["Бумага"],
|
||||||
|
"milk": ["Стекло", "Тетра Пак", "Иное"],
|
||||||
|
"bred": ["Пластик", "Иное"],
|
||||||
|
"wathing": ["Пластик", "Опасные отходы", "Иное"],
|
||||||
|
"cloth": ["Одежда"],
|
||||||
|
"fruits_vegatables": ["Иное"],
|
||||||
|
"other_things": ["Металл", "Бумага", "Стекло", "Иное", "Тетра Пак", "Батарейки", "Крышечки", "Шины",
|
||||||
|
"Опасные отходы", "Лампочки", "Пластик"]
|
||||||
|
}
|
||||||
|
|
||||||
@app.get("/api/trashbox", response_model=List[pydantic_schemas.TrashboxResponse])
|
@app.get("/api/trashbox", response_model=List[pydantic_schemas.TrashboxResponse])
|
||||||
def get_trashboxes(data: pydantic_schemas.TrashboxRequest = Depends()):#крутая функция для работы с api
|
async def get_trashboxes(data: pydantic_schemas.TrashboxRequest = Depends()): #крутая функция для работы с api
|
||||||
# json, передаваемый стороннему API
|
# json, передаваемый стороннему API
|
||||||
BASE_URL= "https://geointelect2.gate.petersburg.ru"
|
head = {'Authorization': 'Bearer ' + TRASHBOXES_TOKEN}
|
||||||
my_token="eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJhU1RaZm42bHpTdURYcUttRkg1SzN5UDFhT0FxUkhTNm9OendMUExaTXhFIn0.eyJleHAiOjE3ODYyMjUzMzMsImlhdCI6MTY5MTUzMDkzMywianRpIjoiYjU0MmU3MTQtYzJkMS00NTY2LWJkY2MtYmQ5NzA0ODY1ZjgzIiwiaXNzIjoiaHR0cHM6Ly9rYy5wZXRlcnNidXJnLnJ1L3JlYWxtcy9lZ3MtYXBpIiwiYXVkIjoiYWNjb3VudCIsInN1YiI6ImJjYjQ2NzljLTU3ZGItNDU5ZC1iNWUxLWRlOGI4Yzg5MTMwMyIsInR5cCI6IkJlYXJlciIsImF6cCI6ImFkbWluLXJlc3QtY2xpZW50Iiwic2Vzc2lvbl9zdGF0ZSI6IjJhOTgwMzUyLTY1M2QtNGZlZC1iMDI1LWQ1N2U0NDRjZmM3NiIsImFjciI6IjEiLCJhbGxvd2VkLW9yaWdpbnMiOlsiLyoiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbImRlZmF1bHQtcm9sZXMtZWdzLWFwaSIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJhY2NvdW50Ijp7InJvbGVzIjpbIm1hbmFnZS1hY2NvdW50IiwibWFuYWdlLWFjY291bnQtbGlua3MiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6ImVtYWlsIHByb2ZpbGUiLCJzaWQiOiIyYTk4MDM1Mi02NTNkLTRmZWQtYjAyNS1kNTdlNDQ0Y2ZjNzYiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsIm5hbWUiOiLQktC70LDQtNC40LzQuNGAINCv0LrQvtCy0LvQtdCyIiwicHJlZmVycmVkX3VzZXJuYW1lIjoiZTBmYzc2OGRhOTA4MjNiODgwZGQzOGVhMDJjMmQ5NTciLCJnaXZlbl9uYW1lIjoi0JLQu9Cw0LTQuNC80LjRgCIsImZhbWlseV9uYW1lIjoi0K_QutC-0LLQu9C10LIifQ.FTKiC1hpWcOkmSW9QZpC-RY7Ko50jw1mDMfXIWYxlQ-zehLm2CLmOnHvYoOoI39k2OzeCIAB9ZdRrrGZc6G9Z1eFELUjNGEqKxSC1Phj9ATemKgbOKEttk-OGc-rFr9VPA8_SnfvLts6wTI2YK33YBIxCF5nCbnr4Qj3LeEQ0d6Hy8PO4ATrBF5EOeuAZRprvIEjXe_f8N9ONKckCPB-xFB4P2pZlVXGoCNoewGEcY3zXH4khezN6zcVr6tpc6G8dBv9EqT_v92IDSg-aXQk6ysA0cO0-6x5w1-_qU0iHGIAPsLNV9IKBoFbjc0JH6cWabldPRH12NP1trvYfqKDGQ"
|
|
||||||
head = {'Authorization': 'Bearer {}'.format(my_token)}
|
|
||||||
# Данные пользователя (местоположение, количество мусорок, которое пользователь хочет видеть)
|
# Данные пользователя (местоположение, количество мусорок, которое пользователь хочет видеть)
|
||||||
my_data={
|
my_data={
|
||||||
'x' : f"{data.Lng}",
|
'x' : f"{data.Lng}",
|
||||||
@ -230,27 +267,12 @@ def get_trashboxes(data: pydantic_schemas.TrashboxRequest = Depends()):#крут
|
|||||||
'limit' : '1'
|
'limit' : '1'
|
||||||
}
|
}
|
||||||
# Перевод категории с фронта на категорию с сайта
|
# Перевод категории с фронта на категорию с сайта
|
||||||
match data.Category:
|
list_of_category = trashboxes_category[data.Category]
|
||||||
case "PORRIDGE":
|
|
||||||
list_of_category = ["Опасные отходы", "Иное"]
|
|
||||||
case "conspects":
|
|
||||||
list_of_category = ["Бумага"]
|
|
||||||
case "milk":
|
|
||||||
list_of_category = ["Стекло", "Тетра Пак", "Иное"]
|
|
||||||
case "bred":
|
|
||||||
list_of_category = ["Пластик", "Иное"]
|
|
||||||
case "wathing":
|
|
||||||
list_of_category = ["Пластик", "Опасные отходы", "Иное"]
|
|
||||||
case "cloth":
|
|
||||||
list_of_category = ["Одежда"]
|
|
||||||
case "fruits_vegatables":
|
|
||||||
list_of_category = ["Иное"]
|
|
||||||
case "other_things":
|
|
||||||
list_of_category = ["Металл", "Бумага", "Стекло", "Иное", "Тетра Пак", "Батарейки", "Крышечки", "Шины",
|
|
||||||
"Опасные отходы", "Лампочки", "Пластик"]
|
|
||||||
# Получение ответа от стороннего апи
|
# Получение ответа от стороннего апи
|
||||||
response = requests.post(f"{BASE_URL}/nearest_recycling/get", headers=head, data=my_data, timeout=10)
|
response = requests.post(TRASHBOXES_BASE_URL + "/nearest_recycling/get", headers=head, data=my_data, timeout=10)
|
||||||
infos = response.json()
|
infos = response.json()
|
||||||
|
|
||||||
# Чтение ответа
|
# Чтение ответа
|
||||||
trashboxes = []
|
trashboxes = []
|
||||||
for trashbox in infos["results"]:
|
for trashbox in infos["results"]:
|
||||||
@ -277,10 +299,10 @@ async def react_app(req: Request, rest_of_path: str):
|
|||||||
|
|
||||||
|
|
||||||
@app.post("/api/announcement/dispose")
|
@app.post("/api/announcement/dispose")
|
||||||
def dispose(data: pydantic_schemas.DisposeRequest, current_user_schema: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)],
|
async def dispose(data: pydantic_schemas.DisposeRequest, current_user_schema: Annotated[pydantic_schemas.User, Depends(auth_utils.get_current_user)],
|
||||||
db: Annotated[Session, Depends(auth_utils.get_db)]):
|
db: Annotated[Session, Depends(auth_utils.get_session)]):
|
||||||
# Находим в бд текущего юзера
|
# Находим в бд текущего юзера
|
||||||
current_user = auth_utils.get_user_by_id(db, current_user_schema.id)
|
current_user = await auth_utils.get_user_by_id(db, current_user_schema.id)
|
||||||
# Начисляем баллы пользователю за утилизацию
|
# Начисляем баллы пользователю за утилизацию
|
||||||
current_user.points += 60
|
current_user.points += 60
|
||||||
# В полученном json переходим к данным мусорки
|
# В полученном json переходим к данным мусорки
|
||||||
@ -292,12 +314,14 @@ def dispose(data: pydantic_schemas.DisposeRequest, current_user_schema: Annotate
|
|||||||
db.add(new_trashox)
|
db.add(new_trashox)
|
||||||
# в соответствии с логикой api, после утилизации объявление пользователя удаляется
|
# в соответствии с логикой api, после утилизации объявление пользователя удаляется
|
||||||
# находим объявление с айди data.ann_id
|
# находим объявление с айди data.ann_id
|
||||||
ann_to_del = db.query(orm_models.Announcement).filter(orm_models.Announcement.id == data.ann_id).first() # находим стих в бд
|
#ann_to_del = db.query(orm_models.Announcement).filter(orm_models.Announcement.id == data.ann_id).first() #
|
||||||
|
query_ann = await db.execute(select(orm_models.Announcement).where(orm_models.Announcement.id == data.ann_id)) # находим объявление в бд
|
||||||
|
ann_to_del = query_ann.scalars().first()
|
||||||
if not ann_to_del:
|
if not ann_to_del:
|
||||||
raise HTTPException(status_code=404, detail="Announcement not found")
|
raise HTTPException(status_code=404, detail="Announcement not found")
|
||||||
# удаляем объявление из бд
|
# удаляем объявление из бд
|
||||||
db.delete(ann_to_del)
|
await db.delete(ann_to_del)
|
||||||
db.commit()
|
await db.commit()
|
||||||
db.refresh(new_trashox) # обновляем состояние объекта
|
await db.refresh(new_trashox) # обновляем состояние объекта
|
||||||
return {"Success": True}
|
return {"Success": True}
|
||||||
|
|
||||||
|
@ -5,25 +5,22 @@ from fastapi import Depends, HTTPException, status
|
|||||||
from fastapi.security import OAuth2PasswordBearer
|
from fastapi.security import OAuth2PasswordBearer
|
||||||
from jose import JWTError, jwt
|
from jose import JWTError, jwt
|
||||||
from passlib.context import CryptContext
|
from passlib.context import CryptContext
|
||||||
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from .db import database
|
from .db import SessionLocal
|
||||||
from . import orm_models, pydantic_schemas
|
from . import orm_models, pydantic_schemas
|
||||||
|
from .config import SECRET_KEY, ALGORITHM, ACCESS_TOKEN_EXPIRE_MINUTES
|
||||||
|
|
||||||
|
|
||||||
SECRET_KEY = "651a52941cf5de14d48ef5d7af115709"
|
|
||||||
ALGORITHM = "HS256"
|
|
||||||
ACCESS_TOKEN_EXPIRE_MINUTES = 1440
|
|
||||||
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/token")
|
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="/api/token")
|
||||||
|
|
||||||
|
|
||||||
def get_db():
|
async def get_session() -> AsyncSession:
|
||||||
db = database
|
async with SessionLocal() as session:
|
||||||
try:
|
yield session
|
||||||
yield db
|
|
||||||
finally:
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
|
|
||||||
def verify_password(plain_password, hashed_password):
|
def verify_password(plain_password, hashed_password):
|
||||||
@ -34,22 +31,24 @@ def get_password_hash(password):
|
|||||||
return pwd_context.hash(password)
|
return pwd_context.hash(password)
|
||||||
|
|
||||||
|
|
||||||
def get_user_by_nickname(db: Annotated[Session, Depends(get_db)], nickname: str):
|
async def get_user_by_nickname(db: Annotated[AsyncSession, Depends(get_session)], nickname: str):
|
||||||
user_with_required_id = db.query(orm_models.User).filter(orm_models.User.nickname == nickname).first()
|
query = await db.execute(select(orm_models.User).where(orm_models.User.nickname == nickname))
|
||||||
|
user_with_required_nickname = query.scalars().first()
|
||||||
|
if user_with_required_nickname:
|
||||||
|
return user_with_required_nickname
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_user_by_id(db: Annotated[AsyncSession, Depends(get_session)], user_id: int):
|
||||||
|
query = await db.execute(select(orm_models.User).where(orm_models.User.id == user_id))
|
||||||
|
user_with_required_id = query.scalars().first()
|
||||||
if user_with_required_id:
|
if user_with_required_id:
|
||||||
return user_with_required_id
|
return user_with_required_id
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_user_by_id(db: Annotated[Session, Depends(get_db)], user_id: int):
|
async def authenticate_user(db: Annotated[AsyncSession, Depends(get_session)], nickname: str, password: str):
|
||||||
user_with_required_id = db.query(orm_models.User).filter(orm_models.User.id == user_id).first()
|
user = await get_user_by_nickname(db=db, nickname=nickname)
|
||||||
if user_with_required_id:
|
|
||||||
return user_with_required_id
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def authenticate_user(db: Annotated[Session, Depends(get_db)], nickname: str, password: str):
|
|
||||||
user = get_user_by_nickname(db=db, nickname=nickname)
|
|
||||||
if not user:
|
if not user:
|
||||||
return False
|
return False
|
||||||
if not verify_password(password, user.hashed_password):
|
if not verify_password(password, user.hashed_password):
|
||||||
@ -68,7 +67,7 @@ def create_access_token(data: dict, expires_delta: Union[timedelta, None] = None
|
|||||||
return encoded_jwt
|
return encoded_jwt
|
||||||
|
|
||||||
|
|
||||||
async def get_current_user(db: Annotated[Session, Depends(get_db)], token: Annotated[str, Depends(oauth2_scheme)]):
|
async def get_current_user(db: Annotated[AsyncSession, Depends(get_session)], token: Annotated[str, Depends(oauth2_scheme)]):
|
||||||
credentials_exception = HTTPException(
|
credentials_exception = HTTPException(
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||||
detail="Could not validate credentials",
|
detail="Could not validate credentials",
|
||||||
@ -82,14 +81,14 @@ async def get_current_user(db: Annotated[Session, Depends(get_db)], token: Annot
|
|||||||
token_data = pydantic_schemas.TokenData(user_id=user_id)
|
token_data = pydantic_schemas.TokenData(user_id=user_id)
|
||||||
except JWTError:
|
except JWTError:
|
||||||
raise credentials_exception
|
raise credentials_exception
|
||||||
user = get_user_by_id(db, user_id=token_data.user_id)
|
user = await get_user_by_id(db, user_id=token_data.user_id)
|
||||||
if user is None:
|
if user is None:
|
||||||
raise credentials_exception
|
raise credentials_exception
|
||||||
return pydantic_schemas.User(id=user.id, nickname=user.nickname, name=user.name, surname=user.surname,
|
return pydantic_schemas.User(id=user.id, nickname=user.nickname, name=user.name, surname=user.surname,
|
||||||
disabled=user.disabled, items=user.announcements, reg_date=user.reg_date, points=user.points)
|
disabled=user.disabled, items=user.announcements, reg_date=user.reg_date, points=user.points)
|
||||||
|
|
||||||
|
|
||||||
async def get_current_active_user(
|
def get_current_active_user(
|
||||||
current_user: Annotated[pydantic_schemas.User, Depends(get_current_user)]
|
current_user: Annotated[pydantic_schemas.User, Depends(get_current_user)]
|
||||||
):
|
):
|
||||||
if current_user.disabled:
|
if current_user.disabled:
|
||||||
|
13
back/config.py
Normal file
13
back/config.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import os
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
|
load_dotenv('.env')
|
||||||
|
|
||||||
|
TRASHBOXES_TOKEN = os.environ.get("TRASHBOXES_TOKEN")
|
||||||
|
TRASHBOXES_BASE_URL = os.environ.get("TRASHBOXES_BASE_URL")
|
||||||
|
|
||||||
|
SECRET_KEY = os.environ.get("SECRET_KEY")
|
||||||
|
ALGORITHM = os.environ.get("ALGORITHM")
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES = int(os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES"))
|
||||||
|
|
||||||
|
SQLALCHEMY_DATABASE_URL = os.environ.get("SQLALCHEMY_DATABASE_URL")
|
36
back/db.py
36
back/db.py
@ -1,32 +1,20 @@
|
|||||||
from typing import AsyncGenerator
|
from asyncio import current_task
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession, async_scoped_session, create_async_engine
|
||||||
from sqlalchemy import create_engine, MetaData
|
from sqlalchemy.orm import sessionmaker
|
||||||
# from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker, DeclarativeBase
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
|
||||||
|
from .config import SQLALCHEMY_DATABASE_URL
|
||||||
|
|
||||||
SQLALCHEMY_DATABASE_URL = "sqlite:///./sql_app.db"
|
engine = create_async_engine(SQLALCHEMY_DATABASE_URL)
|
||||||
|
|
||||||
engine = create_engine(
|
SessionLocal = sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False)
|
||||||
SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}
|
|
||||||
)
|
|
||||||
|
|
||||||
SessionLocal = sessionmaker(bind=engine, autoflush=True, autocommit=False)
|
async_session = SessionLocal()
|
||||||
|
# async_session = async_scoped_session(SessionLocal, scopefunc=current_task)
|
||||||
database = SessionLocal()
|
|
||||||
Base = declarative_base()
|
Base = declarative_base()
|
||||||
|
|
||||||
|
|
||||||
# # add your model's MetaData object here
|
# Создаем таблицы
|
||||||
# # for 'autogenerate' support
|
async def init_models():
|
||||||
# # in your application's model:
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
# class Base(DeclarativeBase):
|
|
||||||
# metadata = MetaData(naming_convention={
|
|
||||||
# "ix": "ix_%(column_0_label)s",
|
|
||||||
# "uq": "uq_%(table_name)s_%(column_0_name)s",
|
|
||||||
# "ck": "ck_%(table_name)s_`%(constraint_name)s`",
|
|
||||||
# "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
|
||||||
# "pk": "pk_%(table_name)s"
|
|
||||||
# })
|
|
||||||
|
6
back/delete_db.py
Normal file
6
back/delete_db.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from sqlalchemy import Table, MetaData, text
|
||||||
|
from .db import engine, Base
|
||||||
|
|
||||||
|
tbl = Table('Poems', MetaData(), autoload_with=engine)
|
||||||
|
tbl.drop(engine, checkfirst=False)
|
||||||
|
a = input()
|
@ -3,6 +3,8 @@ import uvicorn
|
|||||||
|
|
||||||
from .api import app as app_fastapi
|
from .api import app as app_fastapi
|
||||||
from .scheduler import app as app_rocketry
|
from .scheduler import app as app_rocketry
|
||||||
|
from .db import init_models
|
||||||
|
|
||||||
|
|
||||||
class Server(uvicorn.Server):
|
class Server(uvicorn.Server):
|
||||||
"""Customized uvicorn.Server
|
"""Customized uvicorn.Server
|
||||||
@ -16,6 +18,9 @@ class Server(uvicorn.Server):
|
|||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
"Run scheduler and the API"
|
"Run scheduler and the API"
|
||||||
|
|
||||||
|
await init_models()
|
||||||
|
|
||||||
server = Server(config=uvicorn.Config(app_fastapi, workers=1, loop="asyncio", host="0.0.0.0"))
|
server = Server(config=uvicorn.Config(app_fastapi, workers=1, loop="asyncio", host="0.0.0.0"))
|
||||||
|
|
||||||
api = asyncio.create_task(server.serve())
|
api = asyncio.create_task(server.serve())
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Boolean, Float, Date, ForeignKey
|
from sqlalchemy import Column, Integer, String, Boolean, Float, Date, ForeignKey
|
||||||
from .db import Base, engine
|
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
|
|
||||||
|
from .db import Base, engine
|
||||||
|
|
||||||
|
|
||||||
class User(Base):#класс пользователя
|
class User(Base):#класс пользователя
|
||||||
__tablename__ = "users"
|
__tablename__ = "users"
|
||||||
@ -17,8 +18,8 @@ class User(Base):#класс пользователя
|
|||||||
num_of_ratings = Column(Integer, default=0) # количество оценок (т.е. то, сколько раз другие пользователи оценили текущего)
|
num_of_ratings = Column(Integer, default=0) # количество оценок (т.е. то, сколько раз другие пользователи оценили текущего)
|
||||||
reg_date = Column(Date) # дата регистрации
|
reg_date = Column(Date) # дата регистрации
|
||||||
|
|
||||||
announcements = relationship("Announcement", back_populates="user")
|
announcements = relationship("Announcement", back_populates="user", lazy='selectin')
|
||||||
trashboxes_chosen = relationship("Trashbox", back_populates="user")
|
trashboxes_chosen = relationship("Trashbox", back_populates="user", lazy='selectin')
|
||||||
|
|
||||||
class Announcement(Base): #класс объявления
|
class Announcement(Base): #класс объявления
|
||||||
__tablename__ = "announcements"
|
__tablename__ = "announcements"
|
||||||
@ -66,24 +67,3 @@ class Poems(Base):#класс поэзии
|
|||||||
author = Column(String) # автор стихотворения
|
author = Column(String) # автор стихотворения
|
||||||
|
|
||||||
|
|
||||||
# Создаем описанные выше таблицы
|
|
||||||
Base.metadata.create_all(bind=engine)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# from typing import AsyncGenerator
|
|
||||||
# from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
|
||||||
# from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase
|
|
||||||
# # This function can be called during the initialization of the FastAPI app.
|
|
||||||
# async def create_db_and_tables():
|
|
||||||
# async with engine.begin() as conn:
|
|
||||||
# await conn.run_sync(Base.metadata.create_all)
|
|
||||||
|
|
||||||
|
|
||||||
# async def get_async_session() -> AsyncGenerator[AsyncSession, None]:
|
|
||||||
# async with async_session_maker() as session:
|
|
||||||
# yield session
|
|
||||||
|
|
||||||
|
|
||||||
# async def get_user_db(session: AsyncSession = Depends(get_async_session)):
|
|
||||||
# yield SQLAlchemyUserDatabase(session, User)
|
|
@ -2,7 +2,7 @@ from . import add_poems_and_filters
|
|||||||
from rocketry import Rocketry
|
from rocketry import Rocketry
|
||||||
from rocketry.conds import daily
|
from rocketry.conds import daily
|
||||||
import datetime
|
import datetime
|
||||||
from .db import database
|
from .db import async_session
|
||||||
|
|
||||||
app = Rocketry(execution="async")
|
app = Rocketry(execution="async")
|
||||||
|
|
||||||
@ -10,5 +10,5 @@ app = Rocketry(execution="async")
|
|||||||
@app.task('daily')
|
@app.task('daily')
|
||||||
async def daily_check():
|
async def daily_check():
|
||||||
# Фильтруем по сроку годности
|
# Фильтруем по сроку годности
|
||||||
add_poems_and_filters.check_obsolete(database, current_date=datetime.date.today())
|
await add_poems_and_filters.check_obsolete(async_session, current_date=datetime.date.today())
|
||||||
|
|
||||||
|
2706
back/text121.txt
2706
back/text121.txt
File diff suppressed because it is too large
Load Diff
@ -1,2 +0,0 @@
|
|||||||
TOKEN = "eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJhU1RaZm42bHpTdURYcUttRkg1SzN5UDFhT0FxUkhTNm9OendMUExaTXhFIn0.eyJleHAiOjE3ODYyMjUzMzMsImlhdCI6MTY5MTUzMDkzMywianRpIjoiYjU0MmU3MTQtYzJkMS00NTY2LWJkY2MtYmQ5NzA0ODY1ZjgzIiwiaXNzIjoiaHR0cHM6Ly9rYy5wZXRlcnNidXJnLnJ1L3JlYWxtcy9lZ3MtYXBpIiwiYXVkIjoiYWNjb3VudCIsInN1YiI6ImJjYjQ2NzljLTU3ZGItNDU5ZC1iNWUxLWRlOGI4Yzg5MTMwMyIsInR5cCI6IkJlYXJlciIsImF6cCI6ImFkbWluLXJlc3QtY2xpZW50Iiwic2Vzc2lvbl9zdGF0ZSI6IjJhOTgwMzUyLTY1M2QtNGZlZC1iMDI1LWQ1N2U0NDRjZmM3NiIsImFjciI6IjEiLCJhbGxvd2VkLW9yaWdpbnMiOlsiLyoiXSwicmVhbG1fYWNjZXNzIjp7InJvbGVzIjpbImRlZmF1bHQtcm9sZXMtZWdzLWFwaSIsIm9mZmxpbmVfYWNjZXNzIiwidW1hX2F1dGhvcml6YXRpb24iXX0sInJlc291cmNlX2FjY2VzcyI6eyJhY2NvdW50Ijp7InJvbGVzIjpbIm1hbmFnZS1hY2NvdW50IiwibWFuYWdlLWFjY291bnQtbGlua3MiLCJ2aWV3LXByb2ZpbGUiXX19LCJzY29wZSI6ImVtYWlsIHByb2ZpbGUiLCJzaWQiOiIyYTk4MDM1Mi02NTNkLTRmZWQtYjAyNS1kNTdlNDQ0Y2ZjNzYiLCJlbWFpbF92ZXJpZmllZCI6ZmFsc2UsIm5hbWUiOiLQktC70LDQtNC40LzQuNGAINCv0LrQvtCy0LvQtdCyIiwicHJlZmVycmVkX3VzZXJuYW1lIjoiZTBmYzc2OGRhOTA4MjNiODgwZGQzOGVhMDJjMmQ5NTciLCJnaXZlbl9uYW1lIjoi0JLQu9Cw0LTQuNC80LjRgCIsImZhbWlseV9uYW1lIjoi0K_QutC-0LLQu9C10LIifQ.FTKiC1hpWcOkmSW9QZpC-RY7Ko50jw1mDMfXIWYxlQ-zehLm2CLmOnHvYoOoI39k2OzeCIAB9ZdRrrGZc6G9Z1eFELUjNGEqKxSC1Phj9ATemKgbOKEttk-OGc-rFr9VPA8_SnfvLts6wTI2YK33YBIxCF5nCbnr4Qj3LeEQ0d6Hy8PO4ATrBF5EOeuAZRprvIEjXe_f8N9ONKckCPB-xFB4P2pZlVXGoCNoewGEcY3zXH4khezN6zcVr6tpc6G8dBv9EqT_v92IDSg-aXQk6ysA0cO0-6x5w1-_qU0iHGIAPsLNV9IKBoFbjc0JH6cWabldPRH12NP1trvYfqKDGQ"
|
|
||||||
DOMAIN = "https://geointelect2.gate.petersburg.ru"
|
|
@ -6,7 +6,7 @@ const composePutAnnouncementURL = () => (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const processPutAnnouncement = (data: PutAnnouncementResponse): PutAnnouncement => {
|
const processPutAnnouncement = (data: PutAnnouncementResponse): PutAnnouncement => {
|
||||||
return data.Answer
|
return data.Success
|
||||||
}
|
}
|
||||||
|
|
||||||
export { composePutAnnouncementURL, processPutAnnouncement }
|
export { composePutAnnouncementURL, processPutAnnouncement }
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import { isObject } from '../../utils/types'
|
import { isObject } from '../../utils/types'
|
||||||
|
|
||||||
type PutAnnouncementResponse = {
|
type PutAnnouncementResponse = {
|
||||||
Answer: boolean,
|
Success: boolean,
|
||||||
}
|
}
|
||||||
|
|
||||||
const isPutAnnouncementResponse = (obj: unknown): obj is PutAnnouncementResponse => (
|
const isPutAnnouncementResponse = (obj: unknown): obj is PutAnnouncementResponse => (
|
||||||
isObject(obj, {
|
isObject(obj, {
|
||||||
'Answer': 'boolean',
|
'Success': 'boolean',
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -6,11 +6,11 @@ const composeRemoveAnnouncementURL = () => (
|
|||||||
)
|
)
|
||||||
|
|
||||||
function processRemoveAnnouncement(data: RemoveAnnouncementResponse): RemoveAnnouncement {
|
function processRemoveAnnouncement(data: RemoveAnnouncementResponse): RemoveAnnouncement {
|
||||||
if (!data.Answer) {
|
if (!data.Success) {
|
||||||
throw new Error('Не удалось закрыть объявление')
|
throw new Error('Не удалось закрыть объявление')
|
||||||
}
|
}
|
||||||
|
|
||||||
return data.Answer
|
return data.Success
|
||||||
}
|
}
|
||||||
|
|
||||||
export { composeRemoveAnnouncementURL, processRemoveAnnouncement }
|
export { composeRemoveAnnouncementURL, processRemoveAnnouncement }
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import { isObject } from '../../utils/types'
|
import { isObject } from '../../utils/types'
|
||||||
|
|
||||||
type RemoveAnnouncementResponse = {
|
type RemoveAnnouncementResponse = {
|
||||||
Answer: boolean,
|
Success: boolean,
|
||||||
}
|
}
|
||||||
|
|
||||||
const isRemoveAnnouncementResponse = (obj: unknown): obj is RemoveAnnouncementResponse => (
|
const isRemoveAnnouncementResponse = (obj: unknown): obj is RemoveAnnouncementResponse => (
|
||||||
isObject(obj, {
|
isObject(obj, {
|
||||||
'Answer': 'boolean',
|
'Success': 'boolean',
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -14,8 +14,8 @@ function useAddAnnouncement() {
|
|||||||
processPutAnnouncement
|
processPutAnnouncement
|
||||||
)
|
)
|
||||||
|
|
||||||
function handleAdd(formData: FormData) {
|
async function handleAdd(formData: FormData) {
|
||||||
void doSend({}, {
|
await doSend({}, {
|
||||||
body: formData,
|
body: formData,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -15,8 +15,8 @@ function useBook() {
|
|||||||
processBook,
|
processBook,
|
||||||
)
|
)
|
||||||
|
|
||||||
const handleBook = useCallback((id: number) => {
|
const handleBook = useCallback(async (id: number) => {
|
||||||
void doSend({}, {
|
await doSend({}, {
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
id,
|
id,
|
||||||
}),
|
}),
|
||||||
|
@ -12,7 +12,7 @@ function useSendButtonCaption(
|
|||||||
const [title, setTitle] = useState(initial)
|
const [title, setTitle] = useState(initial)
|
||||||
|
|
||||||
const update = useCallback(<T extends NonNullable<unknown>>(data: T | null | undefined) => {
|
const update = useCallback(<T extends NonNullable<unknown>>(data: T | null | undefined) => {
|
||||||
if (data !== undefined) { // not loading
|
if (data !== undefined && data !== null) { // not loading or error
|
||||||
setCaption(result)
|
setCaption(result)
|
||||||
setTitle('Отправить ещё раз')
|
setTitle('Отправить ещё раз')
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
Generic single-database configuration.
|
Generic single-database configuration with an async dbapi.
|
@ -1,11 +1,12 @@
|
|||||||
|
import asyncio
|
||||||
from logging.config import fileConfig
|
from logging.config import fileConfig
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
from sqlalchemy import pool
|
||||||
|
from sqlalchemy.engine import Connection
|
||||||
|
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||||
|
|
||||||
from alembic import context
|
from alembic import context
|
||||||
|
|
||||||
|
|
||||||
from back import auxiliary_for_alembic, db
|
from back import auxiliary_for_alembic, db
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
# this is the Alembic Config object, which provides
|
||||||
@ -17,6 +18,10 @@ config = context.config
|
|||||||
if config.config_file_name is not None:
|
if config.config_file_name is not None:
|
||||||
fileConfig(config.config_file_name)
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
target_metadata = auxiliary_for_alembic.Base.metadata
|
target_metadata = auxiliary_for_alembic.Base.metadata
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
# other values from the config, defined by the needs of env.py,
|
||||||
@ -25,7 +30,7 @@ target_metadata = auxiliary_for_alembic.Base.metadata
|
|||||||
# ... etc.
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
def run_migrations_offline() -> None:
|
||||||
"""Run migrations in 'offline' mode.
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
This configures the context with just a URL
|
This configures the context with just a URL
|
||||||
@ -37,44 +42,49 @@ def run_migrations_offline():
|
|||||||
script output.
|
script output.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# url = config.get_main_option("sqlalchemy.url")
|
|
||||||
url = config.get_main_option(db.SQLALCHEMY_DATABASE_URL)
|
url = config.get_main_option(db.SQLALCHEMY_DATABASE_URL)
|
||||||
context.configure(
|
context.configure(
|
||||||
url=url,
|
url=url,
|
||||||
target_metadata=target_metadata,
|
target_metadata=target_metadata,
|
||||||
literal_binds=True,
|
literal_binds=True,
|
||||||
dialect_opts={"paramstyle": "named"},
|
dialect_opts={"paramstyle": "named"},
|
||||||
render_as_batch=True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with context.begin_transaction():
|
with context.begin_transaction():
|
||||||
context.run_migrations()
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
def do_run_migrations(connection: Connection) -> None:
|
||||||
"""Run migrations in 'online' mode.
|
context.configure(connection=connection, target_metadata=target_metadata)
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_async_migrations() -> None:
|
||||||
|
"""In this scenario we need to create an Engine
|
||||||
and associate a connection with the context.
|
and associate a connection with the context.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
configuration = config.get_section(config.config_ini_section)
|
configuration = config.get_section(config.config_ini_section)
|
||||||
configuration['sqlalchemy.url'] = db.SQLALCHEMY_DATABASE_URL
|
configuration['sqlalchemy.url'] = db.SQLALCHEMY_DATABASE_URL
|
||||||
connectable = engine_from_config(
|
connectable = async_engine_from_config(
|
||||||
configuration,
|
configuration,
|
||||||
prefix="sqlalchemy.",
|
prefix="sqlalchemy.",
|
||||||
poolclass=pool.NullPool,
|
poolclass=pool.NullPool,
|
||||||
)
|
)
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
async with connectable.connect() as connection:
|
||||||
context.configure(
|
await connection.run_sync(do_run_migrations)
|
||||||
connection=connection,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
render_as_batch=True
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
await connectable.dispose()
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode."""
|
||||||
|
|
||||||
|
asyncio.run(run_async_migrations())
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
if context.is_offline_mode():
|
||||||
|
@ -5,20 +5,22 @@ Revises: ${down_revision | comma,n}
|
|||||||
Create Date: ${create_date}
|
Create Date: ${create_date}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
${imports if imports else ""}
|
${imports if imports else ""}
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = ${repr(up_revision)}
|
revision: str = ${repr(up_revision)}
|
||||||
down_revision = ${repr(down_revision)}
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
branch_labels = ${repr(branch_labels)}
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
depends_on = ${repr(depends_on)}
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade() -> None:
|
||||||
${upgrades if upgrades else "pass"}
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade() -> None:
|
||||||
${downgrades if downgrades else "pass"}
|
${downgrades if downgrades else "pass"}
|
||||||
|
@ -0,0 +1,30 @@
|
|||||||
|
"""lazy=selectin added to user table relationships
|
||||||
|
|
||||||
|
Revision ID: 8e631a2fe6b8
|
||||||
|
Revises:
|
||||||
|
Create Date: 2023-09-02 23:45:08.799366
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '8e631a2fe6b8'
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
BIN
privacy_policy.pdf
Normal file
BIN
privacy_policy.pdf
Normal file
Binary file not shown.
@ -1,5 +1,7 @@
|
|||||||
|
aiosqlite==0.19.0
|
||||||
annotated-types==0.5.0
|
annotated-types==0.5.0
|
||||||
anyio==3.7.1
|
anyio==3.7.1
|
||||||
|
asyncpg==0.28.0
|
||||||
certifi==2023.7.22
|
certifi==2023.7.22
|
||||||
charset-normalizer==3.2.0
|
charset-normalizer==3.2.0
|
||||||
click==8.1.6
|
click==8.1.6
|
||||||
@ -15,6 +17,7 @@ pyasn1==0.5.0
|
|||||||
pydantic==1.10.10
|
pydantic==1.10.10
|
||||||
pydantic_core==2.4.0
|
pydantic_core==2.4.0
|
||||||
python-dateutil==2.8.2
|
python-dateutil==2.8.2
|
||||||
|
python-dotenv==1.0.0
|
||||||
python-jose==3.3.0
|
python-jose==3.3.0
|
||||||
python-multipart==0.0.6
|
python-multipart==0.0.6
|
||||||
redbird==0.7.1
|
redbird==0.7.1
|
||||||
|
Loading…
x
Reference in New Issue
Block a user