init
This commit is contained in:
commit
45e14ae3b8
|
@ -0,0 +1,6 @@
|
|||
.idea
|
||||
.venv
|
||||
__pycache__
|
||||
*.cpy
|
||||
.env
|
||||
*.sqlite3
|
|
@ -0,0 +1,114 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python>=3.9 or backports.zoneinfo library.
|
||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
hooks = black
|
||||
black.type = console_scripts
|
||||
black.entrypoint = black
|
||||
black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration with an async dbapi.
|
|
@ -0,0 +1,94 @@
|
|||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.engine import Connection
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
from core.models import Base
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
from core.config import settings
|
||||
|
||||
config.set_main_option("sqlalchemy.url", settings.db.url)
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection: Connection) -> None:
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
|
@ -0,0 +1,26 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,38 @@
|
|||
"""create products table
|
||||
|
||||
Revision ID: 8c13d075d6fb
|
||||
Revises:
|
||||
Create Date: 2024-08-04 14:18:16.395756
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "8c13d075d6fb"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"products",
|
||||
sa.Column("name", sa.String(), nullable=False),
|
||||
sa.Column("description", sa.String(), nullable=False),
|
||||
sa.Column("price", sa.Integer(), nullable=False),
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("products")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,37 @@
|
|||
"""create users table
|
||||
|
||||
Revision ID: 387940fdf447
|
||||
Revises: 8c13d075d6fb
|
||||
Create Date: 2024-08-04 16:27:47.847232
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "387940fdf447"
|
||||
down_revision: Union[str, None] = "8c13d075d6fb"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("username", sa.String(length=30), nullable=False),
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("username"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("users")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,42 @@
|
|||
"""create posts table
|
||||
|
||||
Revision ID: 7b8cc105f577
|
||||
Revises: 387940fdf447
|
||||
Create Date: 2024-08-04 16:40:00.680607
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "7b8cc105f577"
|
||||
down_revision: Union[str, None] = "387940fdf447"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"posts",
|
||||
sa.Column("title", sa.String(length=100), nullable=False),
|
||||
sa.Column("body", sa.Text(), server_default="", nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("posts")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,44 @@
|
|||
"""create profile table
|
||||
|
||||
Revision ID: 83a5d3eb87f5
|
||||
Revises: 7b8cc105f577
|
||||
Create Date: 2024-08-04 19:01:41.691939
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "83a5d3eb87f5"
|
||||
down_revision: Union[str, None] = "7b8cc105f577"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"profiles",
|
||||
sa.Column("first_name", sa.String(length=30), nullable=True),
|
||||
sa.Column("last_name", sa.String(length=30), nullable=True),
|
||||
sa.Column("bio", sa.String(), nullable=True),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["users.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("user_id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("profiles")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,9 @@
|
|||
from fastapi import APIRouter
|
||||
|
||||
from .products.views import router as products_router
|
||||
from .auth.views import router as auth_router
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
router.include_router(products_router, prefix="/products")
|
||||
router.include_router(auth_router)
|
|
@ -0,0 +1,55 @@
|
|||
from typing import Annotated
|
||||
import secrets
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBasic, HTTPBasicCredentials
|
||||
|
||||
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||
|
||||
security = HTTPBasic()
|
||||
|
||||
|
||||
@router.get("/basic-auth")
|
||||
def basic_auth_credentials(
|
||||
credentials: Annotated[HTTPBasicCredentials, Depends(security)],
|
||||
):
|
||||
return {
|
||||
"message": "Hello",
|
||||
"username": credentials.username,
|
||||
"password": credentials.password,
|
||||
}
|
||||
|
||||
|
||||
username_to_password = {
|
||||
"admin": "admin",
|
||||
"user": "user",
|
||||
}
|
||||
|
||||
|
||||
def get_auth_user_username(
|
||||
credentials: Annotated[HTTPBasicCredentials, Depends(security)],
|
||||
):
|
||||
unauth_exception = HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Incorrect username or password",
|
||||
headers={"WWW-Authenticate": "Basic"},
|
||||
)
|
||||
correct_password = username_to_password.get(credentials.username)
|
||||
if correct_password is None:
|
||||
raise unauth_exception
|
||||
|
||||
if not secrets.compare_digest(
|
||||
credentials.password.encode("utf-8"),
|
||||
correct_password.encode("utf-8"),
|
||||
):
|
||||
raise unauth_exception
|
||||
return credentials.username
|
||||
|
||||
|
||||
@router.get("/basic-auth-username")
|
||||
def basic_auth_username(
|
||||
auth_username: str = Depends(get_auth_user_username),
|
||||
):
|
||||
return {
|
||||
"message": f"Hello {auth_username}",
|
||||
"username": auth_username,
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from core.models import Product
|
||||
from sqlalchemy.engine import Result
|
||||
|
||||
from .schemas import ProductCreate, ProductUpdate, ProductUpdatePartial
|
||||
|
||||
|
||||
async def get_all_products(session: AsyncSession) -> list[Product]:
|
||||
stmt = select(Product).order_by(Product.id)
|
||||
result: Result = await session.execute(stmt)
|
||||
products = result.scalars().all()
|
||||
return list(products)
|
||||
|
||||
|
||||
async def get_products_by_id(session: AsyncSession, product_id: int) -> Product | None:
|
||||
return await session.get(Product, product_id)
|
||||
|
||||
|
||||
async def create_product(session: AsyncSession, product_in: ProductCreate) -> Product:
|
||||
new_product = Product(**product_in.model_dump())
|
||||
session.add(new_product)
|
||||
await session.commit()
|
||||
await session.refresh(new_product)
|
||||
return new_product
|
||||
|
||||
|
||||
async def update_product(
|
||||
session: AsyncSession,
|
||||
product: Product,
|
||||
product_update: ProductUpdate | ProductUpdatePartial,
|
||||
partial: bool = False,
|
||||
) -> Product:
|
||||
for name, value in product_update.model_dump(exclude_unset=partial).items():
|
||||
setattr(product, name, value)
|
||||
await session.commit()
|
||||
return product
|
||||
|
||||
|
||||
async def delete_product(session: AsyncSession, product: Product) -> None:
|
||||
await session.delete(product)
|
||||
await session.commit()
|
|
@ -0,0 +1,19 @@
|
|||
from fastapi import Depends, HTTPException, status
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from core.models import db_helper
|
||||
|
||||
from . import crud
|
||||
|
||||
|
||||
async def get_product_by_id(
|
||||
product_id: int,
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
):
|
||||
product = await crud.get_products_by_id(session=session, product_id=product_id)
|
||||
if product is not None:
|
||||
return product
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Product {product_id} not found",
|
||||
)
|
|
@ -0,0 +1,26 @@
|
|||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class ProductBase(BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
price: int
|
||||
|
||||
|
||||
class ProductCreate(ProductBase):
|
||||
pass
|
||||
|
||||
|
||||
class ProductUpdate(ProductBase):
|
||||
pass
|
||||
|
||||
|
||||
class ProductUpdatePartial(ProductBase):
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
price: int | None = None
|
||||
|
||||
|
||||
class Product(ProductBase):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
id: int
|
|
@ -0,0 +1,68 @@
|
|||
from fastapi import APIRouter, HTTPException, status, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from . import crud
|
||||
from .schemas import Product, ProductCreate, ProductUpdate, ProductUpdatePartial
|
||||
|
||||
from core.models import db_helper
|
||||
|
||||
from .dependencies import get_product_by_id
|
||||
|
||||
router = APIRouter(prefix="")
|
||||
|
||||
|
||||
@router.get("", response_model=list[Product])
|
||||
async def get_all_products(
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
):
|
||||
return await crud.get_all_products(session=session)
|
||||
|
||||
|
||||
@router.post("", response_model=Product, status_code=status.HTTP_201_CREATED)
|
||||
async def create_product(
|
||||
product_in: ProductCreate,
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
):
|
||||
return await crud.create_product(session=session, product_in=product_in)
|
||||
|
||||
|
||||
@router.get("/{product_id}", response_model=Product)
|
||||
async def get_product(
|
||||
product=Depends(get_product_by_id),
|
||||
):
|
||||
return product
|
||||
|
||||
|
||||
@router.put("/{product_id}")
|
||||
async def update_product(
|
||||
product_update: ProductUpdate,
|
||||
product=Depends(get_product_by_id),
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
):
|
||||
return await crud.update_product(
|
||||
session=session,
|
||||
product=product,
|
||||
product_update=product_update,
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/{product_id}")
|
||||
async def update_product_partial(
|
||||
product_update: ProductUpdatePartial,
|
||||
product=Depends(get_product_by_id),
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
):
|
||||
return await crud.update_product(
|
||||
session=session,
|
||||
product=product,
|
||||
product_update=product_update,
|
||||
partial=True,
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{product_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_product(
|
||||
product=Depends(get_product_by_id),
|
||||
session: AsyncSession = Depends(db_helper.session_dependency),
|
||||
) -> None:
|
||||
await crud.delete_product(session=session, product=product)
|
|
@ -0,0 +1,34 @@
|
|||
from pydantic_settings import BaseSettings
|
||||
from pydantic import BaseModel
|
||||
from pathlib import Path
|
||||
|
||||
import logging
|
||||
|
||||
BASE_DIR = Path(__file__).parent.parent
|
||||
|
||||
DB_PATH = BASE_DIR / "db.sqlite3"
|
||||
|
||||
|
||||
class DbSettings(BaseModel):
|
||||
url: str = f"sqlite+aiosqlite:///{DB_PATH}"
|
||||
echo: bool = False
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
api_v1_prefix: str = "/api/v1"
|
||||
|
||||
db: DbSettings = DbSettings()
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
||||
|
||||
def config_logging(level=logging.INFO):
|
||||
logging.basicConfig(
|
||||
level=level,
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
format="[%(asctime)s.%(msecs)03d] %(module)-15s:%(lineno)4d | %(funcName)-20s| %(levelname)-8s | %(message)s",
|
||||
)
|
||||
|
||||
|
||||
config_logging(level=logging.INFO)
|
|
@ -0,0 +1,16 @@
|
|||
__all__ = (
|
||||
"Base",
|
||||
"Product",
|
||||
"db_helper",
|
||||
"DatabaseHelper",
|
||||
"User",
|
||||
"Post",
|
||||
"Profile",
|
||||
)
|
||||
|
||||
from .base import Base
|
||||
from .db_helper import DatabaseHelper, db_helper
|
||||
from .product import Product
|
||||
from .user import User
|
||||
from .post import Post
|
||||
from .profile import Profile
|
|
@ -0,0 +1,15 @@
|
|||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, declared_attr
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
"""
|
||||
Базовый класс для описания моделей.
|
||||
"""
|
||||
|
||||
__abstract__ = True # Что бы эта таблица не создавалась в базе данных
|
||||
|
||||
@declared_attr.directive
|
||||
def __tablename__(cls) -> str:
|
||||
return f"{cls.__name__.lower()}s"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
|
@ -0,0 +1,43 @@
|
|||
from asyncio import current_task
|
||||
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
create_async_engine,
|
||||
async_sessionmaker,
|
||||
async_scoped_session,
|
||||
)
|
||||
|
||||
from core.config import settings
|
||||
|
||||
|
||||
class DatabaseHelper:
|
||||
def __init__(self, url: str, echo: bool = False):
|
||||
self.engine = create_async_engine(
|
||||
url=url,
|
||||
echo=echo,
|
||||
)
|
||||
self.session_factory = async_sessionmaker(
|
||||
bind=self.engine,
|
||||
autoflush=False,
|
||||
autocommit=False,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
def get_scoped_session(self):
|
||||
session = async_scoped_session(
|
||||
session_factory=self.session_factory,
|
||||
scopefunc=current_task,
|
||||
)
|
||||
return session
|
||||
|
||||
async def session_dependency(self) -> AsyncSession:
|
||||
|
||||
async with self.session_factory() as session:
|
||||
yield session
|
||||
await session.close()
|
||||
|
||||
|
||||
db_helper = DatabaseHelper(
|
||||
url=settings.db.url,
|
||||
echo=settings.db.echo,
|
||||
)
|
|
@ -0,0 +1,28 @@
|
|||
from sqlalchemy.orm import declared_attr, Mapped, mapped_column, relationship
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.models.user import User
|
||||
|
||||
|
||||
class UserRelationMixin:
|
||||
_user_id_nullable: bool = False
|
||||
_user_id_unique: bool = False
|
||||
_user_back_populate: str | None = None
|
||||
|
||||
@declared_attr
|
||||
def user_id(cls) -> Mapped[int]:
|
||||
return mapped_column(
|
||||
ForeignKey("users.id"),
|
||||
unique=cls._user_id_unique,
|
||||
nullable=cls._user_id_nullable,
|
||||
)
|
||||
|
||||
@declared_attr
|
||||
def user(cls) -> Mapped["User"]:
|
||||
return relationship(
|
||||
"User",
|
||||
back_populates=cls._user_back_populate,
|
||||
)
|
|
@ -0,0 +1,23 @@
|
|||
from sqlalchemy import String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from core.models.base import Base
|
||||
|
||||
from core.models.mixins import UserRelationMixin
|
||||
|
||||
|
||||
class Post(UserRelationMixin, Base):
|
||||
|
||||
_user_back_populates = "posts"
|
||||
title: Mapped[str] = mapped_column(String(100), unique=False)
|
||||
body: Mapped[str] = mapped_column(
|
||||
Text,
|
||||
default="",
|
||||
server_default="",
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}(id={self.id}, title={self.title!r}, user_id={self.user_id})"
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
|
@ -0,0 +1,9 @@
|
|||
from sqlalchemy.orm import Mapped
|
||||
|
||||
from core.models.base import Base
|
||||
|
||||
|
||||
class Product(Base):
|
||||
name: Mapped[str]
|
||||
description: Mapped[str]
|
||||
price: Mapped[int]
|
|
@ -0,0 +1,14 @@
|
|||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
|
||||
from core.models.base import Base
|
||||
|
||||
from core.models.mixins import UserRelationMixin
|
||||
|
||||
|
||||
class Profile(UserRelationMixin, Base):
|
||||
_user_id_unique = True
|
||||
_user_back_populate = "profile"
|
||||
first_name: Mapped[str | None] = mapped_column(String(30))
|
||||
last_name: Mapped[str | None] = mapped_column(String(30))
|
||||
bio: Mapped[str | None]
|
|
@ -0,0 +1,22 @@
|
|||
from sqlalchemy import String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from core.models.base import Base
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .post import Post
|
||||
from .profile import Profile
|
||||
|
||||
|
||||
class User(Base):
|
||||
username: Mapped[str] = mapped_column(String(30), unique=True)
|
||||
posts: Mapped[list["Post"]] = relationship(back_populates="user")
|
||||
profile: Mapped["Profile"] = relationship(back_populates="user")
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}(id={self.id}, username={self.username!r})"
|
||||
|
||||
def __repr__(self):
|
||||
return str(self)
|
|
@ -0,0 +1,86 @@
|
|||
import asyncio
|
||||
|
||||
from sqlalchemy import select, Result
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.orm import joinedload
|
||||
from core.models import db_helper, User, Profile, Post
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
async def create_user(session: AsyncSession, username) -> User:
|
||||
user = User(username=username)
|
||||
session.add(user)
|
||||
await session.commit()
|
||||
log.info("user %r", user)
|
||||
return user
|
||||
|
||||
|
||||
async def get_user_by_username(session: AsyncSession, username: str) -> User | None:
|
||||
stmt = select(User).where(User.username == username)
|
||||
# result: Result = await session.execute(stmt)
|
||||
# user: User | None = result.scalar_one_or_none()
|
||||
user: User | None = await session.scalar(stmt)
|
||||
log.info("found user %r %r", username, user)
|
||||
return user
|
||||
|
||||
|
||||
async def create_user_profile(
|
||||
session: AsyncSession,
|
||||
user_id: int,
|
||||
first_name: str | None = None,
|
||||
last_name: str | None = None,
|
||||
) -> Profile:
|
||||
profile = Profile(
|
||||
user_id=user_id,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
)
|
||||
session.add(profile)
|
||||
await session.commit()
|
||||
return profile
|
||||
|
||||
|
||||
async def show_users_with_profiles(session: AsyncSession) -> list[User]:
|
||||
stmt = select(User).options(joinedload(User.profile)).order_by(User.id)
|
||||
users = await session.scalars(stmt)
|
||||
for user in users:
|
||||
log.info("user: %r profile first name: %r", user, user.profile.first_name)
|
||||
|
||||
|
||||
async def create_posts(
|
||||
session: AsyncSession,
|
||||
user_id: int,
|
||||
*posts_titles: str,
|
||||
) -> list[Post]:
|
||||
posts = [Post(title=title, user_id=user_id) for title in posts_titles]
|
||||
session.add_all(posts)
|
||||
await session.commit()
|
||||
log.info("posts: %r", posts)
|
||||
return posts
|
||||
|
||||
|
||||
async def main():
|
||||
async with db_helper.session_factory() as session:
|
||||
# await create_user(session=session, username="sergey")
|
||||
# await create_user(session=session, username="vasya")
|
||||
user_vasya = await get_user_by_username(session=session, username="vasya")
|
||||
# # await get_user_by_username(session=session, username="sefesf")
|
||||
# await create_user_profile(
|
||||
# session=session,
|
||||
# user_id=user_vasya.id,
|
||||
# first_name="вася",
|
||||
# )
|
||||
# await show_users_with_profiles(session=session)
|
||||
await create_posts(
|
||||
session,
|
||||
user_vasya.id,
|
||||
"sqla 2.0",
|
||||
"sqla joins",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
|
@ -0,0 +1,32 @@
|
|||
from typing import Annotated
|
||||
|
||||
from fastapi import Path, APIRouter
|
||||
|
||||
|
||||
router = APIRouter(prefix="/items")
|
||||
|
||||
|
||||
@router.get("")
|
||||
def get_items():
|
||||
return [
|
||||
"item1",
|
||||
"item2",
|
||||
"item3",
|
||||
]
|
||||
|
||||
|
||||
@router.get("/latest")
|
||||
def get_items_latest():
|
||||
return {
|
||||
"item": {
|
||||
"id": "0",
|
||||
"name": "latest",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@router.get("/{items_id}")
|
||||
def get_item_by_id(item_id: Annotated[int, Path(ge=1, lt=1_000_000)]):
|
||||
return {
|
||||
"item": {"id": item_id},
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
from contextlib import asynccontextmanager
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI
|
||||
from pydantic import EmailStr, BaseModel
|
||||
|
||||
from items_views import router as items_router
|
||||
|
||||
from users.views import router as users_router
|
||||
|
||||
from api_v1 import router as api_v1_router
|
||||
|
||||
|
||||
from core.config import settings
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# заготовка под добавление каких-то действий после запуска приложения.
|
||||
yield
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
app.include_router(router=items_router, tags=["items"])
|
||||
app.include_router(router=users_router, tags=["users"])
|
||||
app.include_router(router=api_v1_router, tags=["api_v1"], prefix=settings.api_v1_prefix)
|
||||
|
||||
|
||||
@app.get("/")
|
||||
def hello_index():
|
||||
return {"message": "hello index"}
|
||||
|
||||
|
||||
@app.get("/hello")
|
||||
def hello(name: str = "World"):
|
||||
name = name.strip().title()
|
||||
return {"message": f"Hello {name}"}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", reload=True)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,25 @@
|
|||
[tool.poetry]
|
||||
name = "testfastapi"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["sergey <sergey@sm8255082.ru>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.12"
|
||||
fastapi = "^0.112.0"
|
||||
uvicorn = {extras = ["standard"], version = "^0.30.5"}
|
||||
pydantic = {extras = ["email"], version = "^2.8.2"}
|
||||
sqlalchemy = {extras = ["asyncio"], version = "^2.0.31"}
|
||||
aiosqlite = "^0.20.0"
|
||||
pydantic-settings = "^2.4.0"
|
||||
alembic = "^1.13.2"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^24.8.0"
|
||||
pytest = "^8.3.2"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
|
@ -0,0 +1,9 @@
|
|||
from users.schemas import CreateUser
|
||||
|
||||
|
||||
def create_user(user: CreateUser) -> dict:
|
||||
new_user = user.model_dump()
|
||||
return {
|
||||
"success": True,
|
||||
"user": new_user,
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
from pydantic import BaseModel, EmailStr
|
||||
from typing import Annotated
|
||||
from annotated_types import MinLen, MaxLen
|
||||
|
||||
|
||||
class CreateUser(BaseModel):
|
||||
username: Annotated[str, MinLen(3), MaxLen(40)]
|
||||
email: EmailStr
|
|
@ -0,0 +1,12 @@
|
|||
from fastapi import APIRouter
|
||||
|
||||
from users.schemas import CreateUser
|
||||
|
||||
from users import crud
|
||||
|
||||
router = APIRouter(prefix="/users")
|
||||
|
||||
|
||||
@router.post("")
|
||||
def create_user(user: CreateUser):
|
||||
return crud.create_user(user=user)
|
Loading…
Reference in New Issue