started actual fastapi server with DB

This commit is contained in:
Jack Case
2025-10-19 15:33:34 +00:00
parent 4931a785a1
commit e90bc1cc72
3 changed files with 69 additions and 0 deletions

View File

@@ -1,4 +1,8 @@
from typing import Annotated
from sqlmodel import Field, SQLModel, create_engine, Relationship
from pydantic import AfterValidator, BaseModel
from urllib.parse import urlparse, ParseResult
NAMING_CONVENTION = {
"ix": "ix_%(column_0_label)s",
@@ -11,6 +15,10 @@ NAMING_CONVENTION = {
metadata = SQLModel.metadata
metadata.naming_convention = NAMING_CONVENTION
################################################
# Database Models
################################################
class Domain(SQLModel, table=True):
id: int | None = Field(default=None, primary_key=True)
domain_name: str = Field(index=True, unique=True)
@@ -31,3 +39,24 @@ class User(SQLModel, table=True):
salt: str
email_verified: bool = Field(default=False)
################################################
# API Models
################################################
def url_validator(urls: list[str]) -> list[ParseResult]:
parsed_urls = list()
for url in urls:
try:
parsed = urlparse(url)
if not parsed.netloc:
raise ValueError(f"couldn't parse domain from '{url}'")
parsed_urls.append(parsed)
except ValueError as e:
raise ValueError(f"couldn't parse '{url}' as a URL")
return parsed_urls
class SlopReport(BaseModel):
"""Accept reports of one or more slop page URLs"""
slop_urls: Annotated[list[str], AfterValidator(url_validator)]