🐐
This commit is contained in:
149
backend/alembic.ini
Normal file
149
backend/alembic.ini
Normal file
@@ -0,0 +1,149 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts.
|
||||
# this is typically a path given in POSIX (e.g. forward slashes)
|
||||
# format, relative to the token %(here)s which refers to the location of this
|
||||
# ini file
|
||||
script_location = %(here)s/alembic
|
||||
|
||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||
# Uncomment the line below if you want the files to be prepended with date and time
|
||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||
# for all available tokens
|
||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||
# Or organize into date-based subdirectories (requires recursive_version_locations = true)
|
||||
# file_template = %%(year)d/%%(month).2d/%%(day).2d_%%(hour).2d%%(minute).2d_%%(second).2d_%%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory. for multiple paths, the path separator
|
||||
# is defined by "path_separator" below.
|
||||
prepend_sys_path = .
|
||||
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the tzdata library which can be installed by adding
|
||||
# `alembic[tz]` to the pip requirements.
|
||||
# string value is passed to ZoneInfo()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to <script_location>/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "path_separator"
|
||||
# below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
|
||||
|
||||
# path_separator; This indicates what character is used to split lists of file
|
||||
# paths, including version_locations and prepend_sys_path within configparser
|
||||
# files such as alembic.ini.
|
||||
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
|
||||
# to provide os-dependent path splitting.
|
||||
#
|
||||
# Note that in order to support legacy alembic.ini files, this default does NOT
|
||||
# take place if path_separator is not present in alembic.ini. If this
|
||||
# option is omitted entirely, fallback logic is as follows:
|
||||
#
|
||||
# 1. Parsing of the version_locations option falls back to using the legacy
|
||||
# "version_path_separator" key, which if absent then falls back to the legacy
|
||||
# behavior of splitting on spaces and/or commas.
|
||||
# 2. Parsing of the prepend_sys_path option falls back to the legacy
|
||||
# behavior of splitting on spaces, commas, or colons.
|
||||
#
|
||||
# Valid values for path_separator are:
|
||||
#
|
||||
# path_separator = :
|
||||
# path_separator = ;
|
||||
# path_separator = space
|
||||
# path_separator = newline
|
||||
#
|
||||
# Use os.pathsep. Default configuration used for new projects.
|
||||
path_separator = os
|
||||
|
||||
# set to 'true' to search source files recursively
|
||||
# in each "version_locations" directory
|
||||
# new in Alembic version 1.10
|
||||
# recursive_version_locations = false
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
# database URL. This is consumed by the user-maintained env.py script only.
|
||||
# other means of configuring database URLs may be customized within the env.py
|
||||
# file.
|
||||
sqlalchemy.url = postgresql://wikitcg:password@localhost/wikitcg
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
|
||||
# hooks = ruff
|
||||
# ruff.type = module
|
||||
# ruff.module = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Alternatively, use the exec runner to execute a binary found on your PATH
|
||||
# hooks = ruff
|
||||
# ruff.type = exec
|
||||
# ruff.executable = ruff
|
||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration. This is also consumed by the user-maintained
|
||||
# env.py script only.
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARNING
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARNING
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
1
backend/alembic/README
Normal file
1
backend/alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
||||
79
backend/alembic/env.py
Normal file
79
backend/alembic/env.py
Normal file
@@ -0,0 +1,79 @@
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
from models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
backend/alembic/script.py.mako
Normal file
28
backend/alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
29
backend/auth.py
Normal file
29
backend/auth.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from jose import JWTError, jwt
|
||||
from passlib.context import CryptContext
|
||||
|
||||
logger = logging.getLogger("app")
|
||||
|
||||
SECRET_KEY = "changethis"
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 * 30 # 1 month
|
||||
|
||||
pwd_context = CryptContext(schemes=["bcrypt"])
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
return pwd_context.hash(password)
|
||||
|
||||
def verify_password(plain: str, hashed: str) -> bool:
|
||||
return pwd_context.verify(plain, hashed)
|
||||
|
||||
def create_access_token(user_id: str) -> str:
|
||||
expire = datetime.now() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
return jwt.encode({"sub": user_id, "exp": expire}, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
def decode_access_token(token: str) -> str | None:
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
return payload.get("sub")
|
||||
except JWTError:
|
||||
return None
|
||||
109
backend/card.py
109
backend/card.py
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
from math import sqrt, cbrt
|
||||
from enum import Enum
|
||||
from typing import NamedTuple
|
||||
@@ -5,6 +6,8 @@ from urllib.parse import quote
|
||||
from datetime import datetime, timedelta
|
||||
from time import sleep
|
||||
|
||||
logger = logging.getLogger("app")
|
||||
|
||||
class CardType(Enum):
|
||||
other = 0
|
||||
person = 1
|
||||
@@ -15,6 +18,7 @@ class CardType(Enum):
|
||||
group = 6
|
||||
science_thing = 7
|
||||
vehicle = 8
|
||||
business = 9
|
||||
|
||||
class CardRarity(Enum):
|
||||
common = 0
|
||||
@@ -94,6 +98,7 @@ WIKIDATA_INSTANCE_TYPE_MAP = {
|
||||
"Q482994": CardType.artwork, # album
|
||||
"Q134556": CardType.artwork, # single
|
||||
"Q169930": CardType.artwork, # EP
|
||||
"Q196600": CardType.artwork, # media franchise
|
||||
"Q202866": CardType.artwork, # animated film
|
||||
"Q734698": CardType.artwork, # collectible card game
|
||||
"Q506240": CardType.artwork, # television film
|
||||
@@ -108,9 +113,11 @@ WIKIDATA_INSTANCE_TYPE_MAP = {
|
||||
"Q47461344": CardType.artwork, # written work
|
||||
"Q71631512": CardType.artwork, # tabletop role-playing game supplement
|
||||
"Q21198342": CardType.artwork, # manga series
|
||||
"Q58483083": CardType.artwork, # dramatico-musical work
|
||||
"Q24634210": CardType.artwork, # podcast show
|
||||
"Q105543609": CardType.artwork, # musical work / composition
|
||||
"Q106499608": CardType.artwork, # literary reading
|
||||
"Q117467246": CardType.artwork, # animated television series
|
||||
|
||||
"Q515": CardType.location, # city
|
||||
"Q8502": CardType.location, # mountain
|
||||
@@ -137,31 +144,50 @@ WIKIDATA_INSTANCE_TYPE_MAP = {
|
||||
|
||||
"Q7278": CardType.group, # political party
|
||||
"Q476028": CardType.group, # association football club
|
||||
"Q732717": CardType.group, # law enforcement agency
|
||||
"Q215380": CardType.group, # musical group
|
||||
"Q176799": CardType.group, # military unit
|
||||
"Q178790": CardType.group, # labor union
|
||||
"Q2367225": CardType.group, # university and college sports club
|
||||
"Q4801149": CardType.group, # artillery brigade
|
||||
"Q9248092": CardType.group, # infantry division
|
||||
"Q7210356": CardType.group, # political organization
|
||||
"Q5419137": CardType.group, # veterans' organization
|
||||
"Q12973014": CardType.group, # sports team
|
||||
"Q11446438": CardType.group, # female idol group
|
||||
"Q135408445": CardType.group, # men's national association football team
|
||||
|
||||
"Q7187": CardType.science_thing, # gene
|
||||
"Q8054": CardType.science_thing, # protein
|
||||
"Q65943": CardType.science_thing, # theorem
|
||||
"Q12140": CardType.science_thing, # medication
|
||||
"Q11276": CardType.science_thing, # globular cluster
|
||||
"Q898273": CardType.science_thing, # protein domain
|
||||
"Q168845": CardType.science_thing, # star cluster
|
||||
"Q1840368": CardType.science_thing, # cloud type
|
||||
"Q113145171": CardType.science_thing, # type of chemical entity
|
||||
|
||||
"Q1420": CardType.vehicle, # car
|
||||
"Q11446": CardType.vehicle, # ship
|
||||
"Q43193": CardType.vehicle, # truck
|
||||
"Q25956": CardType.vehicle, # space station
|
||||
"Q39804": CardType.vehicle, # cruise ship
|
||||
"Q811704": CardType.vehicle, # rolling stock class
|
||||
"Q673687": CardType.vehicle, # racing automobile
|
||||
"Q174736": CardType.vehicle, # destroyer
|
||||
"Q484000": CardType.vehicle, # unmanned aerial vehicle
|
||||
"Q559026": CardType.vehicle, # ship class
|
||||
"Q830335": CardType.vehicle, # protected cruiser
|
||||
"Q928235": CardType.vehicle, # sloop-of-war
|
||||
"Q391022": CardType.vehicle, # research vessel
|
||||
"Q1185562": CardType.vehicle, # light aircraft carrier
|
||||
"Q7233751": CardType.vehicle, # post ship
|
||||
"Q3231690": CardType.vehicle, # automobile model
|
||||
"Q1428357": CardType.vehicle, # submarine class
|
||||
"Q1499623": CardType.vehicle, # destroyer escort
|
||||
"Q4818021": CardType.vehicle, # attack submarine
|
||||
|
||||
"Q4830453": CardType.business, # business
|
||||
}
|
||||
|
||||
import asyncio
|
||||
@@ -185,13 +211,15 @@ async def _get_random_summary_async(client: httpx.AsyncClient) -> dict:
|
||||
headers=HEADERS,
|
||||
follow_redirects=False,
|
||||
)
|
||||
except httpx.ReadTimeout:
|
||||
except:
|
||||
return {}
|
||||
|
||||
if not response.is_success:
|
||||
print("Error in request:")
|
||||
print(response.status_code)
|
||||
print(response.text)
|
||||
logger.error(
|
||||
"Error in request:" +
|
||||
str(response.status_code) +
|
||||
response.text
|
||||
)
|
||||
return {}
|
||||
|
||||
return response.json()
|
||||
@@ -212,22 +240,28 @@ async def _get_page_summary_async(client: httpx.AsyncClient, title: str) -> dict
|
||||
headers=HEADERS,
|
||||
follow_redirects=False,
|
||||
)
|
||||
except httpx.ReadTimeout:
|
||||
except:
|
||||
return {}
|
||||
|
||||
if not response.is_success:
|
||||
print("Error in request:")
|
||||
print(response.status_code)
|
||||
print(response.text)
|
||||
logger.error(
|
||||
"Error in request:" +
|
||||
str(response.status_code) +
|
||||
response.text
|
||||
)
|
||||
return {}
|
||||
|
||||
return response.json()
|
||||
|
||||
async def _infer_card_type_async(client: httpx.AsyncClient, entity_id: str) -> tuple[CardType, str, int]:
|
||||
response = await client.get(
|
||||
"https://www.wikidata.org/wiki/Special:EntityData/" + entity_id + ".json",
|
||||
headers=HEADERS
|
||||
)
|
||||
try:
|
||||
response = await client.get(
|
||||
"https://www.wikidata.org/wiki/Special:EntityData/" + entity_id + ".json",
|
||||
headers=HEADERS
|
||||
)
|
||||
except:
|
||||
return CardType.other, "", 0
|
||||
|
||||
if not response.is_success:
|
||||
return CardType.other, "", 0
|
||||
|
||||
@@ -253,10 +287,13 @@ async def _infer_card_type_async(client: httpx.AsyncClient, entity_id: str) -> t
|
||||
|
||||
async def _get_wikirank_score(client: httpx.AsyncClient, title: str) -> float | None:
|
||||
"""Returns a quality score from 0-100, or None if unavailable."""
|
||||
response = await client.get(
|
||||
f"https://api.wikirank.net/api.php?name={quote(title, safe="")}&lang=en",
|
||||
headers=HEADERS
|
||||
)
|
||||
try:
|
||||
response = await client.get(
|
||||
f"https://api.wikirank.net/api.php?name={quote(title, safe='')}&lang=en",
|
||||
headers=HEADERS
|
||||
)
|
||||
except:
|
||||
return None
|
||||
if not response.is_success:
|
||||
return None
|
||||
data = response.json()
|
||||
@@ -307,7 +344,7 @@ async def _get_monthly_pageviews(client: httpx.AsyncClient, title: str) -> int |
|
||||
return None
|
||||
items = response.json().get("items", [])
|
||||
return items[0]["views"] if items else None
|
||||
except httpx.ReadError:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def _pageviews_to_defense(views: int | None) -> int:
|
||||
@@ -340,8 +377,8 @@ async def _get_card_async(client: httpx.AsyncClient, page_title: str|None = None
|
||||
rarity = _score_to_rarity(score)
|
||||
multiplier = RARITY_MULTIPLIER[rarity]
|
||||
|
||||
attack = int(language_count*1.5*multiplier**2)
|
||||
defense = int(_pageviews_to_defense(views)*max(multiplier,(multiplier**2)/2))
|
||||
attack = min(2500,int(((language_count*1.5)**1.2)*multiplier**2))
|
||||
defense = min(2500,int(_pageviews_to_defense(views)*max(multiplier,(multiplier**2)/2)))
|
||||
|
||||
return Card(
|
||||
name=summary["title"],
|
||||
@@ -353,13 +390,15 @@ async def _get_card_async(client: httpx.AsyncClient, page_title: str|None = None
|
||||
text=text,
|
||||
attack=attack,
|
||||
defense=defense,
|
||||
cost=min(12,max(1,int(cbrt(attack+defense)/1.5)))
|
||||
cost=min(12,max(1,int(((attack**2+defense**2)**0.18)/1.5)))
|
||||
)
|
||||
|
||||
async def _get_cards_async(size: int) -> list[Card]:
|
||||
logger.debug(f"Generating {size} cards")
|
||||
async with httpx.AsyncClient(follow_redirects=True) as client:
|
||||
cards = await asyncio.gather(*[_get_card_async(client) for _ in range(size)])
|
||||
return [c for c in cards if c is not None]
|
||||
|
||||
return [c for c in cards if c is not None]
|
||||
|
||||
async def _get_specific_card_async(title: str) -> Card|None:
|
||||
async with httpx.AsyncClient(follow_redirects=True) as client:
|
||||
@@ -367,33 +406,29 @@ async def _get_specific_card_async(title: str) -> Card|None:
|
||||
|
||||
# Sync entrypoints
|
||||
def generate_cards(size: int) -> list[Card]:
|
||||
print(f"Generating {size} cards")
|
||||
batches = [10 for _ in range(size//10)] + ([size%10] if size%10 != 0 else [])
|
||||
n_batches = len(batches)
|
||||
cards = []
|
||||
for i in range(n_batches):
|
||||
b = batches[i]
|
||||
print(f"Generating batch of {b} cards (batch {i+1}/{n_batches})")
|
||||
if i != 0:
|
||||
sleep(5)
|
||||
cards += asyncio.run(_get_cards_async(b))
|
||||
|
||||
return cards
|
||||
return asyncio.run(_get_cards_async(size))
|
||||
|
||||
def generate_card(title: str) -> Card|None:
|
||||
return asyncio.run(_get_specific_card_async(title))
|
||||
|
||||
|
||||
# for card in generate_cards(5):
|
||||
# print(card)
|
||||
|
||||
# rarities = []
|
||||
# cards = []
|
||||
# for i in range(20):
|
||||
# print(i)
|
||||
# cards += generate_cards(10)
|
||||
# sleep(3)
|
||||
|
||||
# costs = []
|
||||
# from collections import Counter
|
||||
# for card in generate_cards(1000):
|
||||
# rarities.append(card.card_rarity)
|
||||
# for card in cards:
|
||||
# costs.append((card.card_rarity,card.cost))
|
||||
# if card.card_rarity == CardRarity.legendary:
|
||||
# print(card)
|
||||
|
||||
# print(Counter(rarities))
|
||||
# print(Counter(costs))
|
||||
|
||||
# for card in generate_cards(100):
|
||||
# if card.card_type == CardType.other:
|
||||
|
||||
17
backend/database.py
Normal file
17
backend/database.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import DeclarativeBase, sessionmaker
|
||||
|
||||
DATABASE_URL = "postgresql://wikitcg:password@localhost/wikitcg"
|
||||
|
||||
engine = create_engine(DATABASE_URL)
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
61
backend/database_functions.py
Normal file
61
backend/database_functions.py
Normal file
@@ -0,0 +1,61 @@
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from card import _get_cards_async
|
||||
from models import Card as CardModel
|
||||
from database import SessionLocal
|
||||
|
||||
logger = logging.getLogger("app")
|
||||
|
||||
POOL_MINIMUM = 500
|
||||
POOL_TARGET = 1000
|
||||
POOL_BATCH_SIZE = 10
|
||||
POOL_SLEEP = 5.0
|
||||
|
||||
pool_filling = False
|
||||
|
||||
async def fill_card_pool():
|
||||
global pool_filling
|
||||
if pool_filling:
|
||||
logger.info("Pool fill already in progress, skipping")
|
||||
return
|
||||
|
||||
db: Session = SessionLocal()
|
||||
try:
|
||||
unassigned = db.query(CardModel).filter(CardModel.user_id == None).count()
|
||||
logger.info(f"Card pool has {unassigned} unassigned cards")
|
||||
if unassigned >= POOL_MINIMUM:
|
||||
logger.info("Pool sufficiently stocked, skipping fill")
|
||||
return
|
||||
|
||||
pool_filling = True
|
||||
needed = POOL_TARGET - unassigned
|
||||
logger.info(f"Filling pool with {needed} cards")
|
||||
|
||||
fetched = 0
|
||||
while fetched < needed:
|
||||
batch_size = min(POOL_BATCH_SIZE, needed - fetched)
|
||||
cards = await _get_cards_async(batch_size)
|
||||
|
||||
for card in cards:
|
||||
db.add(CardModel(
|
||||
name=card.name,
|
||||
image_link=card.image_link,
|
||||
card_rarity=card.card_rarity.name,
|
||||
card_type=card.card_type.name,
|
||||
text=card.text,
|
||||
attack=card.attack,
|
||||
defense=card.defense,
|
||||
cost=card.cost,
|
||||
user_id=None,
|
||||
))
|
||||
db.commit()
|
||||
fetched += batch_size
|
||||
logger.info(f"Pool fill progress: {fetched}/{needed}")
|
||||
await asyncio.sleep(POOL_SLEEP)
|
||||
|
||||
finally:
|
||||
pool_filling = False
|
||||
db.close()
|
||||
49
backend/log_conf.yaml
Normal file
49
backend/log_conf.yaml
Normal file
@@ -0,0 +1,49 @@
|
||||
version: 1
|
||||
disable_existing_loggers: False
|
||||
formatters:
|
||||
default:
|
||||
# "()": uvicorn.logging.DefaultFormatter
|
||||
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
access:
|
||||
# "()": uvicorn.logging.AccessFormatter
|
||||
format: '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
handlers:
|
||||
default:
|
||||
formatter: default
|
||||
class: logging.StreamHandler
|
||||
stream: ext://sys.stderr
|
||||
access:
|
||||
formatter: access
|
||||
class: logging.StreamHandler
|
||||
stream: ext://sys.stdout
|
||||
loggers:
|
||||
uvicorn.error:
|
||||
level: INFO
|
||||
handlers:
|
||||
- default
|
||||
propagate: no
|
||||
uvicorn.access:
|
||||
level: INFO
|
||||
handlers:
|
||||
- access
|
||||
propagate: no
|
||||
httpcore:
|
||||
level: WARNING
|
||||
handlers:
|
||||
- default
|
||||
propagate: no
|
||||
httpx:
|
||||
level: WARNING
|
||||
handlers:
|
||||
- default
|
||||
propagate: no
|
||||
app:
|
||||
level: INFO
|
||||
handlers:
|
||||
- default
|
||||
propagate: no
|
||||
root:
|
||||
level: DEBUG
|
||||
handlers:
|
||||
- default
|
||||
propagate: no
|
||||
@@ -1,8 +1,46 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from card import _get_cards_async
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
app = FastAPI()
|
||||
from sqlalchemy.orm import Session
|
||||
from fastapi import FastAPI, Depends, HTTPException, status
|
||||
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from pydantic import BaseModel
|
||||
|
||||
from database import SessionLocal, get_db
|
||||
from database_functions import fill_card_pool
|
||||
from models import Card as CardModel
|
||||
from models import User as UserModel
|
||||
from card import _get_cards_async
|
||||
from auth import hash_password, verify_password, create_access_token, decode_access_token
|
||||
|
||||
logger = logging.getLogger("app")
|
||||
|
||||
# Auth
|
||||
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login")
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
username: str
|
||||
email: str
|
||||
password: str
|
||||
|
||||
def get_current_user(token: str = Depends(oauth2_scheme), db: Session = Depends(get_db)) -> UserModel:
|
||||
user_id = decode_access_token(token)
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid token")
|
||||
user = db.query(UserModel).filter(UserModel.id == uuid.UUID(user_id)).first()
|
||||
if not user:
|
||||
raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="User not found")
|
||||
return user
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
asyncio.create_task(fill_card_pool())
|
||||
yield
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
@@ -12,11 +50,35 @@ app.add_middleware(
|
||||
)
|
||||
|
||||
@app.get("/pack/{size}")
|
||||
async def open_pack(size: int = 10):
|
||||
async def open_pack(size: int = 10, user: UserModel = Depends(get_current_user)):
|
||||
cards = await _get_cards_async(size)
|
||||
return [
|
||||
{**card._asdict(),
|
||||
"card_type": card.card_type.name,
|
||||
"card_rarity": card.card_rarity.name}
|
||||
for card in cards
|
||||
]
|
||||
]
|
||||
|
||||
@app.post("/register")
|
||||
def register(req: RegisterRequest, db: Session = Depends(get_db)):
|
||||
if db.query(UserModel).filter(UserModel.username == req.username).first():
|
||||
raise HTTPException(status_code=400, detail="Username already taken")
|
||||
if db.query(UserModel).filter(UserModel.email == req.email).first():
|
||||
raise HTTPException(status_code=400, detail="Email already registered")
|
||||
user = UserModel(
|
||||
id=uuid.uuid4(),
|
||||
username=req.username,
|
||||
email=req.email,
|
||||
password_hash=hash_password(req.password),
|
||||
)
|
||||
db.add(user)
|
||||
db.commit()
|
||||
return {"message": "User created"}
|
||||
|
||||
@app.post("/login")
|
||||
def login(form: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db)):
|
||||
user = db.query(UserModel).filter(UserModel.username == form.username).first()
|
||||
if not user or not verify_password(form.password, user.password_hash):
|
||||
raise HTTPException(status_code=400, detail="Invalid username or password")
|
||||
token = create_access_token(str(user.id))
|
||||
return {"access_token": token, "token_type": "bearer"}
|
||||
|
||||
59
backend/models.py
Normal file
59
backend/models.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy import String, Integer, ForeignKey, DateTime, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from database import Base
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
username: Mapped[str] = mapped_column(String, unique=True, nullable=False)
|
||||
email: Mapped[str] = mapped_column(String, unique=True, nullable=False)
|
||||
password_hash: Mapped[str] = mapped_column(String, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.now)
|
||||
|
||||
cards: Mapped[list["Card"]] = relationship(back_populates="user")
|
||||
decks: Mapped[list["Deck"]] = relationship(back_populates="user")
|
||||
|
||||
|
||||
class Card(Base):
|
||||
__tablename__ = "cards"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
user_id: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
|
||||
name: Mapped[str] = mapped_column(String, nullable=False)
|
||||
image_link: Mapped[str] = mapped_column(String, nullable=True)
|
||||
card_rarity: Mapped[str] = mapped_column(String, nullable=False)
|
||||
card_type: Mapped[str] = mapped_column(String, nullable=False)
|
||||
text: Mapped[str] = mapped_column(Text, nullable=True)
|
||||
attack: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
defense: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
cost: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.now)
|
||||
|
||||
user: Mapped["User | None"] = relationship(back_populates="cards")
|
||||
deck_cards: Mapped[list["DeckCard"]] = relationship(back_populates="card")
|
||||
|
||||
|
||||
class Deck(Base):
|
||||
__tablename__ = "decks"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
user_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=False)
|
||||
name: Mapped[str] = mapped_column(String, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.now)
|
||||
|
||||
user: Mapped["User"] = relationship(back_populates="decks")
|
||||
deck_cards: Mapped[list["DeckCard"]] = relationship(back_populates="deck")
|
||||
|
||||
|
||||
class DeckCard(Base):
|
||||
__tablename__ = "deck_cards"
|
||||
|
||||
deck_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("decks.id"), primary_key=True)
|
||||
card_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), ForeignKey("cards.id"), primary_key=True)
|
||||
|
||||
deck: Mapped["Deck"] = relationship(back_populates="deck_cards")
|
||||
card: Mapped["Card"] = relationship(back_populates="deck_cards")
|
||||
1
backend/run
Executable file
1
backend/run
Executable file
@@ -0,0 +1 @@
|
||||
uvicorn main:app --reload --log-config=log_conf.yaml
|
||||
Reference in New Issue
Block a user