diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 0000000..88328ed
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,978 @@
+# flyctl launch added from .gitignore
+# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
+# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
+
+### Data ###
+**/*.csv
+**/*.dat
+**/*.efx
+**/*.gbr
+**/*.key
+**/*.pps
+**/*.ppt
+**/*.pptx
+**/*.sdf
+**/*.tax2010
+**/*.vcf
+**/*.xml
+
+### Emacs ###
+# -*- mode: gitignore; -*-
+**/*~
+**/\#*\#
+.emacs.desktop
+.emacs.desktop.lock
+**/*.elc
+**/auto-save-list
+**/tramp
+**/.\#*
+
+# Org-mode
+**/.org-id-locations
+**/*_archive
+
+# flymake-mode
+**/*_flymake.*
+
+# eshell files
+eshell/history
+eshell/lastdir
+
+# elpa packages
+elpa
+
+# reftex files
+**/*.rel
+
+# AUCTeX auto folder
+auto
+
+# cask packages
+**/.cask
+**/dist
+
+# Flycheck
+**/flycheck_*.el
+
+# server auth directory
+server
+
+# projectiles files
+**/.projectile
+
+# directory configuration
+**/.dir-locals.el
+
+# network security
+network-security.data
+
+
+### Executable ###
+**/*.app
+**/*.bat
+**/*.cgi
+**/*.com
+**/*.exe
+**/*.gadget
+**/*.jar
+**/*.pif
+**/*.vb
+**/*.wsf
+
+### Node ###
+# Logs
+**/logs
+**/*.log
+**/npm-debug.log*
+**/yarn-debug.log*
+**/yarn-error.log*
+**/lerna-debug.log*
+**/.pnpm-debug.log*
+
+# Diagnostic reports (https://nodejs.org/api/report.html)
+**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
+
+# Runtime data
+**/pids
+**/*.pid
+**/*.seed
+**/*.pid.lock
+
+# Directory for instrumented libs generated by jscoverage/JSCover
+**/lib-cov
+
+# Coverage directory used by tools like istanbul
+**/coverage
+**/*.lcov
+
+# nyc test coverage
+**/.nyc_output
+
+# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
+**/.grunt
+
+# Bower dependency directory (https://bower.io/)
+**/bower_components
+
+# node-waf configuration
+**/.lock-wscript
+
+# Compiled binary addons (https://nodejs.org/api/addons.html)
+**/build/Release
+
+# Dependency directories
+**/node_modules
+**/jspm_packages
+
+# Snowpack dependency directory (https://snowpack.dev/)
+**/web_modules
+
+# TypeScript cache
+**/*.tsbuildinfo
+
+# Optional npm cache directory
+**/.npm
+
+# Optional eslint cache
+**/.eslintcache
+
+# Optional stylelint cache
+**/.stylelintcache
+
+# Microbundle cache
+**/.rpt2_cache
+**/.rts2_cache_cjs
+**/.rts2_cache_es
+**/.rts2_cache_umd
+
+# Optional REPL history
+**/.node_repl_history
+
+# Output of 'npm pack'
+**/*.tgz
+
+# Yarn Integrity file
+**/.yarn-integrity
+
+# dotenv environment variable files
+**/.env
+**/.env.development.local
+**/.env.test.local
+**/.env.production.local
+**/.env.local
+
+# parcel-bundler cache (https://parceljs.org/)
+**/.cache
+**/.parcel-cache
+
+# Next.js build output
+**/.next
+**/out
+
+# Nuxt.js build / generate output
+**/.nuxt
+**/dist
+
+# Gatsby files
+**/.cache
+# Comment in the public line in if your project uses Gatsby and not Next.js
+# https://nextjs.org/blog/next-9-1#public-directory-support
+# public
+
+# vuepress build output
+**/.vuepress/dist
+
+# vuepress v2.x temp and cache directory
+**/.temp
+
+# Docusaurus cache and generated files
+**/.docusaurus
+
+# Serverless directories
+**/.serverless
+
+# FuseBox cache
+**/.fusebox
+
+# DynamoDB Local files
+**/.dynamodb
+
+# TernJS port file
+**/.tern-port
+
+# Stores VSCode versions used for testing VSCode extensions
+**/.vscode-test
+
+# yarn v2
+**/.yarn/cache
+**/.yarn/unplugged
+**/.yarn/build-state.yml
+**/.yarn/install-state.gz
+**/.pnp.*
+
+### Node Patch ###
+# Serverless Webpack directories
+**/.webpack
+
+# Optional stylelint cache
+
+# SvelteKit build / generate output
+**/.svelte-kit
+
+### PyCharm ###
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+**/.idea/**/workspace.xml
+**/.idea/**/tasks.xml
+**/.idea/**/usage.statistics.xml
+**/.idea/**/dictionaries
+**/.idea/**/shelf
+
+# AWS User-specific
+**/.idea/**/aws.xml
+
+# Generated files
+**/.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+**/.idea/**/dataSources
+**/.idea/**/dataSources.ids
+**/.idea/**/dataSources.local.xml
+**/.idea/**/sqlDataSources.xml
+**/.idea/**/dynamic.xml
+**/.idea/**/uiDesigner.xml
+**/.idea/**/dbnavigator.xml
+
+# Gradle
+**/.idea/**/gradle.xml
+**/.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+**/cmake-build-*
+
+# Mongo Explorer plugin
+**/.idea/**/mongoSettings.xml
+
+# File-based project format
+**/*.iws
+
+# IntelliJ
+**/out
+
+# mpeltonen/sbt-idea plugin
+**/.idea_modules
+
+# JIRA plugin
+**/atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+**/.idea/replstate.xml
+
+# SonarLint plugin
+**/.idea/sonarlint
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+**/com_crashlytics_export_strings.xml
+**/crashlytics.properties
+**/crashlytics-build.properties
+**/fabric.properties
+
+# Editor-based Rest Client
+**/.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+**/.idea/caches/build_file_checksums.ser
+
+### PyCharm Patch ###
+# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
+
+# *.iml
+# modules.xml
+# .idea/misc.xml
+# *.ipr
+
+# Sonarlint plugin
+# https://plugins.jetbrains.com/plugin/7973-sonarlint
+**/.idea/**/sonarlint
+
+# SonarQube Plugin
+# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
+**/.idea/**/sonarIssues.xml
+
+# Markdown Navigator plugin
+# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
+**/.idea/**/markdown-navigator.xml
+**/.idea/**/markdown-navigator-enh.xml
+**/.idea/**/markdown-navigator
+
+# Cache file creation bug
+# See https://youtrack.jetbrains.com/issue/JBR-2257
+**/.idea/$CACHE_FILE$
+
+# CodeStream plugin
+# https://plugins.jetbrains.com/plugin/12206-codestream
+**/.idea/codestream.xml
+
+# Azure Toolkit for IntelliJ plugin
+# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
+**/.idea/**/azureSettings.xml
+
+### Python ###
+# Byte-compiled / optimized / DLL files
+**/__pycache__
+**/*.py[cod]
+**/*$py.class
+
+# C extensions
+**/*.so
+
+# Distribution / packaging
+**/.Python
+**/build
+**/develop-eggs
+**/downloads
+**/eggs
+**/.eggs
+**/lib
+**/lib64
+**/parts
+**/sdist
+**/var
+**/wheels
+**/share/python-wheels
+**/*.egg-info
+**/.installed.cfg
+**/*.egg
+**/MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+**/*.manifest
+**/*.spec
+
+# Installer logs
+**/pip-log.txt
+**/pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+**/htmlcov
+**/.tox
+**/.nox
+**/.coverage
+**/.coverage.*
+**/nosetests.xml
+**/coverage.xml
+**/*.cover
+**/*.py,cover
+**/.hypothesis
+**/.pytest_cache
+**/cover
+
+# Translations
+**/*.mo
+**/*.pot
+
+# Django stuff:
+**/local_settings.py
+**/db.sqlite3
+**/db.sqlite3-journal
+
+# Flask stuff:
+**/instance
+**/.webassets-cache
+
+# Scrapy stuff:
+**/.scrapy
+
+# Sphinx documentation
+**/docs/_build
+
+# PyBuilder
+**/.pybuilder
+**/target
+
+# Jupyter Notebook
+**/.ipynb_checkpoints
+
+# IPython
+**/profile_default
+**/ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+**/.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+**/__pypackages__
+
+# Celery stuff
+**/celerybeat-schedule
+**/celerybeat.pid
+
+# SageMath parsed files
+**/*.sage.py
+
+# Environments
+**/.venv
+**/env
+**/venv
+**/ENV
+**/env.bak
+**/venv.bak
+
+# Spyder project settings
+**/.spyderproject
+**/.spyproject
+
+# Rope project settings
+**/.ropeproject
+
+# mkdocs documentation
+site
+
+# mypy
+**/.mypy_cache
+**/.dmypy.json
+**/dmypy.json
+
+# Pyre type checker
+**/.pyre
+
+# pytype static type analyzer
+**/.pytype
+
+# Cython debug symbols
+**/cython_debug
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+### Python Patch ###
+# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
+**/poetry.toml
+
+# ruff
+**/.ruff_cache
+
+# LSP config files
+**/pyrightconfig.json
+
+### SublimeText ###
+# Cache files for Sublime Text
+**/*.tmlanguage.cache
+**/*.tmPreferences.cache
+**/*.stTheme.cache
+
+# Workspace files are user-specific
+**/*.sublime-workspace
+
+# Project files should be checked into the repository, unless a significant
+# proportion of contributors will probably not be using Sublime Text
+# *.sublime-project
+
+# SFTP configuration file
+**/sftp-config.json
+**/sftp-config-alt*.json
+
+# Package control specific files
+**/Package Control.last-run
+**/Package Control.ca-list
+**/Package Control.ca-bundle
+**/Package Control.system-ca-bundle
+**/Package Control.cache
+**/Package Control.ca-certs
+**/Package Control.merged-ca-bundle
+**/Package Control.user-ca-bundle
+**/oscrypto-ca-bundle.crt
+**/bh_unicode_properties.cache
+
+# Sublime-github package stores a github token in this file
+# https://packagecontrol.io/packages/sublime-github
+**/GitHub.sublime-settings
+
+### Vim ###
+# Swap
+**/[._]*.s[a-v][a-z]
+!**/*.svg # comment out if you don't need vector files
+**/[._]*.sw[a-p]
+**/[._]s[a-rt-v][a-z]
+**/[._]ss[a-gi-z]
+**/[._]sw[a-p]
+
+# Session
+**/Session.vim
+**/Sessionx.vim
+
+# Temporary
+**/.netrwhist
+# Auto-generated tag files
+**/tags
+# Persistent undo
+**/[._]*.un~
+
+### VisualStudioCode ###
+**/.vscode/*
+!**/.vscode/settings.json
+!**/.vscode/tasks.json
+!**/.vscode/launch.json
+!**/.vscode/extensions.json
+!**/.vscode/*.code-snippets
+
+# Local History for Visual Studio Code
+**/.history
+
+# Built Visual Studio Code Extensions
+**/*.vsix
+
+### VisualStudioCode Patch ###
+# Ignore all local history of files
+**/.history
+**/.ionide
+
+### VisualStudio ###
+## Ignore Visual Studio temporary files, build results, and
+## files generated by popular Visual Studio add-ons.
+##
+## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
+
+# User-specific files
+**/*.rsuser
+**/*.suo
+**/*.user
+**/*.userosscache
+**/*.sln.docstates
+
+# User-specific files (MonoDevelop/Xamarin Studio)
+**/*.userprefs
+
+# Mono auto generated files
+**/mono_crash.*
+
+# Build results
+**/[Dd]ebug
+**/[Dd]ebugPublic
+**/[Rr]elease
+**/[Rr]eleases
+**/x64
+**/x86
+**/[Ww][Ii][Nn]32
+**/[Aa][Rr][Mm]
+**/[Aa][Rr][Mm]64
+**/bld
+**/[Bb]in
+**/[Oo]bj
+**/[Ll]og
+**/[Ll]ogs
+
+# Visual Studio 2015/2017 cache/options directory
+**/.vs
+# Uncomment if you have tasks that create the project's static files in wwwroot
+#wwwroot/
+
+# Visual Studio 2017 auto generated files
+**/Generated\ Files
+
+# MSTest test Results
+**/[Tt]est[Rr]esult*
+**/[Bb]uild[Ll]og.*
+
+# NUnit
+**/*.VisualState.xml
+**/TestResult.xml
+**/nunit-*.xml
+
+# Build Results of an ATL Project
+**/[Dd]ebugPS
+**/[Rr]eleasePS
+**/dlldata.c
+
+# Benchmark Results
+**/BenchmarkDotNet.Artifacts
+
+# .NET Core
+**/project.lock.json
+**/project.fragment.lock.json
+**/artifacts
+
+# ASP.NET Scaffolding
+**/ScaffoldingReadMe.txt
+
+# StyleCop
+**/StyleCopReport.xml
+
+# Files built by Visual Studio
+**/*_i.c
+**/*_p.c
+**/*_h.h
+**/*.ilk
+**/*.meta
+**/*.obj
+**/*.iobj
+**/*.pch
+**/*.pdb
+**/*.ipdb
+**/*.pgc
+**/*.pgd
+**/*.rsp
+**/*.sbr
+**/*.tlb
+**/*.tli
+**/*.tlh
+**/*.tmp
+**/*.tmp_proj
+**/*_wpftmp.csproj
+**/*.tlog
+**/*.vspscc
+**/*.vssscc
+**/.builds
+**/*.pidb
+**/*.svclog
+**/*.scc
+
+# Chutzpah Test files
+**/_Chutzpah*
+
+# Visual C++ cache files
+**/ipch
+**/*.aps
+**/*.ncb
+**/*.opendb
+**/*.opensdf
+**/*.cachefile
+**/*.VC.db
+**/*.VC.VC.opendb
+
+# Visual Studio profiler
+**/*.psess
+**/*.vsp
+**/*.vspx
+**/*.sap
+
+# Visual Studio Trace Files
+**/*.e2e
+
+# TFS 2012 Local Workspace
+**/$tf
+
+# Guidance Automation Toolkit
+**/*.gpState
+
+# ReSharper is a .NET coding add-in
+**/_ReSharper*
+**/*.[Rr]e[Ss]harper
+**/*.DotSettings.user
+
+# TeamCity is a build add-in
+**/_TeamCity*
+
+# DotCover is a Code Coverage Tool
+**/*.dotCover
+
+# AxoCover is a Code Coverage Tool
+**/.axoCover/*
+!**/.axoCover/settings.json
+
+# Coverlet is a free, cross platform Code Coverage Tool
+**/coverage*.json
+**/coverage*.xml
+**/coverage*.info
+
+# Visual Studio code coverage results
+**/*.coverage
+**/*.coveragexml
+
+# NCrunch
+**/_NCrunch_*
+**/.*crunch*.local.xml
+**/nCrunchTemp_*
+
+# MightyMoose
+**/*.mm.*
+**/AutoTest.Net
+
+# Web workbench (sass)
+**/.sass-cache
+
+# Installshield output folder
+**/[Ee]xpress
+
+# DocProject is a documentation generator add-in
+**/DocProject/buildhelp
+**/DocProject/Help/*.HxT
+**/DocProject/Help/*.HxC
+**/DocProject/Help/*.hhc
+**/DocProject/Help/*.hhk
+**/DocProject/Help/*.hhp
+**/DocProject/Help/Html2
+**/DocProject/Help/html
+
+# Click-Once directory
+**/publish
+
+# Publish Web Output
+**/*.[Pp]ublish.xml
+**/*.azurePubxml
+# Note: Comment the next line if you want to checkin your web deploy settings,
+# but database connection strings (with potential passwords) will be unencrypted
+**/*.pubxml
+**/*.publishproj
+
+# Microsoft Azure Web App publish settings. Comment the next line if you want to
+# checkin your Azure Web App publish settings, but sensitive information contained
+# in these scripts will be unencrypted
+**/PublishScripts
+
+# NuGet Packages
+**/*.nupkg
+# NuGet Symbol Packages
+**/*.snupkg
+# The packages folder can be ignored because of Package Restore
+**/**/[Pp]ackages/*
+# except build/, which is used as an MSBuild target.
+!**/**/[Pp]ackages/build
+# Uncomment if necessary however generally it will be regenerated when needed
+#!**/[Pp]ackages/repositories.config
+# NuGet v3's project.json files produces more ignorable files
+**/*.nuget.props
+**/*.nuget.targets
+
+# Microsoft Azure Build Output
+**/csx
+**/*.build.csdef
+
+# Microsoft Azure Emulator
+**/ecf
+**/rcf
+
+# Windows Store app package directories and files
+**/AppPackages
+**/BundleArtifacts
+**/Package.StoreAssociation.xml
+**/_pkginfo.txt
+**/*.appx
+**/*.appxbundle
+**/*.appxupload
+
+# Visual Studio cache files
+# files ending in .cache can be ignored
+**/*.[Cc]ache
+# but keep track of directories ending in .cache
+!**/?*.[Cc]ache
+
+# Others
+**/ClientBin
+**/~$*
+**/*.dbmdl
+**/*.dbproj.schemaview
+**/*.jfm
+**/*.pfx
+**/*.publishsettings
+**/orleans.codegen.cs
+
+# Including strong name files can present a security risk
+# (https://github.com/github/gitignore/pull/2483#issue-259490424)
+#*.snk
+
+# Since there are multiple workflows, uncomment next line to ignore bower_components
+# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
+#bower_components/
+
+# RIA/Silverlight projects
+**/Generated_Code
+
+# Backup & report files from converting an old project file
+# to a newer Visual Studio version. Backup files are not needed,
+# because we have git ;-)
+**/_UpgradeReport_Files
+**/Backup*
+**/UpgradeLog*.XML
+**/UpgradeLog*.htm
+**/ServiceFabricBackup
+**/*.rptproj.bak
+
+# SQL Server files
+**/*.mdf
+**/*.ldf
+**/*.ndf
+
+# Business Intelligence projects
+**/*.rdl.data
+**/*.bim.layout
+**/*.bim_*.settings
+**/*.rptproj.rsuser
+**/*- [Bb]ackup.rdl
+**/*- [Bb]ackup ([0-9]).rdl
+**/*- [Bb]ackup ([0-9][0-9]).rdl
+
+# Microsoft Fakes
+**/FakesAssemblies
+
+# GhostDoc plugin setting file
+**/*.GhostDoc.xml
+
+# Node.js Tools for Visual Studio
+**/.ntvs_analysis.dat
+
+# Visual Studio 6 build log
+**/*.plg
+
+# Visual Studio 6 workspace options file
+**/*.opt
+
+# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
+**/*.vbw
+
+# Visual Studio 6 auto-generated project file (contains which files were open etc.)
+**/*.vbp
+
+# Visual Studio 6 workspace and project file (working project files containing files to include in project)
+**/*.dsw
+**/*.dsp
+
+# Visual Studio 6 technical files
+
+# Visual Studio LightSwitch build output
+**/**/*.HTMLClient/GeneratedArtifacts
+**/**/*.DesktopClient/GeneratedArtifacts
+**/**/*.DesktopClient/ModelManifest.xml
+**/**/*.Server/GeneratedArtifacts
+**/**/*.Server/ModelManifest.xml
+**/_Pvt_Extensions
+
+# Paket dependency manager
+**/.paket/paket.exe
+**/paket-files
+
+# FAKE - F# Make
+**/.fake
+
+# CodeRush personal settings
+**/.cr/personal
+
+# Python Tools for Visual Studio (PTVS)
+**/*.pyc
+
+# Cake - Uncomment if you are using it
+# tools/**
+# !tools/packages.config
+
+# Tabs Studio
+**/*.tss
+
+# Telerik's JustMock configuration file
+**/*.jmconfig
+
+# BizTalk build output
+**/*.btp.cs
+**/*.btm.cs
+**/*.odx.cs
+**/*.xsd.cs
+
+# OpenCover UI analysis results
+**/OpenCover
+
+# Azure Stream Analytics local run output
+**/ASALocalRun
+
+# MSBuild Binary and Structured Log
+**/*.binlog
+
+# NVidia Nsight GPU debugger configuration file
+**/*.nvuser
+
+# MFractors (Xamarin productivity tool) working folder
+**/.mfractor
+
+# Local History for Visual Studio
+**/.localhistory
+
+# Visual Studio History (VSHistory) files
+**/.vshistory
+
+# BeatPulse healthcheck temp database
+**/healthchecksdb
+
+# Backup folder for Package Reference Convert tool in Visual Studio 2017
+**/MigrationBackup
+
+# Ionide (cross platform F# VS Code tools) working folder
+**/.ionide
+
+# Fody - auto-generated XML schema
+**/FodyWeavers.xsd
+
+# VS Code files for those working on multiple tools
+**/*.code-workspace
+
+# Local History for Visual Studio Code
+
+# Windows Installer files from build outputs
+**/*.cab
+**/*.msi
+**/*.msix
+**/*.msm
+**/*.msp
+
+# JetBrains Rider
+**/*.sln.iml
+
+### VisualStudio Patch ###
+# Additional files built by Visual Studio
+
+# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
+**/database.db
+**/database.db
+**/database.db
+**/.markata.cache
+**/database.sqlite
+
+# flyctl launch added from .pytest_cache/.gitignore
+# Created by pytest automatically.
+.pytest_cache/**/*
+
+# flyctl launch added from .ruff_cache/.gitignore
+.ruff_cache/**/*
+fly.toml
diff --git a/.gitignore b/.gitignore
index 9c4e139..1683c90 100644
--- a/.gitignore
+++ b/.gitignore
@@ -967,3 +967,5 @@ database.db
database.db
.markata.cache
database.sqlite
+.env.dev
+.env.dev.docker
diff --git a/Dockerfile b/Dockerfile
index 6379386..56a258e 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -4,9 +4,9 @@ WORKDIR /app
Copy pyproject.toml /app
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
COPY README.md /app
-RUN pip3 install '.[api]'
+RUN pip3 install '.[all]'
COPY . /app
-RUN pip3 install '.[api]'
+RUN pip3 install '.[all]'
EXPOSE 5000
diff --git a/README.md b/README.md
index 6792fe8..ad0cc9f 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,6 @@
+> [!IMPORTANT]
+> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
+
# Learn SQL Model
learning sql model
diff --git a/creeper.png b/creeper.png
new file mode 100644
index 0000000..79a2499
Binary files /dev/null and b/creeper.png differ
diff --git a/d3.py b/d3.py
new file mode 100644
index 0000000..3b41786
--- /dev/null
+++ b/d3.py
@@ -0,0 +1,70 @@
+import sqlite3
+
+from jinja2 import Environment, FileSystemLoader
+
+
+def get_tables_and_columns(conn):
+ cursor = conn.cursor()
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+ tables = [
+ {
+ "name": table[0],
+ "columns": get_columns(conn, table[0]),
+ "foreign_keys": get_foreign_keys(conn, table[0]),
+ }
+ for table in cursor.fetchall()
+ ]
+ return tables
+
+
+def get_columns(conn, table_name):
+ cursor = conn.cursor()
+ cursor.execute(f"PRAGMA table_info({table_name});")
+ columns = [row[1] for row in cursor.fetchall()]
+ return columns
+
+
+def get_foreign_keys(conn, table_name):
+ cursor = conn.cursor()
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
+ foreign_keys = [
+ {"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
+ for row in cursor.fetchall()
+ ]
+ return foreign_keys
+
+
+def generate_links(tables):
+ links = []
+ for t_index, table in enumerate(tables):
+ for fk in table["foreign_keys"]:
+ target_index = next(
+ i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
+ )
+ source_y = 40 + table["columns"].index(fk["from"]) * 20
+ target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
+ links.append(
+ {
+ "source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
+ "target": {"x": 50 + target_index * 150, "y": 50 + target_y},
+ }
+ )
+ return links
+
+
+def generate_er_diagram(database_path):
+ conn = sqlite3.connect(database_path)
+ tables = get_tables_and_columns(conn)
+ links = [] # Currently, we won't extract relationships
+ links = generate_links(tables)
+
+ env = Environment(loader=FileSystemLoader("templates"))
+ template = env.get_template("er_diagram.html")
+
+ with open("index.html", "w") as f:
+ f.write(template.render(tables=tables, links=links))
+
+
+if __name__ == "__main__":
+ db_path = "database.db"
+ generate_er_diagram(db_path)
diff --git a/database.md b/database.md
new file mode 100644
index 0000000..481c4a0
--- /dev/null
+++ b/database.md
@@ -0,0 +1,72 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| f48730a783a5 |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
+|------|-------------|---|---|------|-----|-----------|--------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+| size | INTEGER | | | |
+| age | INTEGER | | | |
+| shoe_size | INTEGER | | | |
+| pet_id | INTEGER | pet.id | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 0 records.
+
+---
+
diff --git a/er_diagram.png b/er_diagram.png
new file mode 100644
index 0000000..afcf6e0
Binary files /dev/null and b/er_diagram.png differ
diff --git a/im.png b/im.png
new file mode 100644
index 0000000..9196f21
Binary files /dev/null and b/im.png differ
diff --git a/index.html b/index.html
new file mode 100644
index 0000000..bb40a04
--- /dev/null
+++ b/index.html
@@ -0,0 +1,129 @@
+
+
+
+
+
+ ER Diagram
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/learn_sql_model/api/hero.py b/learn_sql_model/api/hero.py
index f4c4fa6..4ae0c8c 100644
--- a/learn_sql_model/api/hero.py
+++ b/learn_sql_model/api/hero.py
@@ -1,7 +1,6 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select
-from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
@@ -9,7 +8,7 @@ hero_router = APIRouter()
@hero_router.on_event("startup")
-def on_startup() -> None:
+async def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine)
...
@@ -38,7 +37,7 @@ async def post_hero(
session.add(db_hero)
session.commit()
session.refresh(db_hero)
- await manager.broadcast({hero.json()}, id=1)
+ # await manager.broadcast({hero.json()}, id=1)
return db_hero
@@ -57,7 +56,7 @@ async def patch_hero(
session.add(db_hero)
session.commit()
session.refresh(db_hero)
- await manager.broadcast({hero.json()}, id=1)
+ # await manager.broadcast({hero.json()}, id=1)
return db_hero
@@ -73,7 +72,7 @@ async def delete_hero(
raise HTTPException(status_code=404, detail="Hero not found")
session.delete(hero)
session.commit()
- await manager.broadcast(f"deleted hero {hero_id}", id=1)
+ # await manager.broadcast(f"deleted hero {hero_id}", id=1)
return {"ok": True}
diff --git a/learn_sql_model/api/websocket.py b/learn_sql_model/api/websocket.py
index 962399c..ddfcb0d 100644
--- a/learn_sql_model/api/websocket.py
+++ b/learn_sql_model/api/websocket.py
@@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
from rich.console import Console
-from sqlmodel import Session
+from sqlmodel import Session, select
from websockets.exceptions import ConnectionClosed
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.console import console
-from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
+from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros
web_socket_router = APIRouter()
@@ -46,7 +46,9 @@ async def websocket_endpoint_connect(
):
Console().log(f"Client #{id} connecting")
await manager.connect(websocket, channel)
- heros = Heros.list(session=session)
+ statement = select(Hero)
+ heros = session.exec(statement).all()
+ heros = Heros(__root__=heros)
await websocket.send_text(heros.json())
try:
@@ -83,11 +85,18 @@ async def websocket_endpoint_hero_echo(
while True:
data = await websocket.receive_text()
hero = HeroUpdate.parse_raw(data)
- heros = Heros.list(session=session)
+ statement = select(Hero)
+ heros = session.exec(statement).all()
+ heros = Heros(__root__=heros)
if heros != last_heros:
await manager.broadcast(heros.json(), "heros")
last_heros = heros
- hero.update(session=session)
+ db_hero = session.get(Hero, hero.id)
+ for key, value in hero.dict(exclude_unset=True).items():
+ setattr(db_hero, key, value)
+ session.add(db_hero)
+ session.commit()
+ session.refresh(db_hero)
console.print(heros)
await websocket.send_text(heros.json())
@@ -96,7 +105,9 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
- heros = Heros.list(session=session)
+ statement = select(Hero)
+ heros = session.exec(statement).all()
+ heros = Heros(__root__=heros)
await manager.broadcast(heros.json(), "heros")
print("disconnected")
except ConnectionClosed:
@@ -104,6 +115,8 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
- heros = Heros.list(session=session)
+ statement = select(Hero)
+ heros = session.exec(statement).all()
+ heros = Heros(__root__=heros)
await manager.broadcast(heros.json(), "heros")
print("connection closed")
diff --git a/learn_sql_model/cli/hero.py b/learn_sql_model/cli/hero.py
index 00047f8..40366ab 100644
--- a/learn_sql_model/cli/hero.py
+++ b/learn_sql_model/cli/hero.py
@@ -34,7 +34,7 @@ def hero():
@hero_app.command()
def get(
- hero_id: Optional[int] = typer.Argument(default=None),
+ hero_id: Optional[int] = typer.Argument(),
) -> Union[Hero, List[Hero]]:
"get one hero"
hero = HeroRead.get(id=hero_id)
@@ -54,9 +54,8 @@ def list() -> Union[Hero, List[Hero]]:
def clear() -> Union[Hero, List[Hero]]:
"list many heros"
heros = Heros.list()
- for hero in heros.heros:
- HeroDelete(id=hero.id).delete()
-
+ for hero in heros.__root__:
+ HeroDelete.delete(id=hero.id)
return hero
@@ -81,10 +80,12 @@ def update(
@hero_app.command()
@engorgio(typer=True)
def delete(
- hero: HeroDelete,
+ hero_id: Optional[int] = typer.Argument(),
) -> Hero:
"delete a hero by id"
- hero.delete()
+ hero = HeroDelete.delete(id=hero_id)
+ Console().print(hero)
+ return hero
@hero_app.command()
diff --git a/learn_sql_model/cli/model.py b/learn_sql_model/cli/model.py
index eb5ea67..786daff 100644
--- a/learn_sql_model/cli/model.py
+++ b/learn_sql_model/cli/model.py
@@ -1,11 +1,15 @@
from pathlib import Path
+from typing import Annotated
-import alembic
-from alembic.config import Config
-import copier
+# import copier
import typer
from learn_sql_model.cli.common import verbose_callback
+from learn_sql_model.config import get_config
+from learn_sql_model.optional import _optional_import_
+
+alembic = _optional_import_("alembic", group="manage")
+Config = _optional_import_("alembic.config", "Config", group="manage")
model_app = typer.Typer()
@@ -40,11 +44,18 @@ def create_revision(
callback=verbose_callback,
help="show the log messages",
),
- message: str = typer.Option(
- prompt=True,
- ),
+ message: Annotated[
+ str,
+ typer.Option(
+ "--message",
+ "-m",
+ prompt=True,
+ ),
+ ] = None,
):
alembic_cfg = Config("alembic.ini")
+ config = get_config()
+ alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.revision(
config=alembic_cfg,
message=message,
@@ -63,7 +74,17 @@ def checkout(
revision: str = typer.Option("head"),
):
alembic_cfg = Config("alembic.ini")
- alembic.command.upgrade(config=alembic_cfg, revision="head")
+ config = get_config()
+ alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
+ alembic.command.upgrade(config=alembic_cfg, revision=revision)
+
+
+@model_app.command()
+def status():
+ alembic_cfg = Config("alembic.ini")
+ config = get_config()
+ alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
+ alembic.command.current(config=alembic_cfg)
@model_app.command()
@@ -73,5 +94,4 @@ def populate(
callback=verbose_callback,
help="show the log messages",
),
-):
- ...
+): ...
diff --git a/learn_sql_model/config.py b/learn_sql_model/config.py
index 4e9415a..9619314 100644
--- a/learn_sql_model/config.py
+++ b/learn_sql_model/config.py
@@ -4,6 +4,7 @@ from typing import TYPE_CHECKING
from fastapi import Depends
from pydantic import BaseModel, BaseSettings, validator
from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
from sqlmodel import Session
from learn_sql_model.standard_config import load
@@ -18,6 +19,7 @@ class ApiServer(BaseModel):
reload: bool = True
log_level: str = "info"
host: str = "0.0.0.0"
+ workers: int = 1
class ApiClient(BaseModel):
@@ -25,7 +27,6 @@ class ApiClient(BaseModel):
protocol: str = "https"
url: str = f"{protocol}://{host}"
-
class Database:
def __init__(self, config: "Config" = None) -> None:
if config is None:
@@ -39,10 +40,22 @@ class Database:
"transactions": None,
}
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
+
+ self.db_conf = {}
+ if 'sqlite' in self.config.database_url:
+ self.db_conf = {
+ 'connect_args': {"check_same_thread": False},
+ 'pool_recycle': 3600,
+ 'pool_pre_ping': True,
+ }
+ self._engine = create_engine(
+ self.config.database_url,
+ **self.db_conf
+ )
@property
def engine(self) -> "Engine":
- return create_engine(self.config.database_url)
+ return self._engine
@property
def session(self) -> "Session":
@@ -87,10 +100,14 @@ def get_config(overrides: dict = {}) -> Config:
return config
+config = get_config()
+database = get_database()
+
+SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
+
+
def get_session() -> "Session":
- config = get_config()
- engine = create_engine(config.database_url)
- with Session(engine) as session:
+ with Session(database.engine) as session:
yield session
diff --git a/learn_sql_model/er_diagram.py b/learn_sql_model/er_diagram.py
new file mode 100644
index 0000000..4a48fa5
--- /dev/null
+++ b/learn_sql_model/er_diagram.py
@@ -0,0 +1,151 @@
+import sqlite3
+
+from graphviz import Digraph
+
+from learn_sql_model.config import get_config
+
+config = get_config()
+
+
+def generate_er_diagram(output_path):
+ # Connect to the SQLite database
+ database_path = config.database_url.replace("sqlite:///", "")
+ conn = sqlite3.connect(database_path)
+ cursor = conn.cursor()
+
+ # Get the table names from the database
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+ tables = cursor.fetchall()
+
+ # Create a new Digraph
+ dot = Digraph(format="png")
+ dot.attr(rankdir="TD")
+
+ # Iterate over the tables
+ for table in tables:
+ table_name = table[0]
+ dot.node(table_name, shape="box")
+ cursor.execute(f"PRAGMA table_info({table_name});")
+ columns = cursor.fetchall()
+
+ # Add the columns to the table node
+ for column in columns:
+ column_name = column[1]
+ dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
+ dot.edge(table_name, f"{table_name}.{column_name}")
+
+ # Check for foreign key relationships
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
+ foreign_keys = cursor.fetchall()
+
+ # Add dotted lines for foreign key relationships
+ for foreign_key in foreign_keys:
+ from_column = foreign_key[3]
+ to_table = foreign_key[2]
+ to_column = foreign_key[4]
+ dot.node(f"{to_table}.{to_column}", shape="oval")
+ dot.edge(
+ f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
+ )
+
+ # Render and save the diagram
+ dot.render(output_path.replace(".png", ""), cleanup=True)
+
+ # Close the database connection
+ cursor.close()
+ conn.close()
+
+
+def generate_er_markdown(output_path, er_diagram_path):
+ # Connect to the SQLite database
+ database_path = config.database_url.replace("sqlite:///", "")
+ conn = sqlite3.connect(database_path)
+ cursor = conn.cursor()
+
+ # Get the table names from the database
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+ tables = cursor.fetchall()
+
+ with open(output_path, "w") as f:
+ # Write the ER Diagram image
+ f.write(f"\n\n---\n\n")
+
+ # Iterate over the tables
+ for table in tables:
+ table_name = table[0]
+
+ f.write(f"## Table: {table_name}\n\n")
+
+ # Get the table columns
+ cursor.execute(f"PRAGMA table_info({table_name});")
+ columns = cursor.fetchall()
+
+ f.write("### First 5 rows\n\n")
+ cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
+ rows = cursor.fetchall()
+ f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
+ f.write("|")
+ for column in columns:
+ # ---
+ f.write(f'{"-"*(len(column[1]) + 2)}|')
+ f.write("\n")
+ for row in rows:
+ f.write(f'| {" | ".join([str(r) for r in row])} |\n')
+ f.write("\n")
+
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
+ foreign_keys = cursor.fetchall()
+
+ # Add dotted lines for foreign key relationships
+ fkeys = {}
+ for foreign_key in foreign_keys:
+ from_column = foreign_key[3]
+ to_table = foreign_key[2]
+ to_column = foreign_key[4]
+ fkeys[from_column] = f"{to_table}.{to_column}"
+
+ # Replace 'description' with the actual column name in the table that contains the description, if applicable
+ try:
+ cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
+ description = cursor.fetchone()
+ if description:
+ f.write(f"### Description\n\n{description[0]}\n\n")
+ except:
+ ...
+
+ # Write the table columns
+ f.write("### Columns\n\n")
+ f.write("| Column Name | Type | Foreign Key | Example Value |\n")
+ f.write("|-------------|------|-------------|---------------|\n")
+
+ for column in columns:
+
+ column_name = column[1]
+ column_type = column[2]
+ fkey = ""
+ if column_name in fkeys:
+ fkey = fkeys[column_name]
+ f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
+
+ f.write("\n")
+
+ # Get the count of records
+ cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
+ records_count = cursor.fetchone()[0]
+ f.write(
+ f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
+ )
+
+ # Close the database connection
+ cursor.close()
+ conn.close()
+
+
+if __name__ == "__main__":
+ # Usage example
+ database_path = "database.db"
+ md_output_path = "database.md"
+ er_output_path = "er_diagram.png"
+
+ generate_er_diagram(database_path, er_output_path)
+ generate_markdown(database_path, md_output_path, er_output_path)
diff --git a/learn_sql_model/factories/hero.py b/learn_sql_model/factories/hero.py
index a167c84..2c227d9 100644
--- a/learn_sql_model/factories/hero.py
+++ b/learn_sql_model/factories/hero.py
@@ -10,7 +10,6 @@ class HeroFactory(ModelFactory[Hero]):
__model__ = Hero
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
- id = None
pet_id = None
@classmethod
diff --git a/learn_sql_model/game/debug.py b/learn_sql_model/game/debug.py
new file mode 100644
index 0000000..8e01bf3
--- /dev/null
+++ b/learn_sql_model/game/debug.py
@@ -0,0 +1,25 @@
+import pygame
+
+
+class Debug:
+ def __init__(self, game):
+ self.game = game
+ self.is_open = False
+ self.debounce = False
+
+ def handle_events(self, events):
+ for event in events:
+ if event.type == pygame.KEYDOWN:
+ if event.key == pygame.K_F3 and not self.debounce:
+ self.is_open = not self.is_open
+ self.debounce = True
+ if event.type == pygame.KEYUP:
+ if event.key == pygame.K_F3:
+ self.debounce = False
+
+ def render(self):
+ if self.is_open:
+ text = self.game.font.render(
+ str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
+ )
+ self.game.screen.blit(text, (20, 20))
diff --git a/learn_sql_model/game/game.py b/learn_sql_model/game/game.py
index aea1c26..742d8f5 100644
--- a/learn_sql_model/game/game.py
+++ b/learn_sql_model/game/game.py
@@ -5,10 +5,11 @@ from websocket import create_connection
from learn_sql_model.config import get_config
from learn_sql_model.console import console
+from learn_sql_model.game.debug import Debug
+from learn_sql_model.game.light import Light
from learn_sql_model.game.map import Map
from learn_sql_model.game.menu import Menu
from learn_sql_model.game.player import Player
-from learn_sql_model.game.light import Light
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
@@ -20,7 +21,8 @@ config = get_config()
class Client:
def __init__(self):
- self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
+ # self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
+ self.screen = pygame.display.set_mode((1280, 720))
pygame.display.set_caption("Learn SQL Model")
self.clock = pygame.time.Clock()
self.running = True
@@ -35,17 +37,27 @@ class Client:
self.menu = Menu(self)
self.map = Map(self)
self.light = Light(self)
- self.font = pygame.font.SysFont("", 50)
+ self.font = pygame.font.SysFont("", 25)
self.joysticks = {}
+ self.darkness = pygame.Surface(
+ (self.screen.get_width(), self.screen.get_height()),
+ pygame.SRCALPHA,
+ 32,
+ )
+ self.debug = Debug(self)
atexit.register(self.quit)
@property
def ws(self):
def connect():
- self._ws = create_connection(
- f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
- )
+ if "https" in config.api_client.url:
+ url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
+ elif "http" in config.api_client.url:
+ url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
+ else:
+ url = f"ws://{config.api_client.url}/wsecho"
+ self._ws = create_connection(url)
if not hasattr(self, "_ws"):
connect()
@@ -54,6 +66,13 @@ class Client:
return self._ws
def run(self):
+ try:
+ from pyinstrument import Profiler
+
+ profiler = Profiler()
+ profiler.start()
+ except ImportError:
+ profiler = None
while self.running:
console.print("running")
console.print("handle_events")
@@ -61,12 +80,16 @@ class Client:
console.print("update")
self.update()
console.print("render")
+
self.render()
time = self.clock.tick(60)
self.elapsed = time / 100
self.ticks += 1
console.print(f"time: {time}")
console.print(f"ticks: {self.ticks}")
+ if profiler:
+ profiler.stop()
+ print(profiler.output_text())
self.quit()
def quit(self):
@@ -80,15 +103,26 @@ class Client:
self.screen.fill((0, 0, 0))
self.map.render()
self.player.render()
- self.light.render()
+
+ if self.ticks % 1 == 0 or self.ticks == 0:
+ light_level = 0
+ self.darkness.fill((light_level, light_level, light_level))
+ self.light.render()
+ self.screen.blit(
+ self.darkness,
+ (0, 0),
+ special_flags=pygame.BLEND_MULT,
+ )
# update the screen
self.menu.render()
+ self.debug.render()
pygame.display.flip()
def handle_events(self):
self.events = pygame.event.get()
self.menu.handle_events(self.events)
+ self.debug.handle_events(self.events)
self.player.handle_events()
for event in self.events:
if event.type == pygame.QUIT:
diff --git a/learn_sql_model/game/light.py b/learn_sql_model/game/light.py
index f562361..abaa861 100644
--- a/learn_sql_model/game/light.py
+++ b/learn_sql_model/game/light.py
@@ -1,25 +1,219 @@
+import bisect
+
+from PIL import Image, ImageFilter
+
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
+def rot_center(image, angle):
+ """rotate an image while keeping its center and size"""
+ orig_rect = image.get_rect()
+ rot_image = pygame.transform.rotate(image, angle)
+ rot_rect = orig_rect.copy()
+ rot_rect.center = rot_image.get_rect().center
+ rot_image = rot_image.subsurface(rot_rect).copy()
+ return rot_image
+
+
class Light:
def __init__(self, game):
self.game = game
+ self.surf = pygame.Surface(
+ (self.game.screen.get_width(), self.game.screen.get_height()),
+ pygame.SRCALPHA,
+ 32,
+ )
+ self.surf.set_colorkey((0, 0, 0))
+ self.pre_render()
+
+ def pre_render(self):
+
+ # self.lights = {}
+ # for deg in range(-360, 360, 20):
+ # print("loading light", deg)
+ # self.lights[deg] = pygame.image.load(
+ # f"lights/light-{deg}.png"
+ # ).convert_alpha()
+ # return
+
+ light_surf = pygame.Surface(
+ (
+ self.game.player.hero.flashlight_strength * 3,
+ self.game.player.hero.flashlight_strength * 3,
+ ),
+ pygame.SRCALPHA,
+ 32,
+ )
+
+ v = pygame.math.Vector2(0, 1)
+ v.scale_to_length(self.game.player.hero.flashlight_strength)
+ for r in range(-90 - 25, -90 + 25):
+ _v = v.rotate(r)
+ pygame.draw.line(
+ light_surf,
+ (255, 250, 205),
+ (light_surf.get_width() / 2, light_surf.get_height() / 2),
+ (
+ light_surf.get_width() / 2 + _v.x,
+ light_surf.get_height() / 2 + _v.y,
+ ),
+ 50,
+ )
+ pygame.draw.circle(
+ light_surf,
+ (255, 250, 205),
+ (light_surf.get_width() / 2, light_surf.get_height() / 2),
+ self.game.player.hero.lanturn_strength,
+ )
+
+ light_surf_pil = Image.frombytes(
+ "RGBA",
+ (light_surf.get_width(), light_surf.get_height()),
+ pygame.image.tostring(light_surf, "RGBA", False),
+ )
+ light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
+ light_surf = pygame.image.fromstring(
+ light_surf_blur.tobytes(),
+ (light_surf.get_width(), light_surf.get_height()),
+ "RGBA",
+ ).convert_alpha()
+
+ pygame.draw.circle(
+ light_surf,
+ (255, 250, 205),
+ (light_surf.get_width() / 2, light_surf.get_height() / 2),
+ self.game.player.hero.lanturn_strength,
+ )
+
+ light_surf_pil = Image.frombytes(
+ "RGBA",
+ (light_surf.get_width(), light_surf.get_height()),
+ pygame.image.tostring(light_surf, "RGBA", False),
+ )
+ light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
+ light_surf = pygame.image.fromstring(
+ light_surf_blur.tobytes(),
+ (light_surf.get_width(), light_surf.get_height()),
+ "RGBA",
+ ).convert_alpha()
+
+ pygame.draw.circle(
+ light_surf,
+ (255, 250, 205),
+ (light_surf.get_width() / 2, light_surf.get_height() / 2),
+ self.game.player.hero.lanturn_strength,
+ )
+
+ light_surf_pil = Image.frombytes(
+ "RGBA",
+ (light_surf.get_width(), light_surf.get_height()),
+ pygame.image.tostring(light_surf, "RGBA", False),
+ )
+ light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
+ light_surf = pygame.image.fromstring(
+ light_surf_blur.tobytes(),
+ (light_surf.get_width(), light_surf.get_height()),
+ "RGBA",
+ ).convert_alpha()
+
+ self.light_surf = light_surf
+ self.light_surf.set_colorkey((0, 0, 0))
+
+ self.lights = {
+ deg: pygame.transform.rotate(self.light_surf, deg - 90)
+ for deg in range(-360, 360, 20)
+ }
+
+ for deg, light in self.lights.items():
+ pygame.image.save(light, f"lights/light-{deg}.png")
def render(self):
+ self.surf.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
v = pygame.math.Vector2(
mx - self.game.player.hero.x, my - self.game.player.hero.y
)
- v.scale_to_length(1000)
+ v.scale_to_length(self.game.player.hero.flashlight_strength)
+ self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
- for r in range(0, 360):
- _v = v.rotate(r)
- pygame.draw.line(
- self.game.screen,
- (255, 250, 205),
- (self.game.player.hero.x, self.game.player.hero.y),
- (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
- 50,
+ for other in self.game.player.others.__root__:
+ if other.id == self.game.player.hero.id:
+ continue
+
+ light_index = list(self.lights.keys())[
+ bisect.bisect_left(
+ list(self.lights.keys()),
+ other.flashlight_angle + 90,
+ )
+ ]
+
+ my_light = self.lights[light_index]
+ self.surf.blit(
+ my_light,
+ (
+ other.x - my_light.get_width() / 2,
+ other.y - my_light.get_height() / 2,
+ ),
)
+
+ light_index = list(self.lights.keys())[
+ bisect.bisect_left(
+ list(self.lights.keys()),
+ self.game.player.hero.flashlight_angle + 90,
+ )
+ ]
+
+ my_light = self.lights[light_index]
+ self.surf.blit(
+ my_light,
+ (
+ self.game.player.hero.x - my_light.get_width() / 2,
+ self.game.player.hero.y - my_light.get_height() / 2,
+ ),
+ )
+
+ # for r in range(-25, 25):
+ # _v = v.rotate(r)
+ # pygame.draw.line(
+ # self.surf,
+ # (255, 250, 205),
+ # (self.game.player.hero.x, self.game.player.hero.y),
+ # (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
+ # 50,
+ # )
+ # # draw a circle
+ # pygame.draw.circle(
+ # self.surf,
+ # (255, 250, 205),
+ # (self.game.player.hero.x, self.game.player.hero.y),
+ # self.game.player.hero.lanturn_strength,
+ # )
+
+ # for other in self.game.player.others.__root__:
+ # if other.id == self.game.player.hero.id:
+ # continue
+ # v = pygame.math.Vector2(0, 1)
+ # v = v.rotate(-other.flashlight_angle)
+ # v.scale_to_length(other.flashlight_strength)
+ # for r in range(-25, 25):
+ # _v = v.rotate(r)
+ # pygame.draw.line(
+ # self.surf,
+ # (255, 250, 205),
+ # (other.x, other.y),
+ # (other.x + _v.x, other.y + _v.y),
+ # 50,
+ # )
+ # pygame.draw.circle(
+ # self.surf,
+ # (255, 250, 205),
+ # (other.x, other.y),
+ # other.lanturn_strength,
+ # )
+
+ self.game.darkness.blit(
+ self.surf,
+ (0, 0),
+ )
diff --git a/learn_sql_model/game/map.py b/learn_sql_model/game/map.py
index 616c75b..e792056 100644
--- a/learn_sql_model/game/map.py
+++ b/learn_sql_model/game/map.py
@@ -1,7 +1,8 @@
-from learn_sql_model.optional import _optional_import_
import pydantic
from rich.console import Console
+from learn_sql_model.optional import _optional_import_
+
snoise2 = _optional_import_("noise", "snoise2", group="game")
pygame = _optional_import_("pygame", group="game")
@@ -35,9 +36,41 @@ class Map:
self.persistence = 0.05 # Amplitude of each octave
self.lacunarity = 1.0 # Frequency of each octave
self.thresh = 125
- self.pre_draw()
+ # try to load the map from map.png
+ try:
+ self.surf = pygame.image.load("map.png").convert_alpha()
+
+ # self.surf_pil = Image.frombytes(
+ # "RGBA",
+ # (self.surf.get_width(), self.surf.get_height()),
+ # pygame.image.tostring(self.surf, "RGBA", False),
+ # )
+ # self.surf_blur = (
+ # self.surf_pil.filter(
+ # ImageFilter.SMOOTH_MORE(),
+ # )
+ # .filter(ImageFilter.SMOOTH_MORE())
+ # .filter(ImageFilter.SMOOTH_MORE())
+ # .filter(ImageFilter.SMOOTH_MORE())
+ # .filter(ImageFilter.SMOOTH_MORE())
+ # .filter(ImageFilter.SMOOTH_MORE())
+ # # sharpen
+ # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
+ # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
+ # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
+ # )
+
+ # self.surf = pygame.image.fromstring(
+ # self.surf_blur.tobytes(),
+ # (self.surf.get_width(), self.surf.get_height()),
+ # "RGBA",
+ # ).convert_alpha()
+
+ except FileNotFoundError:
+ self.pre_draw()
def refresh_surf(self):
+
self.surf = pygame.Surface((self.screen_width, self.screen_height))
def get_noise(self, x, y):
@@ -53,7 +86,7 @@ class Map:
def render(self):
self.game.screen.blit(
- pygame.transform.scale(self.surf, (self.screen_width, self.screen_height)),
+ self.surf,
(0, 0),
)
@@ -64,6 +97,7 @@ class Map:
def pre_draw(self):
self.refresh_surf()
+
for x in range(int(self.screen_width)):
for y in range(int(self.screen_height)):
if not self.point_check_collision(x, y):
@@ -78,6 +112,7 @@ class Map:
),
)
pygame.image.save(self.surf, "map.png")
+
# av1 = (
# Image.open("rock.jpg")
# .convert("RGB")
diff --git a/learn_sql_model/game/menu.py b/learn_sql_model/game/menu.py
index 565c305..c3c67f5 100644
--- a/learn_sql_model/game/menu.py
+++ b/learn_sql_model/game/menu.py
@@ -1,6 +1,7 @@
from typing import Callable, Tuple
from pydantic import BaseModel
+
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
@@ -113,7 +114,7 @@ class Menu:
def handle_events(self, events):
self.hamburger.handle_events(self, events)
for event in events:
- if event.type == pygame.MOUSEBUTTONDOWN:
+ if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open:
if event.button == 1: # Left mouse button
self.handle_click()
diff --git a/learn_sql_model/game/player.py b/learn_sql_model/game/player.py
index a4b84d7..c7e1a79 100644
--- a/learn_sql_model/game/player.py
+++ b/learn_sql_model/game/player.py
@@ -12,19 +12,34 @@ HeroFactory = _optional_import_(
class Player:
def __init__(self, game):
- hero = HeroFactory().build(size=25, x=100, y=100)
+ hero = HeroFactory().build(
+ size=25,
+ x=100,
+ y=100,
+ flashlight_strength=1000,
+ lanturn_strength=100,
+ flashlight_angle=0,
+ )
self.hero = HeroCreate(**hero.dict()).post()
+ self.hero.size = 64
self.game = game
- self.others = Heros(heros=[])
+ self.others = [] # Heros(heros=[])
self.width = 16
self.height = 16
self.white = (255, 255, 255)
self.x = self.game.screen.get_width() / 2
self.y = self.game.screen.get_height() / 2
- self.speed = 5
- self.max_speed = 5
- self.image = pygame.image.load("player.png").convert_alpha()
+ self.speed = 10
+ self.max_speed = 10
+ self.image = pygame.image.load("creeper.png").convert_alpha()
+ self.pet_image = pygame.image.load("pet.png").convert_alpha()
+ self.image = pygame.transform.scale(
+ self.image, (self.hero.size, self.hero.size)
+ )
+ self.pet_image = pygame.transform.scale(
+ self.pet_image, (self.hero.size/1.5, self.hero.size/2)
+ )
self.x_last = self.x
self.y_last = self.y
self.hitbox_surface = pygame.Surface((self.width, self.height))
@@ -40,15 +55,23 @@ class Player:
self.joysticks = {}
def rename_hero(self):
- old_hero = self.hero
hero = HeroFactory().build(
- size=self.hero.size, x=self.hero.x, y=self.hero.y, id=old_hero.id
+ size=self.hero.size,
+ x=self.hero.x,
+ y=self.hero.y,
+ id=self.hero.id,
+ flashlight_strength=self.hero.flashlight_strength,
+ lanturn_strength=self.hero.lanturn_strength,
)
- self.hero = HeroCreate(**hero.dict()).post()
+ self.hero = HeroUpdate(**hero.dict()).update()
def quit(self):
try:
- HeroDelete(id=self.hero.id).delete()
+ # session = get_config().database.session
+ # hero = session.get(Hero, self.hero.id)
+ # session.delete(hero)
+ # session.commit()
+ HeroDelete.delete(id=self.hero.id)
except RuntimeError:
pass
@@ -158,6 +181,10 @@ class Player:
movement_vector = end_pos - start_pos
try:
movement_direction = movement_vector.normalize()
+ except ValueError:
+ end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
+ movement_vector = end_pos - start_pos
+ movement_direction = movement_vector.normalize()
except ZeroDivisionError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
@@ -181,7 +208,7 @@ class Player:
self.x_last = self.hero.x
self.y_last = self.hero.y
- if self.game.ticks % 5 == 0 or self.game.ticks == 0:
+ if self.game.ticks % 60 == 0 or self.game.ticks == 0:
console.print("updating")
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
console.print(update)
@@ -200,20 +227,35 @@ class Player:
)
def render(self):
- for other in self.others.heros:
+ for other in self.others.__root__:
if other.id != self.hero.id:
- pygame.draw.circle(
- self.game.screen, (255, 0, 0), (other.x, other.y), other.size
- )
+ # put self.image on the game.screen
self.game.screen.blit(
- self.game.font.render(other.name, False, (255, 255, 255), 1),
- (other.x, other.y),
+ self.image,
+ (other.x - other.size / 2, other.y - other.size / 2),
)
- pygame.draw.circle(
- self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
+ # pygame.draw.circle(
+ # self.game.screen, (255, 0, 0), (other.x, other.y), other.size
+ # )
+ self.game.screen.blit(
+ self.game.font.render(other.name, False, (255, 255, 255), 1),
+ (other.x - other.size / 2, other.y + other.size / 2),
+ )
+ self.game.screen.blit(
+ self.image,
+ (self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
self.game.screen.blit(
- self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
- (self.hero.x, self.hero.y),
+ self.pet_image,
+ (self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
+ )
+
+ # pygame.draw.circle(
+ # self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
+ # )
+
+ self.game.screen.blit(
+ self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
+ (self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2),
)
diff --git a/learn_sql_model/models/hero.py b/learn_sql_model/models/hero.py
index f96ca58..9b23250 100644
--- a/learn_sql_model/models/hero.py
+++ b/learn_sql_model/models/hero.py
@@ -1,11 +1,12 @@
-from typing import Optional
+from typing import Dict, Optional
import httpx
+import pydantic
from pydantic import BaseModel
-from sqlmodel import Field, Relationship, SQLModel
+from sqlmodel import Field, SQLModel
from learn_sql_model.config import config
-from learn_sql_model.models.pet import Pet
+from learn_sql_model.optional import optional
class HeroBase(SQLModel, table=False):
@@ -13,16 +14,27 @@ class HeroBase(SQLModel, table=False):
secret_name: str
x: int
y: int
- size: int
- age: Optional[int] = None
- shoe_size: Optional[int] = None
+ size: Optional[int]
+ flashlight_strength: Optional[int] = 1000
+ flashlight_angle: Optional[int] = 0
+ lanturn_strength: Optional[int] = 100
+ # age: Optional[int] = None
+ # shoe_size: Optional[int] = None
- pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- pet: Optional[Pet] = Relationship(back_populates="hero")
+ # pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ # pet: Optional[Pet] = Relationship(back_populates="hero")
+
+ @pydantic.validator("size", pre=True, always=True)
+ def validate_size(cls, v):
+ if v is None:
+ return 50
+ if v <= 0:
+ raise ValueError("size must be > 0")
+ return v
class Hero(HeroBase, table=True):
- id: Optional[int] = Field(default=None, primary_key=True)
+ id: int = Field(default=None, primary_key=True)
class HeroCreate(HeroBase):
@@ -66,36 +78,27 @@ class Heros(BaseModel):
return Heros.parse_obj({"__root__": r.json()})
-class HeroUpdate(SQLModel):
- # id is required to update the hero
+@optional
+class HeroUpdate(HeroBase):
id: int
- # all other fields, must match the model, but with Optional default None
- name: Optional[str] = None
- secret_name: Optional[str] = None
- age: Optional[int] = None
- shoe_size: Optional[int] = None
- x: Optional[int]
- y: Optional[int]
-
- pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- pet: Optional[Pet] = Relationship(back_populates="hero")
-
def update(self) -> Hero:
r = httpx.patch(
f"{config.api_client.url}/hero/",
- json=self.dict(),
+ json=self.dict(exclude_none=True),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return Hero.parse_obj(r.json())
class HeroDelete(BaseModel):
id: int
- def delete(self) -> Hero:
+ @classmethod
+ def delete(self, id: int) -> Dict[str, bool]:
r = httpx.delete(
- f"{config.api_client.url}/hero/{self.id}",
+ f"{config.api_client.url}/hero/{id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
diff --git a/learn_sql_model/optional.py b/learn_sql_model/optional.py
index b43f240..83e6b60 100644
--- a/learn_sql_model/optional.py
+++ b/learn_sql_model/optional.py
@@ -1,5 +1,8 @@
+from typing import List, Optional
import textwrap
+import inspect
+from pydantic import BaseModel
def _optional_import_(
module: str,
@@ -61,3 +64,33 @@ def _optional_import_(
self._failed_import()
return _failed_import()
+
+
+# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
+# def decorator(cls):
+# def wrapper(*args, **kwargs):
+# if fields is None:
+# fields = cls.__fields__
+# if required is None:
+# required = []
+#
+# for field in fields:
+# if field not in required:
+# cls.__fields__[field].required = False
+# return _cls
+# return wrapper
+# return decorator
+#
+ #
+def optional(*fields):
+ def dec(_cls):
+ for field in fields:
+ _cls.__fields__[field].required = False
+ return _cls
+
+ if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
+ cls = fields[0]
+ fields = cls.__fields__
+ return dec(cls)
+ return dec
+
diff --git a/lights/light--100.png b/lights/light--100.png
new file mode 100644
index 0000000..de46434
Binary files /dev/null and b/lights/light--100.png differ
diff --git a/lights/light--120.png b/lights/light--120.png
new file mode 100644
index 0000000..963152f
Binary files /dev/null and b/lights/light--120.png differ
diff --git a/lights/light--140.png b/lights/light--140.png
new file mode 100644
index 0000000..cda76ae
Binary files /dev/null and b/lights/light--140.png differ
diff --git a/lights/light--160.png b/lights/light--160.png
new file mode 100644
index 0000000..3643c9e
Binary files /dev/null and b/lights/light--160.png differ
diff --git a/lights/light--180.png b/lights/light--180.png
new file mode 100644
index 0000000..82f9e33
Binary files /dev/null and b/lights/light--180.png differ
diff --git a/lights/light--20.png b/lights/light--20.png
new file mode 100644
index 0000000..3e14e6b
Binary files /dev/null and b/lights/light--20.png differ
diff --git a/lights/light--200.png b/lights/light--200.png
new file mode 100644
index 0000000..6490af9
Binary files /dev/null and b/lights/light--200.png differ
diff --git a/lights/light--220.png b/lights/light--220.png
new file mode 100644
index 0000000..a59b646
Binary files /dev/null and b/lights/light--220.png differ
diff --git a/lights/light--240.png b/lights/light--240.png
new file mode 100644
index 0000000..0f036b7
Binary files /dev/null and b/lights/light--240.png differ
diff --git a/lights/light--260.png b/lights/light--260.png
new file mode 100644
index 0000000..7033ac0
Binary files /dev/null and b/lights/light--260.png differ
diff --git a/lights/light--280.png b/lights/light--280.png
new file mode 100644
index 0000000..42509bf
Binary files /dev/null and b/lights/light--280.png differ
diff --git a/lights/light--300.png b/lights/light--300.png
new file mode 100644
index 0000000..0c63757
Binary files /dev/null and b/lights/light--300.png differ
diff --git a/lights/light--320.png b/lights/light--320.png
new file mode 100644
index 0000000..4112187
Binary files /dev/null and b/lights/light--320.png differ
diff --git a/lights/light--340.png b/lights/light--340.png
new file mode 100644
index 0000000..9a9bb4d
Binary files /dev/null and b/lights/light--340.png differ
diff --git a/lights/light--360.png b/lights/light--360.png
new file mode 100644
index 0000000..553fc60
Binary files /dev/null and b/lights/light--360.png differ
diff --git a/lights/light--40.png b/lights/light--40.png
new file mode 100644
index 0000000..c142c08
Binary files /dev/null and b/lights/light--40.png differ
diff --git a/lights/light--60.png b/lights/light--60.png
new file mode 100644
index 0000000..8965054
Binary files /dev/null and b/lights/light--60.png differ
diff --git a/lights/light--80.png b/lights/light--80.png
new file mode 100644
index 0000000..9ed81e6
Binary files /dev/null and b/lights/light--80.png differ
diff --git a/lights/light-0.png b/lights/light-0.png
new file mode 100644
index 0000000..553fc60
Binary files /dev/null and b/lights/light-0.png differ
diff --git a/lights/light-100.png b/lights/light-100.png
new file mode 100644
index 0000000..7033ac0
Binary files /dev/null and b/lights/light-100.png differ
diff --git a/lights/light-120.png b/lights/light-120.png
new file mode 100644
index 0000000..0367763
Binary files /dev/null and b/lights/light-120.png differ
diff --git a/lights/light-140.png b/lights/light-140.png
new file mode 100644
index 0000000..a59b646
Binary files /dev/null and b/lights/light-140.png differ
diff --git a/lights/light-160.png b/lights/light-160.png
new file mode 100644
index 0000000..6490af9
Binary files /dev/null and b/lights/light-160.png differ
diff --git a/lights/light-180.png b/lights/light-180.png
new file mode 100644
index 0000000..82f9e33
Binary files /dev/null and b/lights/light-180.png differ
diff --git a/lights/light-20.png b/lights/light-20.png
new file mode 100644
index 0000000..9a9bb4d
Binary files /dev/null and b/lights/light-20.png differ
diff --git a/lights/light-200.png b/lights/light-200.png
new file mode 100644
index 0000000..3643c9e
Binary files /dev/null and b/lights/light-200.png differ
diff --git a/lights/light-220.png b/lights/light-220.png
new file mode 100644
index 0000000..cda76ae
Binary files /dev/null and b/lights/light-220.png differ
diff --git a/lights/light-240.png b/lights/light-240.png
new file mode 100644
index 0000000..7c35056
Binary files /dev/null and b/lights/light-240.png differ
diff --git a/lights/light-260.png b/lights/light-260.png
new file mode 100644
index 0000000..de46434
Binary files /dev/null and b/lights/light-260.png differ
diff --git a/lights/light-280.png b/lights/light-280.png
new file mode 100644
index 0000000..9ed81e6
Binary files /dev/null and b/lights/light-280.png differ
diff --git a/lights/light-300.png b/lights/light-300.png
new file mode 100644
index 0000000..4c35301
Binary files /dev/null and b/lights/light-300.png differ
diff --git a/lights/light-320.png b/lights/light-320.png
new file mode 100644
index 0000000..c142c08
Binary files /dev/null and b/lights/light-320.png differ
diff --git a/lights/light-340.png b/lights/light-340.png
new file mode 100644
index 0000000..3e14e6b
Binary files /dev/null and b/lights/light-340.png differ
diff --git a/lights/light-40.png b/lights/light-40.png
new file mode 100644
index 0000000..4112187
Binary files /dev/null and b/lights/light-40.png differ
diff --git a/lights/light-60.png b/lights/light-60.png
new file mode 100644
index 0000000..0c63757
Binary files /dev/null and b/lights/light-60.png differ
diff --git a/lights/light-80.png b/lights/light-80.png
new file mode 100644
index 0000000..42509bf
Binary files /dev/null and b/lights/light-80.png differ
diff --git a/load_test.py b/load_test.py
new file mode 100644
index 0000000..461db8f
--- /dev/null
+++ b/load_test.py
@@ -0,0 +1,18 @@
+from locust import HttpUser, between, task
+
+from learn_sql_model.factories.hero import HeroFactory
+from learn_sql_model.models.hero import HeroCreate
+
+
+class QuickstartUser(HttpUser):
+ wait_time = between(1, 2)
+
+ @task
+ def hello_world(self):
+ self.client.get("/hero/1")
+ self.client.get("/heros/")
+
+ @task(3)
+ def create_hero(self):
+ hero = HeroFactory().build()
+ HeroCreate(**hero.dict()).post()
diff --git a/locustfile.py b/locustfile.py
new file mode 100644
index 0000000..4f4f551
--- /dev/null
+++ b/locustfile.py
@@ -0,0 +1,58 @@
+import random
+
+from locust import FastHttpUser, task
+
+from learn_sql_model.config import get_config
+from learn_sql_model.factories.hero import HeroFactory
+from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
+
+config = get_config()
+
+
+class QuickstartUser(FastHttpUser):
+ # wait_time = between(1, 2)
+ host = "http://localhost:5000"
+ # host = "https://waylonwalker.com"
+
+ def on_start(self):
+ self.client.verify = False
+
+ @task(6)
+ def get_a_hero(self):
+ # heros = Heros.list()
+ id = 1
+ # id = random.choice(heros.__root__).id
+
+ self.client.get(f"/hero/{id}")
+
+ # @task(2)
+ # def get_all_hero(self):
+ # self.client.get("/heros/")
+
+ @task
+ def create_hero(self):
+ hero = HeroFactory().build()
+ hero_create = HeroCreate(**hero.dict()).post()
+
+ self.client.post(
+ f"{config.api_client.url}/hero/",
+ json=hero_create.dict(),
+ )
+
+ @task(3)
+ def update_hero(self):
+ hero = HeroFactory().build()
+ hero_update = HeroUpdate(id=1, name=hero.name)
+
+ self.client.patch(
+ "/hero/",
+ json=hero_update.dict(exclude_none=True),
+ )
+
+ @task
+ def delete_hero(self):
+ heros = Heros.list()
+ id = random.choice(heros.__root__).id
+ self.client.delete(
+ f"/hero/{id}",
+ )
diff --git a/map.pkl b/map.pkl
new file mode 100644
index 0000000..e69de29
diff --git a/map.png b/map.png
new file mode 100644
index 0000000..04809ee
Binary files /dev/null and b/map.png differ
diff --git a/micro b/micro
new file mode 100644
index 0000000..a06df4a
Binary files /dev/null and b/micro differ
diff --git a/migrations/env.py b/migrations/env.py
index 4863934..27494c8 100644
--- a/migrations/env.py
+++ b/migrations/env.py
@@ -79,7 +79,7 @@ def run_migrations_online() -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
- render_as_batch=False,
+ render_as_batch=True,
version_table=f'{config.get_main_option("project")}_alembic_version',
)
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
index 3124b62..567e915 100644
--- a/migrations/script.py.mako
+++ b/migrations/script.py.mako
@@ -8,6 +8,9 @@ Create Date: ${create_date}
from alembic import op
import sqlalchemy as sa
import sqlmodel
+from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
+from learn_sql_model.config import get_config
+
${imports if imports else ""}
# revision identifiers, used by Alembic.
@@ -19,6 +22,8 @@ depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
+ generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
diff --git a/migrations/versions/3555f61aaa79_add_x_and_y.py b/migrations/versions/3555f61aaa79_add_x_and_y.py
new file mode 100644
index 0000000..b6f112d
--- /dev/null
+++ b/migrations/versions/3555f61aaa79_add_x_and_y.py
@@ -0,0 +1,32 @@
+"""add x and y
+
+Revision ID: 3555f61aaa79
+Revises: 79972ec5f79d
+Create Date: 2023-06-22 15:03:27.338959
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = "3555f61aaa79"
+down_revision = "79972ec5f79d"
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
+ op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
+ # ### end Alembic commands ###
+ # generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ # generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column("hero", "y")
+ op.drop_column("hero", "x")
+ # ### end Alembic commands ###
diff --git a/migrations/versions/3555f61aaa79_er_diagram.md b/migrations/versions/3555f61aaa79_er_diagram.md
new file mode 100644
index 0000000..caf6156
--- /dev/null
+++ b/migrations/versions/3555f61aaa79_er_diagram.md
@@ -0,0 +1,68 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| 79972ec5f79d |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id | x | y |
+|------|-------------|----|---|---|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 0 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/3555f61aaa79_er_diagram.png b/migrations/versions/3555f61aaa79_er_diagram.png
new file mode 100644
index 0000000..7c6bdab
Binary files /dev/null and b/migrations/versions/3555f61aaa79_er_diagram.png differ
diff --git a/migrations/versions/79972ec5f79d_er_diagram.md b/migrations/versions/79972ec5f79d_er_diagram.md
new file mode 100644
index 0000000..9f3ece5
--- /dev/null
+++ b/migrations/versions/79972ec5f79d_er_diagram.md
@@ -0,0 +1,65 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 0 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id |
+|------|-------------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 0 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/79972ec5f79d_er_diagram.png b/migrations/versions/79972ec5f79d_er_diagram.png
new file mode 100644
index 0000000..cc4f37b
Binary files /dev/null and b/migrations/versions/79972ec5f79d_er_diagram.png differ
diff --git a/migrations/versions/79972ec5f79d_int.py b/migrations/versions/79972ec5f79d_int.py
new file mode 100644
index 0000000..1ccca25
--- /dev/null
+++ b/migrations/versions/79972ec5f79d_int.py
@@ -0,0 +1,45 @@
+"""int
+
+Revision ID: 79972ec5f79d
+Revises:
+Create Date: 2023-06-22 15:02:20.292322
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+
+
+# revision identifiers, used by Alembic.
+revision = "79972ec5f79d"
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table(
+ "hero",
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_table(
+ "pet",
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column("birthday", sa.DateTime(), nullable=True),
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ # ### end Alembic commands ###
+ # generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ # generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table("pet")
+ op.drop_table("hero")
+ # ### end Alembic commands ###
diff --git a/migrations/versions/a1cd0a1947be_add_hero_lighting.py b/migrations/versions/a1cd0a1947be_add_hero_lighting.py
new file mode 100644
index 0000000..4e48a0c
--- /dev/null
+++ b/migrations/versions/a1cd0a1947be_add_hero_lighting.py
@@ -0,0 +1,36 @@
+"""add hero.lighting
+
+Revision ID: a1cd0a1947be
+Revises: c79214cdc7b3
+Create Date: 2023-06-28 19:43:47.108749
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
+from learn_sql_model.config import get_config
+
+
+
+# revision identifiers, used by Alembic.
+revision = 'a1cd0a1947be'
+down_revision = 'c79214cdc7b3'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
+ op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
+ # ### end Alembic commands ###
+ generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('hero', 'lanturn_strength')
+ op.drop_column('hero', 'flashlight_strength')
+ # ### end Alembic commands ###
diff --git a/migrations/versions/a1cd0a1947be_er_diagram.md b/migrations/versions/a1cd0a1947be_er_diagram.md
new file mode 100644
index 0000000..3fc470a
--- /dev/null
+++ b/migrations/versions/a1cd0a1947be_er_diagram.md
@@ -0,0 +1,75 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| c79214cdc7b3 |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
+|------|-------------|----|---|---|------|---------------------|------------------|
+| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
+| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
+| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
+| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+| size | INTEGER | | | |
+| flashlight_strength | INTEGER | | | |
+| lanturn_strength | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 4 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/a1cd0a1947be_er_diagram.png b/migrations/versions/a1cd0a1947be_er_diagram.png
new file mode 100644
index 0000000..8e26ac5
Binary files /dev/null and b/migrations/versions/a1cd0a1947be_er_diagram.png differ
diff --git a/migrations/versions/a9bb6625c57b_add_birthday.py b/migrations/versions/a9bb6625c57b_add_birthday.py
deleted file mode 100644
index 9e8feb4..0000000
--- a/migrations/versions/a9bb6625c57b_add_birthday.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""add birthday
-
-Revision ID: a9bb6625c57b
-Revises: c8516c888495
-Create Date: 2023-05-25 19:00:58.137464
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-
-
-# revision identifiers, used by Alembic.
-revision = 'a9bb6625c57b'
-down_revision = 'c8516c888495'
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.add_column('pet', sa.Column('birthday', sa.DateTime(), nullable=True))
- # ### end Alembic commands ###
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_column('pet', 'birthday')
- # ### end Alembic commands ###
diff --git a/migrations/versions/e26398d96dd0_add_x_y_size.py b/migrations/versions/c79214cdc7b3_add_hero_size.py
similarity index 50%
rename from migrations/versions/e26398d96dd0_add_x_y_size.py
rename to migrations/versions/c79214cdc7b3_add_hero_size.py
index 4219bde..643480b 100644
--- a/migrations/versions/e26398d96dd0_add_x_y_size.py
+++ b/migrations/versions/c79214cdc7b3_add_hero_size.py
@@ -1,33 +1,34 @@
-"""add x, y, size
+"""add hero.size
-Revision ID: e26398d96dd0
-Revises: a9bb6625c57b
-Create Date: 2023-06-10 18:37:04.751553
+Revision ID: c79214cdc7b3
+Revises: 3555f61aaa79
+Create Date: 2023-06-28 11:39:02.606001
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
+from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
+from learn_sql_model.config import get_config
+
# revision identifiers, used by Alembic.
-revision = 'e26398d96dd0'
-down_revision = 'a9bb6625c57b'
+revision = 'c79214cdc7b3'
+down_revision = '3555f61aaa79'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
- op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
- op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
- op.add_column('hero', sa.Column('size', sa.Integer(), nullable=False))
+ op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
+ generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'size')
- op.drop_column('hero', 'y')
- op.drop_column('hero', 'x')
# ### end Alembic commands ###
diff --git a/migrations/versions/c79214cdc7b3_er_diagram.md b/migrations/versions/c79214cdc7b3_er_diagram.md
new file mode 100644
index 0000000..9969846
--- /dev/null
+++ b/migrations/versions/c79214cdc7b3_er_diagram.md
@@ -0,0 +1,74 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| 3555f61aaa79 |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id | x | y | size |
+|------|-------------|----|---|---|------|
+| tight-gold | successful-health | 1 | 6430 | 6231 | None |
+| hard-rope | green-research | 2 | 1395 | 2865 | None |
+| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
+| huge-library | adult-body | 4 | 656 | 2377 | None |
+| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+| size | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 1572 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/c79214cdc7b3_er_diagram.png b/migrations/versions/c79214cdc7b3_er_diagram.png
new file mode 100644
index 0000000..887cf61
Binary files /dev/null and b/migrations/versions/c79214cdc7b3_er_diagram.png differ
diff --git a/migrations/versions/c8516c888495_init.py b/migrations/versions/c8516c888495_init.py
deleted file mode 100644
index c351dd9..0000000
--- a/migrations/versions/c8516c888495_init.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""init
-
-Revision ID: c8516c888495
-Revises:
-Create Date: 2023-05-25 18:42:37.057225
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-
-
-# revision identifiers, used by Alembic.
-revision = 'c8516c888495'
-down_revision = None
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.create_table('pet',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.PrimaryKeyConstraint('id')
- )
- op.create_table('hero',
- sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column('age', sa.Integer(), nullable=True),
- sa.Column('shoe_size', sa.Integer(), nullable=True),
- sa.Column('pet_id', sa.Integer(), nullable=True),
- sa.ForeignKeyConstraint(['pet_id'], ['pet.id'], ),
- sa.PrimaryKeyConstraint('id')
- )
- # ### end Alembic commands ###
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_table('hero')
- op.drop_table('pet')
- # ### end Alembic commands ###
diff --git a/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py b/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py
new file mode 100644
index 0000000..f59d974
--- /dev/null
+++ b/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py
@@ -0,0 +1,34 @@
+"""add hero.flashlight_angle
+
+Revision ID: d79dd8e699d1
+Revises: e1af975310a1
+Create Date: 2023-06-28 19:54:19.322431
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
+from learn_sql_model.config import get_config
+
+
+
+# revision identifiers, used by Alembic.
+revision = 'd79dd8e699d1'
+down_revision = 'e1af975310a1'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
+ # ### end Alembic commands ###
+ generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('hero', 'flashlight_angle')
+ # ### end Alembic commands ###
diff --git a/migrations/versions/d79dd8e699d1_er_diagram.md b/migrations/versions/d79dd8e699d1_er_diagram.md
new file mode 100644
index 0000000..da209b0
--- /dev/null
+++ b/migrations/versions/d79dd8e699d1_er_diagram.md
@@ -0,0 +1,72 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| e1af975310a1 |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
+|------|-------------|----|---|---|------|---------------------|------------------|------------------|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+| size | INTEGER | | | |
+| flashlight_strength | INTEGER | | | |
+| lanturn_strength | INTEGER | | | |
+| flashlight_angle | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 0 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/d79dd8e699d1_er_diagram.png b/migrations/versions/d79dd8e699d1_er_diagram.png
new file mode 100644
index 0000000..f1560e3
Binary files /dev/null and b/migrations/versions/d79dd8e699d1_er_diagram.png differ
diff --git a/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py b/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py
new file mode 100644
index 0000000..e2df16f
--- /dev/null
+++ b/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py
@@ -0,0 +1,34 @@
+"""add hero.flashlight_angle
+
+Revision ID: e1af975310a1
+Revises: a1cd0a1947be
+Create Date: 2023-06-28 19:53:18.068873
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
+from learn_sql_model.config import get_config
+
+
+
+# revision identifiers, used by Alembic.
+revision = 'e1af975310a1'
+down_revision = 'a1cd0a1947be'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###
+ generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
+ generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###
diff --git a/migrations/versions/e1af975310a1_er_diagram.md b/migrations/versions/e1af975310a1_er_diagram.md
new file mode 100644
index 0000000..a5dd9d2
--- /dev/null
+++ b/migrations/versions/e1af975310a1_er_diagram.md
@@ -0,0 +1,71 @@
+
+
+---
+
+## Table: learn_sql_model_alembic_version
+
+### First 5 rows
+
+| version_num |
+|-------------|
+| a1cd0a1947be |
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| version_num | VARCHAR(32) | | | |
+
+### Records Count
+
+The table learn_sql_model_alembic_version contains 1 records.
+
+---
+
+## Table: hero
+
+### First 5 rows
+
+| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
+|------|-------------|----|---|---|------|---------------------|------------------|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| secret_name | VARCHAR | | | |
+| id | INTEGER | | | |
+| x | INTEGER | | | |
+| y | INTEGER | | | |
+| size | INTEGER | | | |
+| flashlight_strength | INTEGER | | | |
+| lanturn_strength | INTEGER | | | |
+
+### Records Count
+
+The table hero contains 0 records.
+
+---
+
+## Table: pet
+
+### First 5 rows
+
+| name | birthday | id |
+|------|----------|----|
+
+### Columns
+
+| Column Name | Type | Foreign Key | Example Value |
+|-------------|------|-------------|---------------|
+| name | VARCHAR | | | |
+| birthday | DATETIME | | | |
+| id | INTEGER | | | |
+
+### Records Count
+
+The table pet contains 0 records.
+
+---
+
diff --git a/migrations/versions/e1af975310a1_er_diagram.png b/migrations/versions/e1af975310a1_er_diagram.png
new file mode 100644
index 0000000..8e26ac5
Binary files /dev/null and b/migrations/versions/e1af975310a1_er_diagram.png differ
diff --git a/notify.py b/notify.py
new file mode 100644
index 0000000..8c1178d
--- /dev/null
+++ b/notify.py
@@ -0,0 +1,37 @@
+# # Import smtplib for the actual sending function
+# import smtplib
+# # Import the email modules we'll need
+# from email.mime.text import MIMEText
+
+# # Open a plain text file for reading. For this example, assume that
+# # the text file contains only ASCII characters.
+# # with open(textfile, 'rb') as fp:
+# # # Create a text/plain message
+# # msg = MIMEText(fp.read())
+# msg = MIMEText("hello there", "plain", "utf-8")
+
+# # me == the sender's email address
+# # you == the recipient's email address
+# me = "waylon@waylonwalker.com"
+# you = "3195728809@msg.fi.google.com"
+# msg["Subject"] = "Python SMTP test"
+# msg["From"] = me
+# msg["To"] = you
+
+# # Send the message via our own SMTP server, but don't include the
+# # envelope header.
+# s = smtplib.SMTP("localhost")
+# s.sendmail(me, [you], msg.as_string())
+# s.quit()
+import requests
+
+requests.post(
+ "https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
+ auth=("api", "YOUR_API_KEY"),
+ data={
+ "from": "Excited User ",
+ "to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
+ "subject": "Hello",
+ "text": "Testing some Mailgun awesomness!",
+ },
+)
diff --git a/pet.png b/pet.png
new file mode 100644
index 0000000..b22dd02
Binary files /dev/null and b/pet.png differ
diff --git a/pyproject.toml b/pyproject.toml
index 9263d99..53dbe66 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -24,13 +24,14 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy",
]
dependencies = [
+"black",
"python-socketio[client]",
"anyconfig",
"copier",
"engorgio",
"fastapi",
"httpx",
- "pydantic[dotenv]",
+ "pydantic<2.0.0",
"pyflyby",
"pyinstaller",
"rich",
diff --git a/rect.py b/rect.py
new file mode 100644
index 0000000..29def88
--- /dev/null
+++ b/rect.py
@@ -0,0 +1,23 @@
+import pygame
+
+pygame.init()
+
+screen = pygame.display.set_mode((500, 500))
+pygame.display.set_caption("draw a square")
+
+running = True
+
+while running:
+ for event in pygame.event.get():
+ if event.type == pygame.QUIT:
+ running = False
+
+ surface = pygame.Surface((500, 500))
+ surface.fill((255, 0, 0))
+
+ color = (0, 0, 255)
+ rect = (200, 200, 100, 100)
+ pygame.draw.rect(surface, color, rect)
+
+ screen.blit(surface, (0, 0))
+ pygame.display.flip()
diff --git a/templates/er_diagram.html b/templates/er_diagram.html
new file mode 100644
index 0000000..5dc1fed
--- /dev/null
+++ b/templates/er_diagram.html
@@ -0,0 +1,129 @@
+
+
+
+
+
+ ER Diagram
+
+
+
+
+
+
+
+
+
+
+
diff --git a/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
index e1b86b7..878f877 100644
--- a/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
@@ -1,86 +1,89 @@
from fastapi import APIRouter, Depends, HTTPException
-from sqlmodel import SQLModel, Session
+from sqlmodel import Session, select
from learn_sql_model.api.websocket_connection_manager import manager
-from learn_sql_model.config import get_config, get_session
-from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}, {{modelname}}Create, {{modelname}}Read, {{modelname}}Update
+from learn_sql_model.config import get_session
+from learn_sql_model.models.{{ modelname }} import {{ modelname }}, {{ modelname }}Create, {{ modelname }}Read, {{ modelname }}Update, {{ modelname }}s
-{{modelname.lower()}}_router = APIRouter()
+{{ modelname }}_router = APIRouter()
-@{{modelname.lower()}}_router.on_event("startup")
+@{{ modelname }}_router.on_event("startup")
def on_startup() -> None:
- SQLModel.metadata.create_all(get_config().database.engine)
+ # SQLModel.metadata.create_all(get_config().database.engine)
+ ...
-@{{modelname.lower()}}_router.get("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
-async def get_{{modelname.lower()}}(
+@{{ modelname }}_router.get("/{{ modelname }}/{{{ modelname }}_id}")
+def get_{{ modelname }}(
*,
session: Session = Depends(get_session),
- {{modelname.lower()}}_id: int,
-) -> {{modelname}}Read:
- "get one {{modelname.lower()}}"
- {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
- if not {{modelname.lower()}}:
- raise HTTPException(status_code=404, detail="{{modelname}} not found")
- return {{modelname.lower()}}
+ {{ modelname }}_id: int,
+) -> {{ modelname }}Read:
+ "get one {{ modelname }}"
+ {{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
+ if not {{ modelname }}:
+ raise HTTPException(status_code=404, detail="{{ modelname }} not found")
+ return {{ modelname }}
-@{{modelname.lower()}}_router.post("/{{modelname.lower()}}/")
-async def post_{{modelname.lower()}}(
+@{{ modelname }}_router.post("/{{ modelname }}/")
+def post_{{ modelname }}(
*,
session: Session = Depends(get_session),
- {{modelname.lower()}}: {{modelname}}Create,
-) -> {{modelname}}Read:
- "read all the {{modelname.lower()}}s"
- db_{{modelname.lower()}} = {{modelname}}.from_orm({{modelname.lower()}})
- session.add(db_{{modelname.lower()}})
+ {{ modelname }}: {{ modelname }}Create,
+) -> {{ modelname }}Read:
+ "create a {{ modelname }}"
+ db_{{ modelname }} = {{ modelname }}.from_orm({{ modelname }})
+ session.add(db_{{ modelname }})
session.commit()
- session.refresh(db_{{modelname.lower()}})
- await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
- return db_{{modelname.lower()}}
+ session.refresh(db_{{ modelname }})
+ await manager.broadcast({{{ modelname }}.json()}, id=1)
+ return db_{{ modelname }}
-@{{modelname.lower()}}_router.patch("/{{modelname.lower()}}/")
-async def patch_{{modelname.lower()}}(
+@{{ modelname }}_router.patch("/{{ modelname }}/")
+def patch_{{ modelname }}(
*,
session: Session = Depends(get_session),
- {{modelname.lower()}}: {{modelname}}Update,
-) -> {{modelname}}Read:
- "read all the {{modelname.lower()}}s"
- db_{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
- if not db_{{modelname.lower()}}:
- raise HTTPException(status_code=404, detail="{{modelname}} not found")
- for key, value in {{modelname.lower()}}.dict(exclude_unset=True).items():
- setattr(db_{{modelname.lower()}}, key, value)
- session.add(db_{{modelname.lower()}})
+ {{ modelname }}: {{ modelname }}Update,
+) -> {{ modelname }}Read:
+ "update a {{ modelname }}"
+ db_{{ modelname }} = session.get({{ modelname }}, {{ modelname }}.id)
+ if not db_{{ modelname }}:
+ raise HTTPException(status_code=404, detail="{{ modelname }} not found")
+ for key, value in {{ modelname }}.dict(exclude_unset=True).items():
+ setattr(db_{{ modelname }}, key, value)
+ session.add(db_{{ modelname }})
session.commit()
- session.refresh(db_{{modelname.lower()}})
- await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
- return db_{{modelname.lower()}}
+ session.refresh(db_{{ modelname }})
+ await manager.broadcast({{{ modelname }}.json()}, id=1)
+ return db_{{ modelname }}
-@{{modelname.lower()}}_router.delete("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
-async def delete_{{modelname.lower()}}(
+@{{ modelname }}_router.delete("/{{ modelname }}/{{{ modelname }}_id}")
+def delete_{{ modelname }}(
*,
session: Session = Depends(get_session),
- {{modelname.lower()}}_id: int,
+ {{ modelname }}_id: int,
):
- "read all the {{modelname.lower()}}s"
- {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
- if not {{modelname.lower()}}:
- raise HTTPException(status_code=404, detail="{{modelname}} not found")
- session.delete({{modelname.lower()}})
+ "delete a {{ modelname }}"
+ {{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
+ if not {{ modelname }}:
+ raise HTTPException(status_code=404, detail="{{ modelname }} not found")
+ session.delete({{ modelname }})
session.commit()
- await manager.broadcast(f"deleted {{modelname.lower()}} {{{modelname.lower()}}_id}", id=1)
+ await manager.broadcast(f"deleted {{ modelname }} {{{ modelname }}_id}", id=1)
return {"ok": True}
-@{{modelname.lower()}}_router.get("/{{modelname.lower()}}s/")
-async def get_{{modelname.lower()}}s(
+@{{ modelname }}_router.get("/{{ modelname }}s/")
+def get_{{ modelname }}s(
*,
session: Session = Depends(get_session),
-) -> list[{{modelname}}]:
- "get all {{modelname.lower()}}s"
- return {{modelname}}Read.list(session=session)
+) -> {{ modelname }}s:
+ "get all {{ modelname }}s"
+ statement = select({{ modelname }})
+ {{ modelname }}s = session.exec(statement).all()
+ return {{ modelname }}s(__root__={{ modelname }}s)
diff --git a/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
index 995f10b..8e807e0 100644
--- a/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
@@ -1,14 +1,12 @@
from faker import Faker
from polyfactory.factories.pydantic_factory import ModelFactory
-from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
+from learn_sql_model.factories.pet import PetFactory
+from learn_sql_model.models.{{ modelname }} import {{ modelname }}
+from learn_sql_model.models.pet import Pet
-class {{modelname}}Factory(ModelFactory[{{modelname.lower()}}]):
- __model__ = {{modelname}}
+class {{ modelname }}Factory(ModelFactory[{{ modelname }}]):
+ __model__ = {{ modelname }}
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
- id = None
-
- __random_seed__ = 10
-
diff --git a/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
index 6fed597..7d47e42 100644
--- a/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
@@ -1,93 +1,81 @@
-from typing import Optional
+from typing import Dict, Optional
-from fastapi import Depends, HTTPException
import httpx
from pydantic import BaseModel
-from sqlmodel import Field, Relationship, SQLModel, Session, select
+from sqlmodel import Field, SQLModel
-from learn_sql_model.config import config, get_config
-from learn_sql_model.models.pet import Pet
+from learn_sql_model.config import config
-class {{modelname}}Base(SQLModel, table=False):
+class {{ modelname }}Base(SQLModel, table=False):
+ # put model attributes here
-class {{modelname}}({{modelname}}Base, table=True):
- id: Optional[int] = Field(default=None, primary_key=True)
+class {{ modelname }}({{ modelname }}Base, table=True):
+ id: int = Field(default=None, primary_key=True)
-class {{modelname}}Create({{modelname}}Base):
+class {{ modelname }}Create({{ modelname }}Base):
...
- def post(self) -> {{modelname}}:
+ def post(self) -> {{ modelname }}:
r = httpx.post(
- f"{config.api_client.url}/{{modelname.lower()}}/",
+ f"{config.api_client.url}/{{ modelname }}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {{ modelname }}.parse_obj(r.json())
-class {{modelname}}Read({{modelname}}Base):
+
+class {{ modelname }}Read({{ modelname }}Base):
id: int
@classmethod
def get(
cls,
id: int,
- ) -> {{modelname}}:
- with config.database.session as session:
- {{modelname.lower()}} = session.get({{modelname}}, id)
- if not {{modelname.lower()}}:
- raise HTTPException(status_code=404, detail="{{modelname}} not found")
- return {{modelname.lower()}}
+ ) -> {{ modelname }}:
+ r = httpx.get(f"{config.api_client.url}/{{ modelname }}/{id}")
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {{ modelname }}Read.parse_obj(r.json())
+
+
+class {{ modelname }}s(BaseModel):
+ __root__: list[{{ modelname }}]
@classmethod
def list(
self,
- where=None,
- offset=0,
- limit=None,
- session: Session = None,
- ) -> {{modelname}}:
-
- if session is None:
- session = get_config().database.session
-
- statement = select({{modelname}})
- if where != "None" and where is not None:
- from sqlmodel import text
-
- statement = statement.where(text(where))
- statement = statement.offset(offset).limit(limit)
- {{modelname.lower()}}es = session.exec(statement).all()
- return {{modelname.lower()}}es
+ ) -> {{ modelname }}:
+ r = httpx.get(f"{config.api_client.url}/{{ modelname }}s/")
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {{ modelname }}s.parse_obj({"__root__": r.json()})
-class {{modelname}}Update(SQLModel):
- # id is required to update the {{modelname.lower()}}
+class {{ modelname }}Update(SQLModel):
+ # id is required to update the {{ modelname }}
id: int
- # all other fields, must match the model, but with Optional default None
-
- pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- pet: Optional[Pet] = Relationship(back_populates="{{modelname.lower()}}")
-
- def update(self) -> {{modelname}}:
+ def update(self) -> {{ modelname }}:
r = httpx.patch(
- f"{config.api_client.url}/{{modelname.lower()}}/",
+ f"{config.api_client.url}/{{ modelname }}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
-class {{modelname}}Delete(BaseModel):
+class {{ modelname }}Delete(BaseModel):
id: int
- def delete(self) -> {{modelname}}:
+ @classmethod
+ def delete(self, id: int) -> Dict[str, bool]:
r = httpx.delete(
- f"{config.api_client.url}/{{modelname.lower()}}/{self.id}",
+ f"{config.api_client.url}/{{ modelname }}/{id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
diff --git a/templates/model/tests/{{modelname.lower()}}.py.jinja b/templates/model/tests/{{modelname.lower()}}.py.jinja
index 39da70f..7aa9ba5 100644
--- a/templates/model/tests/{{modelname.lower()}}.py.jinja
+++ b/templates/model/tests/{{modelname.lower()}}.py.jinja
@@ -1,234 +1,103 @@
-from fastapi.testclient import TestClient
-import pytest
-from sqlalchemy import create_engine
-from sqlmodel import SQLModel, Session, select
-from sqlmodel.pool import StaticPool
-from typer.testing import CliRunner
+from typing import Optional
-from learn_sql_model.api.app import app
-from learn_sql_model.config import get_config, get_session
-from learn_sql_model.factories.{{modelname.lower()}} import {{modelname}}Factory
-from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
+import httpx
+from pydantic import BaseModel
+from sqlmodel import Field, Relationship, SQLModel
-runner = CliRunner()
-client = TestClient(app)
+from learn_sql_model.config import config
+from learn_sql_model.models.pet import Pet
-@pytest.fixture(name="session")
-def session_fixture():
- engine = create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- )
- SQLModel.metadata.create_all(engine)
- with Session(engine) as session:
- yield session
+class {{ model.lower }}Base(SQLModel, table=False):
+ name: str
+ secret_name: str
+ x: int
+ y: int
+ size: int
+ age: Optional[int] = None
+ shoe_size: Optional[int] = None
+
+ pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
-@pytest.fixture(name="client")
-def client_fixture(session: Session):
- def get_session_override():
- return session
-
- app.dependency_overrides[get_session] = get_session_override
-
- client = TestClient(app)
- yield client
- app.dependency_overrides.clear()
+class {{ model.lower }}({{ model.lower }}Base, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
-def test_api_post(client: TestClient):
- {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
- {{modelname.lower()}}_dict = {{modelname.lower()}}.dict()
- response = client.post("/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {{modelname.lower()}}_dict})
- response_{{modelname.lower()}} = {{modelname}}.parse_obj(response.json())
+class {{ model.lower }}Create({{ model.lower }}Base):
+ ...
- assert response.status_code == 200
- assert response_{{modelname.lower()}}.name == "Steelman"
- assert response_{{modelname.lower()}}.age == 25
+ def post(self) -> {{ model.lower }}:
+ r = httpx.post(
+ f"{config.api_client.url}/{{ model.lower() }}/",
+ json=self.dict(),
+ )
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+
+ return {{ model.lower }}.parse_obj(r.json())
-def test_api_read_{{modelname.lower()}}es(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- {{modelname.lower()}}_2 = {{modelname}}(name="Rusty-Man", secret_name="Tommy Sharp", age=48)
- session.add({{modelname.lower()}}_1)
- session.add({{modelname.lower()}}_2)
- session.commit()
+class {{ model.lower }}Read({{ model.lower }}Base):
+ id: int
- response = client.get("/{{modelname.lower()}}s/")
- data = response.json()
-
- assert response.status_code == 200
-
- assert len(data) == 2
- assert data[0]["name"] == {{modelname.lower()}}_1.name
- assert data[0]["secret_name"] == {{modelname.lower()}}_1.secret_name
- assert data[0]["age"] == {{modelname.lower()}}_1.age
- assert data[0]["id"] == {{modelname.lower()}}_1.id
- assert data[1]["name"] == {{modelname.lower()}}_2.name
- assert data[1]["secret_name"] == {{modelname.lower()}}_2.secret_name
- assert data[1]["age"] == {{modelname.lower()}}_2.age
- assert data[1]["id"] == {{modelname.lower()}}_2.id
+ @classmethod
+ def get(
+ cls,
+ id: int,
+ ) -> {{ model.lower }}:
+ r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}/{id}")
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {{ model.lower() }}
-def test_api_read_{{modelname.lower()}}(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
+class {{ model.lower }}s(BaseModel):
+ {{ model.lower() }}s: list[{{ model.lower }}]
- response = client.get(f"/{{modelname.lower()}}/999")
- assert response.status_code == 404
+ @classmethod
+ def list(
+ self,
+ ) -> {{ model.lower }}:
+ r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}s/")
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {{ model.lower }}s.parse_obj(r.json())
-def test_api_read_{{modelname.lower()}}_404(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
+class {{ model.lower }}Update(SQLModel):
+ # id is required to update the {{ model.lower() }}
+ id: int
- response = client.get(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
- data = response.json()
+ # all other fields, must match the model, but with Optional default None
+ name: Optional[str] = None
+ secret_name: Optional[str] = None
+ age: Optional[int] = None
+ shoe_size: Optional[int] = None
+ x: int
+ y: int
- assert response.status_code == 200
- assert data["name"] == {{modelname.lower()}}_1.name
- assert data["secret_name"] == {{modelname.lower()}}_1.secret_name
- assert data["age"] == {{modelname.lower()}}_1.age
- assert data["id"] == {{modelname.lower()}}_1.id
+ pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
+
+ def update(self) -> {{ model.lower }}:
+ r = httpx.patch(
+ f"{config.api_client.url}/{{ model.lower() }}/",
+ json=self.dict(),
+ )
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
-def test_api_update_{{modelname.lower()}}(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
+class {{ model.lower }}Delete(BaseModel):
+ id: int
- response = client.patch(
- f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": {{modelname.lower()}}_1.id}}
- )
- data = response.json()
+ def delete(self) -> {{ model.lower }}:
+ r = httpx.delete(
+ f"{config.api_client.url}/{{ model.lower() }}/{self.id}",
+ )
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return {"ok": True}
- assert response.status_code == 200
- assert data["name"] == "Deadpuddle"
- assert data["secret_name"] == "Dive Wilson"
- assert data["age"] is None
- assert data["id"] == {{modelname.lower()}}_1.id
-
-
-def test_api_update_{{modelname.lower()}}_404(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
-
- response = client.patch(f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": 999}})
- assert response.status_code == 404
-
-
-def test_delete_{{modelname.lower()}}(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
-
- response = client.delete(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
-
- {{modelname.lower()}}_in_db = session.get({{modelname}}, {{modelname.lower()}}_1.id)
-
- assert response.status_code == 200
-
- assert {{modelname.lower()}}_in_db is None
-
-
-def test_delete_{{modelname.lower()}}_404(session: Session, client: TestClient):
- {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
- session.add({{modelname.lower()}}_1)
- session.commit()
-
- response = client.delete(f"/{{modelname.lower()}}/999")
- assert response.status_code == 404
-
-
-def test_config_memory(mocker):
- mocker.patch(
- "learn_sql_model.config.Database.engine",
- new_callable=lambda: create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- ),
- )
- config = get_config()
- SQLModel.metadata.create_all(config.database.engine)
- {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
- with config.database.session as session:
- session.add({{modelname.lower()}})
- session.commit()
- {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
- {{modelname.lower()}}es = session.exec(select({{modelname}})).all()
- assert {{modelname.lower()}}.name == "Steelman"
- assert {{modelname.lower()}}.age == 25
- assert len({{modelname.lower()}}es) == 1
-
-
-def test_cli_get(mocker):
- mocker.patch(
- "learn_sql_model.config.Database.engine",
- new_callable=lambda: create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- ),
- )
-
- config = get_config()
- SQLModel.metadata.create_all(config.database.engine)
-
- {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
- with config.database.session as session:
- session.add({{modelname.lower()}})
- session.commit()
- {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
- result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "1"])
- assert result.exit_code == 0
- assert f"name='{{{modelname.lower()}}.name}'" in result.stdout
- assert f"secret_name='{{{modelname.lower()}}.secret_name}'" in result.stdout
-
-
-def test_cli_get_404(mocker):
- mocker.patch(
- "learn_sql_model.config.Database.engine",
- new_callable=lambda: create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- ),
- )
-
- config = get_config()
- SQLModel.metadata.create_all(config.database.engine)
-
- {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
- with config.database.session as session:
- session.add({{modelname.lower()}})
- session.commit()
- {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
- result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "999"])
- assert result.exception.status_code == 404
- assert result.exception.detail == "{{modelname}} not found"
-
-
-def test_cli_list(mocker):
- mocker.patch(
- "learn_sql_model.config.Database.engine",
- new_callable=lambda: create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- ),
- )
-
- config = get_config()
- SQLModel.metadata.create_all(config.database.engine)
-
- {{modelname.lower()}}_1 = {{modelname}}Factory().build(name="Steelman", age=25)
- {{modelname.lower()}}_2 = {{modelname}}Factory().build(name="Hunk", age=52)
-
- with config.database.session as session:
- session.add({{modelname.lower()}}_1)
- session.add({{modelname.lower()}}_2)
- session.commit()
- session.refresh({{modelname.lower()}}_1)
- session.refresh({{modelname.lower()}}_2)
- result = runner.invoke({{modelname.lower()}}_app, ["list"])
- assert result.exit_code == 0
- assert f"name='{{{modelname.lower()}}_1.name}'" in result.stdout
- assert f"secret_name='{{{modelname.lower()}}_1.secret_name}'" in result.stdout
- assert f"name='{{{modelname.lower()}}_2.name}'" in result.stdout
- assert f"secret_name='{{{modelname.lower()}}_2.secret_name}'" in result.stdout
diff --git a/tests/test_hero.py b/tests/test_hero.py
index 9f74a2b..80d8afe 100644
--- a/tests/test_hero.py
+++ b/tests/test_hero.py
@@ -1,16 +1,16 @@
from fastapi.testclient import TestClient
import pytest
from sqlalchemy import create_engine
-from sqlmodel import SQLModel, Session, select
+from sqlmodel import SQLModel, Session
from sqlmodel.pool import StaticPool
from typer.testing import CliRunner
from learn_sql_model.api.app import app
from learn_sql_model.cli.hero import hero_app
-from learn_sql_model.config import get_config, get_session
+from learn_sql_model.config import get_session
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models import hero as hero_models
-from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, Heros
+from learn_sql_model.models.hero import Hero, HeroCreate, HeroDelete, HeroRead
runner = CliRunner()
client = TestClient(app)
@@ -39,21 +39,19 @@ def client_fixture(session: Session):
def test_api_post(client: TestClient):
- hero = HeroFactory().build(name="Steelman", age=25)
+ hero = HeroFactory().build()
hero_dict = hero.dict()
response = client.post("/hero/", json=hero_dict)
response_hero = Hero.parse_obj(response.json())
assert response.status_code == 200
- assert response_hero.name == "Steelman"
- assert response_hero.age == 25
+ assert response_hero.name == hero.name
-def test_api_read_heroes(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
- hero_2 = HeroFactory().build(name="Rusty-Man", age=48)
- session.add(hero_1)
- session.add(hero_2)
+def test_api_read_heros(session: Session, client: TestClient):
+ heros = HeroFactory().batch(5)
+ for hero in heros:
+ session.add(hero)
session.commit()
response = client.get("/heros/")
@@ -61,35 +59,31 @@ def test_api_read_heroes(session: Session, client: TestClient):
assert response.status_code == 200
- assert len(data) == 2
- assert data[0]["name"] == hero_1.name
- assert data[0]["secret_name"] == hero_1.secret_name
- assert data[0]["age"] == hero_1.age
- assert data[0]["id"] == hero_1.id
- assert data[1]["name"] == hero_2.name
- assert data[1]["secret_name"] == hero_2.secret_name
- assert data[1]["age"] == hero_2.age
- assert data[1]["id"] == hero_2.id
+ assert len(data) == 5
+ for d in data:
+ api_hero = Hero.parse_obj(d)
+ my_hero = [hero for hero in heros if hero.id == api_hero.id][0]
+ for key, value in api_hero.dict(exclude_unset=True).items():
+ assert getattr(my_hero, key) == value
def test_api_read_hero(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
- session.add(hero_1)
+ hero = HeroFactory().build()
+ session.add(hero)
session.commit()
- response = client.get(f"/hero/{hero_1.id}")
+ response = client.get(f"/hero/{hero.id}")
data = response.json()
+ response_hero = Hero.parse_obj(data)
assert response.status_code == 200
- assert data["name"] == hero_1.name
- assert data["secret_name"] == hero_1.secret_name
- assert data["age"] == hero_1.age
- assert data["id"] == hero_1.id
+ for key, value in hero.dict(exclude_unset=True).items():
+ assert getattr(response_hero, key) == value
def test_api_read_hero_404(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
- session.add(hero_1)
+ hero = HeroFactory().build()
+ session.add(hero)
session.commit()
response = client.get(f"/hero/999")
@@ -97,22 +91,24 @@ def test_api_read_hero_404(session: Session, client: TestClient):
def test_api_update_hero(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
- session.add(hero_1)
+ hero = HeroFactory().build()
+ new_hero = HeroFactory().build()
+ session.add(hero)
session.commit()
- response = client.patch(f"/hero/", json={"name": "Deadpuddle", "id": hero_1.id})
+ response = client.patch(
+ f"/hero/", json={"id": hero.id, **new_hero.dict(exclude={"id"})}
+ )
data = response.json()
+ response_hero = Hero.parse_obj(data)
assert response.status_code == 200
- assert data["name"] == "Deadpuddle"
- assert data["secret_name"] == hero_1.secret_name
- assert data["age"] is hero_1.age
- assert data["id"] == hero_1.id
+ for key, value in hero.dict(exclude_unset=True).items():
+ assert getattr(response_hero, key) == value
def test_api_update_hero_404(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
+ hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
@@ -121,7 +117,7 @@ def test_api_update_hero_404(session: Session, client: TestClient):
def test_delete_hero(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
+ hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
@@ -135,7 +131,7 @@ def test_delete_hero(session: Session, client: TestClient):
def test_delete_hero_404(session: Session, client: TestClient):
- hero_1 = HeroFactory().build(name="Steelman", age=25)
+ hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
@@ -143,28 +139,8 @@ def test_delete_hero_404(session: Session, client: TestClient):
assert response.status_code == 404
-def test_config_memory(mocker):
- mocker.patch(
- "learn_sql_model.config.Database.engine",
- new_callable=lambda: create_engine(
- "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
- ),
- )
- config = get_config()
- SQLModel.metadata.create_all(config.database.engine)
- hero = HeroFactory().build(name="Steelman", age=25)
- with config.database.session as session:
- session.add(hero)
- session.commit()
- hero = session.get(Hero, hero.id)
- heroes = session.exec(select(Hero)).all()
- assert hero.name == "Steelman"
- assert hero.age == 25
- assert len(heroes) == 1
-
-
def test_cli_get(mocker):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+ hero = HeroFactory().build()
hero = HeroRead(**hero.dict(exclude_none=True))
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
@@ -173,13 +149,18 @@ def test_cli_get(mocker):
result = runner.invoke(hero_app, ["get", "1"])
assert result.exit_code == 0
- assert f"name='{hero.name}'" in result.stdout
- assert f"secret_name='{hero.secret_name}'" in result.stdout
+ for key, value in hero.dict(exclude_unset=True).items():
+ if type(value) == str:
+ assert f"{key}='{value}'" in result.stdout
+ elif type(value) == int:
+ assert f"{key}={value}" in result.stdout
assert httpx.get.call_count == 1
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 0
def test_cli_get_404(mocker):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+ hero = HeroFactory().build()
hero = HeroRead(**hero.dict(exclude_none=True))
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
@@ -191,31 +172,30 @@ def test_cli_get_404(mocker):
assert result.exit_code == 1
assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
assert httpx.get.call_count == 1
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 0
def test_cli_list(mocker):
- hero_1 = HeroRead(
- **HeroFactory().build(name="Steelman", age=25, id=1).dict(exclude_none=True)
- )
- hero_2 = HeroRead(
- **HeroFactory().build(name="Hunk", age=52, id=2).dict(exclude_none=True)
- )
- heros = Heros(__root__=[hero_1, hero_2])
+ heros = HeroFactory().batch(5)
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
httpx.get.return_value.status_code = 200
- httpx.get.return_value.json.return_value = heros.dict()["__root__"]
+ httpx.get.return_value.json.return_value = heros
result = runner.invoke(hero_app, ["list"])
assert result.exit_code == 0
- assert f"name='{hero_1.name}'" in result.stdout
- assert f"secret_name='{hero_1.secret_name}'" in result.stdout
- assert f"name='{hero_2.name}'" in result.stdout
- assert f"secret_name='{hero_2.secret_name}'" in result.stdout
+
+ for hero in heros:
+ for key, value in hero.dict(exclude_unset=True).items():
+ if type(value) == str:
+ assert f"{key}='{value}'" in result.stdout
+ elif type(value) == int:
+ assert f"{key}={value}" in result.stdout
def test_model_post(mocker):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+ hero = HeroFactory().build()
hero_create = HeroCreate(**hero.dict())
httpx = mocker.patch.object(hero_models, "httpx")
@@ -226,10 +206,11 @@ def test_model_post(mocker):
assert result == hero
assert httpx.get.call_count == 0
assert httpx.post.call_count == 1
+ assert httpx.delete.call_count == 0
def test_model_post_500(mocker):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+ hero = HeroFactory().build()
hero_create = HeroCreate(**hero.dict())
httpx = mocker.patch.object(hero_models, "httpx")
@@ -240,10 +221,11 @@ def test_model_post_500(mocker):
hero_create.post()
assert httpx.get.call_count == 0
assert httpx.post.call_count == 1
+ assert httpx.delete.call_count == 0
-def test_model_read_hero(mocker, session: Session, client: TestClient):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+def test_model_read_hero(mocker):
+ hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
@@ -255,10 +237,11 @@ def test_model_read_hero(mocker, session: Session, client: TestClient):
assert hero_read.secret_name == hero.secret_name
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 0
-def test_model_read_hero_404(mocker, session: Session, client: TestClient):
- hero = HeroFactory().build(name="Steelman", age=25, id=1)
+def test_model_read_hero_404(mocker):
+ hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
httpx.get.return_value.status_code = 404
@@ -269,3 +252,68 @@ def test_model_read_hero_404(mocker, session: Session, client: TestClient):
assert e.value.args[0] == "404: Hero not found"
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 0
+
+
+def test_model_delete_hero(mocker):
+ hero = HeroFactory().build()
+
+ httpx = mocker.patch.object(hero_models, "httpx")
+ httpx.delete.return_value = mocker.Mock()
+ httpx.delete.return_value.status_code = 200
+ httpx.delete.return_value.json.return_value = hero.dict()
+
+ hero_delete = HeroDelete.delete(id=hero.id)
+ assert hero_delete == {"ok": True}
+ assert httpx.get.call_count == 0
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 1
+
+
+def test_model_delete_hero_404(mocker):
+ hero = HeroFactory().build()
+
+ httpx = mocker.patch.object(hero_models, "httpx")
+ httpx.delete.return_value = mocker.Mock()
+ httpx.delete.return_value.status_code = 404
+ httpx.get.return_value.text = "Hero not found"
+
+ with pytest.raises(RuntimeError) as e:
+ HeroDelete.delete(id=hero.id)
+ assert e.value.args[0] == "404: Hero not found"
+ assert httpx.get.call_count == 0
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 1
+
+
+def test_cli_delete_hero(mocker):
+ hero = HeroFactory().build()
+
+ httpx = mocker.patch.object(hero_models, "httpx")
+ httpx.delete.return_value = mocker.Mock()
+ httpx.delete.return_value.status_code = 200
+ httpx.delete.return_value.json.return_value = hero.dict()
+
+ result = runner.invoke(hero_app, ["delete", "--hero-id", "1"])
+ assert result.exit_code == 0
+ assert "{'ok': True}" in result.stdout
+ assert httpx.get.call_count == 0
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 1
+
+
+def test_cli_delete_hero_404(mocker):
+ hero = HeroFactory().build()
+
+ httpx = mocker.patch.object(hero_models, "httpx")
+ httpx.delete.return_value = mocker.Mock()
+ httpx.delete.return_value.status_code = 404
+ httpx.delete.return_value.text = "Hero not found"
+ httpx.delete.return_value.json.return_value = hero.dict()
+
+ result = runner.invoke(hero_app, ["delete", "--hero-id", "999"])
+ assert result.exit_code == 1
+ assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
+ assert httpx.get.call_count == 0
+ assert httpx.post.call_count == 0
+ assert httpx.delete.call_count == 1
diff --git a/tmp.py b/tmp.py
new file mode 100644
index 0000000..a4947c0
--- /dev/null
+++ b/tmp.py
@@ -0,0 +1,144 @@
+import sqlite3
+
+from graphviz import Digraph
+
+
+def generate_er_diagram(database_path, output_path):
+ # Connect to the SQLite database
+ conn = sqlite3.connect(database_path)
+ cursor = conn.cursor()
+
+ # Get the table names from the database
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+ tables = cursor.fetchall()
+
+ # Create a new Digraph
+ dot = Digraph(format="png")
+ dot.attr(rankdir="TD")
+
+ # Iterate over the tables
+ for table in tables:
+ table_name = table[0]
+ dot.node(table_name, shape="box")
+ cursor.execute(f"PRAGMA table_info({table_name});")
+ columns = cursor.fetchall()
+
+ # Add the columns to the table node
+ for column in columns:
+ column_name = column[1]
+ dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
+ dot.edge(table_name, f"{table_name}.{column_name}")
+
+ # Check for foreign key relationships
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
+ foreign_keys = cursor.fetchall()
+
+ # Add dotted lines for foreign key relationships
+ for foreign_key in foreign_keys:
+ from_column = foreign_key[3]
+ to_table = foreign_key[2]
+ to_column = foreign_key[4]
+ dot.node(f"{to_table}.{to_column}", shape="oval")
+ dot.edge(
+ f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
+ )
+
+ # Render and save the diagram
+ dot.render(output_path.replace(".png", ""), cleanup=True)
+
+ # Close the database connection
+ cursor.close()
+ conn.close()
+
+
+def generate_markdown(database_path, output_path, er_diagram_path):
+ # Connect to the SQLite database
+ conn = sqlite3.connect(database_path)
+ cursor = conn.cursor()
+
+ # Get the table names from the database
+ cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
+ tables = cursor.fetchall()
+
+ with open(output_path, "w") as f:
+ # Write the ER Diagram image
+ f.write(f"\n\n---\n\n")
+
+ # Iterate over the tables
+ for table in tables:
+ table_name = table[0]
+
+ f.write(f"## Table: {table_name}\n\n")
+
+ # Get the table columns
+ cursor.execute(f"PRAGMA table_info({table_name});")
+ columns = cursor.fetchall()
+
+ f.write("### First 5 rows\n\n")
+ cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
+ rows = cursor.fetchall()
+ f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
+ f.write("|")
+ for column in columns:
+ # ---
+ f.write(f'{"-"*(len(column[1]) + 2)}|')
+ f.write("\n")
+ for row in rows:
+ f.write(f'| {" | ".join([str(r) for r in row])} |\n')
+ f.write("\n")
+
+ cursor.execute(f"PRAGMA foreign_key_list({table_name});")
+ foreign_keys = cursor.fetchall()
+
+ # Add dotted lines for foreign key relationships
+ fkeys = {}
+ for foreign_key in foreign_keys:
+ from_column = foreign_key[3]
+ to_table = foreign_key[2]
+ to_column = foreign_key[4]
+ fkeys[from_column] = f"{to_table}.{to_column}"
+
+ # Replace 'description' with the actual column name in the table that contains the description, if applicable
+ try:
+ cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
+ description = cursor.fetchone()
+ if description:
+ f.write(f"### Description\n\n{description[0]}\n\n")
+ except:
+ ...
+
+ # Write the table columns
+ f.write("### Columns\n\n")
+ f.write("| Column Name | Type | Foreign Key | Example Value |\n")
+ f.write("|-------------|------|-------------|---------------|\n")
+
+ for column in columns:
+
+ column_name = column[1]
+ column_type = column[2]
+ fkey = ""
+ if column_name in fkeys:
+ fkey = fkeys[column_name]
+ f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
+
+ f.write("\n")
+
+ # Get the count of records
+ cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
+ records_count = cursor.fetchone()[0]
+ f.write(
+ f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
+ )
+
+ # Close the database connection
+ cursor.close()
+ conn.close()
+
+
+# Usage example
+database_path = "database.db"
+md_output_path = "database.md"
+er_output_path = "er_diagram.png"
+
+generate_er_diagram(database_path, er_output_path)
+generate_markdown(database_path, md_output_path, er_output_path)
diff --git a/tmp.sh b/tmp.sh
new file mode 100644
index 0000000..7f36aa4
--- /dev/null
+++ b/tmp.sh
@@ -0,0 +1,20 @@
+max="$1"
+date
+echo "url: $2
+rate: $max calls / second"
+START=$(date +%s);
+
+get () {
+ curl -s -v "$1" 2>&1 | tr '\r\n' '\\n' | awk -v date="$(date +'%r')" '{print $0"\n-----", date}' >> /tmp/perf-test.log
+}
+
+while true
+do
+ echo $(($(date +%s) - START)) | awk '{print int($1/60)":"int($1%60)}'
+ sleep 1
+
+ for i in `seq 1 $max`
+ do
+ get $2 &
+ done
+done
diff --git a/wyatt.py b/wyatt.py
new file mode 100644
index 0000000..d84db3e
--- /dev/null
+++ b/wyatt.py
@@ -0,0 +1,84 @@
+import random
+import sys
+
+# Initialize player attributes
+player = {
+ "name": input("Enter your character's name: "),
+ "health": 100,
+ "food": 100,
+ "x": 5,
+ "y": 5,
+ "day": 1,
+}
+
+# Define game resources
+resources = {
+ "food": 50,
+ "water": 50,
+}
+
+# Define game constants
+MAP_WIDTH, MAP_HEIGHT = 20, 10
+PLAYER_CHAR = "(o)"
+ENEMY_CHAR = "(?)"
+
+# Game loop
+while player["health"] > 0:
+ # Create the game map
+ game_map = [[" " for _ in range(MAP_WIDTH)] for _ in range(MAP_HEIGHT)]
+ game_map[player["y"]][player["x"]] = PLAYER_CHAR
+
+ # Place enemies randomly on the map
+ for _ in range(random.randint(1, 3)):
+ enemy_x = random.randint(0, MAP_WIDTH - 1)
+ enemy_y = random.randint(0, MAP_HEIGHT - 1)
+ game_map[enemy_y][enemy_x] = ENEMY_CHAR
+
+ # Print the game map
+ for row in game_map:
+ print("".join(row))
+
+ print(f"\nDay {player['day']}")
+ print(f"Name: {player['name']}")
+ print(f"Health: {player['health']} HP {'*' * player['health']}")
+ print(f"Food: {player['food']} Hunger {'*' * player['food']}")
+ print(f"Coordinates: ({player['x']}, {player['y']})")
+
+ # Player input for movement
+ move = input("Move (W/A/S/D): ").upper()
+
+ # Update player position based on input
+ if move == "W" and player["y"] > 0:
+ player["y"] -= 1
+ elif move == "S" and player["y"] < MAP_HEIGHT - 1:
+ player["y"] += 1
+ elif move == "A" and player["x"] > 0:
+ player["x"] -= 1
+ elif move == "D" and player["x"] < MAP_WIDTH - 1:
+ player["x"] += 1
+
+ # Consume resources
+ player["food"] -= random.randint(5, 15)
+
+ # Check if the player has enough resources
+ if player["food"] < 0:
+ player["food"] = 0
+ player["health"] -= 10
+
+ # Check if the player encounters an enemy
+ if game_map[player["y"]][player["x"]] == ENEMY_CHAR:
+ enemy_damage = random.randint(10, 30)
+ player["health"] -= enemy_damage
+ print(f"You encountered an enemy and took {enemy_damage} damage!")
+
+ # Rest for the day
+ player["day"] += 1
+
+ # Exit the game if health reaches zero
+ if player["health"] <= 0:
+ print("Game Over. You did not survive.")
+ break
+
+ input("Press Enter to continue to the next day...")
+
+sys.exit()