diff --git a/.dockerignore b/.dockerignore
deleted file mode 100644
index 88328ed..0000000
--- a/.dockerignore
+++ /dev/null
@@ -1,978 +0,0 @@
-# flyctl launch added from .gitignore
-# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
-# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
-
-### Data ###
-**/*.csv
-**/*.dat
-**/*.efx
-**/*.gbr
-**/*.key
-**/*.pps
-**/*.ppt
-**/*.pptx
-**/*.sdf
-**/*.tax2010
-**/*.vcf
-**/*.xml
-
-### Emacs ###
-# -*- mode: gitignore; -*-
-**/*~
-**/\#*\#
-.emacs.desktop
-.emacs.desktop.lock
-**/*.elc
-**/auto-save-list
-**/tramp
-**/.\#*
-
-# Org-mode
-**/.org-id-locations
-**/*_archive
-
-# flymake-mode
-**/*_flymake.*
-
-# eshell files
-eshell/history
-eshell/lastdir
-
-# elpa packages
-elpa
-
-# reftex files
-**/*.rel
-
-# AUCTeX auto folder
-auto
-
-# cask packages
-**/.cask
-**/dist
-
-# Flycheck
-**/flycheck_*.el
-
-# server auth directory
-server
-
-# projectiles files
-**/.projectile
-
-# directory configuration
-**/.dir-locals.el
-
-# network security
-network-security.data
-
-
-### Executable ###
-**/*.app
-**/*.bat
-**/*.cgi
-**/*.com
-**/*.exe
-**/*.gadget
-**/*.jar
-**/*.pif
-**/*.vb
-**/*.wsf
-
-### Node ###
-# Logs
-**/logs
-**/*.log
-**/npm-debug.log*
-**/yarn-debug.log*
-**/yarn-error.log*
-**/lerna-debug.log*
-**/.pnpm-debug.log*
-
-# Diagnostic reports (https://nodejs.org/api/report.html)
-**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
-
-# Runtime data
-**/pids
-**/*.pid
-**/*.seed
-**/*.pid.lock
-
-# Directory for instrumented libs generated by jscoverage/JSCover
-**/lib-cov
-
-# Coverage directory used by tools like istanbul
-**/coverage
-**/*.lcov
-
-# nyc test coverage
-**/.nyc_output
-
-# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
-**/.grunt
-
-# Bower dependency directory (https://bower.io/)
-**/bower_components
-
-# node-waf configuration
-**/.lock-wscript
-
-# Compiled binary addons (https://nodejs.org/api/addons.html)
-**/build/Release
-
-# Dependency directories
-**/node_modules
-**/jspm_packages
-
-# Snowpack dependency directory (https://snowpack.dev/)
-**/web_modules
-
-# TypeScript cache
-**/*.tsbuildinfo
-
-# Optional npm cache directory
-**/.npm
-
-# Optional eslint cache
-**/.eslintcache
-
-# Optional stylelint cache
-**/.stylelintcache
-
-# Microbundle cache
-**/.rpt2_cache
-**/.rts2_cache_cjs
-**/.rts2_cache_es
-**/.rts2_cache_umd
-
-# Optional REPL history
-**/.node_repl_history
-
-# Output of 'npm pack'
-**/*.tgz
-
-# Yarn Integrity file
-**/.yarn-integrity
-
-# dotenv environment variable files
-**/.env
-**/.env.development.local
-**/.env.test.local
-**/.env.production.local
-**/.env.local
-
-# parcel-bundler cache (https://parceljs.org/)
-**/.cache
-**/.parcel-cache
-
-# Next.js build output
-**/.next
-**/out
-
-# Nuxt.js build / generate output
-**/.nuxt
-**/dist
-
-# Gatsby files
-**/.cache
-# Comment in the public line in if your project uses Gatsby and not Next.js
-# https://nextjs.org/blog/next-9-1#public-directory-support
-# public
-
-# vuepress build output
-**/.vuepress/dist
-
-# vuepress v2.x temp and cache directory
-**/.temp
-
-# Docusaurus cache and generated files
-**/.docusaurus
-
-# Serverless directories
-**/.serverless
-
-# FuseBox cache
-**/.fusebox
-
-# DynamoDB Local files
-**/.dynamodb
-
-# TernJS port file
-**/.tern-port
-
-# Stores VSCode versions used for testing VSCode extensions
-**/.vscode-test
-
-# yarn v2
-**/.yarn/cache
-**/.yarn/unplugged
-**/.yarn/build-state.yml
-**/.yarn/install-state.gz
-**/.pnp.*
-
-### Node Patch ###
-# Serverless Webpack directories
-**/.webpack
-
-# Optional stylelint cache
-
-# SvelteKit build / generate output
-**/.svelte-kit
-
-### PyCharm ###
-# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
-# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
-
-# User-specific stuff
-**/.idea/**/workspace.xml
-**/.idea/**/tasks.xml
-**/.idea/**/usage.statistics.xml
-**/.idea/**/dictionaries
-**/.idea/**/shelf
-
-# AWS User-specific
-**/.idea/**/aws.xml
-
-# Generated files
-**/.idea/**/contentModel.xml
-
-# Sensitive or high-churn files
-**/.idea/**/dataSources
-**/.idea/**/dataSources.ids
-**/.idea/**/dataSources.local.xml
-**/.idea/**/sqlDataSources.xml
-**/.idea/**/dynamic.xml
-**/.idea/**/uiDesigner.xml
-**/.idea/**/dbnavigator.xml
-
-# Gradle
-**/.idea/**/gradle.xml
-**/.idea/**/libraries
-
-# Gradle and Maven with auto-import
-# When using Gradle or Maven with auto-import, you should exclude module files,
-# since they will be recreated, and may cause churn. Uncomment if using
-# auto-import.
-# .idea/artifacts
-# .idea/compiler.xml
-# .idea/jarRepositories.xml
-# .idea/modules.xml
-# .idea/*.iml
-# .idea/modules
-# *.iml
-# *.ipr
-
-# CMake
-**/cmake-build-*
-
-# Mongo Explorer plugin
-**/.idea/**/mongoSettings.xml
-
-# File-based project format
-**/*.iws
-
-# IntelliJ
-**/out
-
-# mpeltonen/sbt-idea plugin
-**/.idea_modules
-
-# JIRA plugin
-**/atlassian-ide-plugin.xml
-
-# Cursive Clojure plugin
-**/.idea/replstate.xml
-
-# SonarLint plugin
-**/.idea/sonarlint
-
-# Crashlytics plugin (for Android Studio and IntelliJ)
-**/com_crashlytics_export_strings.xml
-**/crashlytics.properties
-**/crashlytics-build.properties
-**/fabric.properties
-
-# Editor-based Rest Client
-**/.idea/httpRequests
-
-# Android studio 3.1+ serialized cache file
-**/.idea/caches/build_file_checksums.ser
-
-### PyCharm Patch ###
-# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
-
-# *.iml
-# modules.xml
-# .idea/misc.xml
-# *.ipr
-
-# Sonarlint plugin
-# https://plugins.jetbrains.com/plugin/7973-sonarlint
-**/.idea/**/sonarlint
-
-# SonarQube Plugin
-# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
-**/.idea/**/sonarIssues.xml
-
-# Markdown Navigator plugin
-# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
-**/.idea/**/markdown-navigator.xml
-**/.idea/**/markdown-navigator-enh.xml
-**/.idea/**/markdown-navigator
-
-# Cache file creation bug
-# See https://youtrack.jetbrains.com/issue/JBR-2257
-**/.idea/$CACHE_FILE$
-
-# CodeStream plugin
-# https://plugins.jetbrains.com/plugin/12206-codestream
-**/.idea/codestream.xml
-
-# Azure Toolkit for IntelliJ plugin
-# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
-**/.idea/**/azureSettings.xml
-
-### Python ###
-# Byte-compiled / optimized / DLL files
-**/__pycache__
-**/*.py[cod]
-**/*$py.class
-
-# C extensions
-**/*.so
-
-# Distribution / packaging
-**/.Python
-**/build
-**/develop-eggs
-**/downloads
-**/eggs
-**/.eggs
-**/lib
-**/lib64
-**/parts
-**/sdist
-**/var
-**/wheels
-**/share/python-wheels
-**/*.egg-info
-**/.installed.cfg
-**/*.egg
-**/MANIFEST
-
-# PyInstaller
-# Usually these files are written by a python script from a template
-# before PyInstaller builds the exe, so as to inject date/other infos into it.
-**/*.manifest
-**/*.spec
-
-# Installer logs
-**/pip-log.txt
-**/pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-**/htmlcov
-**/.tox
-**/.nox
-**/.coverage
-**/.coverage.*
-**/nosetests.xml
-**/coverage.xml
-**/*.cover
-**/*.py,cover
-**/.hypothesis
-**/.pytest_cache
-**/cover
-
-# Translations
-**/*.mo
-**/*.pot
-
-# Django stuff:
-**/local_settings.py
-**/db.sqlite3
-**/db.sqlite3-journal
-
-# Flask stuff:
-**/instance
-**/.webassets-cache
-
-# Scrapy stuff:
-**/.scrapy
-
-# Sphinx documentation
-**/docs/_build
-
-# PyBuilder
-**/.pybuilder
-**/target
-
-# Jupyter Notebook
-**/.ipynb_checkpoints
-
-# IPython
-**/profile_default
-**/ipython_config.py
-
-# pyenv
-# For a library or package, you might want to ignore these files since the code is
-# intended to run in multiple environments; otherwise, check them in:
-# .python-version
-
-# pipenv
-# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
-# However, in case of collaboration, if having platform-specific dependencies or dependencies
-# having no cross-platform support, pipenv may install dependencies that don't work, or not
-# install all needed dependencies.
-#Pipfile.lock
-
-# poetry
-# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
-# This is especially recommended for binary packages to ensure reproducibility, and is more
-# commonly ignored for libraries.
-# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
-#poetry.lock
-
-# pdm
-# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
-#pdm.lock
-# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
-# in version control.
-# https://pdm.fming.dev/#use-with-ide
-**/.pdm.toml
-
-# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
-**/__pypackages__
-
-# Celery stuff
-**/celerybeat-schedule
-**/celerybeat.pid
-
-# SageMath parsed files
-**/*.sage.py
-
-# Environments
-**/.venv
-**/env
-**/venv
-**/ENV
-**/env.bak
-**/venv.bak
-
-# Spyder project settings
-**/.spyderproject
-**/.spyproject
-
-# Rope project settings
-**/.ropeproject
-
-# mkdocs documentation
-site
-
-# mypy
-**/.mypy_cache
-**/.dmypy.json
-**/dmypy.json
-
-# Pyre type checker
-**/.pyre
-
-# pytype static type analyzer
-**/.pytype
-
-# Cython debug symbols
-**/cython_debug
-
-# PyCharm
-# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
-# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
-# and can be added to the global gitignore or merged into this file. For a more nuclear
-# option (not recommended) you can uncomment the following to ignore the entire idea folder.
-#.idea/
-
-### Python Patch ###
-# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
-**/poetry.toml
-
-# ruff
-**/.ruff_cache
-
-# LSP config files
-**/pyrightconfig.json
-
-### SublimeText ###
-# Cache files for Sublime Text
-**/*.tmlanguage.cache
-**/*.tmPreferences.cache
-**/*.stTheme.cache
-
-# Workspace files are user-specific
-**/*.sublime-workspace
-
-# Project files should be checked into the repository, unless a significant
-# proportion of contributors will probably not be using Sublime Text
-# *.sublime-project
-
-# SFTP configuration file
-**/sftp-config.json
-**/sftp-config-alt*.json
-
-# Package control specific files
-**/Package Control.last-run
-**/Package Control.ca-list
-**/Package Control.ca-bundle
-**/Package Control.system-ca-bundle
-**/Package Control.cache
-**/Package Control.ca-certs
-**/Package Control.merged-ca-bundle
-**/Package Control.user-ca-bundle
-**/oscrypto-ca-bundle.crt
-**/bh_unicode_properties.cache
-
-# Sublime-github package stores a github token in this file
-# https://packagecontrol.io/packages/sublime-github
-**/GitHub.sublime-settings
-
-### Vim ###
-# Swap
-**/[._]*.s[a-v][a-z]
-!**/*.svg # comment out if you don't need vector files
-**/[._]*.sw[a-p]
-**/[._]s[a-rt-v][a-z]
-**/[._]ss[a-gi-z]
-**/[._]sw[a-p]
-
-# Session
-**/Session.vim
-**/Sessionx.vim
-
-# Temporary
-**/.netrwhist
-# Auto-generated tag files
-**/tags
-# Persistent undo
-**/[._]*.un~
-
-### VisualStudioCode ###
-**/.vscode/*
-!**/.vscode/settings.json
-!**/.vscode/tasks.json
-!**/.vscode/launch.json
-!**/.vscode/extensions.json
-!**/.vscode/*.code-snippets
-
-# Local History for Visual Studio Code
-**/.history
-
-# Built Visual Studio Code Extensions
-**/*.vsix
-
-### VisualStudioCode Patch ###
-# Ignore all local history of files
-**/.history
-**/.ionide
-
-### VisualStudio ###
-## Ignore Visual Studio temporary files, build results, and
-## files generated by popular Visual Studio add-ons.
-##
-## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
-
-# User-specific files
-**/*.rsuser
-**/*.suo
-**/*.user
-**/*.userosscache
-**/*.sln.docstates
-
-# User-specific files (MonoDevelop/Xamarin Studio)
-**/*.userprefs
-
-# Mono auto generated files
-**/mono_crash.*
-
-# Build results
-**/[Dd]ebug
-**/[Dd]ebugPublic
-**/[Rr]elease
-**/[Rr]eleases
-**/x64
-**/x86
-**/[Ww][Ii][Nn]32
-**/[Aa][Rr][Mm]
-**/[Aa][Rr][Mm]64
-**/bld
-**/[Bb]in
-**/[Oo]bj
-**/[Ll]og
-**/[Ll]ogs
-
-# Visual Studio 2015/2017 cache/options directory
-**/.vs
-# Uncomment if you have tasks that create the project's static files in wwwroot
-#wwwroot/
-
-# Visual Studio 2017 auto generated files
-**/Generated\ Files
-
-# MSTest test Results
-**/[Tt]est[Rr]esult*
-**/[Bb]uild[Ll]og.*
-
-# NUnit
-**/*.VisualState.xml
-**/TestResult.xml
-**/nunit-*.xml
-
-# Build Results of an ATL Project
-**/[Dd]ebugPS
-**/[Rr]eleasePS
-**/dlldata.c
-
-# Benchmark Results
-**/BenchmarkDotNet.Artifacts
-
-# .NET Core
-**/project.lock.json
-**/project.fragment.lock.json
-**/artifacts
-
-# ASP.NET Scaffolding
-**/ScaffoldingReadMe.txt
-
-# StyleCop
-**/StyleCopReport.xml
-
-# Files built by Visual Studio
-**/*_i.c
-**/*_p.c
-**/*_h.h
-**/*.ilk
-**/*.meta
-**/*.obj
-**/*.iobj
-**/*.pch
-**/*.pdb
-**/*.ipdb
-**/*.pgc
-**/*.pgd
-**/*.rsp
-**/*.sbr
-**/*.tlb
-**/*.tli
-**/*.tlh
-**/*.tmp
-**/*.tmp_proj
-**/*_wpftmp.csproj
-**/*.tlog
-**/*.vspscc
-**/*.vssscc
-**/.builds
-**/*.pidb
-**/*.svclog
-**/*.scc
-
-# Chutzpah Test files
-**/_Chutzpah*
-
-# Visual C++ cache files
-**/ipch
-**/*.aps
-**/*.ncb
-**/*.opendb
-**/*.opensdf
-**/*.cachefile
-**/*.VC.db
-**/*.VC.VC.opendb
-
-# Visual Studio profiler
-**/*.psess
-**/*.vsp
-**/*.vspx
-**/*.sap
-
-# Visual Studio Trace Files
-**/*.e2e
-
-# TFS 2012 Local Workspace
-**/$tf
-
-# Guidance Automation Toolkit
-**/*.gpState
-
-# ReSharper is a .NET coding add-in
-**/_ReSharper*
-**/*.[Rr]e[Ss]harper
-**/*.DotSettings.user
-
-# TeamCity is a build add-in
-**/_TeamCity*
-
-# DotCover is a Code Coverage Tool
-**/*.dotCover
-
-# AxoCover is a Code Coverage Tool
-**/.axoCover/*
-!**/.axoCover/settings.json
-
-# Coverlet is a free, cross platform Code Coverage Tool
-**/coverage*.json
-**/coverage*.xml
-**/coverage*.info
-
-# Visual Studio code coverage results
-**/*.coverage
-**/*.coveragexml
-
-# NCrunch
-**/_NCrunch_*
-**/.*crunch*.local.xml
-**/nCrunchTemp_*
-
-# MightyMoose
-**/*.mm.*
-**/AutoTest.Net
-
-# Web workbench (sass)
-**/.sass-cache
-
-# Installshield output folder
-**/[Ee]xpress
-
-# DocProject is a documentation generator add-in
-**/DocProject/buildhelp
-**/DocProject/Help/*.HxT
-**/DocProject/Help/*.HxC
-**/DocProject/Help/*.hhc
-**/DocProject/Help/*.hhk
-**/DocProject/Help/*.hhp
-**/DocProject/Help/Html2
-**/DocProject/Help/html
-
-# Click-Once directory
-**/publish
-
-# Publish Web Output
-**/*.[Pp]ublish.xml
-**/*.azurePubxml
-# Note: Comment the next line if you want to checkin your web deploy settings,
-# but database connection strings (with potential passwords) will be unencrypted
-**/*.pubxml
-**/*.publishproj
-
-# Microsoft Azure Web App publish settings. Comment the next line if you want to
-# checkin your Azure Web App publish settings, but sensitive information contained
-# in these scripts will be unencrypted
-**/PublishScripts
-
-# NuGet Packages
-**/*.nupkg
-# NuGet Symbol Packages
-**/*.snupkg
-# The packages folder can be ignored because of Package Restore
-**/**/[Pp]ackages/*
-# except build/, which is used as an MSBuild target.
-!**/**/[Pp]ackages/build
-# Uncomment if necessary however generally it will be regenerated when needed
-#!**/[Pp]ackages/repositories.config
-# NuGet v3's project.json files produces more ignorable files
-**/*.nuget.props
-**/*.nuget.targets
-
-# Microsoft Azure Build Output
-**/csx
-**/*.build.csdef
-
-# Microsoft Azure Emulator
-**/ecf
-**/rcf
-
-# Windows Store app package directories and files
-**/AppPackages
-**/BundleArtifacts
-**/Package.StoreAssociation.xml
-**/_pkginfo.txt
-**/*.appx
-**/*.appxbundle
-**/*.appxupload
-
-# Visual Studio cache files
-# files ending in .cache can be ignored
-**/*.[Cc]ache
-# but keep track of directories ending in .cache
-!**/?*.[Cc]ache
-
-# Others
-**/ClientBin
-**/~$*
-**/*.dbmdl
-**/*.dbproj.schemaview
-**/*.jfm
-**/*.pfx
-**/*.publishsettings
-**/orleans.codegen.cs
-
-# Including strong name files can present a security risk
-# (https://github.com/github/gitignore/pull/2483#issue-259490424)
-#*.snk
-
-# Since there are multiple workflows, uncomment next line to ignore bower_components
-# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
-#bower_components/
-
-# RIA/Silverlight projects
-**/Generated_Code
-
-# Backup & report files from converting an old project file
-# to a newer Visual Studio version. Backup files are not needed,
-# because we have git ;-)
-**/_UpgradeReport_Files
-**/Backup*
-**/UpgradeLog*.XML
-**/UpgradeLog*.htm
-**/ServiceFabricBackup
-**/*.rptproj.bak
-
-# SQL Server files
-**/*.mdf
-**/*.ldf
-**/*.ndf
-
-# Business Intelligence projects
-**/*.rdl.data
-**/*.bim.layout
-**/*.bim_*.settings
-**/*.rptproj.rsuser
-**/*- [Bb]ackup.rdl
-**/*- [Bb]ackup ([0-9]).rdl
-**/*- [Bb]ackup ([0-9][0-9]).rdl
-
-# Microsoft Fakes
-**/FakesAssemblies
-
-# GhostDoc plugin setting file
-**/*.GhostDoc.xml
-
-# Node.js Tools for Visual Studio
-**/.ntvs_analysis.dat
-
-# Visual Studio 6 build log
-**/*.plg
-
-# Visual Studio 6 workspace options file
-**/*.opt
-
-# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
-**/*.vbw
-
-# Visual Studio 6 auto-generated project file (contains which files were open etc.)
-**/*.vbp
-
-# Visual Studio 6 workspace and project file (working project files containing files to include in project)
-**/*.dsw
-**/*.dsp
-
-# Visual Studio 6 technical files
-
-# Visual Studio LightSwitch build output
-**/**/*.HTMLClient/GeneratedArtifacts
-**/**/*.DesktopClient/GeneratedArtifacts
-**/**/*.DesktopClient/ModelManifest.xml
-**/**/*.Server/GeneratedArtifacts
-**/**/*.Server/ModelManifest.xml
-**/_Pvt_Extensions
-
-# Paket dependency manager
-**/.paket/paket.exe
-**/paket-files
-
-# FAKE - F# Make
-**/.fake
-
-# CodeRush personal settings
-**/.cr/personal
-
-# Python Tools for Visual Studio (PTVS)
-**/*.pyc
-
-# Cake - Uncomment if you are using it
-# tools/**
-# !tools/packages.config
-
-# Tabs Studio
-**/*.tss
-
-# Telerik's JustMock configuration file
-**/*.jmconfig
-
-# BizTalk build output
-**/*.btp.cs
-**/*.btm.cs
-**/*.odx.cs
-**/*.xsd.cs
-
-# OpenCover UI analysis results
-**/OpenCover
-
-# Azure Stream Analytics local run output
-**/ASALocalRun
-
-# MSBuild Binary and Structured Log
-**/*.binlog
-
-# NVidia Nsight GPU debugger configuration file
-**/*.nvuser
-
-# MFractors (Xamarin productivity tool) working folder
-**/.mfractor
-
-# Local History for Visual Studio
-**/.localhistory
-
-# Visual Studio History (VSHistory) files
-**/.vshistory
-
-# BeatPulse healthcheck temp database
-**/healthchecksdb
-
-# Backup folder for Package Reference Convert tool in Visual Studio 2017
-**/MigrationBackup
-
-# Ionide (cross platform F# VS Code tools) working folder
-**/.ionide
-
-# Fody - auto-generated XML schema
-**/FodyWeavers.xsd
-
-# VS Code files for those working on multiple tools
-**/*.code-workspace
-
-# Local History for Visual Studio Code
-
-# Windows Installer files from build outputs
-**/*.cab
-**/*.msi
-**/*.msix
-**/*.msm
-**/*.msp
-
-# JetBrains Rider
-**/*.sln.iml
-
-### VisualStudio Patch ###
-# Additional files built by Visual Studio
-
-# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
-**/database.db
-**/database.db
-**/database.db
-**/.markata.cache
-**/database.sqlite
-
-# flyctl launch added from .pytest_cache/.gitignore
-# Created by pytest automatically.
-.pytest_cache/**/*
-
-# flyctl launch added from .ruff_cache/.gitignore
-.ruff_cache/**/*
-fly.toml
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 962ef14..afa41f1 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -41,15 +41,13 @@ jobs:
with:
name: lsm-${{ matrix.os }}
path: dist
- - uses: vimtor/action-zip@v1
- with:
- files: dist/
- dest: lsm-${{ matrix.os }}.zip
+ - name: zip up dist
+ run: zip -r lsm-${{ matrix.os }}.zip dist
- name: Upload Release Asset to versioned release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: gh release upload v${{ github.run_number }} lsm-${{ matrix.os }}.zip
+ run: gh release upload v${{ github.run_number }} lsm-${{ matrix.os }}
- name: Upload Release Asset to latest release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- run: gh release upload latest lsm-${{ matrix.os }}.zip
+ run: gh release upload latest lsm-${{ matrix.os }}
diff --git a/.gitignore b/.gitignore
index 1683c90..9c4e139 100644
--- a/.gitignore
+++ b/.gitignore
@@ -967,5 +967,3 @@ database.db
database.db
.markata.cache
database.sqlite
-.env.dev
-.env.dev.docker
diff --git a/.pyflyby b/.pyflyby
index 41d35d2..db34f23 100644
--- a/.pyflyby
+++ b/.pyflyby
@@ -1,5 +1,3 @@
-from learn_sql_model.optional import _optional_import_
-
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import Config
diff --git a/Dockerfile b/Dockerfile
index 56a258e..16592c5 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -4,9 +4,9 @@ WORKDIR /app
Copy pyproject.toml /app
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
COPY README.md /app
-RUN pip3 install '.[all]'
+RUN pip3 install .
COPY . /app
-RUN pip3 install '.[all]'
+RUN pip3 install .
EXPOSE 5000
diff --git a/README.md b/README.md
index ad0cc9f..6792fe8 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,3 @@
-> [!IMPORTANT]
-> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
-
# Learn SQL Model
learning sql model
diff --git a/creeper.png b/creeper.png
deleted file mode 100644
index 79a2499..0000000
Binary files a/creeper.png and /dev/null differ
diff --git a/d3.py b/d3.py
deleted file mode 100644
index 3b41786..0000000
--- a/d3.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import sqlite3
-
-from jinja2 import Environment, FileSystemLoader
-
-
-def get_tables_and_columns(conn):
- cursor = conn.cursor()
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = [
- {
- "name": table[0],
- "columns": get_columns(conn, table[0]),
- "foreign_keys": get_foreign_keys(conn, table[0]),
- }
- for table in cursor.fetchall()
- ]
- return tables
-
-
-def get_columns(conn, table_name):
- cursor = conn.cursor()
- cursor.execute(f"PRAGMA table_info({table_name});")
- columns = [row[1] for row in cursor.fetchall()]
- return columns
-
-
-def get_foreign_keys(conn, table_name):
- cursor = conn.cursor()
- cursor.execute(f"PRAGMA foreign_key_list({table_name});")
- foreign_keys = [
- {"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
- for row in cursor.fetchall()
- ]
- return foreign_keys
-
-
-def generate_links(tables):
- links = []
- for t_index, table in enumerate(tables):
- for fk in table["foreign_keys"]:
- target_index = next(
- i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
- )
- source_y = 40 + table["columns"].index(fk["from"]) * 20
- target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
- links.append(
- {
- "source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
- "target": {"x": 50 + target_index * 150, "y": 50 + target_y},
- }
- )
- return links
-
-
-def generate_er_diagram(database_path):
- conn = sqlite3.connect(database_path)
- tables = get_tables_and_columns(conn)
- links = [] # Currently, we won't extract relationships
- links = generate_links(tables)
-
- env = Environment(loader=FileSystemLoader("templates"))
- template = env.get_template("er_diagram.html")
-
- with open("index.html", "w") as f:
- f.write(template.render(tables=tables, links=links))
-
-
-if __name__ == "__main__":
- db_path = "database.db"
- generate_er_diagram(db_path)
diff --git a/database.md b/database.md
deleted file mode 100644
index 481c4a0..0000000
--- a/database.md
+++ /dev/null
@@ -1,72 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| f48730a783a5 |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
-|------|-------------|---|---|------|-----|-----------|--------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-| size | INTEGER | | | |
-| age | INTEGER | | | |
-| shoe_size | INTEGER | | | |
-| pet_id | INTEGER | pet.id | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 0 records.
-
----
-
diff --git a/er_diagram.png b/er_diagram.png
deleted file mode 100644
index afcf6e0..0000000
Binary files a/er_diagram.png and /dev/null differ
diff --git a/im.png b/im.png
deleted file mode 100644
index 9196f21..0000000
Binary files a/im.png and /dev/null differ
diff --git a/index.html b/index.html
deleted file mode 100644
index bb40a04..0000000
--- a/index.html
+++ /dev/null
@@ -1,129 +0,0 @@
-
-
-
-
-
- ER Diagram
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/learn_sql_model/api/hero.py b/learn_sql_model/api/hero.py
index 4ae0c8c..79db3d6 100644
--- a/learn_sql_model/api/hero.py
+++ b/learn_sql_model/api/hero.py
@@ -1,16 +1,16 @@
from fastapi import APIRouter, Depends, HTTPException
-from sqlmodel import Session, select
+from sqlmodel import SQLModel, Session
-from learn_sql_model.config import get_session
+from learn_sql_model.api.websocket_connection_manager import manager
+from learn_sql_model.config import get_config, get_session
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
hero_router = APIRouter()
@hero_router.on_event("startup")
-async def on_startup() -> None:
- # SQLModel.metadata.create_all(get_config().database.engine)
- ...
+def on_startup() -> None:
+ SQLModel.metadata.create_all(get_config().database.engine)
@hero_router.get("/hero/{hero_id}")
@@ -32,12 +32,12 @@ async def post_hero(
session: Session = Depends(get_session),
hero: HeroCreate,
) -> HeroRead:
- "create a hero"
+ "read all the heros"
db_hero = Hero.from_orm(hero)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
- # await manager.broadcast({hero.json()}, id=1)
+ await manager.broadcast({hero.json()}, id=1)
return db_hero
@@ -47,7 +47,7 @@ async def patch_hero(
session: Session = Depends(get_session),
hero: HeroUpdate,
) -> HeroRead:
- "update a hero"
+ "read all the heros"
db_hero = session.get(Hero, hero.id)
if not db_hero:
raise HTTPException(status_code=404, detail="Hero not found")
@@ -56,7 +56,7 @@ async def patch_hero(
session.add(db_hero)
session.commit()
session.refresh(db_hero)
- # await manager.broadcast({hero.json()}, id=1)
+ await manager.broadcast({hero.json()}, id=1)
return db_hero
@@ -66,13 +66,13 @@ async def delete_hero(
session: Session = Depends(get_session),
hero_id: int,
):
- "delete a hero"
+ "read all the heros"
hero = session.get(Hero, hero_id)
if not hero:
raise HTTPException(status_code=404, detail="Hero not found")
session.delete(hero)
session.commit()
- # await manager.broadcast(f"deleted hero {hero_id}", id=1)
+ await manager.broadcast(f"deleted hero {hero_id}", id=1)
return {"ok": True}
@@ -82,6 +82,4 @@ async def get_heros(
session: Session = Depends(get_session),
) -> Heros:
"get all heros"
- statement = select(Hero)
- heros = session.exec(statement).all()
- return Heros(__root__=heros)
+ return Heros.list(session=session)
diff --git a/learn_sql_model/api/websocket.py b/learn_sql_model/api/websocket.py
index ddfcb0d..962399c 100644
--- a/learn_sql_model/api/websocket.py
+++ b/learn_sql_model/api/websocket.py
@@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
from rich.console import Console
-from sqlmodel import Session, select
+from sqlmodel import Session
from websockets.exceptions import ConnectionClosed
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.console import console
-from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros
+from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
web_socket_router = APIRouter()
@@ -46,9 +46,7 @@ async def websocket_endpoint_connect(
):
Console().log(f"Client #{id} connecting")
await manager.connect(websocket, channel)
- statement = select(Hero)
- heros = session.exec(statement).all()
- heros = Heros(__root__=heros)
+ heros = Heros.list(session=session)
await websocket.send_text(heros.json())
try:
@@ -85,18 +83,11 @@ async def websocket_endpoint_hero_echo(
while True:
data = await websocket.receive_text()
hero = HeroUpdate.parse_raw(data)
- statement = select(Hero)
- heros = session.exec(statement).all()
- heros = Heros(__root__=heros)
+ heros = Heros.list(session=session)
if heros != last_heros:
await manager.broadcast(heros.json(), "heros")
last_heros = heros
- db_hero = session.get(Hero, hero.id)
- for key, value in hero.dict(exclude_unset=True).items():
- setattr(db_hero, key, value)
- session.add(db_hero)
- session.commit()
- session.refresh(db_hero)
+ hero.update(session=session)
console.print(heros)
await websocket.send_text(heros.json())
@@ -105,9 +96,7 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
- statement = select(Hero)
- heros = session.exec(statement).all()
- heros = Heros(__root__=heros)
+ heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("disconnected")
except ConnectionClosed:
@@ -115,8 +104,6 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
- statement = select(Hero)
- heros = session.exec(statement).all()
- heros = Heros(__root__=heros)
+ heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("connection closed")
diff --git a/learn_sql_model/cli/api.py b/learn_sql_model/cli/api.py
index 2dadd71..24ccb13 100644
--- a/learn_sql_model/cli/api.py
+++ b/learn_sql_model/cli/api.py
@@ -1,12 +1,11 @@
import httpx
from rich.console import Console
import typer
+import uvicorn
from learn_sql_model.cli.common import verbose_callback
from learn_sql_model.config import get_config
-from learn_sql_model.optional import _optional_import_
-uvicorn = _optional_import_("uvicorn", group="api")
api_app = typer.Typer()
diff --git a/learn_sql_model/cli/hero.py b/learn_sql_model/cli/hero.py
index 40366ab..1815a2f 100644
--- a/learn_sql_model/cli/hero.py
+++ b/learn_sql_model/cli/hero.py
@@ -6,6 +6,7 @@ from rich.console import Console
import typer
from learn_sql_model.config import get_config
+from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import (
Hero,
HeroCreate,
@@ -14,13 +15,6 @@ from learn_sql_model.models.hero import (
HeroUpdate,
Heros,
)
-from learn_sql_model.optional import _optional_import_
-
-HeroFactory = _optional_import_(
- "learn_sql_model.factories.hero",
- "HeroFactory",
- group="api",
-)
hero_app = typer.Typer()
@@ -33,8 +27,9 @@ def hero():
@hero_app.command()
+@engorgio(typer=True)
def get(
- hero_id: Optional[int] = typer.Argument(),
+ hero_id: Optional[int] = typer.Argument(default=None),
) -> Union[Hero, List[Hero]]:
"get one hero"
hero = HeroRead.get(id=hero_id)
@@ -43,19 +38,25 @@ def get(
@hero_app.command()
-def list() -> Union[Hero, List[Hero]]:
+@engorgio(typer=True)
+def list(
+ where: Optional[str] = None,
+ offset: int = 0,
+ limit: Optional[int] = None,
+) -> Union[Hero, List[Hero]]:
"list many heros"
- heros = Heros.list()
+ heros = Heros.list(where=where, offset=offset, limit=limit)
Console().print(heros)
- return heros
+ return hero
@hero_app.command()
def clear() -> Union[Hero, List[Hero]]:
"list many heros"
heros = Heros.list()
- for hero in heros.__root__:
- HeroDelete.delete(id=hero.id)
+ for hero in heros.heros:
+ HeroDelete(id=hero.id).delete()
+
return hero
@@ -80,15 +81,14 @@ def update(
@hero_app.command()
@engorgio(typer=True)
def delete(
- hero_id: Optional[int] = typer.Argument(),
+ hero: HeroDelete,
) -> Hero:
"delete a hero by id"
- hero = HeroDelete.delete(id=hero_id)
- Console().print(hero)
- return hero
+ hero.delete()
@hero_app.command()
+@engorgio(typer=True)
def populate(
n: int = 10,
) -> Hero:
diff --git a/learn_sql_model/cli/model.py b/learn_sql_model/cli/model.py
index 786daff..eb5ea67 100644
--- a/learn_sql_model/cli/model.py
+++ b/learn_sql_model/cli/model.py
@@ -1,15 +1,11 @@
from pathlib import Path
-from typing import Annotated
-# import copier
+import alembic
+from alembic.config import Config
+import copier
import typer
from learn_sql_model.cli.common import verbose_callback
-from learn_sql_model.config import get_config
-from learn_sql_model.optional import _optional_import_
-
-alembic = _optional_import_("alembic", group="manage")
-Config = _optional_import_("alembic.config", "Config", group="manage")
model_app = typer.Typer()
@@ -44,18 +40,11 @@ def create_revision(
callback=verbose_callback,
help="show the log messages",
),
- message: Annotated[
- str,
- typer.Option(
- "--message",
- "-m",
- prompt=True,
- ),
- ] = None,
+ message: str = typer.Option(
+ prompt=True,
+ ),
):
alembic_cfg = Config("alembic.ini")
- config = get_config()
- alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.revision(
config=alembic_cfg,
message=message,
@@ -74,17 +63,7 @@ def checkout(
revision: str = typer.Option("head"),
):
alembic_cfg = Config("alembic.ini")
- config = get_config()
- alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
- alembic.command.upgrade(config=alembic_cfg, revision=revision)
-
-
-@model_app.command()
-def status():
- alembic_cfg = Config("alembic.ini")
- config = get_config()
- alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
- alembic.command.current(config=alembic_cfg)
+ alembic.command.upgrade(config=alembic_cfg, revision="head")
@model_app.command()
@@ -94,4 +73,5 @@ def populate(
callback=verbose_callback,
help="show the log messages",
),
-): ...
+):
+ ...
diff --git a/learn_sql_model/config.py b/learn_sql_model/config.py
index 9619314..8deb688 100644
--- a/learn_sql_model/config.py
+++ b/learn_sql_model/config.py
@@ -4,8 +4,7 @@ from typing import TYPE_CHECKING
from fastapi import Depends
from pydantic import BaseModel, BaseSettings, validator
from sqlalchemy import create_engine
-from sqlalchemy.orm import sessionmaker
-from sqlmodel import Session
+from sqlmodel import SQLModel, Session
from learn_sql_model.standard_config import load
@@ -19,7 +18,6 @@ class ApiServer(BaseModel):
reload: bool = True
log_level: str = "info"
host: str = "0.0.0.0"
- workers: int = 1
class ApiClient(BaseModel):
@@ -27,6 +25,7 @@ class ApiClient(BaseModel):
protocol: str = "https"
url: str = f"{protocol}://{host}"
+
class Database:
def __init__(self, config: "Config" = None) -> None:
if config is None:
@@ -40,22 +39,10 @@ class Database:
"transactions": None,
}
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
-
- self.db_conf = {}
- if 'sqlite' in self.config.database_url:
- self.db_conf = {
- 'connect_args': {"check_same_thread": False},
- 'pool_recycle': 3600,
- 'pool_pre_ping': True,
- }
- self._engine = create_engine(
- self.config.database_url,
- **self.db_conf
- )
@property
def engine(self) -> "Engine":
- return self._engine
+ return create_engine(self.config.database_url)
@property
def session(self) -> "Session":
@@ -84,8 +71,7 @@ class Config(BaseSettings):
return get_database(config=self)
def init(self) -> None:
- # SQLModel.metadata.create_all(self.database.engine)
- ...
+ SQLModel.metadata.create_all(self.database.engine)
def get_database(config: Config = None) -> Database:
@@ -100,14 +86,9 @@ def get_config(overrides: dict = {}) -> Config:
return config
-config = get_config()
-database = get_database()
-
-SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
-
-
def get_session() -> "Session":
- with Session(database.engine) as session:
+ config = get_config()
+ with Session(config.database.engine) as session:
yield session
diff --git a/learn_sql_model/er_diagram.py b/learn_sql_model/er_diagram.py
deleted file mode 100644
index 4a48fa5..0000000
--- a/learn_sql_model/er_diagram.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import sqlite3
-
-from graphviz import Digraph
-
-from learn_sql_model.config import get_config
-
-config = get_config()
-
-
-def generate_er_diagram(output_path):
- # Connect to the SQLite database
- database_path = config.database_url.replace("sqlite:///", "")
- conn = sqlite3.connect(database_path)
- cursor = conn.cursor()
-
- # Get the table names from the database
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = cursor.fetchall()
-
- # Create a new Digraph
- dot = Digraph(format="png")
- dot.attr(rankdir="TD")
-
- # Iterate over the tables
- for table in tables:
- table_name = table[0]
- dot.node(table_name, shape="box")
- cursor.execute(f"PRAGMA table_info({table_name});")
- columns = cursor.fetchall()
-
- # Add the columns to the table node
- for column in columns:
- column_name = column[1]
- dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
- dot.edge(table_name, f"{table_name}.{column_name}")
-
- # Check for foreign key relationships
- cursor.execute(f"PRAGMA foreign_key_list({table_name});")
- foreign_keys = cursor.fetchall()
-
- # Add dotted lines for foreign key relationships
- for foreign_key in foreign_keys:
- from_column = foreign_key[3]
- to_table = foreign_key[2]
- to_column = foreign_key[4]
- dot.node(f"{to_table}.{to_column}", shape="oval")
- dot.edge(
- f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
- )
-
- # Render and save the diagram
- dot.render(output_path.replace(".png", ""), cleanup=True)
-
- # Close the database connection
- cursor.close()
- conn.close()
-
-
-def generate_er_markdown(output_path, er_diagram_path):
- # Connect to the SQLite database
- database_path = config.database_url.replace("sqlite:///", "")
- conn = sqlite3.connect(database_path)
- cursor = conn.cursor()
-
- # Get the table names from the database
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = cursor.fetchall()
-
- with open(output_path, "w") as f:
- # Write the ER Diagram image
- f.write(f"\n\n---\n\n")
-
- # Iterate over the tables
- for table in tables:
- table_name = table[0]
-
- f.write(f"## Table: {table_name}\n\n")
-
- # Get the table columns
- cursor.execute(f"PRAGMA table_info({table_name});")
- columns = cursor.fetchall()
-
- f.write("### First 5 rows\n\n")
- cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
- rows = cursor.fetchall()
- f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
- f.write("|")
- for column in columns:
- # ---
- f.write(f'{"-"*(len(column[1]) + 2)}|')
- f.write("\n")
- for row in rows:
- f.write(f'| {" | ".join([str(r) for r in row])} |\n')
- f.write("\n")
-
- cursor.execute(f"PRAGMA foreign_key_list({table_name});")
- foreign_keys = cursor.fetchall()
-
- # Add dotted lines for foreign key relationships
- fkeys = {}
- for foreign_key in foreign_keys:
- from_column = foreign_key[3]
- to_table = foreign_key[2]
- to_column = foreign_key[4]
- fkeys[from_column] = f"{to_table}.{to_column}"
-
- # Replace 'description' with the actual column name in the table that contains the description, if applicable
- try:
- cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
- description = cursor.fetchone()
- if description:
- f.write(f"### Description\n\n{description[0]}\n\n")
- except:
- ...
-
- # Write the table columns
- f.write("### Columns\n\n")
- f.write("| Column Name | Type | Foreign Key | Example Value |\n")
- f.write("|-------------|------|-------------|---------------|\n")
-
- for column in columns:
-
- column_name = column[1]
- column_type = column[2]
- fkey = ""
- if column_name in fkeys:
- fkey = fkeys[column_name]
- f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
-
- f.write("\n")
-
- # Get the count of records
- cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
- records_count = cursor.fetchone()[0]
- f.write(
- f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
- )
-
- # Close the database connection
- cursor.close()
- conn.close()
-
-
-if __name__ == "__main__":
- # Usage example
- database_path = "database.db"
- md_output_path = "database.md"
- er_output_path = "er_diagram.png"
-
- generate_er_diagram(database_path, er_output_path)
- generate_markdown(database_path, md_output_path, er_output_path)
diff --git a/learn_sql_model/factories/hero.py b/learn_sql_model/factories/hero.py
index 2c227d9..a167c84 100644
--- a/learn_sql_model/factories/hero.py
+++ b/learn_sql_model/factories/hero.py
@@ -10,6 +10,7 @@ class HeroFactory(ModelFactory[Hero]):
__model__ = Hero
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
+ id = None
pet_id = None
@classmethod
diff --git a/learn_sql_model/game/debug.py b/learn_sql_model/game/debug.py
deleted file mode 100644
index 8e01bf3..0000000
--- a/learn_sql_model/game/debug.py
+++ /dev/null
@@ -1,25 +0,0 @@
-import pygame
-
-
-class Debug:
- def __init__(self, game):
- self.game = game
- self.is_open = False
- self.debounce = False
-
- def handle_events(self, events):
- for event in events:
- if event.type == pygame.KEYDOWN:
- if event.key == pygame.K_F3 and not self.debounce:
- self.is_open = not self.is_open
- self.debounce = True
- if event.type == pygame.KEYUP:
- if event.key == pygame.K_F3:
- self.debounce = False
-
- def render(self):
- if self.is_open:
- text = self.game.font.render(
- str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
- )
- self.game.screen.blit(text, (20, 20))
diff --git a/learn_sql_model/game/game.py b/learn_sql_model/game/game.py
index 742d8f5..1e741a3 100644
--- a/learn_sql_model/game/game.py
+++ b/learn_sql_model/game/game.py
@@ -1,28 +1,29 @@
import atexit
+import pygame
from typer import Typer
from websocket import create_connection
from learn_sql_model.config import get_config
from learn_sql_model.console import console
-from learn_sql_model.game.debug import Debug
-from learn_sql_model.game.light import Light
-from learn_sql_model.game.map import Map
-from learn_sql_model.game.menu import Menu
-from learn_sql_model.game.player import Player
-from learn_sql_model.optional import _optional_import_
-
-pygame = _optional_import_("pygame", group="game")
+from learn_sql_model.factories.hero import HeroFactory
+from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
speed = 10
+pygame.font.init() # you have to call this at the start,
+# if you want to use this module.
+my_font = pygame.font.SysFont("Comic Sans MS", 30)
+
config = get_config()
class Client:
def __init__(self):
- # self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
- self.screen = pygame.display.set_mode((1280, 720))
+ hero = HeroFactory().build(size=50, x=100, y=100)
+ self.hero = HeroCreate(**hero.dict()).post()
+
+ self.screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption("Learn SQL Model")
self.clock = pygame.time.Clock()
self.running = True
@@ -33,31 +34,16 @@ class Client:
self.moving_left = False
self.moving_right = False
self.ticks = 0
- self.player = Player(self)
- self.menu = Menu(self)
- self.map = Map(self)
- self.light = Light(self)
- self.font = pygame.font.SysFont("", 25)
- self.joysticks = {}
- self.darkness = pygame.Surface(
- (self.screen.get_width(), self.screen.get_height()),
- pygame.SRCALPHA,
- 32,
- )
- self.debug = Debug(self)
+ self.others = []
atexit.register(self.quit)
@property
def ws(self):
def connect():
- if "https" in config.api_client.url:
- url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
- elif "http" in config.api_client.url:
- url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
- else:
- url = f"ws://{config.api_client.url}/wsecho"
- self._ws = create_connection(url)
+ self._ws = create_connection(
+ f"ws://{config.api_client.url.replace('https://', '')}/wsecho"
+ )
if not hasattr(self, "_ws"):
connect()
@@ -66,13 +52,6 @@ class Client:
return self._ws
def run(self):
- try:
- from pyinstrument import Profiler
-
- profiler = Profiler()
- profiler.start()
- except ImportError:
- profiler = None
while self.running:
console.print("running")
console.print("handle_events")
@@ -80,60 +59,109 @@ class Client:
console.print("update")
self.update()
console.print("render")
-
self.render()
time = self.clock.tick(60)
- self.elapsed = time / 100
self.ticks += 1
console.print(f"time: {time}")
console.print(f"ticks: {self.ticks}")
- if profiler:
- profiler.stop()
- print(profiler.output_text())
self.quit()
def quit(self):
- self.running = False
- self.player.quit()
+ try:
+ HeroDelete(id=self.hero.id).delete()
+ except:
+ pass
def update(self):
- ...
+ if self.moving_up:
+ self.hero.y -= speed
+ if self.moving_down:
+ self.hero.y += speed
+ if self.moving_left:
+ self.hero.x -= speed
+ if self.moving_right:
+ self.hero.x += speed
+
+ if self.ticks % 5 == 0 or self.ticks == 0:
+ console.print("updating")
+ update = HeroUpdate(**self.hero.dict(exclude_unset=True))
+ console.print(update)
+ self.ws.send(update.json())
+ console.print("sent")
+
+ raw_heros = self.ws.recv()
+ console.print(raw_heros)
+ self.others = Heros.parse_raw(raw_heros)
def render(self):
self.screen.fill((0, 0, 0))
- self.map.render()
- self.player.render()
- if self.ticks % 1 == 0 or self.ticks == 0:
- light_level = 0
- self.darkness.fill((light_level, light_level, light_level))
- self.light.render()
+ for other in self.others.heros:
+ if other.id != self.hero.id:
+ pygame.draw.circle(
+ self.screen, (255, 0, 0), (other.x, other.y), other.size
+ )
+ self.screen.blit(
+ my_font.render(other.name, False, (255, 255, 255), 1),
+ (other.x, other.y),
+ )
+
+ pygame.draw.circle(
+ self.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
+ )
self.screen.blit(
- self.darkness,
- (0, 0),
- special_flags=pygame.BLEND_MULT,
+ my_font.render(self.hero.name, False, (255, 255, 255)),
+ (self.hero.x, self.hero.y),
)
# update the screen
- self.menu.render()
- self.debug.render()
pygame.display.flip()
def handle_events(self):
self.events = pygame.event.get()
- self.menu.handle_events(self.events)
- self.debug.handle_events(self.events)
- self.player.handle_events()
for event in self.events:
if event.type == pygame.QUIT:
self.running = False
- if event.type == pygame.JOYDEVICEADDED:
- # This event will be generated when the program starts for every
- # joystick, filling up the list without needing to create them manually.
- joy = pygame.joystick.Joystick(event.device_index)
- self.joysticks[joy.get_instance_id()] = joy
- if event.type == pygame.JOYDEVICEREMOVED:
- del self.joysticks[event.instance_id]
+ if event.type == pygame.KEYDOWN:
+ if event.key == pygame.K_ESCAPE:
+ self.running = False
+ if event.key == pygame.K_LEFT:
+ self.moving_left = True
+ if event.key == pygame.K_RIGHT:
+ self.moving_right = True
+ if event.key == pygame.K_UP:
+ self.moving_up = True
+ if event.key == pygame.K_DOWN:
+ self.moving_down = True
+ # wasd
+ if event.key == pygame.K_w:
+ self.moving_up = True
+ if event.key == pygame.K_s:
+ self.moving_down = True
+ if event.key == pygame.K_a:
+ self.moving_left = True
+ if event.key == pygame.K_d:
+ self.moving_right = True
+ # controller left joystick
+
+ if event.type == pygame.KEYUP:
+ if event.key == pygame.K_LEFT:
+ self.moving_left = False
+ if event.key == pygame.K_RIGHT:
+ self.moving_right = False
+ if event.key == pygame.K_UP:
+ self.moving_up = False
+ if event.key == pygame.K_DOWN:
+ self.moving_down = False
+ # wasd
+ if event.key == pygame.K_w:
+ self.moving_up = False
+ if event.key == pygame.K_s:
+ self.moving_down = False
+ if event.key == pygame.K_a:
+ self.moving_left = False
+ if event.key == pygame.K_d:
+ self.moving_right = False
def check_events(self):
pass
diff --git a/learn_sql_model/game/light.py b/learn_sql_model/game/light.py
deleted file mode 100644
index abaa861..0000000
--- a/learn_sql_model/game/light.py
+++ /dev/null
@@ -1,219 +0,0 @@
-import bisect
-
-from PIL import Image, ImageFilter
-
-from learn_sql_model.optional import _optional_import_
-
-pygame = _optional_import_("pygame", group="game")
-
-
-def rot_center(image, angle):
- """rotate an image while keeping its center and size"""
- orig_rect = image.get_rect()
- rot_image = pygame.transform.rotate(image, angle)
- rot_rect = orig_rect.copy()
- rot_rect.center = rot_image.get_rect().center
- rot_image = rot_image.subsurface(rot_rect).copy()
- return rot_image
-
-
-class Light:
- def __init__(self, game):
- self.game = game
- self.surf = pygame.Surface(
- (self.game.screen.get_width(), self.game.screen.get_height()),
- pygame.SRCALPHA,
- 32,
- )
- self.surf.set_colorkey((0, 0, 0))
- self.pre_render()
-
- def pre_render(self):
-
- # self.lights = {}
- # for deg in range(-360, 360, 20):
- # print("loading light", deg)
- # self.lights[deg] = pygame.image.load(
- # f"lights/light-{deg}.png"
- # ).convert_alpha()
- # return
-
- light_surf = pygame.Surface(
- (
- self.game.player.hero.flashlight_strength * 3,
- self.game.player.hero.flashlight_strength * 3,
- ),
- pygame.SRCALPHA,
- 32,
- )
-
- v = pygame.math.Vector2(0, 1)
- v.scale_to_length(self.game.player.hero.flashlight_strength)
- for r in range(-90 - 25, -90 + 25):
- _v = v.rotate(r)
- pygame.draw.line(
- light_surf,
- (255, 250, 205),
- (light_surf.get_width() / 2, light_surf.get_height() / 2),
- (
- light_surf.get_width() / 2 + _v.x,
- light_surf.get_height() / 2 + _v.y,
- ),
- 50,
- )
- pygame.draw.circle(
- light_surf,
- (255, 250, 205),
- (light_surf.get_width() / 2, light_surf.get_height() / 2),
- self.game.player.hero.lanturn_strength,
- )
-
- light_surf_pil = Image.frombytes(
- "RGBA",
- (light_surf.get_width(), light_surf.get_height()),
- pygame.image.tostring(light_surf, "RGBA", False),
- )
- light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
- light_surf = pygame.image.fromstring(
- light_surf_blur.tobytes(),
- (light_surf.get_width(), light_surf.get_height()),
- "RGBA",
- ).convert_alpha()
-
- pygame.draw.circle(
- light_surf,
- (255, 250, 205),
- (light_surf.get_width() / 2, light_surf.get_height() / 2),
- self.game.player.hero.lanturn_strength,
- )
-
- light_surf_pil = Image.frombytes(
- "RGBA",
- (light_surf.get_width(), light_surf.get_height()),
- pygame.image.tostring(light_surf, "RGBA", False),
- )
- light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
- light_surf = pygame.image.fromstring(
- light_surf_blur.tobytes(),
- (light_surf.get_width(), light_surf.get_height()),
- "RGBA",
- ).convert_alpha()
-
- pygame.draw.circle(
- light_surf,
- (255, 250, 205),
- (light_surf.get_width() / 2, light_surf.get_height() / 2),
- self.game.player.hero.lanturn_strength,
- )
-
- light_surf_pil = Image.frombytes(
- "RGBA",
- (light_surf.get_width(), light_surf.get_height()),
- pygame.image.tostring(light_surf, "RGBA", False),
- )
- light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
- light_surf = pygame.image.fromstring(
- light_surf_blur.tobytes(),
- (light_surf.get_width(), light_surf.get_height()),
- "RGBA",
- ).convert_alpha()
-
- self.light_surf = light_surf
- self.light_surf.set_colorkey((0, 0, 0))
-
- self.lights = {
- deg: pygame.transform.rotate(self.light_surf, deg - 90)
- for deg in range(-360, 360, 20)
- }
-
- for deg, light in self.lights.items():
- pygame.image.save(light, f"lights/light-{deg}.png")
-
- def render(self):
- self.surf.fill((0, 0, 0))
- mx, my = pygame.mouse.get_pos()
- v = pygame.math.Vector2(
- mx - self.game.player.hero.x, my - self.game.player.hero.y
- )
- v.scale_to_length(self.game.player.hero.flashlight_strength)
- self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
-
- for other in self.game.player.others.__root__:
- if other.id == self.game.player.hero.id:
- continue
-
- light_index = list(self.lights.keys())[
- bisect.bisect_left(
- list(self.lights.keys()),
- other.flashlight_angle + 90,
- )
- ]
-
- my_light = self.lights[light_index]
- self.surf.blit(
- my_light,
- (
- other.x - my_light.get_width() / 2,
- other.y - my_light.get_height() / 2,
- ),
- )
-
- light_index = list(self.lights.keys())[
- bisect.bisect_left(
- list(self.lights.keys()),
- self.game.player.hero.flashlight_angle + 90,
- )
- ]
-
- my_light = self.lights[light_index]
- self.surf.blit(
- my_light,
- (
- self.game.player.hero.x - my_light.get_width() / 2,
- self.game.player.hero.y - my_light.get_height() / 2,
- ),
- )
-
- # for r in range(-25, 25):
- # _v = v.rotate(r)
- # pygame.draw.line(
- # self.surf,
- # (255, 250, 205),
- # (self.game.player.hero.x, self.game.player.hero.y),
- # (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
- # 50,
- # )
- # # draw a circle
- # pygame.draw.circle(
- # self.surf,
- # (255, 250, 205),
- # (self.game.player.hero.x, self.game.player.hero.y),
- # self.game.player.hero.lanturn_strength,
- # )
-
- # for other in self.game.player.others.__root__:
- # if other.id == self.game.player.hero.id:
- # continue
- # v = pygame.math.Vector2(0, 1)
- # v = v.rotate(-other.flashlight_angle)
- # v.scale_to_length(other.flashlight_strength)
- # for r in range(-25, 25):
- # _v = v.rotate(r)
- # pygame.draw.line(
- # self.surf,
- # (255, 250, 205),
- # (other.x, other.y),
- # (other.x + _v.x, other.y + _v.y),
- # 50,
- # )
- # pygame.draw.circle(
- # self.surf,
- # (255, 250, 205),
- # (other.x, other.y),
- # other.lanturn_strength,
- # )
-
- self.game.darkness.blit(
- self.surf,
- (0, 0),
- )
diff --git a/learn_sql_model/game/map.py b/learn_sql_model/game/map.py
deleted file mode 100644
index e792056..0000000
--- a/learn_sql_model/game/map.py
+++ /dev/null
@@ -1,134 +0,0 @@
-import pydantic
-from rich.console import Console
-
-from learn_sql_model.optional import _optional_import_
-
-snoise2 = _optional_import_("noise", "snoise2", group="game")
-pygame = _optional_import_("pygame", group="game")
-
-
-console = Console()
-
-
-class Point(pydantic.BaseModel):
- x: int
- y: int
-
-
-class Map:
- def __init__(self, game):
- self.game = game
- # self.grass = pygame.image.load("grass.webp").convert_alpha()
- # self.rock = pygame.image.load("rock.jpg").convert_alpha()
- # self.dirt = pygame.image.load("dirt.jpg").convert_alpha()
- self.brown = (204, 153, 102)
- self.grey = (128, 128, 128)
- self.green = (0, 255, 0)
- self.white = (255, 255, 255)
- self.resolution = 16
- self.scale = 0.14 # Determines the "smoothness" of the terrain
- self.scale = 0.05 # Determines the "smoothness" of the terrain
- self.offset = Point(x=0, y=0)
- self.last_offset = self.offset
- self.screen_width = self.game.screen.get_width()
- self.screen_height = self.game.screen.get_height()
- self.octaves = 2 # Number of layers of noise to combine
- self.persistence = 0.05 # Amplitude of each octave
- self.lacunarity = 1.0 # Frequency of each octave
- self.thresh = 125
- # try to load the map from map.png
- try:
- self.surf = pygame.image.load("map.png").convert_alpha()
-
- # self.surf_pil = Image.frombytes(
- # "RGBA",
- # (self.surf.get_width(), self.surf.get_height()),
- # pygame.image.tostring(self.surf, "RGBA", False),
- # )
- # self.surf_blur = (
- # self.surf_pil.filter(
- # ImageFilter.SMOOTH_MORE(),
- # )
- # .filter(ImageFilter.SMOOTH_MORE())
- # .filter(ImageFilter.SMOOTH_MORE())
- # .filter(ImageFilter.SMOOTH_MORE())
- # .filter(ImageFilter.SMOOTH_MORE())
- # .filter(ImageFilter.SMOOTH_MORE())
- # # sharpen
- # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
- # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
- # .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
- # )
-
- # self.surf = pygame.image.fromstring(
- # self.surf_blur.tobytes(),
- # (self.surf.get_width(), self.surf.get_height()),
- # "RGBA",
- # ).convert_alpha()
-
- except FileNotFoundError:
- self.pre_draw()
-
- def refresh_surf(self):
-
- self.surf = pygame.Surface((self.screen_width, self.screen_height))
-
- def get_noise(self, x, y):
- value = snoise2(
- (x + self.offset.x) * self.scale,
- (y + self.offset.y) * self.scale,
- self.octaves,
- self.persistence,
- self.lacunarity,
- )
- value = (value + 1) / 2 * 255
- return value
-
- def render(self):
- self.game.screen.blit(
- self.surf,
- (0, 0),
- )
-
- def point_check_collision(self, x, y, thresh=None):
- return self.get_noise(x / self.resolution, y / self.resolution) < (
- thresh or self.thresh
- )
-
- def pre_draw(self):
- self.refresh_surf()
-
- for x in range(int(self.screen_width)):
- for y in range(int(self.screen_height)):
- if not self.point_check_collision(x, y):
- pygame.draw.rect(
- self.surf,
- self.white,
- (
- x,
- y,
- 1,
- 1,
- ),
- )
- pygame.image.save(self.surf, "map.png")
-
- # av1 = (
- # Image.open("rock.jpg")
- # .convert("RGB")
- # .resize((self.screen_width, self.screen_height))
- # )
- # av2 = (
- # Image.open("dirt.jpg")
- # .convert("RGB")
- # .resize((self.screen_width, self.screen_height))
- # )
- # mask = (
- # Image.open("map.png")
- # .convert("L")
- # .resize((self.screen_width, self.screen_height))
- # .filter(ImageFilter.GaussianBlur(3))
- # )
- # Image.composite(av2, av1, mask).save("result.png")
- # result = pygame.image.load("result.png")
- # self.surf.blit(result, (0, 0))
diff --git a/learn_sql_model/game/menu.py b/learn_sql_model/game/menu.py
deleted file mode 100644
index c3c67f5..0000000
--- a/learn_sql_model/game/menu.py
+++ /dev/null
@@ -1,185 +0,0 @@
-from typing import Callable, Tuple
-
-from pydantic import BaseModel
-
-from learn_sql_model.optional import _optional_import_
-
-pygame = _optional_import_("pygame", group="game")
-
-
-screen_sizes = [
- (480, 360), # 360p
- (640, 480), # VGA
- (800, 600), # SVGA
- (1024, 768), # XGA
- (1280, 720), # HD 720p
- (1366, 768), # HD 1366x768
- (1600, 900), # HD+ 1600x900
- (1920, 1080), # Full HD 1080p
- (2560, 1440), # 2K / QHD 1440p
- (3840, 2160), # 4K / UHD 2160p
-]
-
-
-class MenuItem(BaseModel):
- display_text: str
- on_click: Callable = None
- text_color: Tuple[str, str, str] = (0, 0, 0)
-
-
-class Menu:
- def __init__(self, game):
- pygame.font.init()
-
- self.game = game
- self.hamburger = Hamburger(game)
-
- self.padding = 10
- self.font_size = 50
- self.line_height = 55
-
- self.menu_width = min(
- max(200, self.game.screen.get_width() * 0.8), self.game.screen.get_width()
- )
- self.menu_height = min(
- max(200, self.game.screen.get_height() * 0.8), self.game.screen.get_height()
- )
- self.x = (self.game.screen.get_width() - self.menu_width) / 2
- self.y = (self.game.screen.get_height() - self.menu_height) / 2
- self.color = (100, 100, 100)
- self.is_menu_open = False
-
- self.surface = pygame.Surface((self.menu_width, self.menu_height))
- self.font = pygame.font.SysFont("", self.font_size)
-
- self.screen_size_index = False
-
- @property
- def items(self) -> list[MenuItem]:
- return [
- MenuItem(
- display_text="Menu",
- on_click=lambda: print("clicked on me, the menu"),
- ),
- MenuItem(
- display_text="Screen Size",
- on_click=self.next_screen_size,
- ),
- MenuItem(
- display_text=f"{self.game.screen.get_width()}x{self.game.screen.get_height()}",
- color=(50, 0, 0),
- on_click=self.next_screen_size,
- ),
- MenuItem(
- display_text=f"{self.game.player.hero.name}",
- color=(50, 0, 0),
- on_click=self.game.player.rename_hero,
- ),
- MenuItem(
- display_text="quit",
- color=(50, 0, 0),
- on_click=lambda: self.game.quit(),
- ),
- ]
-
- def render(self):
- if self.is_menu_open:
- self.surface.fill(self.color)
-
- pos = (self.padding, self.padding)
- for item in self.items:
- text = self.font.render(item.display_text, True, item.text_color)
- self.surface.blit(text, pos)
- pos = (pos[0], pos[1] + self.line_height)
-
- self.game.screen.blit(self.surface, (self.x, self.y))
-
- self.hamburger.render()
-
- def next_screen_size(self):
- if self.screen_size_index is False:
- self.screen = pygame.display.set_mode(screen_sizes[0])
- self.screen_size_index = 0
- if self.screen_size_index == len(screen_sizes) - 1:
- self.screen_size_index = 0
- else:
- self.screen_size_index += 1
- self.screen = pygame.display.set_mode(screen_sizes[self.screen_size_index])
-
- def get_mouse_pos(self):
- "get mouse position relative to self.surface"
- x, y = pygame.mouse.get_pos()
- return x - self.x, y - self.y
-
- def handle_events(self, events):
- self.hamburger.handle_events(self, events)
- for event in events:
- if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open:
- if event.button == 1: # Left mouse button
- self.handle_click()
-
- def handle_click(self):
- pos = self.get_mouse_pos()
- pos_idx = int(pos[1] // self.line_height)
- if pos_idx > len(self.items):
- return
- if pos_idx < 0:
- return
- self.items[pos_idx].on_click()
-
-
-class Hamburger:
- def __init__(self, game):
- self.game = game
- self.hamburger_width = 50
- self.bar_height = self.hamburger_width / 4
- self.bar_spacing = self.hamburger_width / 20
- self.hamburger_height = self.bar_height * 3 + self.bar_spacing * 2
- self.x = self.game.screen.get_width() - self.hamburger_width - 20
- self.y = 20
- self.color = (100, 100, 100)
- self.rect = pygame.Rect(
- self.x, self.y, self.hamburger_width, self.hamburger_height
- )
- self.surface = pygame.Surface((self.hamburger_width, self.hamburger_height))
-
- def render(self):
- pygame.draw.rect(
- self.surface,
- self.color,
- (0, 0, self.hamburger_width, self.bar_height),
- )
- pygame.draw.rect(
- self.surface,
- self.color,
- (
- 0,
- self.bar_height + self.bar_spacing,
- self.hamburger_width,
- self.bar_height,
- ),
- )
- pygame.draw.rect(
- self.surface,
- self.color,
- (
- 0,
- 2 * (self.bar_height + self.bar_spacing),
- self.hamburger_width,
- self.bar_height,
- ),
- )
-
- self.game.screen.blit(self.surface, (self.x, self.y))
-
- def handle_events(self, menu: Menu, events):
- for event in events:
- if event.type == pygame.MOUSEBUTTONDOWN:
- if event.button == 1: # Left mouse button
- self.handle_click(menu)
-
- def handle_click(self, menu):
- pos = pygame.mouse.get_pos()
-
- if self.rect.collidepoint(pos):
- menu.is_menu_open = not menu.is_menu_open
diff --git a/learn_sql_model/game/player.py b/learn_sql_model/game/player.py
deleted file mode 100644
index c7e1a79..0000000
--- a/learn_sql_model/game/player.py
+++ /dev/null
@@ -1,261 +0,0 @@
-from learn_sql_model.console import console
-from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
-from learn_sql_model.optional import _optional_import_
-
-pygame = _optional_import_("pygame", group="game")
-HeroFactory = _optional_import_(
- "learn_sql_model.factories.hero",
- "HeroFactory",
- group="game",
-)
-
-
-class Player:
- def __init__(self, game):
- hero = HeroFactory().build(
- size=25,
- x=100,
- y=100,
- flashlight_strength=1000,
- lanturn_strength=100,
- flashlight_angle=0,
- )
- self.hero = HeroCreate(**hero.dict()).post()
- self.hero.size = 64
-
- self.game = game
- self.others = [] # Heros(heros=[])
- self.width = 16
- self.height = 16
- self.white = (255, 255, 255)
- self.x = self.game.screen.get_width() / 2
- self.y = self.game.screen.get_height() / 2
- self.speed = 10
- self.max_speed = 10
- self.image = pygame.image.load("creeper.png").convert_alpha()
- self.pet_image = pygame.image.load("pet.png").convert_alpha()
- self.image = pygame.transform.scale(
- self.image, (self.hero.size, self.hero.size)
- )
- self.pet_image = pygame.transform.scale(
- self.pet_image, (self.hero.size/1.5, self.hero.size/2)
- )
- self.x_last = self.x
- self.y_last = self.y
- self.hitbox_surface = pygame.Surface((self.width, self.height))
- self.hitbox_surface.fill(self.white)
- pygame.draw.rect(
- self.hitbox_surface, (255, 0, 0), (0, 0, self.width, self.height), 1
- )
- self.hitbox_surface.set_alpha(0)
- self.moving_up = False
- self.moving_down = False
- self.moving_left = False
- self.moving_right = False
- self.joysticks = {}
-
- def rename_hero(self):
- hero = HeroFactory().build(
- size=self.hero.size,
- x=self.hero.x,
- y=self.hero.y,
- id=self.hero.id,
- flashlight_strength=self.hero.flashlight_strength,
- lanturn_strength=self.hero.lanturn_strength,
- )
- self.hero = HeroUpdate(**hero.dict()).update()
-
- def quit(self):
- try:
- # session = get_config().database.session
- # hero = session.get(Hero, self.hero.id)
- # session.delete(hero)
- # session.commit()
- HeroDelete.delete(id=self.hero.id)
- except RuntimeError:
- pass
-
- def handle_events(self):
- # Update the self
- for event in self.game.events:
- if event.type == pygame.QUIT:
- self.running = False
- if event.type == pygame.KEYDOWN:
- if event.key == pygame.K_ESCAPE:
- self.running = False
- if event.key == pygame.K_LEFT:
- self.speed = self.max_speed
- self.moving_left = True
- if event.key == pygame.K_RIGHT:
- self.speed = self.max_speed
- self.moving_right = True
- if event.key == pygame.K_UP:
- self.speed = self.max_speed
- self.moving_up = True
- if event.key == pygame.K_DOWN:
- self.speed = self.max_speed
- self.moving_down = True
- # wasd
- if event.key == pygame.K_w:
- self.speed = self.max_speed
- self.moving_up = True
- if event.key == pygame.K_s:
- self.speed = self.max_speed
- self.moving_down = True
- if event.key == pygame.K_a:
- self.speed = self.max_speed
- self.moving_left = True
- if event.key == pygame.K_d:
- self.speed = self.max_speed
- self.moving_right = True
-
- if event.type == pygame.KEYUP:
- if event.key == pygame.K_LEFT:
- self.moving_left = False
- if event.key == pygame.K_RIGHT:
- self.moving_right = False
- if event.key == pygame.K_UP:
- self.moving_up = False
- if event.key == pygame.K_DOWN:
- self.moving_down = False
- # wasd
- if event.key == pygame.K_w:
- self.moving_up = False
- if event.key == pygame.K_s:
- self.moving_down = False
- if event.key == pygame.K_a:
- self.moving_left = False
- if event.key == pygame.K_d:
- self.moving_right = False
-
- for joystick in self.joysticks.values():
- if abs(joystick.get_axis(0)) > 0.2:
- self.x += joystick.get_axis(0) * 10 * self.speed * self.elapsed
- if abs(joystick.get_axis(1)) > 0.2:
- self.y += joystick.get_axis(1) * 10 * self.speed * self.elapsed
-
- if abs(joystick.get_axis(3)) > 0.2 and abs(joystick.get_axis(4)) > 0.2:
- pygame.mouse.set_pos(
- (
- pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
- pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
- )
- )
- elif abs(joystick.get_axis(3)) > 0.2:
- pygame.mouse.set_pos(
- (
- pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
- pygame.mouse.get_pos()[1],
- )
- )
- elif abs(joystick.get_axis(4)) > 0.2:
- pygame.mouse.set_pos(
- (
- pygame.mouse.get_pos()[0],
- pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
- )
- )
- if self.moving_left:
- self.hero.x -= self.speed
- if self.moving_right:
- self.hero.x += self.speed
- if self.moving_up:
- self.hero.y -= self.speed
- if self.moving_down:
- self.hero.y += self.speed
- # Check for self collisions with the walls and the black tiles on the map
- if self.hero.x < 0:
- self.hero.x = 0
- if self.hero.x > self.game.screen.get_width() - self.width:
- self.hero.x = self.game.screen.get_width() - self.width
- if self.hero.y < 0:
- self.hero.y = 0
- if self.hero.y > self.game.screen.get_height() - self.height:
- self.hero.y = self.game.screen.get_height() - self.height
-
- self.pos = pygame.math.Vector2(self.hero.x, self.hero.y)
-
- if self.game.map.point_check_collision(self.pos.x, self.pos.y):
- start_pos = pygame.math.Vector2(self.x_last, self.y_last)
- end_pos = pygame.math.Vector2(self.hero.x, self.hero.y)
- movement_vector = end_pos - start_pos
- try:
- movement_direction = movement_vector.normalize()
- except ValueError:
- end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
- movement_vector = end_pos - start_pos
- movement_direction = movement_vector.normalize()
- except ZeroDivisionError:
- end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
- movement_vector = end_pos - start_pos
- movement_direction = movement_vector.normalize()
- movement_speed = 0.05
-
- self.hero.x = self.x_last
- self.hero.y = self.y_last
-
- self.pos = pygame.math.Vector2(start_pos)
-
- while self.game.map.point_check_collision(self.pos.x, self.pos.y):
- self.pos += movement_speed * movement_direction
- self.hero.x = self.pos.x
- self.hero.y = self.pos.y
-
- self.pos -= movement_speed * movement_direction
- self.hero.x = self.pos.x
- self.hero.y = self.pos.y
-
- self.x_last = self.hero.x
- self.y_last = self.hero.y
-
- if self.game.ticks % 60 == 0 or self.game.ticks == 0:
- console.print("updating")
- update = HeroUpdate(**self.hero.dict(exclude_unset=True))
- console.print(update)
- self.game.ws.send(update.json())
- console.print("sent")
-
- raw_heros = self.game.ws.recv()
- console.print(raw_heros)
- self.others = Heros.parse_raw(raw_heros)
-
- def draw(self):
- self.move()
- self.game.screen.blit(
- pygame.transform.scale(self.image, (16, 16)),
- (self.x - 8 - self.game.map.offset.x, self.y - 8 - self.game.map.offset.y),
- )
-
- def render(self):
- for other in self.others.__root__:
- if other.id != self.hero.id:
- # put self.image on the game.screen
- self.game.screen.blit(
- self.image,
- (other.x - other.size / 2, other.y - other.size / 2),
- )
-
- # pygame.draw.circle(
- # self.game.screen, (255, 0, 0), (other.x, other.y), other.size
- # )
- self.game.screen.blit(
- self.game.font.render(other.name, False, (255, 255, 255), 1),
- (other.x - other.size / 2, other.y + other.size / 2),
- )
- self.game.screen.blit(
- self.image,
- (self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
- )
- self.game.screen.blit(
- self.pet_image,
- (self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
- )
-
- # pygame.draw.circle(
- # self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
- # )
-
- self.game.screen.blit(
- self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
- (self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2),
- )
diff --git a/learn_sql_model/models/hero.py b/learn_sql_model/models/hero.py
index 9b23250..4b0544b 100644
--- a/learn_sql_model/models/hero.py
+++ b/learn_sql_model/models/hero.py
@@ -1,12 +1,12 @@
-from typing import Dict, Optional
+from typing import Optional
+from fastapi import HTTPException
import httpx
-import pydantic
from pydantic import BaseModel
-from sqlmodel import Field, SQLModel
+from sqlmodel import Field, Relationship, SQLModel, Session, select
from learn_sql_model.config import config
-from learn_sql_model.optional import optional
+from learn_sql_model.models.pet import Pet
class HeroBase(SQLModel, table=False):
@@ -14,27 +14,16 @@ class HeroBase(SQLModel, table=False):
secret_name: str
x: int
y: int
- size: Optional[int]
- flashlight_strength: Optional[int] = 1000
- flashlight_angle: Optional[int] = 0
- lanturn_strength: Optional[int] = 100
- # age: Optional[int] = None
- # shoe_size: Optional[int] = None
+ size: int
+ age: Optional[int] = None
+ shoe_size: Optional[int] = None
- # pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- # pet: Optional[Pet] = Relationship(back_populates="hero")
-
- @pydantic.validator("size", pre=True, always=True)
- def validate_size(cls, v):
- if v is None:
- return 50
- if v <= 0:
- raise ValueError("size must be > 0")
- return v
+ pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ pet: Optional[Pet] = Relationship(back_populates="hero")
class Hero(HeroBase, table=True):
- id: int = Field(default=None, primary_key=True)
+ id: Optional[int] = Field(default=None, primary_key=True)
class HeroCreate(HeroBase):
@@ -59,46 +48,87 @@ class HeroRead(HeroBase):
cls,
id: int,
) -> Hero:
- r = httpx.get(f"{config.api_client.url}/hero/{id}")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return HeroRead.parse_obj(r.json())
+ with config.database.session as session:
+ hero = session.get(Hero, id)
+ if not hero:
+ raise HTTPException(status_code=404, detail="Hero not found")
+ return hero
class Heros(BaseModel):
- __root__: list[Hero]
+ heros: list[Hero]
@classmethod
def list(
self,
+ where=None,
+ offset=0,
+ limit=None,
+ session: Session = None,
) -> Hero:
- r = httpx.get(f"{config.api_client.url}/heros/")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return Heros.parse_obj({"__root__": r.json()})
+ # with config.database.session as session:
+
+ def get_heros(session, where, offset, limit):
+ statement = select(Hero)
+ if where != "None" and where is not None:
+ from sqlmodel import text
+
+ statement = statement.where(text(where))
+ statement = statement.offset(offset).limit(limit)
+ heros = session.exec(statement).all()
+ return Heros(heros=heros)
+
+ if session is None:
+
+ r = httpx.get(f"{config.api_client.url}/heros/")
+ if r.status_code != 200:
+ raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ return Heros.parse_obj(r.json())
+
+ return get_heros(session, where, offset, limit)
-@optional
-class HeroUpdate(HeroBase):
+class HeroUpdate(SQLModel):
+ # id is required to update the hero
id: int
- def update(self) -> Hero:
+ # all other fields, must match the model, but with Optional default None
+ name: Optional[str] = None
+ secret_name: Optional[str] = None
+ age: Optional[int] = None
+ shoe_size: Optional[int] = None
+ x: int
+ y: int
+
+ pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ pet: Optional[Pet] = Relationship(back_populates="hero")
+
+ def update(self, session: Session = None) -> Hero:
+ if session is not None:
+ db_hero = session.get(Hero, self.id)
+ if not db_hero:
+ raise HTTPException(status_code=404, detail="Hero not found")
+ for key, value in self.dict(exclude_unset=True).items():
+ setattr(db_hero, key, value)
+ session.add(db_hero)
+ session.commit()
+ session.refresh(db_hero)
+ return db_hero
+
r = httpx.patch(
f"{config.api_client.url}/hero/",
- json=self.dict(exclude_none=True),
+ json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return Hero.parse_obj(r.json())
class HeroDelete(BaseModel):
id: int
- @classmethod
- def delete(self, id: int) -> Dict[str, bool]:
+ def delete(self) -> Hero:
r = httpx.delete(
- f"{config.api_client.url}/hero/{id}",
+ f"{config.api_client.url}/hero/{self.id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
diff --git a/learn_sql_model/optional.py b/learn_sql_model/optional.py
deleted file mode 100644
index 83e6b60..0000000
--- a/learn_sql_model/optional.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from typing import List, Optional
-import textwrap
-import inspect
-
-from pydantic import BaseModel
-
-def _optional_import_(
- module: str,
- name: str = None,
- group: str = None,
- package="learn_sql_model",
-):
- """
- lazily throws import errors only then the optional import is used, and
- includes a group install command for the user to install all dependencies
- for the requested feature.
- """
- import importlib
-
- try:
- module = importlib.import_module(module)
- return module if name is None else getattr(module, name)
- except ImportError as e:
- msg = textwrap.dedent(
- f"""
- "pip install '{package}[{group}]'" package to make use of this feature
- Alternatively "pip install '{package}[all]'" package to install all optional dependencies
- """
- )
- import_error = e
-
- class _failed_import:
- """
- Lazily throw an import error. Errors should be thrown whether the
- user tries to call the module, get an attubute from the module, or
- getitem from the module.
-
- """
-
- def _failed_import(self, *args):
- raise ImportError(msg) from import_error
-
- def __call__(self, *args):
- """
- Throw error if the user tries to call the module i.e
- _optional_import_('dummy')()
- """
- self._failed_import(*args)
-
- def __getattr__(self, name):
- """
- Throw error if the user tries to get an attribute from the
- module i.e _optional_import_('dummy').dummy.
- """
- if name == "_failed_import":
- return object.__getattribute__(self, name)
- self._failed_import()
-
- def __getitem__(self, name):
- """
- Throw error if the user tries to get an item from the module
- i.e _optional_import_('dummy')['dummy']
- """
- self._failed_import()
-
- return _failed_import()
-
-
-# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
-# def decorator(cls):
-# def wrapper(*args, **kwargs):
-# if fields is None:
-# fields = cls.__fields__
-# if required is None:
-# required = []
-#
-# for field in fields:
-# if field not in required:
-# cls.__fields__[field].required = False
-# return _cls
-# return wrapper
-# return decorator
-#
- #
-def optional(*fields):
- def dec(_cls):
- for field in fields:
- _cls.__fields__[field].required = False
- return _cls
-
- if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
- cls = fields[0]
- fields = cls.__fields__
- return dec(cls)
- return dec
-
diff --git a/lights/light--100.png b/lights/light--100.png
deleted file mode 100644
index de46434..0000000
Binary files a/lights/light--100.png and /dev/null differ
diff --git a/lights/light--120.png b/lights/light--120.png
deleted file mode 100644
index 963152f..0000000
Binary files a/lights/light--120.png and /dev/null differ
diff --git a/lights/light--140.png b/lights/light--140.png
deleted file mode 100644
index cda76ae..0000000
Binary files a/lights/light--140.png and /dev/null differ
diff --git a/lights/light--160.png b/lights/light--160.png
deleted file mode 100644
index 3643c9e..0000000
Binary files a/lights/light--160.png and /dev/null differ
diff --git a/lights/light--180.png b/lights/light--180.png
deleted file mode 100644
index 82f9e33..0000000
Binary files a/lights/light--180.png and /dev/null differ
diff --git a/lights/light--20.png b/lights/light--20.png
deleted file mode 100644
index 3e14e6b..0000000
Binary files a/lights/light--20.png and /dev/null differ
diff --git a/lights/light--200.png b/lights/light--200.png
deleted file mode 100644
index 6490af9..0000000
Binary files a/lights/light--200.png and /dev/null differ
diff --git a/lights/light--220.png b/lights/light--220.png
deleted file mode 100644
index a59b646..0000000
Binary files a/lights/light--220.png and /dev/null differ
diff --git a/lights/light--240.png b/lights/light--240.png
deleted file mode 100644
index 0f036b7..0000000
Binary files a/lights/light--240.png and /dev/null differ
diff --git a/lights/light--260.png b/lights/light--260.png
deleted file mode 100644
index 7033ac0..0000000
Binary files a/lights/light--260.png and /dev/null differ
diff --git a/lights/light--280.png b/lights/light--280.png
deleted file mode 100644
index 42509bf..0000000
Binary files a/lights/light--280.png and /dev/null differ
diff --git a/lights/light--300.png b/lights/light--300.png
deleted file mode 100644
index 0c63757..0000000
Binary files a/lights/light--300.png and /dev/null differ
diff --git a/lights/light--320.png b/lights/light--320.png
deleted file mode 100644
index 4112187..0000000
Binary files a/lights/light--320.png and /dev/null differ
diff --git a/lights/light--340.png b/lights/light--340.png
deleted file mode 100644
index 9a9bb4d..0000000
Binary files a/lights/light--340.png and /dev/null differ
diff --git a/lights/light--360.png b/lights/light--360.png
deleted file mode 100644
index 553fc60..0000000
Binary files a/lights/light--360.png and /dev/null differ
diff --git a/lights/light--40.png b/lights/light--40.png
deleted file mode 100644
index c142c08..0000000
Binary files a/lights/light--40.png and /dev/null differ
diff --git a/lights/light--60.png b/lights/light--60.png
deleted file mode 100644
index 8965054..0000000
Binary files a/lights/light--60.png and /dev/null differ
diff --git a/lights/light--80.png b/lights/light--80.png
deleted file mode 100644
index 9ed81e6..0000000
Binary files a/lights/light--80.png and /dev/null differ
diff --git a/lights/light-0.png b/lights/light-0.png
deleted file mode 100644
index 553fc60..0000000
Binary files a/lights/light-0.png and /dev/null differ
diff --git a/lights/light-100.png b/lights/light-100.png
deleted file mode 100644
index 7033ac0..0000000
Binary files a/lights/light-100.png and /dev/null differ
diff --git a/lights/light-120.png b/lights/light-120.png
deleted file mode 100644
index 0367763..0000000
Binary files a/lights/light-120.png and /dev/null differ
diff --git a/lights/light-140.png b/lights/light-140.png
deleted file mode 100644
index a59b646..0000000
Binary files a/lights/light-140.png and /dev/null differ
diff --git a/lights/light-160.png b/lights/light-160.png
deleted file mode 100644
index 6490af9..0000000
Binary files a/lights/light-160.png and /dev/null differ
diff --git a/lights/light-180.png b/lights/light-180.png
deleted file mode 100644
index 82f9e33..0000000
Binary files a/lights/light-180.png and /dev/null differ
diff --git a/lights/light-20.png b/lights/light-20.png
deleted file mode 100644
index 9a9bb4d..0000000
Binary files a/lights/light-20.png and /dev/null differ
diff --git a/lights/light-200.png b/lights/light-200.png
deleted file mode 100644
index 3643c9e..0000000
Binary files a/lights/light-200.png and /dev/null differ
diff --git a/lights/light-220.png b/lights/light-220.png
deleted file mode 100644
index cda76ae..0000000
Binary files a/lights/light-220.png and /dev/null differ
diff --git a/lights/light-240.png b/lights/light-240.png
deleted file mode 100644
index 7c35056..0000000
Binary files a/lights/light-240.png and /dev/null differ
diff --git a/lights/light-260.png b/lights/light-260.png
deleted file mode 100644
index de46434..0000000
Binary files a/lights/light-260.png and /dev/null differ
diff --git a/lights/light-280.png b/lights/light-280.png
deleted file mode 100644
index 9ed81e6..0000000
Binary files a/lights/light-280.png and /dev/null differ
diff --git a/lights/light-300.png b/lights/light-300.png
deleted file mode 100644
index 4c35301..0000000
Binary files a/lights/light-300.png and /dev/null differ
diff --git a/lights/light-320.png b/lights/light-320.png
deleted file mode 100644
index c142c08..0000000
Binary files a/lights/light-320.png and /dev/null differ
diff --git a/lights/light-340.png b/lights/light-340.png
deleted file mode 100644
index 3e14e6b..0000000
Binary files a/lights/light-340.png and /dev/null differ
diff --git a/lights/light-40.png b/lights/light-40.png
deleted file mode 100644
index 4112187..0000000
Binary files a/lights/light-40.png and /dev/null differ
diff --git a/lights/light-60.png b/lights/light-60.png
deleted file mode 100644
index 0c63757..0000000
Binary files a/lights/light-60.png and /dev/null differ
diff --git a/lights/light-80.png b/lights/light-80.png
deleted file mode 100644
index 42509bf..0000000
Binary files a/lights/light-80.png and /dev/null differ
diff --git a/load_test.py b/load_test.py
deleted file mode 100644
index 461db8f..0000000
--- a/load_test.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from locust import HttpUser, between, task
-
-from learn_sql_model.factories.hero import HeroFactory
-from learn_sql_model.models.hero import HeroCreate
-
-
-class QuickstartUser(HttpUser):
- wait_time = between(1, 2)
-
- @task
- def hello_world(self):
- self.client.get("/hero/1")
- self.client.get("/heros/")
-
- @task(3)
- def create_hero(self):
- hero = HeroFactory().build()
- HeroCreate(**hero.dict()).post()
diff --git a/locustfile.py b/locustfile.py
deleted file mode 100644
index 4f4f551..0000000
--- a/locustfile.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import random
-
-from locust import FastHttpUser, task
-
-from learn_sql_model.config import get_config
-from learn_sql_model.factories.hero import HeroFactory
-from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
-
-config = get_config()
-
-
-class QuickstartUser(FastHttpUser):
- # wait_time = between(1, 2)
- host = "http://localhost:5000"
- # host = "https://waylonwalker.com"
-
- def on_start(self):
- self.client.verify = False
-
- @task(6)
- def get_a_hero(self):
- # heros = Heros.list()
- id = 1
- # id = random.choice(heros.__root__).id
-
- self.client.get(f"/hero/{id}")
-
- # @task(2)
- # def get_all_hero(self):
- # self.client.get("/heros/")
-
- @task
- def create_hero(self):
- hero = HeroFactory().build()
- hero_create = HeroCreate(**hero.dict()).post()
-
- self.client.post(
- f"{config.api_client.url}/hero/",
- json=hero_create.dict(),
- )
-
- @task(3)
- def update_hero(self):
- hero = HeroFactory().build()
- hero_update = HeroUpdate(id=1, name=hero.name)
-
- self.client.patch(
- "/hero/",
- json=hero_update.dict(exclude_none=True),
- )
-
- @task
- def delete_hero(self):
- heros = Heros.list()
- id = random.choice(heros.__root__).id
- self.client.delete(
- f"/hero/{id}",
- )
diff --git a/map.pkl b/map.pkl
deleted file mode 100644
index e69de29..0000000
diff --git a/map.png b/map.png
deleted file mode 100644
index 04809ee..0000000
Binary files a/map.png and /dev/null differ
diff --git a/micro b/micro
deleted file mode 100644
index a06df4a..0000000
Binary files a/micro and /dev/null differ
diff --git a/migrations/env.py b/migrations/env.py
index 27494c8..4863934 100644
--- a/migrations/env.py
+++ b/migrations/env.py
@@ -79,7 +79,7 @@ def run_migrations_online() -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
- render_as_batch=True,
+ render_as_batch=False,
version_table=f'{config.get_main_option("project")}_alembic_version',
)
diff --git a/migrations/script.py.mako b/migrations/script.py.mako
index 567e915..3124b62 100644
--- a/migrations/script.py.mako
+++ b/migrations/script.py.mako
@@ -8,9 +8,6 @@ Create Date: ${create_date}
from alembic import op
import sqlalchemy as sa
import sqlmodel
-from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
-from learn_sql_model.config import get_config
-
${imports if imports else ""}
# revision identifiers, used by Alembic.
@@ -22,8 +19,6 @@ depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
- generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
diff --git a/migrations/versions/3555f61aaa79_add_x_and_y.py b/migrations/versions/3555f61aaa79_add_x_and_y.py
deleted file mode 100644
index b6f112d..0000000
--- a/migrations/versions/3555f61aaa79_add_x_and_y.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""add x and y
-
-Revision ID: 3555f61aaa79
-Revises: 79972ec5f79d
-Create Date: 2023-06-22 15:03:27.338959
-
-"""
-from alembic import op
-import sqlalchemy as sa
-
-
-# revision identifiers, used by Alembic.
-revision = "3555f61aaa79"
-down_revision = "79972ec5f79d"
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
- op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
- # ### end Alembic commands ###
- # generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- # generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_column("hero", "y")
- op.drop_column("hero", "x")
- # ### end Alembic commands ###
diff --git a/migrations/versions/3555f61aaa79_er_diagram.md b/migrations/versions/3555f61aaa79_er_diagram.md
deleted file mode 100644
index caf6156..0000000
--- a/migrations/versions/3555f61aaa79_er_diagram.md
+++ /dev/null
@@ -1,68 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| 79972ec5f79d |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id | x | y |
-|------|-------------|----|---|---|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 0 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/3555f61aaa79_er_diagram.png b/migrations/versions/3555f61aaa79_er_diagram.png
deleted file mode 100644
index 7c6bdab..0000000
Binary files a/migrations/versions/3555f61aaa79_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/79972ec5f79d_er_diagram.md b/migrations/versions/79972ec5f79d_er_diagram.md
deleted file mode 100644
index 9f3ece5..0000000
--- a/migrations/versions/79972ec5f79d_er_diagram.md
+++ /dev/null
@@ -1,65 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 0 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id |
-|------|-------------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 0 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/79972ec5f79d_er_diagram.png b/migrations/versions/79972ec5f79d_er_diagram.png
deleted file mode 100644
index cc4f37b..0000000
Binary files a/migrations/versions/79972ec5f79d_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/79972ec5f79d_int.py b/migrations/versions/79972ec5f79d_int.py
deleted file mode 100644
index 1ccca25..0000000
--- a/migrations/versions/79972ec5f79d_int.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""int
-
-Revision ID: 79972ec5f79d
-Revises:
-Create Date: 2023-06-22 15:02:20.292322
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-
-
-# revision identifiers, used by Alembic.
-revision = "79972ec5f79d"
-down_revision = None
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.create_table(
- "hero",
- sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column("id", sa.Integer(), nullable=False),
- sa.PrimaryKeyConstraint("id"),
- )
- op.create_table(
- "pet",
- sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
- sa.Column("birthday", sa.DateTime(), nullable=True),
- sa.Column("id", sa.Integer(), nullable=False),
- sa.PrimaryKeyConstraint("id"),
- )
- # ### end Alembic commands ###
- # generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- # generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_table("pet")
- op.drop_table("hero")
- # ### end Alembic commands ###
diff --git a/migrations/versions/a1cd0a1947be_add_hero_lighting.py b/migrations/versions/a1cd0a1947be_add_hero_lighting.py
deleted file mode 100644
index 4e48a0c..0000000
--- a/migrations/versions/a1cd0a1947be_add_hero_lighting.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""add hero.lighting
-
-Revision ID: a1cd0a1947be
-Revises: c79214cdc7b3
-Create Date: 2023-06-28 19:43:47.108749
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
-from learn_sql_model.config import get_config
-
-
-
-# revision identifiers, used by Alembic.
-revision = 'a1cd0a1947be'
-down_revision = 'c79214cdc7b3'
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
- op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
- # ### end Alembic commands ###
- generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_column('hero', 'lanturn_strength')
- op.drop_column('hero', 'flashlight_strength')
- # ### end Alembic commands ###
diff --git a/migrations/versions/a1cd0a1947be_er_diagram.md b/migrations/versions/a1cd0a1947be_er_diagram.md
deleted file mode 100644
index 3fc470a..0000000
--- a/migrations/versions/a1cd0a1947be_er_diagram.md
+++ /dev/null
@@ -1,75 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| c79214cdc7b3 |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
-|------|-------------|----|---|---|------|---------------------|------------------|
-| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
-| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
-| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
-| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-| size | INTEGER | | | |
-| flashlight_strength | INTEGER | | | |
-| lanturn_strength | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 4 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/a1cd0a1947be_er_diagram.png b/migrations/versions/a1cd0a1947be_er_diagram.png
deleted file mode 100644
index 8e26ac5..0000000
Binary files a/migrations/versions/a1cd0a1947be_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/a9bb6625c57b_add_birthday.py b/migrations/versions/a9bb6625c57b_add_birthday.py
new file mode 100644
index 0000000..9e8feb4
--- /dev/null
+++ b/migrations/versions/a9bb6625c57b_add_birthday.py
@@ -0,0 +1,29 @@
+"""add birthday
+
+Revision ID: a9bb6625c57b
+Revises: c8516c888495
+Create Date: 2023-05-25 19:00:58.137464
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+
+
+# revision identifiers, used by Alembic.
+revision = 'a9bb6625c57b'
+down_revision = 'c8516c888495'
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('pet', sa.Column('birthday', sa.DateTime(), nullable=True))
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_column('pet', 'birthday')
+ # ### end Alembic commands ###
diff --git a/migrations/versions/c79214cdc7b3_er_diagram.md b/migrations/versions/c79214cdc7b3_er_diagram.md
deleted file mode 100644
index 9969846..0000000
--- a/migrations/versions/c79214cdc7b3_er_diagram.md
+++ /dev/null
@@ -1,74 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| 3555f61aaa79 |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id | x | y | size |
-|------|-------------|----|---|---|------|
-| tight-gold | successful-health | 1 | 6430 | 6231 | None |
-| hard-rope | green-research | 2 | 1395 | 2865 | None |
-| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
-| huge-library | adult-body | 4 | 656 | 2377 | None |
-| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-| size | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 1572 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/c79214cdc7b3_er_diagram.png b/migrations/versions/c79214cdc7b3_er_diagram.png
deleted file mode 100644
index 887cf61..0000000
Binary files a/migrations/versions/c79214cdc7b3_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/c8516c888495_init.py b/migrations/versions/c8516c888495_init.py
new file mode 100644
index 0000000..c351dd9
--- /dev/null
+++ b/migrations/versions/c8516c888495_init.py
@@ -0,0 +1,44 @@
+"""init
+
+Revision ID: c8516c888495
+Revises:
+Create Date: 2023-05-25 18:42:37.057225
+
+"""
+from alembic import op
+import sqlalchemy as sa
+import sqlmodel
+
+
+# revision identifiers, used by Alembic.
+revision = 'c8516c888495'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('pet',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_table('hero',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
+ sa.Column('age', sa.Integer(), nullable=True),
+ sa.Column('shoe_size', sa.Integer(), nullable=True),
+ sa.Column('pet_id', sa.Integer(), nullable=True),
+ sa.ForeignKeyConstraint(['pet_id'], ['pet.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('hero')
+ op.drop_table('pet')
+ # ### end Alembic commands ###
diff --git a/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py b/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py
deleted file mode 100644
index f59d974..0000000
--- a/migrations/versions/d79dd8e699d1_add_hero_flashlight_angle.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""add hero.flashlight_angle
-
-Revision ID: d79dd8e699d1
-Revises: e1af975310a1
-Create Date: 2023-06-28 19:54:19.322431
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
-from learn_sql_model.config import get_config
-
-
-
-# revision identifiers, used by Alembic.
-revision = 'd79dd8e699d1'
-down_revision = 'e1af975310a1'
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
- # ### end Alembic commands ###
- generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- op.drop_column('hero', 'flashlight_angle')
- # ### end Alembic commands ###
diff --git a/migrations/versions/d79dd8e699d1_er_diagram.md b/migrations/versions/d79dd8e699d1_er_diagram.md
deleted file mode 100644
index da209b0..0000000
--- a/migrations/versions/d79dd8e699d1_er_diagram.md
+++ /dev/null
@@ -1,72 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| e1af975310a1 |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
-|------|-------------|----|---|---|------|---------------------|------------------|------------------|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-| size | INTEGER | | | |
-| flashlight_strength | INTEGER | | | |
-| lanturn_strength | INTEGER | | | |
-| flashlight_angle | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 0 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/d79dd8e699d1_er_diagram.png b/migrations/versions/d79dd8e699d1_er_diagram.png
deleted file mode 100644
index f1560e3..0000000
Binary files a/migrations/versions/d79dd8e699d1_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py b/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py
deleted file mode 100644
index e2df16f..0000000
--- a/migrations/versions/e1af975310a1_add_hero_flashlight_angle.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""add hero.flashlight_angle
-
-Revision ID: e1af975310a1
-Revises: a1cd0a1947be
-Create Date: 2023-06-28 19:53:18.068873
-
-"""
-from alembic import op
-import sqlalchemy as sa
-import sqlmodel
-from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
-from learn_sql_model.config import get_config
-
-
-
-# revision identifiers, used by Alembic.
-revision = 'e1af975310a1'
-down_revision = 'a1cd0a1947be'
-branch_labels = None
-depends_on = None
-
-
-def upgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- pass
- # ### end Alembic commands ###
- generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
-
-
-def downgrade() -> None:
- # ### commands auto generated by Alembic - please adjust! ###
- pass
- # ### end Alembic commands ###
diff --git a/migrations/versions/e1af975310a1_er_diagram.md b/migrations/versions/e1af975310a1_er_diagram.md
deleted file mode 100644
index a5dd9d2..0000000
--- a/migrations/versions/e1af975310a1_er_diagram.md
+++ /dev/null
@@ -1,71 +0,0 @@
-
-
----
-
-## Table: learn_sql_model_alembic_version
-
-### First 5 rows
-
-| version_num |
-|-------------|
-| a1cd0a1947be |
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| version_num | VARCHAR(32) | | | |
-
-### Records Count
-
-The table learn_sql_model_alembic_version contains 1 records.
-
----
-
-## Table: hero
-
-### First 5 rows
-
-| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
-|------|-------------|----|---|---|------|---------------------|------------------|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| secret_name | VARCHAR | | | |
-| id | INTEGER | | | |
-| x | INTEGER | | | |
-| y | INTEGER | | | |
-| size | INTEGER | | | |
-| flashlight_strength | INTEGER | | | |
-| lanturn_strength | INTEGER | | | |
-
-### Records Count
-
-The table hero contains 0 records.
-
----
-
-## Table: pet
-
-### First 5 rows
-
-| name | birthday | id |
-|------|----------|----|
-
-### Columns
-
-| Column Name | Type | Foreign Key | Example Value |
-|-------------|------|-------------|---------------|
-| name | VARCHAR | | | |
-| birthday | DATETIME | | | |
-| id | INTEGER | | | |
-
-### Records Count
-
-The table pet contains 0 records.
-
----
-
diff --git a/migrations/versions/e1af975310a1_er_diagram.png b/migrations/versions/e1af975310a1_er_diagram.png
deleted file mode 100644
index 8e26ac5..0000000
Binary files a/migrations/versions/e1af975310a1_er_diagram.png and /dev/null differ
diff --git a/migrations/versions/c79214cdc7b3_add_hero_size.py b/migrations/versions/e26398d96dd0_add_x_y_size.py
similarity index 50%
rename from migrations/versions/c79214cdc7b3_add_hero_size.py
rename to migrations/versions/e26398d96dd0_add_x_y_size.py
index 643480b..4219bde 100644
--- a/migrations/versions/c79214cdc7b3_add_hero_size.py
+++ b/migrations/versions/e26398d96dd0_add_x_y_size.py
@@ -1,34 +1,33 @@
-"""add hero.size
+"""add x, y, size
-Revision ID: c79214cdc7b3
-Revises: 3555f61aaa79
-Create Date: 2023-06-28 11:39:02.606001
+Revision ID: e26398d96dd0
+Revises: a9bb6625c57b
+Create Date: 2023-06-10 18:37:04.751553
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
-from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
-from learn_sql_model.config import get_config
-
# revision identifiers, used by Alembic.
-revision = 'c79214cdc7b3'
-down_revision = '3555f61aaa79'
+revision = 'e26398d96dd0'
+down_revision = 'a9bb6625c57b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
- op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True))
+ op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
+ op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
+ op.add_column('hero', sa.Column('size', sa.Integer(), nullable=False))
# ### end Alembic commands ###
- generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
- generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'size')
+ op.drop_column('hero', 'y')
+ op.drop_column('hero', 'x')
# ### end Alembic commands ###
diff --git a/notify.py b/notify.py
deleted file mode 100644
index 8c1178d..0000000
--- a/notify.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# # Import smtplib for the actual sending function
-# import smtplib
-# # Import the email modules we'll need
-# from email.mime.text import MIMEText
-
-# # Open a plain text file for reading. For this example, assume that
-# # the text file contains only ASCII characters.
-# # with open(textfile, 'rb') as fp:
-# # # Create a text/plain message
-# # msg = MIMEText(fp.read())
-# msg = MIMEText("hello there", "plain", "utf-8")
-
-# # me == the sender's email address
-# # you == the recipient's email address
-# me = "waylon@waylonwalker.com"
-# you = "3195728809@msg.fi.google.com"
-# msg["Subject"] = "Python SMTP test"
-# msg["From"] = me
-# msg["To"] = you
-
-# # Send the message via our own SMTP server, but don't include the
-# # envelope header.
-# s = smtplib.SMTP("localhost")
-# s.sendmail(me, [you], msg.as_string())
-# s.quit()
-import requests
-
-requests.post(
- "https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
- auth=("api", "YOUR_API_KEY"),
- data={
- "from": "Excited User ",
- "to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
- "subject": "Hello",
- "text": "Testing some Mailgun awesomness!",
- },
-)
diff --git a/pet.png b/pet.png
deleted file mode 100644
index b22dd02..0000000
Binary files a/pet.png and /dev/null differ
diff --git a/player.png b/player.png
deleted file mode 100644
index 0d4b500..0000000
Binary files a/player.png and /dev/null differ
diff --git a/pyproject.toml b/pyproject.toml
index 53dbe66..549f520 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -24,52 +24,36 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy",
]
dependencies = [
-"black",
- "python-socketio[client]",
+"python-socketio[client]",
+"fastapi-socketio",
+"psycopg2-binary",
+'pygame',
+'black',
+'alembic',
+'pygame',
+'pyinstaller',
+ "pyflyby",
"anyconfig",
"copier",
"engorgio",
"fastapi",
"httpx",
- "pydantic<2.0.0",
- "pyflyby",
- "pyinstaller",
+ "passlib[bcrypt]",
+ "polyfactory",
+ "psycopg2",
+ "python-jose[cryptography]",
+ "python-multipart",
"rich",
"sqlmodel",
"textual",
"toml",
"trogon",
"typer",
+ "uvicorn[standard]",
]
dynamic = ["version"]
-[project.optional-dependencies]
-game = [
- "noise",
- "pygame",
- "polyfactory",
- "faker",
-]
-api = [
- "fastapi-socketio",
- "passlib[bcrypt]",
- "psycopg2",
- "psycopg2-binary",
- "python-jose[cryptography]",
- "python-multipart",
- "uvicorn[standard]",
-]
-manage = [
- "alembic",
- "polyfactory",
- "faker",
-]
-all = [
- "learn_sql_model[game, api, manage]",
-]
-
-
[project.urls]
Documentation = "https://github.com/waylonwalker/learn-sql-model#readme"
Issues = "https://github.com/waylonwalker/learn-sql-model/issues"
diff --git a/rect.py b/rect.py
deleted file mode 100644
index 29def88..0000000
--- a/rect.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import pygame
-
-pygame.init()
-
-screen = pygame.display.set_mode((500, 500))
-pygame.display.set_caption("draw a square")
-
-running = True
-
-while running:
- for event in pygame.event.get():
- if event.type == pygame.QUIT:
- running = False
-
- surface = pygame.Surface((500, 500))
- surface.fill((255, 0, 0))
-
- color = (0, 0, 255)
- rect = (200, 200, 100, 100)
- pygame.draw.rect(surface, color, rect)
-
- screen.blit(surface, (0, 0))
- pygame.display.flip()
diff --git a/templates/er_diagram.html b/templates/er_diagram.html
deleted file mode 100644
index 5dc1fed..0000000
--- a/templates/er_diagram.html
+++ /dev/null
@@ -1,129 +0,0 @@
-
-
-
-
-
- ER Diagram
-
-
-
-
-
-
-
-
-
-
-
diff --git a/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
index 878f877..e1b86b7 100644
--- a/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/api/{{modelname.lower()}}.py.jinja
@@ -1,89 +1,86 @@
from fastapi import APIRouter, Depends, HTTPException
-from sqlmodel import Session, select
+from sqlmodel import SQLModel, Session
from learn_sql_model.api.websocket_connection_manager import manager
-from learn_sql_model.config import get_session
-from learn_sql_model.models.{{ modelname }} import {{ modelname }}, {{ modelname }}Create, {{ modelname }}Read, {{ modelname }}Update, {{ modelname }}s
+from learn_sql_model.config import get_config, get_session
+from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}, {{modelname}}Create, {{modelname}}Read, {{modelname}}Update
-{{ modelname }}_router = APIRouter()
+{{modelname.lower()}}_router = APIRouter()
-@{{ modelname }}_router.on_event("startup")
+@{{modelname.lower()}}_router.on_event("startup")
def on_startup() -> None:
- # SQLModel.metadata.create_all(get_config().database.engine)
- ...
+ SQLModel.metadata.create_all(get_config().database.engine)
-@{{ modelname }}_router.get("/{{ modelname }}/{{{ modelname }}_id}")
-def get_{{ modelname }}(
+@{{modelname.lower()}}_router.get("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
+async def get_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
- {{ modelname }}_id: int,
-) -> {{ modelname }}Read:
- "get one {{ modelname }}"
- {{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
- if not {{ modelname }}:
- raise HTTPException(status_code=404, detail="{{ modelname }} not found")
- return {{ modelname }}
+ {{modelname.lower()}}_id: int,
+) -> {{modelname}}Read:
+ "get one {{modelname.lower()}}"
+ {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
+ if not {{modelname.lower()}}:
+ raise HTTPException(status_code=404, detail="{{modelname}} not found")
+ return {{modelname.lower()}}
-@{{ modelname }}_router.post("/{{ modelname }}/")
-def post_{{ modelname }}(
+@{{modelname.lower()}}_router.post("/{{modelname.lower()}}/")
+async def post_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
- {{ modelname }}: {{ modelname }}Create,
-) -> {{ modelname }}Read:
- "create a {{ modelname }}"
- db_{{ modelname }} = {{ modelname }}.from_orm({{ modelname }})
- session.add(db_{{ modelname }})
+ {{modelname.lower()}}: {{modelname}}Create,
+) -> {{modelname}}Read:
+ "read all the {{modelname.lower()}}s"
+ db_{{modelname.lower()}} = {{modelname}}.from_orm({{modelname.lower()}})
+ session.add(db_{{modelname.lower()}})
session.commit()
- session.refresh(db_{{ modelname }})
- await manager.broadcast({{{ modelname }}.json()}, id=1)
- return db_{{ modelname }}
+ session.refresh(db_{{modelname.lower()}})
+ await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
+ return db_{{modelname.lower()}}
-@{{ modelname }}_router.patch("/{{ modelname }}/")
-def patch_{{ modelname }}(
+@{{modelname.lower()}}_router.patch("/{{modelname.lower()}}/")
+async def patch_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
- {{ modelname }}: {{ modelname }}Update,
-) -> {{ modelname }}Read:
- "update a {{ modelname }}"
- db_{{ modelname }} = session.get({{ modelname }}, {{ modelname }}.id)
- if not db_{{ modelname }}:
- raise HTTPException(status_code=404, detail="{{ modelname }} not found")
- for key, value in {{ modelname }}.dict(exclude_unset=True).items():
- setattr(db_{{ modelname }}, key, value)
- session.add(db_{{ modelname }})
+ {{modelname.lower()}}: {{modelname}}Update,
+) -> {{modelname}}Read:
+ "read all the {{modelname.lower()}}s"
+ db_{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
+ if not db_{{modelname.lower()}}:
+ raise HTTPException(status_code=404, detail="{{modelname}} not found")
+ for key, value in {{modelname.lower()}}.dict(exclude_unset=True).items():
+ setattr(db_{{modelname.lower()}}, key, value)
+ session.add(db_{{modelname.lower()}})
session.commit()
- session.refresh(db_{{ modelname }})
- await manager.broadcast({{{ modelname }}.json()}, id=1)
- return db_{{ modelname }}
+ session.refresh(db_{{modelname.lower()}})
+ await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
+ return db_{{modelname.lower()}}
-@{{ modelname }}_router.delete("/{{ modelname }}/{{{ modelname }}_id}")
-def delete_{{ modelname }}(
+@{{modelname.lower()}}_router.delete("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
+async def delete_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
- {{ modelname }}_id: int,
+ {{modelname.lower()}}_id: int,
):
- "delete a {{ modelname }}"
- {{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
- if not {{ modelname }}:
- raise HTTPException(status_code=404, detail="{{ modelname }} not found")
- session.delete({{ modelname }})
+ "read all the {{modelname.lower()}}s"
+ {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
+ if not {{modelname.lower()}}:
+ raise HTTPException(status_code=404, detail="{{modelname}} not found")
+ session.delete({{modelname.lower()}})
session.commit()
- await manager.broadcast(f"deleted {{ modelname }} {{{ modelname }}_id}", id=1)
+ await manager.broadcast(f"deleted {{modelname.lower()}} {{{modelname.lower()}}_id}", id=1)
return {"ok": True}
-@{{ modelname }}_router.get("/{{ modelname }}s/")
-def get_{{ modelname }}s(
+@{{modelname.lower()}}_router.get("/{{modelname.lower()}}s/")
+async def get_{{modelname.lower()}}s(
*,
session: Session = Depends(get_session),
-) -> {{ modelname }}s:
- "get all {{ modelname }}s"
- statement = select({{ modelname }})
- {{ modelname }}s = session.exec(statement).all()
- return {{ modelname }}s(__root__={{ modelname }}s)
+) -> list[{{modelname}}]:
+ "get all {{modelname.lower()}}s"
+ return {{modelname}}Read.list(session=session)
diff --git a/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
index 8e807e0..995f10b 100644
--- a/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/factories/{{modelname.lower()}}.py.jinja
@@ -1,12 +1,14 @@
from faker import Faker
from polyfactory.factories.pydantic_factory import ModelFactory
-from learn_sql_model.factories.pet import PetFactory
-from learn_sql_model.models.{{ modelname }} import {{ modelname }}
-from learn_sql_model.models.pet import Pet
+from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
-class {{ modelname }}Factory(ModelFactory[{{ modelname }}]):
- __model__ = {{ modelname }}
+class {{modelname}}Factory(ModelFactory[{{modelname.lower()}}]):
+ __model__ = {{modelname}}
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
+ id = None
+
+ __random_seed__ = 10
+
diff --git a/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja b/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
index 7d47e42..6fed597 100644
--- a/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
+++ b/templates/model/learn_sql_model/models/{{modelname.lower()}}.py.jinja
@@ -1,81 +1,93 @@
-from typing import Dict, Optional
+from typing import Optional
+from fastapi import Depends, HTTPException
import httpx
from pydantic import BaseModel
-from sqlmodel import Field, SQLModel
+from sqlmodel import Field, Relationship, SQLModel, Session, select
-from learn_sql_model.config import config
+from learn_sql_model.config import config, get_config
+from learn_sql_model.models.pet import Pet
-class {{ modelname }}Base(SQLModel, table=False):
- # put model attributes here
+class {{modelname}}Base(SQLModel, table=False):
-class {{ modelname }}({{ modelname }}Base, table=True):
- id: int = Field(default=None, primary_key=True)
+class {{modelname}}({{modelname}}Base, table=True):
+ id: Optional[int] = Field(default=None, primary_key=True)
-class {{ modelname }}Create({{ modelname }}Base):
+class {{modelname}}Create({{modelname}}Base):
...
- def post(self) -> {{ modelname }}:
+ def post(self) -> {{modelname}}:
r = httpx.post(
- f"{config.api_client.url}/{{ modelname }}/",
+ f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {{ modelname }}.parse_obj(r.json())
-
-class {{ modelname }}Read({{ modelname }}Base):
+class {{modelname}}Read({{modelname}}Base):
id: int
@classmethod
def get(
cls,
id: int,
- ) -> {{ modelname }}:
- r = httpx.get(f"{config.api_client.url}/{{ modelname }}/{id}")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {{ modelname }}Read.parse_obj(r.json())
-
-
-class {{ modelname }}s(BaseModel):
- __root__: list[{{ modelname }}]
+ ) -> {{modelname}}:
+ with config.database.session as session:
+ {{modelname.lower()}} = session.get({{modelname}}, id)
+ if not {{modelname.lower()}}:
+ raise HTTPException(status_code=404, detail="{{modelname}} not found")
+ return {{modelname.lower()}}
@classmethod
def list(
self,
- ) -> {{ modelname }}:
- r = httpx.get(f"{config.api_client.url}/{{ modelname }}s/")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {{ modelname }}s.parse_obj({"__root__": r.json()})
+ where=None,
+ offset=0,
+ limit=None,
+ session: Session = None,
+ ) -> {{modelname}}:
+
+ if session is None:
+ session = get_config().database.session
+
+ statement = select({{modelname}})
+ if where != "None" and where is not None:
+ from sqlmodel import text
+
+ statement = statement.where(text(where))
+ statement = statement.offset(offset).limit(limit)
+ {{modelname.lower()}}es = session.exec(statement).all()
+ return {{modelname.lower()}}es
-class {{ modelname }}Update(SQLModel):
- # id is required to update the {{ modelname }}
+class {{modelname}}Update(SQLModel):
+ # id is required to update the {{modelname.lower()}}
id: int
- def update(self) -> {{ modelname }}:
+ # all other fields, must match the model, but with Optional default None
+
+ pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
+ pet: Optional[Pet] = Relationship(back_populates="{{modelname.lower()}}")
+
+ def update(self) -> {{modelname}}:
r = httpx.patch(
- f"{config.api_client.url}/{{ modelname }}/",
+ f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
-class {{ modelname }}Delete(BaseModel):
+class {{modelname}}Delete(BaseModel):
id: int
- @classmethod
- def delete(self, id: int) -> Dict[str, bool]:
+ def delete(self) -> {{modelname}}:
r = httpx.delete(
- f"{config.api_client.url}/{{ modelname }}/{id}",
+ f"{config.api_client.url}/{{modelname.lower()}}/{self.id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
diff --git a/templates/model/tests/{{modelname.lower()}}.py.jinja b/templates/model/tests/{{modelname.lower()}}.py.jinja
index 7aa9ba5..39da70f 100644
--- a/templates/model/tests/{{modelname.lower()}}.py.jinja
+++ b/templates/model/tests/{{modelname.lower()}}.py.jinja
@@ -1,103 +1,234 @@
-from typing import Optional
+from fastapi.testclient import TestClient
+import pytest
+from sqlalchemy import create_engine
+from sqlmodel import SQLModel, Session, select
+from sqlmodel.pool import StaticPool
+from typer.testing import CliRunner
-import httpx
-from pydantic import BaseModel
-from sqlmodel import Field, Relationship, SQLModel
+from learn_sql_model.api.app import app
+from learn_sql_model.config import get_config, get_session
+from learn_sql_model.factories.{{modelname.lower()}} import {{modelname}}Factory
+from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
-from learn_sql_model.config import config
-from learn_sql_model.models.pet import Pet
+runner = CliRunner()
+client = TestClient(app)
-class {{ model.lower }}Base(SQLModel, table=False):
- name: str
- secret_name: str
- x: int
- y: int
- size: int
- age: Optional[int] = None
- shoe_size: Optional[int] = None
-
- pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
+@pytest.fixture(name="session")
+def session_fixture():
+ engine = create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ )
+ SQLModel.metadata.create_all(engine)
+ with Session(engine) as session:
+ yield session
-class {{ model.lower }}({{ model.lower }}Base, table=True):
- id: Optional[int] = Field(default=None, primary_key=True)
+@pytest.fixture(name="client")
+def client_fixture(session: Session):
+ def get_session_override():
+ return session
+
+ app.dependency_overrides[get_session] = get_session_override
+
+ client = TestClient(app)
+ yield client
+ app.dependency_overrides.clear()
-class {{ model.lower }}Create({{ model.lower }}Base):
- ...
+def test_api_post(client: TestClient):
+ {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
+ {{modelname.lower()}}_dict = {{modelname.lower()}}.dict()
+ response = client.post("/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {{modelname.lower()}}_dict})
+ response_{{modelname.lower()}} = {{modelname}}.parse_obj(response.json())
- def post(self) -> {{ model.lower }}:
- r = httpx.post(
- f"{config.api_client.url}/{{ model.lower() }}/",
- json=self.dict(),
- )
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
-
- return {{ model.lower }}.parse_obj(r.json())
+ assert response.status_code == 200
+ assert response_{{modelname.lower()}}.name == "Steelman"
+ assert response_{{modelname.lower()}}.age == 25
-class {{ model.lower }}Read({{ model.lower }}Base):
- id: int
+def test_api_read_{{modelname.lower()}}es(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ {{modelname.lower()}}_2 = {{modelname}}(name="Rusty-Man", secret_name="Tommy Sharp", age=48)
+ session.add({{modelname.lower()}}_1)
+ session.add({{modelname.lower()}}_2)
+ session.commit()
- @classmethod
- def get(
- cls,
- id: int,
- ) -> {{ model.lower }}:
- r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}/{id}")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {{ model.lower() }}
+ response = client.get("/{{modelname.lower()}}s/")
+ data = response.json()
+
+ assert response.status_code == 200
+
+ assert len(data) == 2
+ assert data[0]["name"] == {{modelname.lower()}}_1.name
+ assert data[0]["secret_name"] == {{modelname.lower()}}_1.secret_name
+ assert data[0]["age"] == {{modelname.lower()}}_1.age
+ assert data[0]["id"] == {{modelname.lower()}}_1.id
+ assert data[1]["name"] == {{modelname.lower()}}_2.name
+ assert data[1]["secret_name"] == {{modelname.lower()}}_2.secret_name
+ assert data[1]["age"] == {{modelname.lower()}}_2.age
+ assert data[1]["id"] == {{modelname.lower()}}_2.id
-class {{ model.lower }}s(BaseModel):
- {{ model.lower() }}s: list[{{ model.lower }}]
+def test_api_read_{{modelname.lower()}}(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
- @classmethod
- def list(
- self,
- ) -> {{ model.lower }}:
- r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}s/")
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {{ model.lower }}s.parse_obj(r.json())
+ response = client.get(f"/{{modelname.lower()}}/999")
+ assert response.status_code == 404
-class {{ model.lower }}Update(SQLModel):
- # id is required to update the {{ model.lower() }}
- id: int
+def test_api_read_{{modelname.lower()}}_404(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
- # all other fields, must match the model, but with Optional default None
- name: Optional[str] = None
- secret_name: Optional[str] = None
- age: Optional[int] = None
- shoe_size: Optional[int] = None
- x: int
- y: int
+ response = client.get(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
+ data = response.json()
- pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
- pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
-
- def update(self) -> {{ model.lower }}:
- r = httpx.patch(
- f"{config.api_client.url}/{{ model.lower() }}/",
- json=self.dict(),
- )
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
+ assert response.status_code == 200
+ assert data["name"] == {{modelname.lower()}}_1.name
+ assert data["secret_name"] == {{modelname.lower()}}_1.secret_name
+ assert data["age"] == {{modelname.lower()}}_1.age
+ assert data["id"] == {{modelname.lower()}}_1.id
-class {{ model.lower }}Delete(BaseModel):
- id: int
+def test_api_update_{{modelname.lower()}}(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
- def delete(self) -> {{ model.lower }}:
- r = httpx.delete(
- f"{config.api_client.url}/{{ model.lower() }}/{self.id}",
- )
- if r.status_code != 200:
- raise RuntimeError(f"{r.status_code}:\n {r.text}")
- return {"ok": True}
+ response = client.patch(
+ f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": {{modelname.lower()}}_1.id}}
+ )
+ data = response.json()
+ assert response.status_code == 200
+ assert data["name"] == "Deadpuddle"
+ assert data["secret_name"] == "Dive Wilson"
+ assert data["age"] is None
+ assert data["id"] == {{modelname.lower()}}_1.id
+
+
+def test_api_update_{{modelname.lower()}}_404(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
+
+ response = client.patch(f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": 999}})
+ assert response.status_code == 404
+
+
+def test_delete_{{modelname.lower()}}(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
+
+ response = client.delete(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
+
+ {{modelname.lower()}}_in_db = session.get({{modelname}}, {{modelname.lower()}}_1.id)
+
+ assert response.status_code == 200
+
+ assert {{modelname.lower()}}_in_db is None
+
+
+def test_delete_{{modelname.lower()}}_404(session: Session, client: TestClient):
+ {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
+ session.add({{modelname.lower()}}_1)
+ session.commit()
+
+ response = client.delete(f"/{{modelname.lower()}}/999")
+ assert response.status_code == 404
+
+
+def test_config_memory(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+ {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add({{modelname.lower()}})
+ session.commit()
+ {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
+ {{modelname.lower()}}es = session.exec(select({{modelname}})).all()
+ assert {{modelname.lower()}}.name == "Steelman"
+ assert {{modelname.lower()}}.age == 25
+ assert len({{modelname.lower()}}es) == 1
+
+
+def test_cli_get(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add({{modelname.lower()}})
+ session.commit()
+ {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
+ result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "1"])
+ assert result.exit_code == 0
+ assert f"name='{{{modelname.lower()}}.name}'" in result.stdout
+ assert f"secret_name='{{{modelname.lower()}}.secret_name}'" in result.stdout
+
+
+def test_cli_get_404(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add({{modelname.lower()}})
+ session.commit()
+ {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
+ result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "999"])
+ assert result.exception.status_code == 404
+ assert result.exception.detail == "{{modelname}} not found"
+
+
+def test_cli_list(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ {{modelname.lower()}}_1 = {{modelname}}Factory().build(name="Steelman", age=25)
+ {{modelname.lower()}}_2 = {{modelname}}Factory().build(name="Hunk", age=52)
+
+ with config.database.session as session:
+ session.add({{modelname.lower()}}_1)
+ session.add({{modelname.lower()}}_2)
+ session.commit()
+ session.refresh({{modelname.lower()}}_1)
+ session.refresh({{modelname.lower()}}_2)
+ result = runner.invoke({{modelname.lower()}}_app, ["list"])
+ assert result.exit_code == 0
+ assert f"name='{{{modelname.lower()}}_1.name}'" in result.stdout
+ assert f"secret_name='{{{modelname.lower()}}_1.secret_name}'" in result.stdout
+ assert f"name='{{{modelname.lower()}}_2.name}'" in result.stdout
+ assert f"secret_name='{{{modelname.lower()}}_2.secret_name}'" in result.stdout
diff --git a/tests/test_console.py b/tests/test_console.py
index 008d362..14274bd 100644
--- a/tests/test_console.py
+++ b/tests/test_console.py
@@ -1,16 +1,14 @@
from learn_sql_model.console import console
-def test_default_console_is_quiet(capsys):
-
- console.print("hello")
- captured = capsys.readouterr()
- assert captured.out == ""
-
-
def test_default_console_not_quiet(capsys):
-
- console.quiet = False
console.print("hello")
captured = capsys.readouterr()
assert captured.out == "hello\n"
+
+
+def test_default_console_is_quiet(capsys):
+ console.quiet = True
+ console.print("hello")
+ captured = capsys.readouterr()
+ assert captured.out == ""
diff --git a/tests/test_hero.py b/tests/test_hero.py
index 80d8afe..831a563 100644
--- a/tests/test_hero.py
+++ b/tests/test_hero.py
@@ -1,16 +1,15 @@
from fastapi.testclient import TestClient
import pytest
from sqlalchemy import create_engine
-from sqlmodel import SQLModel, Session
+from sqlmodel import SQLModel, Session, select
from sqlmodel.pool import StaticPool
from typer.testing import CliRunner
from learn_sql_model.api.app import app
from learn_sql_model.cli.hero import hero_app
-from learn_sql_model.config import get_session
+from learn_sql_model.config import get_config, get_session
from learn_sql_model.factories.hero import HeroFactory
-from learn_sql_model.models import hero as hero_models
-from learn_sql_model.models.hero import Hero, HeroCreate, HeroDelete, HeroRead
+from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead
runner = CliRunner()
client = TestClient(app)
@@ -39,19 +38,21 @@ def client_fixture(session: Session):
def test_api_post(client: TestClient):
- hero = HeroFactory().build()
+ hero = HeroFactory().build(name="Steelman", age=25)
hero_dict = hero.dict()
- response = client.post("/hero/", json=hero_dict)
+ response = client.post("/hero/", json={"hero": hero_dict})
response_hero = Hero.parse_obj(response.json())
assert response.status_code == 200
- assert response_hero.name == hero.name
+ assert response_hero.name == "Steelman"
+ assert response_hero.age == 25
-def test_api_read_heros(session: Session, client: TestClient):
- heros = HeroFactory().batch(5)
- for hero in heros:
- session.add(hero)
+def test_api_read_heroes(session: Session, client: TestClient):
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
+ hero_2 = Hero(name="Rusty-Man", secret_name="Tommy Sharp", age=48)
+ session.add(hero_1)
+ session.add(hero_2)
session.commit()
response = client.get("/heros/")
@@ -59,31 +60,35 @@ def test_api_read_heros(session: Session, client: TestClient):
assert response.status_code == 200
- assert len(data) == 5
- for d in data:
- api_hero = Hero.parse_obj(d)
- my_hero = [hero for hero in heros if hero.id == api_hero.id][0]
- for key, value in api_hero.dict(exclude_unset=True).items():
- assert getattr(my_hero, key) == value
+ assert len(data) == 2
+ assert data[0]["name"] == hero_1.name
+ assert data[0]["secret_name"] == hero_1.secret_name
+ assert data[0]["age"] == hero_1.age
+ assert data[0]["id"] == hero_1.id
+ assert data[1]["name"] == hero_2.name
+ assert data[1]["secret_name"] == hero_2.secret_name
+ assert data[1]["age"] == hero_2.age
+ assert data[1]["id"] == hero_2.id
def test_api_read_hero(session: Session, client: TestClient):
- hero = HeroFactory().build()
- session.add(hero)
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
+ session.add(hero_1)
session.commit()
- response = client.get(f"/hero/{hero.id}")
+ response = client.get(f"/hero/{hero_1.id}")
data = response.json()
- response_hero = Hero.parse_obj(data)
assert response.status_code == 200
- for key, value in hero.dict(exclude_unset=True).items():
- assert getattr(response_hero, key) == value
+ assert data["name"] == hero_1.name
+ assert data["secret_name"] == hero_1.secret_name
+ assert data["age"] == hero_1.age
+ assert data["id"] == hero_1.id
def test_api_read_hero_404(session: Session, client: TestClient):
- hero = HeroFactory().build()
- session.add(hero)
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
+ session.add(hero_1)
session.commit()
response = client.get(f"/hero/999")
@@ -91,33 +96,33 @@ def test_api_read_hero_404(session: Session, client: TestClient):
def test_api_update_hero(session: Session, client: TestClient):
- hero = HeroFactory().build()
- new_hero = HeroFactory().build()
- session.add(hero)
- session.commit()
-
- response = client.patch(
- f"/hero/", json={"id": hero.id, **new_hero.dict(exclude={"id"})}
- )
- data = response.json()
- response_hero = Hero.parse_obj(data)
-
- assert response.status_code == 200
- for key, value in hero.dict(exclude_unset=True).items():
- assert getattr(response_hero, key) == value
-
-
-def test_api_update_hero_404(session: Session, client: TestClient):
- hero_1 = HeroFactory().build()
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1)
session.commit()
- response = client.patch(f"/hero/", json={"name": "Deadpuddle", "id": 999})
+ response = client.patch(
+ f"/hero/", json={"hero": {"name": "Deadpuddle", "id": hero_1.id}}
+ )
+ data = response.json()
+
+ assert response.status_code == 200
+ assert data["name"] == "Deadpuddle"
+ assert data["secret_name"] == "Dive Wilson"
+ assert data["age"] is None
+ assert data["id"] == hero_1.id
+
+
+def test_api_update_hero_404(session: Session, client: TestClient):
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
+ session.add(hero_1)
+ session.commit()
+
+ response = client.patch(f"/hero/", json={"hero": {"name": "Deadpuddle", "id": 999}})
assert response.status_code == 404
def test_delete_hero(session: Session, client: TestClient):
- hero_1 = HeroFactory().build()
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1)
session.commit()
@@ -131,7 +136,7 @@ def test_delete_hero(session: Session, client: TestClient):
def test_delete_hero_404(session: Session, client: TestClient):
- hero_1 = HeroFactory().build()
+ hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1)
session.commit()
@@ -139,181 +144,135 @@ def test_delete_hero_404(session: Session, client: TestClient):
assert response.status_code == 404
-def test_cli_get(mocker):
- hero = HeroFactory().build()
- hero = HeroRead(**hero.dict(exclude_none=True))
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.get.return_value = mocker.Mock()
- httpx.get.return_value.status_code = 200
- httpx.get.return_value.json.return_value = hero.dict()
+def test_config_memory(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+ hero = HeroFactory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add(hero)
+ session.commit()
+ hero = session.get(Hero, hero.id)
+ heroes = session.exec(select(Hero)).all()
+ assert hero.name == "Steelman"
+ assert hero.age == 25
+ assert len(heroes) == 1
- result = runner.invoke(hero_app, ["get", "1"])
+
+def test_cli_get(mocker):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ hero = HeroFactory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add(hero)
+ session.commit()
+ hero = session.get(Hero, hero.id)
+ result = runner.invoke(hero_app, ["get", "--hero-id", "1"])
assert result.exit_code == 0
- for key, value in hero.dict(exclude_unset=True).items():
- if type(value) == str:
- assert f"{key}='{value}'" in result.stdout
- elif type(value) == int:
- assert f"{key}={value}" in result.stdout
- assert httpx.get.call_count == 1
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 0
+ assert f"name='{hero.name}'" in result.stdout
+ assert f"secret_name='{hero.secret_name}'" in result.stdout
def test_cli_get_404(mocker):
- hero = HeroFactory().build()
- hero = HeroRead(**hero.dict(exclude_none=True))
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.get.return_value = mocker.Mock()
- httpx.get.return_value.status_code = 404
- httpx.get.return_value.text = "Hero not found"
- httpx.get.return_value.json.return_value = hero.dict()
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
- result = runner.invoke(hero_app, ["get", "999"])
- assert result.exit_code == 1
- assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
- assert httpx.get.call_count == 1
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 0
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ hero = HeroFactory().build(name="Steelman", age=25)
+ with config.database.session as session:
+ session.add(hero)
+ session.commit()
+ hero = session.get(Hero, hero.id)
+ result = runner.invoke(hero_app, ["get", "--hero-id", "999"])
+ assert result.exception.status_code == 404
+ assert result.exception.detail == "Hero not found"
def test_cli_list(mocker):
- heros = HeroFactory().batch(5)
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.get.return_value = mocker.Mock()
- httpx.get.return_value.status_code = 200
- httpx.get.return_value.json.return_value = heros
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ hero_1 = HeroFactory().build(name="Steelman", age=25)
+ hero_2 = HeroFactory().build(name="Hunk", age=52)
+
+ with config.database.session as session:
+ session.add(hero_1)
+ session.add(hero_2)
+ session.commit()
+ session.refresh(hero_1)
+ session.refresh(hero_2)
result = runner.invoke(hero_app, ["list"])
assert result.exit_code == 0
-
- for hero in heros:
- for key, value in hero.dict(exclude_unset=True).items():
- if type(value) == str:
- assert f"{key}='{value}'" in result.stdout
- elif type(value) == int:
- assert f"{key}={value}" in result.stdout
+ assert f"name='{hero_1.name}'" in result.stdout
+ assert f"secret_name='{hero_1.secret_name}'" in result.stdout
+ assert f"name='{hero_2.name}'" in result.stdout
+ assert f"secret_name='{hero_2.secret_name}'" in result.stdout
def test_model_post(mocker):
- hero = HeroFactory().build()
+ patch_httpx_post = mocker.patch(
+ "httpx.post", return_value=mocker.Mock(status_code=200)
+ )
+ hero = HeroFactory().build(name="Steelman", age=25)
hero_create = HeroCreate(**hero.dict())
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.post.return_value = mocker.Mock()
- httpx.post.return_value.status_code = 200
- httpx.post.return_value.json.return_value = hero.dict()
- result = hero_create.post()
- assert result == hero
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 1
- assert httpx.delete.call_count == 0
+ hero_create.post()
+ assert patch_httpx_post.call_count == 1
def test_model_post_500(mocker):
- hero = HeroFactory().build()
+ patch_httpx_post = mocker.patch(
+ "httpx.post", return_value=mocker.Mock(status_code=500)
+ )
+ hero = HeroFactory().build(name="Steelman", age=25)
hero_create = HeroCreate(**hero.dict())
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.post.return_value = mocker.Mock()
- httpx.post.return_value.status_code = 500
- httpx.post.return_value.json.return_value = hero.dict()
with pytest.raises(RuntimeError):
hero_create.post()
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 1
- assert httpx.delete.call_count == 0
+ assert patch_httpx_post.call_count == 1
-def test_model_read_hero(mocker):
- hero = HeroFactory().build()
+def test_model_read_hero(mocker, session: Session, client: TestClient):
+ mocker.patch(
+ "learn_sql_model.config.Database.engine",
+ new_callable=lambda: create_engine(
+ "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
+ ),
+ )
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.get.return_value = mocker.Mock()
- httpx.get.return_value.status_code = 200
- httpx.get.return_value.json.return_value = hero.dict()
+ config = get_config()
+ SQLModel.metadata.create_all(config.database.engine)
+
+ hero = Hero(name="Deadpond", secret_name="Dive Wilson")
+ session = config.database.session
+ session.add(hero)
+ session.commit()
+ session.refresh(hero)
hero_read = HeroRead.get(id=hero.id)
- assert hero_read.name == hero.name
- assert hero_read.secret_name == hero.secret_name
- assert httpx.get.call_count == 1
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 0
-
-
-def test_model_read_hero_404(mocker):
- hero = HeroFactory().build()
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.get.return_value = mocker.Mock()
- httpx.get.return_value.status_code = 404
- httpx.get.return_value.text = "Hero not found"
-
- with pytest.raises(RuntimeError) as e:
- HeroRead.get(id=hero.id)
- assert e.value.args[0] == "404: Hero not found"
- assert httpx.get.call_count == 1
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 0
-
-
-def test_model_delete_hero(mocker):
- hero = HeroFactory().build()
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.delete.return_value = mocker.Mock()
- httpx.delete.return_value.status_code = 200
- httpx.delete.return_value.json.return_value = hero.dict()
-
- hero_delete = HeroDelete.delete(id=hero.id)
- assert hero_delete == {"ok": True}
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 1
-
-
-def test_model_delete_hero_404(mocker):
- hero = HeroFactory().build()
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.delete.return_value = mocker.Mock()
- httpx.delete.return_value.status_code = 404
- httpx.get.return_value.text = "Hero not found"
-
- with pytest.raises(RuntimeError) as e:
- HeroDelete.delete(id=hero.id)
- assert e.value.args[0] == "404: Hero not found"
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 1
-
-
-def test_cli_delete_hero(mocker):
- hero = HeroFactory().build()
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.delete.return_value = mocker.Mock()
- httpx.delete.return_value.status_code = 200
- httpx.delete.return_value.json.return_value = hero.dict()
-
- result = runner.invoke(hero_app, ["delete", "--hero-id", "1"])
- assert result.exit_code == 0
- assert "{'ok': True}" in result.stdout
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 1
-
-
-def test_cli_delete_hero_404(mocker):
- hero = HeroFactory().build()
-
- httpx = mocker.patch.object(hero_models, "httpx")
- httpx.delete.return_value = mocker.Mock()
- httpx.delete.return_value.status_code = 404
- httpx.delete.return_value.text = "Hero not found"
- httpx.delete.return_value.json.return_value = hero.dict()
-
- result = runner.invoke(hero_app, ["delete", "--hero-id", "999"])
- assert result.exit_code == 1
- assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
- assert httpx.get.call_count == 0
- assert httpx.post.call_count == 0
- assert httpx.delete.call_count == 1
+ assert hero_read.name == "Deadpond"
+ assert hero_read.secret_name == "Dive Wilson"
diff --git a/tmp.py b/tmp.py
deleted file mode 100644
index a4947c0..0000000
--- a/tmp.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import sqlite3
-
-from graphviz import Digraph
-
-
-def generate_er_diagram(database_path, output_path):
- # Connect to the SQLite database
- conn = sqlite3.connect(database_path)
- cursor = conn.cursor()
-
- # Get the table names from the database
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = cursor.fetchall()
-
- # Create a new Digraph
- dot = Digraph(format="png")
- dot.attr(rankdir="TD")
-
- # Iterate over the tables
- for table in tables:
- table_name = table[0]
- dot.node(table_name, shape="box")
- cursor.execute(f"PRAGMA table_info({table_name});")
- columns = cursor.fetchall()
-
- # Add the columns to the table node
- for column in columns:
- column_name = column[1]
- dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
- dot.edge(table_name, f"{table_name}.{column_name}")
-
- # Check for foreign key relationships
- cursor.execute(f"PRAGMA foreign_key_list({table_name});")
- foreign_keys = cursor.fetchall()
-
- # Add dotted lines for foreign key relationships
- for foreign_key in foreign_keys:
- from_column = foreign_key[3]
- to_table = foreign_key[2]
- to_column = foreign_key[4]
- dot.node(f"{to_table}.{to_column}", shape="oval")
- dot.edge(
- f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
- )
-
- # Render and save the diagram
- dot.render(output_path.replace(".png", ""), cleanup=True)
-
- # Close the database connection
- cursor.close()
- conn.close()
-
-
-def generate_markdown(database_path, output_path, er_diagram_path):
- # Connect to the SQLite database
- conn = sqlite3.connect(database_path)
- cursor = conn.cursor()
-
- # Get the table names from the database
- cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
- tables = cursor.fetchall()
-
- with open(output_path, "w") as f:
- # Write the ER Diagram image
- f.write(f"\n\n---\n\n")
-
- # Iterate over the tables
- for table in tables:
- table_name = table[0]
-
- f.write(f"## Table: {table_name}\n\n")
-
- # Get the table columns
- cursor.execute(f"PRAGMA table_info({table_name});")
- columns = cursor.fetchall()
-
- f.write("### First 5 rows\n\n")
- cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
- rows = cursor.fetchall()
- f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
- f.write("|")
- for column in columns:
- # ---
- f.write(f'{"-"*(len(column[1]) + 2)}|')
- f.write("\n")
- for row in rows:
- f.write(f'| {" | ".join([str(r) for r in row])} |\n')
- f.write("\n")
-
- cursor.execute(f"PRAGMA foreign_key_list({table_name});")
- foreign_keys = cursor.fetchall()
-
- # Add dotted lines for foreign key relationships
- fkeys = {}
- for foreign_key in foreign_keys:
- from_column = foreign_key[3]
- to_table = foreign_key[2]
- to_column = foreign_key[4]
- fkeys[from_column] = f"{to_table}.{to_column}"
-
- # Replace 'description' with the actual column name in the table that contains the description, if applicable
- try:
- cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
- description = cursor.fetchone()
- if description:
- f.write(f"### Description\n\n{description[0]}\n\n")
- except:
- ...
-
- # Write the table columns
- f.write("### Columns\n\n")
- f.write("| Column Name | Type | Foreign Key | Example Value |\n")
- f.write("|-------------|------|-------------|---------------|\n")
-
- for column in columns:
-
- column_name = column[1]
- column_type = column[2]
- fkey = ""
- if column_name in fkeys:
- fkey = fkeys[column_name]
- f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
-
- f.write("\n")
-
- # Get the count of records
- cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
- records_count = cursor.fetchone()[0]
- f.write(
- f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
- )
-
- # Close the database connection
- cursor.close()
- conn.close()
-
-
-# Usage example
-database_path = "database.db"
-md_output_path = "database.md"
-er_output_path = "er_diagram.png"
-
-generate_er_diagram(database_path, er_output_path)
-generate_markdown(database_path, md_output_path, er_output_path)
diff --git a/tmp.sh b/tmp.sh
deleted file mode 100644
index 7f36aa4..0000000
--- a/tmp.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-max="$1"
-date
-echo "url: $2
-rate: $max calls / second"
-START=$(date +%s);
-
-get () {
- curl -s -v "$1" 2>&1 | tr '\r\n' '\\n' | awk -v date="$(date +'%r')" '{print $0"\n-----", date}' >> /tmp/perf-test.log
-}
-
-while true
-do
- echo $(($(date +%s) - START)) | awk '{print int($1/60)":"int($1%60)}'
- sleep 1
-
- for i in `seq 1 $max`
- do
- get $2 &
- done
-done
diff --git a/wyatt.py b/wyatt.py
deleted file mode 100644
index d84db3e..0000000
--- a/wyatt.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import random
-import sys
-
-# Initialize player attributes
-player = {
- "name": input("Enter your character's name: "),
- "health": 100,
- "food": 100,
- "x": 5,
- "y": 5,
- "day": 1,
-}
-
-# Define game resources
-resources = {
- "food": 50,
- "water": 50,
-}
-
-# Define game constants
-MAP_WIDTH, MAP_HEIGHT = 20, 10
-PLAYER_CHAR = "(o)"
-ENEMY_CHAR = "(?)"
-
-# Game loop
-while player["health"] > 0:
- # Create the game map
- game_map = [[" " for _ in range(MAP_WIDTH)] for _ in range(MAP_HEIGHT)]
- game_map[player["y"]][player["x"]] = PLAYER_CHAR
-
- # Place enemies randomly on the map
- for _ in range(random.randint(1, 3)):
- enemy_x = random.randint(0, MAP_WIDTH - 1)
- enemy_y = random.randint(0, MAP_HEIGHT - 1)
- game_map[enemy_y][enemy_x] = ENEMY_CHAR
-
- # Print the game map
- for row in game_map:
- print("".join(row))
-
- print(f"\nDay {player['day']}")
- print(f"Name: {player['name']}")
- print(f"Health: {player['health']} HP {'*' * player['health']}")
- print(f"Food: {player['food']} Hunger {'*' * player['food']}")
- print(f"Coordinates: ({player['x']}, {player['y']})")
-
- # Player input for movement
- move = input("Move (W/A/S/D): ").upper()
-
- # Update player position based on input
- if move == "W" and player["y"] > 0:
- player["y"] -= 1
- elif move == "S" and player["y"] < MAP_HEIGHT - 1:
- player["y"] += 1
- elif move == "A" and player["x"] > 0:
- player["x"] -= 1
- elif move == "D" and player["x"] < MAP_WIDTH - 1:
- player["x"] += 1
-
- # Consume resources
- player["food"] -= random.randint(5, 15)
-
- # Check if the player has enough resources
- if player["food"] < 0:
- player["food"] = 0
- player["health"] -= 10
-
- # Check if the player encounters an enemy
- if game_map[player["y"]][player["x"]] == ENEMY_CHAR:
- enemy_damage = random.randint(10, 30)
- player["health"] -= enemy_damage
- print(f"You encountered an enemy and took {enemy_damage} damage!")
-
- # Rest for the day
- player["day"] += 1
-
- # Exit the game if health reaches zero
- if player["health"] <= 0:
- print("Game Over. You did not survive.")
- break
-
- input("Press Enter to continue to the next day...")
-
-sys.exit()