Compare commits

..

No commits in common. "main" and "v5" have entirely different histories.
main ... v5

106 changed files with 811 additions and 4107 deletions

View file

@ -1,978 +0,0 @@
# flyctl launch added from .gitignore
# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
### Data ###
**/*.csv
**/*.dat
**/*.efx
**/*.gbr
**/*.key
**/*.pps
**/*.ppt
**/*.pptx
**/*.sdf
**/*.tax2010
**/*.vcf
**/*.xml
### Emacs ###
# -*- mode: gitignore; -*-
**/*~
**/\#*\#
.emacs.desktop
.emacs.desktop.lock
**/*.elc
**/auto-save-list
**/tramp
**/.\#*
# Org-mode
**/.org-id-locations
**/*_archive
# flymake-mode
**/*_flymake.*
# eshell files
eshell/history
eshell/lastdir
# elpa packages
elpa
# reftex files
**/*.rel
# AUCTeX auto folder
auto
# cask packages
**/.cask
**/dist
# Flycheck
**/flycheck_*.el
# server auth directory
server
# projectiles files
**/.projectile
# directory configuration
**/.dir-locals.el
# network security
network-security.data
### Executable ###
**/*.app
**/*.bat
**/*.cgi
**/*.com
**/*.exe
**/*.gadget
**/*.jar
**/*.pif
**/*.vb
**/*.wsf
### Node ###
# Logs
**/logs
**/*.log
**/npm-debug.log*
**/yarn-debug.log*
**/yarn-error.log*
**/lerna-debug.log*
**/.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
**/pids
**/*.pid
**/*.seed
**/*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
**/lib-cov
# Coverage directory used by tools like istanbul
**/coverage
**/*.lcov
# nyc test coverage
**/.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
**/.grunt
# Bower dependency directory (https://bower.io/)
**/bower_components
# node-waf configuration
**/.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
**/build/Release
# Dependency directories
**/node_modules
**/jspm_packages
# Snowpack dependency directory (https://snowpack.dev/)
**/web_modules
# TypeScript cache
**/*.tsbuildinfo
# Optional npm cache directory
**/.npm
# Optional eslint cache
**/.eslintcache
# Optional stylelint cache
**/.stylelintcache
# Microbundle cache
**/.rpt2_cache
**/.rts2_cache_cjs
**/.rts2_cache_es
**/.rts2_cache_umd
# Optional REPL history
**/.node_repl_history
# Output of 'npm pack'
**/*.tgz
# Yarn Integrity file
**/.yarn-integrity
# dotenv environment variable files
**/.env
**/.env.development.local
**/.env.test.local
**/.env.production.local
**/.env.local
# parcel-bundler cache (https://parceljs.org/)
**/.cache
**/.parcel-cache
# Next.js build output
**/.next
**/out
# Nuxt.js build / generate output
**/.nuxt
**/dist
# Gatsby files
**/.cache
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
**/.vuepress/dist
# vuepress v2.x temp and cache directory
**/.temp
# Docusaurus cache and generated files
**/.docusaurus
# Serverless directories
**/.serverless
# FuseBox cache
**/.fusebox
# DynamoDB Local files
**/.dynamodb
# TernJS port file
**/.tern-port
# Stores VSCode versions used for testing VSCode extensions
**/.vscode-test
# yarn v2
**/.yarn/cache
**/.yarn/unplugged
**/.yarn/build-state.yml
**/.yarn/install-state.gz
**/.pnp.*
### Node Patch ###
# Serverless Webpack directories
**/.webpack
# Optional stylelint cache
# SvelteKit build / generate output
**/.svelte-kit
### PyCharm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
**/.idea/**/workspace.xml
**/.idea/**/tasks.xml
**/.idea/**/usage.statistics.xml
**/.idea/**/dictionaries
**/.idea/**/shelf
# AWS User-specific
**/.idea/**/aws.xml
# Generated files
**/.idea/**/contentModel.xml
# Sensitive or high-churn files
**/.idea/**/dataSources
**/.idea/**/dataSources.ids
**/.idea/**/dataSources.local.xml
**/.idea/**/sqlDataSources.xml
**/.idea/**/dynamic.xml
**/.idea/**/uiDesigner.xml
**/.idea/**/dbnavigator.xml
# Gradle
**/.idea/**/gradle.xml
**/.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
**/cmake-build-*
# Mongo Explorer plugin
**/.idea/**/mongoSettings.xml
# File-based project format
**/*.iws
# IntelliJ
**/out
# mpeltonen/sbt-idea plugin
**/.idea_modules
# JIRA plugin
**/atlassian-ide-plugin.xml
# Cursive Clojure plugin
**/.idea/replstate.xml
# SonarLint plugin
**/.idea/sonarlint
# Crashlytics plugin (for Android Studio and IntelliJ)
**/com_crashlytics_export_strings.xml
**/crashlytics.properties
**/crashlytics-build.properties
**/fabric.properties
# Editor-based Rest Client
**/.idea/httpRequests
# Android studio 3.1+ serialized cache file
**/.idea/caches/build_file_checksums.ser
### PyCharm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
**/.idea/**/sonarlint
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
**/.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
**/.idea/**/markdown-navigator.xml
**/.idea/**/markdown-navigator-enh.xml
**/.idea/**/markdown-navigator
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
**/.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
**/.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
**/.idea/**/azureSettings.xml
### Python ###
# Byte-compiled / optimized / DLL files
**/__pycache__
**/*.py[cod]
**/*$py.class
# C extensions
**/*.so
# Distribution / packaging
**/.Python
**/build
**/develop-eggs
**/downloads
**/eggs
**/.eggs
**/lib
**/lib64
**/parts
**/sdist
**/var
**/wheels
**/share/python-wheels
**/*.egg-info
**/.installed.cfg
**/*.egg
**/MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
**/*.manifest
**/*.spec
# Installer logs
**/pip-log.txt
**/pip-delete-this-directory.txt
# Unit test / coverage reports
**/htmlcov
**/.tox
**/.nox
**/.coverage
**/.coverage.*
**/nosetests.xml
**/coverage.xml
**/*.cover
**/*.py,cover
**/.hypothesis
**/.pytest_cache
**/cover
# Translations
**/*.mo
**/*.pot
# Django stuff:
**/local_settings.py
**/db.sqlite3
**/db.sqlite3-journal
# Flask stuff:
**/instance
**/.webassets-cache
# Scrapy stuff:
**/.scrapy
# Sphinx documentation
**/docs/_build
# PyBuilder
**/.pybuilder
**/target
# Jupyter Notebook
**/.ipynb_checkpoints
# IPython
**/profile_default
**/ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
**/.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
**/__pypackages__
# Celery stuff
**/celerybeat-schedule
**/celerybeat.pid
# SageMath parsed files
**/*.sage.py
# Environments
**/.venv
**/env
**/venv
**/ENV
**/env.bak
**/venv.bak
# Spyder project settings
**/.spyderproject
**/.spyproject
# Rope project settings
**/.ropeproject
# mkdocs documentation
site
# mypy
**/.mypy_cache
**/.dmypy.json
**/dmypy.json
# Pyre type checker
**/.pyre
# pytype static type analyzer
**/.pytype
# Cython debug symbols
**/cython_debug
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
**/poetry.toml
# ruff
**/.ruff_cache
# LSP config files
**/pyrightconfig.json
### SublimeText ###
# Cache files for Sublime Text
**/*.tmlanguage.cache
**/*.tmPreferences.cache
**/*.stTheme.cache
# Workspace files are user-specific
**/*.sublime-workspace
# Project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using Sublime Text
# *.sublime-project
# SFTP configuration file
**/sftp-config.json
**/sftp-config-alt*.json
# Package control specific files
**/Package Control.last-run
**/Package Control.ca-list
**/Package Control.ca-bundle
**/Package Control.system-ca-bundle
**/Package Control.cache
**/Package Control.ca-certs
**/Package Control.merged-ca-bundle
**/Package Control.user-ca-bundle
**/oscrypto-ca-bundle.crt
**/bh_unicode_properties.cache
# Sublime-github package stores a github token in this file
# https://packagecontrol.io/packages/sublime-github
**/GitHub.sublime-settings
### Vim ###
# Swap
**/[._]*.s[a-v][a-z]
!**/*.svg # comment out if you don't need vector files
**/[._]*.sw[a-p]
**/[._]s[a-rt-v][a-z]
**/[._]ss[a-gi-z]
**/[._]sw[a-p]
# Session
**/Session.vim
**/Sessionx.vim
# Temporary
**/.netrwhist
# Auto-generated tag files
**/tags
# Persistent undo
**/[._]*.un~
### VisualStudioCode ###
**/.vscode/*
!**/.vscode/settings.json
!**/.vscode/tasks.json
!**/.vscode/launch.json
!**/.vscode/extensions.json
!**/.vscode/*.code-snippets
# Local History for Visual Studio Code
**/.history
# Built Visual Studio Code Extensions
**/*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
**/.history
**/.ionide
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
**/*.rsuser
**/*.suo
**/*.user
**/*.userosscache
**/*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
**/*.userprefs
# Mono auto generated files
**/mono_crash.*
# Build results
**/[Dd]ebug
**/[Dd]ebugPublic
**/[Rr]elease
**/[Rr]eleases
**/x64
**/x86
**/[Ww][Ii][Nn]32
**/[Aa][Rr][Mm]
**/[Aa][Rr][Mm]64
**/bld
**/[Bb]in
**/[Oo]bj
**/[Ll]og
**/[Ll]ogs
# Visual Studio 2015/2017 cache/options directory
**/.vs
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
**/Generated\ Files
# MSTest test Results
**/[Tt]est[Rr]esult*
**/[Bb]uild[Ll]og.*
# NUnit
**/*.VisualState.xml
**/TestResult.xml
**/nunit-*.xml
# Build Results of an ATL Project
**/[Dd]ebugPS
**/[Rr]eleasePS
**/dlldata.c
# Benchmark Results
**/BenchmarkDotNet.Artifacts
# .NET Core
**/project.lock.json
**/project.fragment.lock.json
**/artifacts
# ASP.NET Scaffolding
**/ScaffoldingReadMe.txt
# StyleCop
**/StyleCopReport.xml
# Files built by Visual Studio
**/*_i.c
**/*_p.c
**/*_h.h
**/*.ilk
**/*.meta
**/*.obj
**/*.iobj
**/*.pch
**/*.pdb
**/*.ipdb
**/*.pgc
**/*.pgd
**/*.rsp
**/*.sbr
**/*.tlb
**/*.tli
**/*.tlh
**/*.tmp
**/*.tmp_proj
**/*_wpftmp.csproj
**/*.tlog
**/*.vspscc
**/*.vssscc
**/.builds
**/*.pidb
**/*.svclog
**/*.scc
# Chutzpah Test files
**/_Chutzpah*
# Visual C++ cache files
**/ipch
**/*.aps
**/*.ncb
**/*.opendb
**/*.opensdf
**/*.cachefile
**/*.VC.db
**/*.VC.VC.opendb
# Visual Studio profiler
**/*.psess
**/*.vsp
**/*.vspx
**/*.sap
# Visual Studio Trace Files
**/*.e2e
# TFS 2012 Local Workspace
**/$tf
# Guidance Automation Toolkit
**/*.gpState
# ReSharper is a .NET coding add-in
**/_ReSharper*
**/*.[Rr]e[Ss]harper
**/*.DotSettings.user
# TeamCity is a build add-in
**/_TeamCity*
# DotCover is a Code Coverage Tool
**/*.dotCover
# AxoCover is a Code Coverage Tool
**/.axoCover/*
!**/.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
**/coverage*.json
**/coverage*.xml
**/coverage*.info
# Visual Studio code coverage results
**/*.coverage
**/*.coveragexml
# NCrunch
**/_NCrunch_*
**/.*crunch*.local.xml
**/nCrunchTemp_*
# MightyMoose
**/*.mm.*
**/AutoTest.Net
# Web workbench (sass)
**/.sass-cache
# Installshield output folder
**/[Ee]xpress
# DocProject is a documentation generator add-in
**/DocProject/buildhelp
**/DocProject/Help/*.HxT
**/DocProject/Help/*.HxC
**/DocProject/Help/*.hhc
**/DocProject/Help/*.hhk
**/DocProject/Help/*.hhp
**/DocProject/Help/Html2
**/DocProject/Help/html
# Click-Once directory
**/publish
# Publish Web Output
**/*.[Pp]ublish.xml
**/*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
**/*.pubxml
**/*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
**/PublishScripts
# NuGet Packages
**/*.nupkg
# NuGet Symbol Packages
**/*.snupkg
# The packages folder can be ignored because of Package Restore
**/**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/**/[Pp]ackages/build
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
**/*.nuget.props
**/*.nuget.targets
# Microsoft Azure Build Output
**/csx
**/*.build.csdef
# Microsoft Azure Emulator
**/ecf
**/rcf
# Windows Store app package directories and files
**/AppPackages
**/BundleArtifacts
**/Package.StoreAssociation.xml
**/_pkginfo.txt
**/*.appx
**/*.appxbundle
**/*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
**/*.[Cc]ache
# but keep track of directories ending in .cache
!**/?*.[Cc]ache
# Others
**/ClientBin
**/~$*
**/*.dbmdl
**/*.dbproj.schemaview
**/*.jfm
**/*.pfx
**/*.publishsettings
**/orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
**/Generated_Code
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
**/_UpgradeReport_Files
**/Backup*
**/UpgradeLog*.XML
**/UpgradeLog*.htm
**/ServiceFabricBackup
**/*.rptproj.bak
# SQL Server files
**/*.mdf
**/*.ldf
**/*.ndf
# Business Intelligence projects
**/*.rdl.data
**/*.bim.layout
**/*.bim_*.settings
**/*.rptproj.rsuser
**/*- [Bb]ackup.rdl
**/*- [Bb]ackup ([0-9]).rdl
**/*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
**/FakesAssemblies
# GhostDoc plugin setting file
**/*.GhostDoc.xml
# Node.js Tools for Visual Studio
**/.ntvs_analysis.dat
# Visual Studio 6 build log
**/*.plg
# Visual Studio 6 workspace options file
**/*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
**/*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
**/*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
**/*.dsw
**/*.dsp
# Visual Studio 6 technical files
# Visual Studio LightSwitch build output
**/**/*.HTMLClient/GeneratedArtifacts
**/**/*.DesktopClient/GeneratedArtifacts
**/**/*.DesktopClient/ModelManifest.xml
**/**/*.Server/GeneratedArtifacts
**/**/*.Server/ModelManifest.xml
**/_Pvt_Extensions
# Paket dependency manager
**/.paket/paket.exe
**/paket-files
# FAKE - F# Make
**/.fake
# CodeRush personal settings
**/.cr/personal
# Python Tools for Visual Studio (PTVS)
**/*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
**/*.tss
# Telerik's JustMock configuration file
**/*.jmconfig
# BizTalk build output
**/*.btp.cs
**/*.btm.cs
**/*.odx.cs
**/*.xsd.cs
# OpenCover UI analysis results
**/OpenCover
# Azure Stream Analytics local run output
**/ASALocalRun
# MSBuild Binary and Structured Log
**/*.binlog
# NVidia Nsight GPU debugger configuration file
**/*.nvuser
# MFractors (Xamarin productivity tool) working folder
**/.mfractor
# Local History for Visual Studio
**/.localhistory
# Visual Studio History (VSHistory) files
**/.vshistory
# BeatPulse healthcheck temp database
**/healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
**/MigrationBackup
# Ionide (cross platform F# VS Code tools) working folder
**/.ionide
# Fody - auto-generated XML schema
**/FodyWeavers.xsd
# VS Code files for those working on multiple tools
**/*.code-workspace
# Local History for Visual Studio Code
# Windows Installer files from build outputs
**/*.cab
**/*.msi
**/*.msix
**/*.msm
**/*.msp
# JetBrains Rider
**/*.sln.iml
### VisualStudio Patch ###
# Additional files built by Visual Studio
# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
**/database.db
**/database.db
**/database.db
**/.markata.cache
**/database.sqlite
# flyctl launch added from .pytest_cache/.gitignore
# Created by pytest automatically.
.pytest_cache/**/*
# flyctl launch added from .ruff_cache/.gitignore
.ruff_cache/**/*
fly.toml

View file

@ -11,18 +11,22 @@ jobs:
contents: write # release changes require contents write
steps:
- uses: actions/checkout@v3
- name: create versioned release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release create v${{ github.run_number }} --notes "Release ${{ github.run_number }}"
- name: remove last latest release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release delete latest
# - name: remove last latest release
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# run: gh release delete latest
- name: create latest release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release create latest --notes "latest release"
- name: create versioned release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release create v${{ github.run_number }} --notes "Release ${{ github.run_number }}"
- name: Upload Release Asset
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release upload latest dist/
release:
needs: setup
strategy:
@ -41,15 +45,7 @@ jobs:
with:
name: lsm-${{ matrix.os }}
path: dist
- uses: vimtor/action-zip@v1
with:
files: dist/
dest: lsm-${{ matrix.os }}.zip
- name: Upload Release Asset to versioned release
- name: Upload Release Asset
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release upload v${{ github.run_number }} lsm-${{ matrix.os }}.zip
- name: Upload Release Asset to latest release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh release upload latest lsm-${{ matrix.os }}.zip
run: gh release upload ${{ github.run_number }} dist/

2
.gitignore vendored
View file

@ -967,5 +967,3 @@ database.db
database.db
.markata.cache
database.sqlite
.env.dev
.env.dev.docker

View file

@ -1,5 +1,3 @@
from learn_sql_model.optional import _optional_import_
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import Config

View file

@ -4,9 +4,9 @@ WORKDIR /app
Copy pyproject.toml /app
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
COPY README.md /app
RUN pip3 install '.[all]'
RUN pip3 install .
COPY . /app
RUN pip3 install '.[all]'
RUN pip3 install .
EXPOSE 5000

View file

@ -1,6 +1,3 @@
> [!IMPORTANT]
> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
# Learn SQL Model
learning sql model

Binary file not shown.

Before

Width:  |  Height:  |  Size: 769 B

70
d3.py
View file

@ -1,70 +0,0 @@
import sqlite3
from jinja2 import Environment, FileSystemLoader
def get_tables_and_columns(conn):
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [
{
"name": table[0],
"columns": get_columns(conn, table[0]),
"foreign_keys": get_foreign_keys(conn, table[0]),
}
for table in cursor.fetchall()
]
return tables
def get_columns(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA table_info({table_name});")
columns = [row[1] for row in cursor.fetchall()]
return columns
def get_foreign_keys(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = [
{"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
for row in cursor.fetchall()
]
return foreign_keys
def generate_links(tables):
links = []
for t_index, table in enumerate(tables):
for fk in table["foreign_keys"]:
target_index = next(
i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
)
source_y = 40 + table["columns"].index(fk["from"]) * 20
target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
links.append(
{
"source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
"target": {"x": 50 + target_index * 150, "y": 50 + target_y},
}
)
return links
def generate_er_diagram(database_path):
conn = sqlite3.connect(database_path)
tables = get_tables_and_columns(conn)
links = [] # Currently, we won't extract relationships
links = generate_links(tables)
env = Environment(loader=FileSystemLoader("templates"))
template = env.get_template("er_diagram.html")
with open("index.html", "w") as f:
f.write(template.render(tables=tables, links=links))
if __name__ == "__main__":
db_path = "database.db"
generate_er_diagram(db_path)

View file

@ -1,72 +0,0 @@
![ER Diagram](er_diagram.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| f48730a783a5 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---
## Table: hero
### First 5 rows
| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
|------|-------------|---|---|------|-----|-----------|--------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| age | INTEGER | | | |
| shoe_size | INTEGER | | | |
| pet_id | INTEGER | pet.id | | |
| id | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

BIN
im.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = [{'name': 'pet', 'columns': ['name', 'birthday', 'id'], 'foreign_keys': []}, {'name': 'hero', 'columns': ['name', 'secret_name', 'x', 'y', 'size', 'age', 'shoe_size', 'pet_id', 'id'], 'foreign_keys': [{'id': 0, 'from': 'pet_id', 'to_table': 'pet', 'to': 'id'}]}];
const links = [{'source': {'x': 320, 'y': 230}, 'target': {'x': 50, 'y': 130}}];
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -1,16 +1,16 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select
from sqlmodel import SQLModel, Session
from learn_sql_model.config import get_session
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_config, get_session
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
hero_router = APIRouter()
@hero_router.on_event("startup")
async def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine)
...
def on_startup() -> None:
SQLModel.metadata.create_all(get_config().database.engine)
@hero_router.get("/hero/{hero_id}")
@ -32,12 +32,12 @@ async def post_hero(
session: Session = Depends(get_session),
hero: HeroCreate,
) -> HeroRead:
"create a hero"
"read all the heros"
db_hero = Hero.from_orm(hero)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
# await manager.broadcast({hero.json()}, id=1)
await manager.broadcast({hero.json()}, id=1)
return db_hero
@ -47,7 +47,7 @@ async def patch_hero(
session: Session = Depends(get_session),
hero: HeroUpdate,
) -> HeroRead:
"update a hero"
"read all the heros"
db_hero = session.get(Hero, hero.id)
if not db_hero:
raise HTTPException(status_code=404, detail="Hero not found")
@ -56,7 +56,7 @@ async def patch_hero(
session.add(db_hero)
session.commit()
session.refresh(db_hero)
# await manager.broadcast({hero.json()}, id=1)
await manager.broadcast({hero.json()}, id=1)
return db_hero
@ -66,13 +66,13 @@ async def delete_hero(
session: Session = Depends(get_session),
hero_id: int,
):
"delete a hero"
"read all the heros"
hero = session.get(Hero, hero_id)
if not hero:
raise HTTPException(status_code=404, detail="Hero not found")
session.delete(hero)
session.commit()
# await manager.broadcast(f"deleted hero {hero_id}", id=1)
await manager.broadcast(f"deleted hero {hero_id}", id=1)
return {"ok": True}
@ -82,6 +82,4 @@ async def get_heros(
session: Session = Depends(get_session),
) -> Heros:
"get all heros"
statement = select(Hero)
heros = session.exec(statement).all()
return Heros(__root__=heros)
return Heros.list(session=session)

View file

@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
from rich.console import Console
from sqlmodel import Session, select
from sqlmodel import Session
from websockets.exceptions import ConnectionClosed
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.console import console
from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros
from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
web_socket_router = APIRouter()
@ -46,9 +46,7 @@ async def websocket_endpoint_connect(
):
Console().log(f"Client #{id} connecting")
await manager.connect(websocket, channel)
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await websocket.send_text(heros.json())
try:
@ -85,18 +83,11 @@ async def websocket_endpoint_hero_echo(
while True:
data = await websocket.receive_text()
hero = HeroUpdate.parse_raw(data)
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
if heros != last_heros:
await manager.broadcast(heros.json(), "heros")
last_heros = heros
db_hero = session.get(Hero, hero.id)
for key, value in hero.dict(exclude_unset=True).items():
setattr(db_hero, key, value)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
hero.update(session=session)
console.print(heros)
await websocket.send_text(heros.json())
@ -105,9 +96,7 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("disconnected")
except ConnectionClosed:
@ -115,8 +104,6 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("connection closed")

View file

@ -1,12 +1,11 @@
import httpx
from rich.console import Console
import typer
import uvicorn
from learn_sql_model.cli.common import verbose_callback
from learn_sql_model.config import get_config
from learn_sql_model.optional import _optional_import_
uvicorn = _optional_import_("uvicorn", group="api")
api_app = typer.Typer()

View file

@ -6,6 +6,7 @@ from rich.console import Console
import typer
from learn_sql_model.config import get_config
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import (
Hero,
HeroCreate,
@ -14,13 +15,6 @@ from learn_sql_model.models.hero import (
HeroUpdate,
Heros,
)
from learn_sql_model.optional import _optional_import_
HeroFactory = _optional_import_(
"learn_sql_model.factories.hero",
"HeroFactory",
group="api",
)
hero_app = typer.Typer()
@ -33,8 +27,9 @@ def hero():
@hero_app.command()
@engorgio(typer=True)
def get(
hero_id: Optional[int] = typer.Argument(),
hero_id: Optional[int] = typer.Argument(default=None),
) -> Union[Hero, List[Hero]]:
"get one hero"
hero = HeroRead.get(id=hero_id)
@ -43,19 +38,25 @@ def get(
@hero_app.command()
def list() -> Union[Hero, List[Hero]]:
@engorgio(typer=True)
def list(
where: Optional[str] = None,
offset: int = 0,
limit: Optional[int] = None,
) -> Union[Hero, List[Hero]]:
"list many heros"
heros = Heros.list()
heros = Heros.list(where=where, offset=offset, limit=limit)
Console().print(heros)
return heros
return hero
@hero_app.command()
def clear() -> Union[Hero, List[Hero]]:
"list many heros"
heros = Heros.list()
for hero in heros.__root__:
HeroDelete.delete(id=hero.id)
for hero in heros.heros:
HeroDelete(id=hero.id).delete()
return hero
@ -80,15 +81,14 @@ def update(
@hero_app.command()
@engorgio(typer=True)
def delete(
hero_id: Optional[int] = typer.Argument(),
hero: HeroDelete,
) -> Hero:
"delete a hero by id"
hero = HeroDelete.delete(id=hero_id)
Console().print(hero)
return hero
hero.delete()
@hero_app.command()
@engorgio(typer=True)
def populate(
n: int = 10,
) -> Hero:

View file

@ -1,15 +1,11 @@
from pathlib import Path
from typing import Annotated
# import copier
import alembic
from alembic.config import Config
import copier
import typer
from learn_sql_model.cli.common import verbose_callback
from learn_sql_model.config import get_config
from learn_sql_model.optional import _optional_import_
alembic = _optional_import_("alembic", group="manage")
Config = _optional_import_("alembic.config", "Config", group="manage")
model_app = typer.Typer()
@ -44,18 +40,11 @@ def create_revision(
callback=verbose_callback,
help="show the log messages",
),
message: Annotated[
str,
typer.Option(
"--message",
"-m",
prompt=True,
),
] = None,
message: str = typer.Option(
prompt=True,
),
):
alembic_cfg = Config("alembic.ini")
config = get_config()
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.revision(
config=alembic_cfg,
message=message,
@ -74,17 +63,7 @@ def checkout(
revision: str = typer.Option("head"),
):
alembic_cfg = Config("alembic.ini")
config = get_config()
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.upgrade(config=alembic_cfg, revision=revision)
@model_app.command()
def status():
alembic_cfg = Config("alembic.ini")
config = get_config()
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.current(config=alembic_cfg)
alembic.command.upgrade(config=alembic_cfg, revision="head")
@model_app.command()
@ -94,4 +73,5 @@ def populate(
callback=verbose_callback,
help="show the log messages",
),
): ...
):
...

View file

@ -4,8 +4,7 @@ from typing import TYPE_CHECKING
from fastapi import Depends
from pydantic import BaseModel, BaseSettings, validator
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel import Session
from sqlmodel import SQLModel, Session
from learn_sql_model.standard_config import load
@ -19,7 +18,6 @@ class ApiServer(BaseModel):
reload: bool = True
log_level: str = "info"
host: str = "0.0.0.0"
workers: int = 1
class ApiClient(BaseModel):
@ -27,6 +25,7 @@ class ApiClient(BaseModel):
protocol: str = "https"
url: str = f"{protocol}://{host}"
class Database:
def __init__(self, config: "Config" = None) -> None:
if config is None:
@ -40,22 +39,10 @@ class Database:
"transactions": None,
}
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
self.db_conf = {}
if 'sqlite' in self.config.database_url:
self.db_conf = {
'connect_args': {"check_same_thread": False},
'pool_recycle': 3600,
'pool_pre_ping': True,
}
self._engine = create_engine(
self.config.database_url,
**self.db_conf
)
@property
def engine(self) -> "Engine":
return self._engine
return create_engine(self.config.database_url)
@property
def session(self) -> "Session":
@ -84,8 +71,7 @@ class Config(BaseSettings):
return get_database(config=self)
def init(self) -> None:
# SQLModel.metadata.create_all(self.database.engine)
...
SQLModel.metadata.create_all(self.database.engine)
def get_database(config: Config = None) -> Database:
@ -100,14 +86,9 @@ def get_config(overrides: dict = {}) -> Config:
return config
config = get_config()
database = get_database()
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
def get_session() -> "Session":
with Session(database.engine) as session:
config = get_config()
with Session(config.database.engine) as session:
yield session

View file

@ -1,151 +0,0 @@
import sqlite3
from graphviz import Digraph
from learn_sql_model.config import get_config
config = get_config()
def generate_er_diagram(output_path):
# Connect to the SQLite database
database_path = config.database_url.replace("sqlite:///", "")
conn = sqlite3.connect(database_path)
cursor = conn.cursor()
# Get the table names from the database
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
# Create a new Digraph
dot = Digraph(format="png")
dot.attr(rankdir="TD")
# Iterate over the tables
for table in tables:
table_name = table[0]
dot.node(table_name, shape="box")
cursor.execute(f"PRAGMA table_info({table_name});")
columns = cursor.fetchall()
# Add the columns to the table node
for column in columns:
column_name = column[1]
dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
dot.edge(table_name, f"{table_name}.{column_name}")
# Check for foreign key relationships
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = cursor.fetchall()
# Add dotted lines for foreign key relationships
for foreign_key in foreign_keys:
from_column = foreign_key[3]
to_table = foreign_key[2]
to_column = foreign_key[4]
dot.node(f"{to_table}.{to_column}", shape="oval")
dot.edge(
f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
)
# Render and save the diagram
dot.render(output_path.replace(".png", ""), cleanup=True)
# Close the database connection
cursor.close()
conn.close()
def generate_er_markdown(output_path, er_diagram_path):
# Connect to the SQLite database
database_path = config.database_url.replace("sqlite:///", "")
conn = sqlite3.connect(database_path)
cursor = conn.cursor()
# Get the table names from the database
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
with open(output_path, "w") as f:
# Write the ER Diagram image
f.write(f"![ER Diagram]({er_diagram_path})\n\n---\n\n")
# Iterate over the tables
for table in tables:
table_name = table[0]
f.write(f"## Table: {table_name}\n\n")
# Get the table columns
cursor.execute(f"PRAGMA table_info({table_name});")
columns = cursor.fetchall()
f.write("### First 5 rows\n\n")
cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
rows = cursor.fetchall()
f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
f.write("|")
for column in columns:
# ---
f.write(f'{"-"*(len(column[1]) + 2)}|')
f.write("\n")
for row in rows:
f.write(f'| {" | ".join([str(r) for r in row])} |\n')
f.write("\n")
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = cursor.fetchall()
# Add dotted lines for foreign key relationships
fkeys = {}
for foreign_key in foreign_keys:
from_column = foreign_key[3]
to_table = foreign_key[2]
to_column = foreign_key[4]
fkeys[from_column] = f"{to_table}.{to_column}"
# Replace 'description' with the actual column name in the table that contains the description, if applicable
try:
cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
description = cursor.fetchone()
if description:
f.write(f"### Description\n\n{description[0]}\n\n")
except:
...
# Write the table columns
f.write("### Columns\n\n")
f.write("| Column Name | Type | Foreign Key | Example Value |\n")
f.write("|-------------|------|-------------|---------------|\n")
for column in columns:
column_name = column[1]
column_type = column[2]
fkey = ""
if column_name in fkeys:
fkey = fkeys[column_name]
f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
f.write("\n")
# Get the count of records
cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
records_count = cursor.fetchone()[0]
f.write(
f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
)
# Close the database connection
cursor.close()
conn.close()
if __name__ == "__main__":
# Usage example
database_path = "database.db"
md_output_path = "database.md"
er_output_path = "er_diagram.png"
generate_er_diagram(database_path, er_output_path)
generate_markdown(database_path, md_output_path, er_output_path)

View file

@ -10,6 +10,7 @@ class HeroFactory(ModelFactory[Hero]):
__model__ = Hero
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
id = None
pet_id = None
@classmethod

View file

@ -1,25 +0,0 @@
import pygame
class Debug:
def __init__(self, game):
self.game = game
self.is_open = False
self.debounce = False
def handle_events(self, events):
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F3 and not self.debounce:
self.is_open = not self.is_open
self.debounce = True
if event.type == pygame.KEYUP:
if event.key == pygame.K_F3:
self.debounce = False
def render(self):
if self.is_open:
text = self.game.font.render(
str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
)
self.game.screen.blit(text, (20, 20))

View file

@ -1,28 +1,29 @@
import atexit
import pygame
from typer import Typer
from websocket import create_connection
from learn_sql_model.config import get_config
from learn_sql_model.console import console
from learn_sql_model.game.debug import Debug
from learn_sql_model.game.light import Light
from learn_sql_model.game.map import Map
from learn_sql_model.game.menu import Menu
from learn_sql_model.game.player import Player
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
speed = 10
pygame.font.init() # you have to call this at the start,
# if you want to use this module.
my_font = pygame.font.SysFont("Comic Sans MS", 30)
config = get_config()
class Client:
def __init__(self):
# self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
self.screen = pygame.display.set_mode((1280, 720))
hero = HeroFactory().build(size=50, x=100, y=100)
self.hero = HeroCreate(**hero.dict()).post()
self.screen = pygame.display.set_mode((800, 600))
pygame.display.set_caption("Learn SQL Model")
self.clock = pygame.time.Clock()
self.running = True
@ -33,31 +34,16 @@ class Client:
self.moving_left = False
self.moving_right = False
self.ticks = 0
self.player = Player(self)
self.menu = Menu(self)
self.map = Map(self)
self.light = Light(self)
self.font = pygame.font.SysFont("", 25)
self.joysticks = {}
self.darkness = pygame.Surface(
(self.screen.get_width(), self.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.debug = Debug(self)
self.others = []
atexit.register(self.quit)
@property
def ws(self):
def connect():
if "https" in config.api_client.url:
url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
elif "http" in config.api_client.url:
url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
else:
url = f"ws://{config.api_client.url}/wsecho"
self._ws = create_connection(url)
self._ws = create_connection(
f"ws://{config.api_client.url.replace('https://', '')}/wsecho"
)
if not hasattr(self, "_ws"):
connect()
@ -66,13 +52,6 @@ class Client:
return self._ws
def run(self):
try:
from pyinstrument import Profiler
profiler = Profiler()
profiler.start()
except ImportError:
profiler = None
while self.running:
console.print("running")
console.print("handle_events")
@ -80,60 +59,109 @@ class Client:
console.print("update")
self.update()
console.print("render")
self.render()
time = self.clock.tick(60)
self.elapsed = time / 100
self.ticks += 1
console.print(f"time: {time}")
console.print(f"ticks: {self.ticks}")
if profiler:
profiler.stop()
print(profiler.output_text())
self.quit()
def quit(self):
self.running = False
self.player.quit()
try:
HeroDelete(id=self.hero.id).delete()
except:
pass
def update(self):
...
if self.moving_up:
self.hero.y -= speed
if self.moving_down:
self.hero.y += speed
if self.moving_left:
self.hero.x -= speed
if self.moving_right:
self.hero.x += speed
if self.ticks % 5 == 0 or self.ticks == 0:
console.print("updating")
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
console.print(update)
self.ws.send(update.json())
console.print("sent")
raw_heros = self.ws.recv()
console.print(raw_heros)
self.others = Heros.parse_raw(raw_heros)
def render(self):
self.screen.fill((0, 0, 0))
self.map.render()
self.player.render()
if self.ticks % 1 == 0 or self.ticks == 0:
light_level = 0
self.darkness.fill((light_level, light_level, light_level))
self.light.render()
for other in self.others.heros:
if other.id != self.hero.id:
pygame.draw.circle(
self.screen, (255, 0, 0), (other.x, other.y), other.size
)
self.screen.blit(
my_font.render(other.name, False, (255, 255, 255), 1),
(other.x, other.y),
)
pygame.draw.circle(
self.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
)
self.screen.blit(
self.darkness,
(0, 0),
special_flags=pygame.BLEND_MULT,
my_font.render(self.hero.name, False, (255, 255, 255)),
(self.hero.x, self.hero.y),
)
# update the screen
self.menu.render()
self.debug.render()
pygame.display.flip()
def handle_events(self):
self.events = pygame.event.get()
self.menu.handle_events(self.events)
self.debug.handle_events(self.events)
self.player.handle_events()
for event in self.events:
if event.type == pygame.QUIT:
self.running = False
if event.type == pygame.JOYDEVICEADDED:
# This event will be generated when the program starts for every
# joystick, filling up the list without needing to create them manually.
joy = pygame.joystick.Joystick(event.device_index)
self.joysticks[joy.get_instance_id()] = joy
if event.type == pygame.JOYDEVICEREMOVED:
del self.joysticks[event.instance_id]
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.running = False
if event.key == pygame.K_LEFT:
self.moving_left = True
if event.key == pygame.K_RIGHT:
self.moving_right = True
if event.key == pygame.K_UP:
self.moving_up = True
if event.key == pygame.K_DOWN:
self.moving_down = True
# wasd
if event.key == pygame.K_w:
self.moving_up = True
if event.key == pygame.K_s:
self.moving_down = True
if event.key == pygame.K_a:
self.moving_left = True
if event.key == pygame.K_d:
self.moving_right = True
# controller left joystick
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
self.moving_left = False
if event.key == pygame.K_RIGHT:
self.moving_right = False
if event.key == pygame.K_UP:
self.moving_up = False
if event.key == pygame.K_DOWN:
self.moving_down = False
# wasd
if event.key == pygame.K_w:
self.moving_up = False
if event.key == pygame.K_s:
self.moving_down = False
if event.key == pygame.K_a:
self.moving_left = False
if event.key == pygame.K_d:
self.moving_right = False
def check_events(self):
pass

View file

@ -1,219 +0,0 @@
import bisect
from PIL import Image, ImageFilter
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
def rot_center(image, angle):
"""rotate an image while keeping its center and size"""
orig_rect = image.get_rect()
rot_image = pygame.transform.rotate(image, angle)
rot_rect = orig_rect.copy()
rot_rect.center = rot_image.get_rect().center
rot_image = rot_image.subsurface(rot_rect).copy()
return rot_image
class Light:
def __init__(self, game):
self.game = game
self.surf = pygame.Surface(
(self.game.screen.get_width(), self.game.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.surf.set_colorkey((0, 0, 0))
self.pre_render()
def pre_render(self):
# self.lights = {}
# for deg in range(-360, 360, 20):
# print("loading light", deg)
# self.lights[deg] = pygame.image.load(
# f"lights/light-{deg}.png"
# ).convert_alpha()
# return
light_surf = pygame.Surface(
(
self.game.player.hero.flashlight_strength * 3,
self.game.player.hero.flashlight_strength * 3,
),
pygame.SRCALPHA,
32,
)
v = pygame.math.Vector2(0, 1)
v.scale_to_length(self.game.player.hero.flashlight_strength)
for r in range(-90 - 25, -90 + 25):
_v = v.rotate(r)
pygame.draw.line(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
(
light_surf.get_width() / 2 + _v.x,
light_surf.get_height() / 2 + _v.y,
),
50,
)
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
self.light_surf = light_surf
self.light_surf.set_colorkey((0, 0, 0))
self.lights = {
deg: pygame.transform.rotate(self.light_surf, deg - 90)
for deg in range(-360, 360, 20)
}
for deg, light in self.lights.items():
pygame.image.save(light, f"lights/light-{deg}.png")
def render(self):
self.surf.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
v = pygame.math.Vector2(
mx - self.game.player.hero.x, my - self.game.player.hero.y
)
v.scale_to_length(self.game.player.hero.flashlight_strength)
self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
for other in self.game.player.others.__root__:
if other.id == self.game.player.hero.id:
continue
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
other.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
other.x - my_light.get_width() / 2,
other.y - my_light.get_height() / 2,
),
)
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
self.game.player.hero.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
self.game.player.hero.x - my_light.get_width() / 2,
self.game.player.hero.y - my_light.get_height() / 2,
),
)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
# 50,
# )
# # draw a circle
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# self.game.player.hero.lanturn_strength,
# )
# for other in self.game.player.others.__root__:
# if other.id == self.game.player.hero.id:
# continue
# v = pygame.math.Vector2(0, 1)
# v = v.rotate(-other.flashlight_angle)
# v.scale_to_length(other.flashlight_strength)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# (other.x + _v.x, other.y + _v.y),
# 50,
# )
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# other.lanturn_strength,
# )
self.game.darkness.blit(
self.surf,
(0, 0),
)

View file

@ -1,134 +0,0 @@
import pydantic
from rich.console import Console
from learn_sql_model.optional import _optional_import_
snoise2 = _optional_import_("noise", "snoise2", group="game")
pygame = _optional_import_("pygame", group="game")
console = Console()
class Point(pydantic.BaseModel):
x: int
y: int
class Map:
def __init__(self, game):
self.game = game
# self.grass = pygame.image.load("grass.webp").convert_alpha()
# self.rock = pygame.image.load("rock.jpg").convert_alpha()
# self.dirt = pygame.image.load("dirt.jpg").convert_alpha()
self.brown = (204, 153, 102)
self.grey = (128, 128, 128)
self.green = (0, 255, 0)
self.white = (255, 255, 255)
self.resolution = 16
self.scale = 0.14 # Determines the "smoothness" of the terrain
self.scale = 0.05 # Determines the "smoothness" of the terrain
self.offset = Point(x=0, y=0)
self.last_offset = self.offset
self.screen_width = self.game.screen.get_width()
self.screen_height = self.game.screen.get_height()
self.octaves = 2 # Number of layers of noise to combine
self.persistence = 0.05 # Amplitude of each octave
self.lacunarity = 1.0 # Frequency of each octave
self.thresh = 125
# try to load the map from map.png
try:
self.surf = pygame.image.load("map.png").convert_alpha()
# self.surf_pil = Image.frombytes(
# "RGBA",
# (self.surf.get_width(), self.surf.get_height()),
# pygame.image.tostring(self.surf, "RGBA", False),
# )
# self.surf_blur = (
# self.surf_pil.filter(
# ImageFilter.SMOOTH_MORE(),
# )
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# # sharpen
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# )
# self.surf = pygame.image.fromstring(
# self.surf_blur.tobytes(),
# (self.surf.get_width(), self.surf.get_height()),
# "RGBA",
# ).convert_alpha()
except FileNotFoundError:
self.pre_draw()
def refresh_surf(self):
self.surf = pygame.Surface((self.screen_width, self.screen_height))
def get_noise(self, x, y):
value = snoise2(
(x + self.offset.x) * self.scale,
(y + self.offset.y) * self.scale,
self.octaves,
self.persistence,
self.lacunarity,
)
value = (value + 1) / 2 * 255
return value
def render(self):
self.game.screen.blit(
self.surf,
(0, 0),
)
def point_check_collision(self, x, y, thresh=None):
return self.get_noise(x / self.resolution, y / self.resolution) < (
thresh or self.thresh
)
def pre_draw(self):
self.refresh_surf()
for x in range(int(self.screen_width)):
for y in range(int(self.screen_height)):
if not self.point_check_collision(x, y):
pygame.draw.rect(
self.surf,
self.white,
(
x,
y,
1,
1,
),
)
pygame.image.save(self.surf, "map.png")
# av1 = (
# Image.open("rock.jpg")
# .convert("RGB")
# .resize((self.screen_width, self.screen_height))
# )
# av2 = (
# Image.open("dirt.jpg")
# .convert("RGB")
# .resize((self.screen_width, self.screen_height))
# )
# mask = (
# Image.open("map.png")
# .convert("L")
# .resize((self.screen_width, self.screen_height))
# .filter(ImageFilter.GaussianBlur(3))
# )
# Image.composite(av2, av1, mask).save("result.png")
# result = pygame.image.load("result.png")
# self.surf.blit(result, (0, 0))

View file

@ -1,185 +0,0 @@
from typing import Callable, Tuple
from pydantic import BaseModel
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
screen_sizes = [
(480, 360), # 360p
(640, 480), # VGA
(800, 600), # SVGA
(1024, 768), # XGA
(1280, 720), # HD 720p
(1366, 768), # HD 1366x768
(1600, 900), # HD+ 1600x900
(1920, 1080), # Full HD 1080p
(2560, 1440), # 2K / QHD 1440p
(3840, 2160), # 4K / UHD 2160p
]
class MenuItem(BaseModel):
display_text: str
on_click: Callable = None
text_color: Tuple[str, str, str] = (0, 0, 0)
class Menu:
def __init__(self, game):
pygame.font.init()
self.game = game
self.hamburger = Hamburger(game)
self.padding = 10
self.font_size = 50
self.line_height = 55
self.menu_width = min(
max(200, self.game.screen.get_width() * 0.8), self.game.screen.get_width()
)
self.menu_height = min(
max(200, self.game.screen.get_height() * 0.8), self.game.screen.get_height()
)
self.x = (self.game.screen.get_width() - self.menu_width) / 2
self.y = (self.game.screen.get_height() - self.menu_height) / 2
self.color = (100, 100, 100)
self.is_menu_open = False
self.surface = pygame.Surface((self.menu_width, self.menu_height))
self.font = pygame.font.SysFont("", self.font_size)
self.screen_size_index = False
@property
def items(self) -> list[MenuItem]:
return [
MenuItem(
display_text="Menu",
on_click=lambda: print("clicked on me, the menu"),
),
MenuItem(
display_text="Screen Size",
on_click=self.next_screen_size,
),
MenuItem(
display_text=f"{self.game.screen.get_width()}x{self.game.screen.get_height()}",
color=(50, 0, 0),
on_click=self.next_screen_size,
),
MenuItem(
display_text=f"{self.game.player.hero.name}",
color=(50, 0, 0),
on_click=self.game.player.rename_hero,
),
MenuItem(
display_text="quit",
color=(50, 0, 0),
on_click=lambda: self.game.quit(),
),
]
def render(self):
if self.is_menu_open:
self.surface.fill(self.color)
pos = (self.padding, self.padding)
for item in self.items:
text = self.font.render(item.display_text, True, item.text_color)
self.surface.blit(text, pos)
pos = (pos[0], pos[1] + self.line_height)
self.game.screen.blit(self.surface, (self.x, self.y))
self.hamburger.render()
def next_screen_size(self):
if self.screen_size_index is False:
self.screen = pygame.display.set_mode(screen_sizes[0])
self.screen_size_index = 0
if self.screen_size_index == len(screen_sizes) - 1:
self.screen_size_index = 0
else:
self.screen_size_index += 1
self.screen = pygame.display.set_mode(screen_sizes[self.screen_size_index])
def get_mouse_pos(self):
"get mouse position relative to self.surface"
x, y = pygame.mouse.get_pos()
return x - self.x, y - self.y
def handle_events(self, events):
self.hamburger.handle_events(self, events)
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open:
if event.button == 1: # Left mouse button
self.handle_click()
def handle_click(self):
pos = self.get_mouse_pos()
pos_idx = int(pos[1] // self.line_height)
if pos_idx > len(self.items):
return
if pos_idx < 0:
return
self.items[pos_idx].on_click()
class Hamburger:
def __init__(self, game):
self.game = game
self.hamburger_width = 50
self.bar_height = self.hamburger_width / 4
self.bar_spacing = self.hamburger_width / 20
self.hamburger_height = self.bar_height * 3 + self.bar_spacing * 2
self.x = self.game.screen.get_width() - self.hamburger_width - 20
self.y = 20
self.color = (100, 100, 100)
self.rect = pygame.Rect(
self.x, self.y, self.hamburger_width, self.hamburger_height
)
self.surface = pygame.Surface((self.hamburger_width, self.hamburger_height))
def render(self):
pygame.draw.rect(
self.surface,
self.color,
(0, 0, self.hamburger_width, self.bar_height),
)
pygame.draw.rect(
self.surface,
self.color,
(
0,
self.bar_height + self.bar_spacing,
self.hamburger_width,
self.bar_height,
),
)
pygame.draw.rect(
self.surface,
self.color,
(
0,
2 * (self.bar_height + self.bar_spacing),
self.hamburger_width,
self.bar_height,
),
)
self.game.screen.blit(self.surface, (self.x, self.y))
def handle_events(self, menu: Menu, events):
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # Left mouse button
self.handle_click(menu)
def handle_click(self, menu):
pos = pygame.mouse.get_pos()
if self.rect.collidepoint(pos):
menu.is_menu_open = not menu.is_menu_open

View file

@ -1,261 +0,0 @@
from learn_sql_model.console import console
from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
HeroFactory = _optional_import_(
"learn_sql_model.factories.hero",
"HeroFactory",
group="game",
)
class Player:
def __init__(self, game):
hero = HeroFactory().build(
size=25,
x=100,
y=100,
flashlight_strength=1000,
lanturn_strength=100,
flashlight_angle=0,
)
self.hero = HeroCreate(**hero.dict()).post()
self.hero.size = 64
self.game = game
self.others = [] # Heros(heros=[])
self.width = 16
self.height = 16
self.white = (255, 255, 255)
self.x = self.game.screen.get_width() / 2
self.y = self.game.screen.get_height() / 2
self.speed = 10
self.max_speed = 10
self.image = pygame.image.load("creeper.png").convert_alpha()
self.pet_image = pygame.image.load("pet.png").convert_alpha()
self.image = pygame.transform.scale(
self.image, (self.hero.size, self.hero.size)
)
self.pet_image = pygame.transform.scale(
self.pet_image, (self.hero.size/1.5, self.hero.size/2)
)
self.x_last = self.x
self.y_last = self.y
self.hitbox_surface = pygame.Surface((self.width, self.height))
self.hitbox_surface.fill(self.white)
pygame.draw.rect(
self.hitbox_surface, (255, 0, 0), (0, 0, self.width, self.height), 1
)
self.hitbox_surface.set_alpha(0)
self.moving_up = False
self.moving_down = False
self.moving_left = False
self.moving_right = False
self.joysticks = {}
def rename_hero(self):
hero = HeroFactory().build(
size=self.hero.size,
x=self.hero.x,
y=self.hero.y,
id=self.hero.id,
flashlight_strength=self.hero.flashlight_strength,
lanturn_strength=self.hero.lanturn_strength,
)
self.hero = HeroUpdate(**hero.dict()).update()
def quit(self):
try:
# session = get_config().database.session
# hero = session.get(Hero, self.hero.id)
# session.delete(hero)
# session.commit()
HeroDelete.delete(id=self.hero.id)
except RuntimeError:
pass
def handle_events(self):
# Update the self
for event in self.game.events:
if event.type == pygame.QUIT:
self.running = False
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.running = False
if event.key == pygame.K_LEFT:
self.speed = self.max_speed
self.moving_left = True
if event.key == pygame.K_RIGHT:
self.speed = self.max_speed
self.moving_right = True
if event.key == pygame.K_UP:
self.speed = self.max_speed
self.moving_up = True
if event.key == pygame.K_DOWN:
self.speed = self.max_speed
self.moving_down = True
# wasd
if event.key == pygame.K_w:
self.speed = self.max_speed
self.moving_up = True
if event.key == pygame.K_s:
self.speed = self.max_speed
self.moving_down = True
if event.key == pygame.K_a:
self.speed = self.max_speed
self.moving_left = True
if event.key == pygame.K_d:
self.speed = self.max_speed
self.moving_right = True
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT:
self.moving_left = False
if event.key == pygame.K_RIGHT:
self.moving_right = False
if event.key == pygame.K_UP:
self.moving_up = False
if event.key == pygame.K_DOWN:
self.moving_down = False
# wasd
if event.key == pygame.K_w:
self.moving_up = False
if event.key == pygame.K_s:
self.moving_down = False
if event.key == pygame.K_a:
self.moving_left = False
if event.key == pygame.K_d:
self.moving_right = False
for joystick in self.joysticks.values():
if abs(joystick.get_axis(0)) > 0.2:
self.x += joystick.get_axis(0) * 10 * self.speed * self.elapsed
if abs(joystick.get_axis(1)) > 0.2:
self.y += joystick.get_axis(1) * 10 * self.speed * self.elapsed
if abs(joystick.get_axis(3)) > 0.2 and abs(joystick.get_axis(4)) > 0.2:
pygame.mouse.set_pos(
(
pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
)
)
elif abs(joystick.get_axis(3)) > 0.2:
pygame.mouse.set_pos(
(
pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
pygame.mouse.get_pos()[1],
)
)
elif abs(joystick.get_axis(4)) > 0.2:
pygame.mouse.set_pos(
(
pygame.mouse.get_pos()[0],
pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
)
)
if self.moving_left:
self.hero.x -= self.speed
if self.moving_right:
self.hero.x += self.speed
if self.moving_up:
self.hero.y -= self.speed
if self.moving_down:
self.hero.y += self.speed
# Check for self collisions with the walls and the black tiles on the map
if self.hero.x < 0:
self.hero.x = 0
if self.hero.x > self.game.screen.get_width() - self.width:
self.hero.x = self.game.screen.get_width() - self.width
if self.hero.y < 0:
self.hero.y = 0
if self.hero.y > self.game.screen.get_height() - self.height:
self.hero.y = self.game.screen.get_height() - self.height
self.pos = pygame.math.Vector2(self.hero.x, self.hero.y)
if self.game.map.point_check_collision(self.pos.x, self.pos.y):
start_pos = pygame.math.Vector2(self.x_last, self.y_last)
end_pos = pygame.math.Vector2(self.hero.x, self.hero.y)
movement_vector = end_pos - start_pos
try:
movement_direction = movement_vector.normalize()
except ValueError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
movement_direction = movement_vector.normalize()
except ZeroDivisionError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
movement_direction = movement_vector.normalize()
movement_speed = 0.05
self.hero.x = self.x_last
self.hero.y = self.y_last
self.pos = pygame.math.Vector2(start_pos)
while self.game.map.point_check_collision(self.pos.x, self.pos.y):
self.pos += movement_speed * movement_direction
self.hero.x = self.pos.x
self.hero.y = self.pos.y
self.pos -= movement_speed * movement_direction
self.hero.x = self.pos.x
self.hero.y = self.pos.y
self.x_last = self.hero.x
self.y_last = self.hero.y
if self.game.ticks % 60 == 0 or self.game.ticks == 0:
console.print("updating")
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
console.print(update)
self.game.ws.send(update.json())
console.print("sent")
raw_heros = self.game.ws.recv()
console.print(raw_heros)
self.others = Heros.parse_raw(raw_heros)
def draw(self):
self.move()
self.game.screen.blit(
pygame.transform.scale(self.image, (16, 16)),
(self.x - 8 - self.game.map.offset.x, self.y - 8 - self.game.map.offset.y),
)
def render(self):
for other in self.others.__root__:
if other.id != self.hero.id:
# put self.image on the game.screen
self.game.screen.blit(
self.image,
(other.x - other.size / 2, other.y - other.size / 2),
)
# pygame.draw.circle(
# self.game.screen, (255, 0, 0), (other.x, other.y), other.size
# )
self.game.screen.blit(
self.game.font.render(other.name, False, (255, 255, 255), 1),
(other.x - other.size / 2, other.y + other.size / 2),
)
self.game.screen.blit(
self.image,
(self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
self.game.screen.blit(
self.pet_image,
(self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
# pygame.draw.circle(
# self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
# )
self.game.screen.blit(
self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
(self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2),
)

View file

@ -1,12 +1,12 @@
from typing import Dict, Optional
from typing import Optional
from fastapi import HTTPException
import httpx
import pydantic
from pydantic import BaseModel
from sqlmodel import Field, SQLModel
from sqlmodel import Field, Relationship, SQLModel, Session, select
from learn_sql_model.config import config
from learn_sql_model.optional import optional
from learn_sql_model.models.pet import Pet
class HeroBase(SQLModel, table=False):
@ -14,27 +14,16 @@ class HeroBase(SQLModel, table=False):
secret_name: str
x: int
y: int
size: Optional[int]
flashlight_strength: Optional[int] = 1000
flashlight_angle: Optional[int] = 0
lanturn_strength: Optional[int] = 100
# age: Optional[int] = None
# shoe_size: Optional[int] = None
size: int
age: Optional[int] = None
shoe_size: Optional[int] = None
# pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
# pet: Optional[Pet] = Relationship(back_populates="hero")
@pydantic.validator("size", pre=True, always=True)
def validate_size(cls, v):
if v is None:
return 50
if v <= 0:
raise ValueError("size must be > 0")
return v
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="hero")
class Hero(HeroBase, table=True):
id: int = Field(default=None, primary_key=True)
id: Optional[int] = Field(default=None, primary_key=True)
class HeroCreate(HeroBase):
@ -59,46 +48,87 @@ class HeroRead(HeroBase):
cls,
id: int,
) -> Hero:
r = httpx.get(f"{config.api_client.url}/hero/{id}")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return HeroRead.parse_obj(r.json())
with config.database.session as session:
hero = session.get(Hero, id)
if not hero:
raise HTTPException(status_code=404, detail="Hero not found")
return hero
class Heros(BaseModel):
__root__: list[Hero]
heros: list[Hero]
@classmethod
def list(
self,
where=None,
offset=0,
limit=None,
session: Session = None,
) -> Hero:
r = httpx.get(f"{config.api_client.url}/heros/")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Heros.parse_obj({"__root__": r.json()})
# with config.database.session as session:
def get_heros(session, where, offset, limit):
statement = select(Hero)
if where != "None" and where is not None:
from sqlmodel import text
statement = statement.where(text(where))
statement = statement.offset(offset).limit(limit)
heros = session.exec(statement).all()
return Heros(heros=heros)
if session is None:
r = httpx.get(f"{config.api_client.url}/heros/")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Heros.parse_obj(r.json())
return get_heros(session, where, offset, limit)
@optional
class HeroUpdate(HeroBase):
class HeroUpdate(SQLModel):
# id is required to update the hero
id: int
def update(self) -> Hero:
# all other fields, must match the model, but with Optional default None
name: Optional[str] = None
secret_name: Optional[str] = None
age: Optional[int] = None
shoe_size: Optional[int] = None
x: int
y: int
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="hero")
def update(self, session: Session = None) -> Hero:
if session is not None:
db_hero = session.get(Hero, self.id)
if not db_hero:
raise HTTPException(status_code=404, detail="Hero not found")
for key, value in self.dict(exclude_unset=True).items():
setattr(db_hero, key, value)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
return db_hero
r = httpx.patch(
f"{config.api_client.url}/hero/",
json=self.dict(exclude_none=True),
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Hero.parse_obj(r.json())
class HeroDelete(BaseModel):
id: int
@classmethod
def delete(self, id: int) -> Dict[str, bool]:
def delete(self) -> Hero:
r = httpx.delete(
f"{config.api_client.url}/hero/{id}",
f"{config.api_client.url}/hero/{self.id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")

View file

@ -1,96 +0,0 @@
from typing import List, Optional
import textwrap
import inspect
from pydantic import BaseModel
def _optional_import_(
module: str,
name: str = None,
group: str = None,
package="learn_sql_model",
):
"""
lazily throws import errors only then the optional import is used, and
includes a group install command for the user to install all dependencies
for the requested feature.
"""
import importlib
try:
module = importlib.import_module(module)
return module if name is None else getattr(module, name)
except ImportError as e:
msg = textwrap.dedent(
f"""
"pip install '{package}[{group}]'" package to make use of this feature
Alternatively "pip install '{package}[all]'" package to install all optional dependencies
"""
)
import_error = e
class _failed_import:
"""
Lazily throw an import error. Errors should be thrown whether the
user tries to call the module, get an attubute from the module, or
getitem from the module.
"""
def _failed_import(self, *args):
raise ImportError(msg) from import_error
def __call__(self, *args):
"""
Throw error if the user tries to call the module i.e
_optional_import_('dummy')()
"""
self._failed_import(*args)
def __getattr__(self, name):
"""
Throw error if the user tries to get an attribute from the
module i.e _optional_import_('dummy').dummy.
"""
if name == "_failed_import":
return object.__getattribute__(self, name)
self._failed_import()
def __getitem__(self, name):
"""
Throw error if the user tries to get an item from the module
i.e _optional_import_('dummy')['dummy']
"""
self._failed_import()
return _failed_import()
# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
# def decorator(cls):
# def wrapper(*args, **kwargs):
# if fields is None:
# fields = cls.__fields__
# if required is None:
# required = []
#
# for field in fields:
# if field not in required:
# cls.__fields__[field].required = False
# return _cls
# return wrapper
# return decorator
#
#
def optional(*fields):
def dec(_cls):
for field in fields:
_cls.__fields__[field].required = False
return _cls
if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
cls = fields[0]
fields = cls.__fields__
return dec(cls)
return dec

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 804 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 809 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 803 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

View file

@ -1,18 +0,0 @@
from locust import HttpUser, between, task
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate
class QuickstartUser(HttpUser):
wait_time = between(1, 2)
@task
def hello_world(self):
self.client.get("/hero/1")
self.client.get("/heros/")
@task(3)
def create_hero(self):
hero = HeroFactory().build()
HeroCreate(**hero.dict()).post()

View file

@ -1,58 +0,0 @@
import random
from locust import FastHttpUser, task
from learn_sql_model.config import get_config
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
config = get_config()
class QuickstartUser(FastHttpUser):
# wait_time = between(1, 2)
host = "http://localhost:5000"
# host = "https://waylonwalker.com"
def on_start(self):
self.client.verify = False
@task(6)
def get_a_hero(self):
# heros = Heros.list()
id = 1
# id = random.choice(heros.__root__).id
self.client.get(f"/hero/{id}")
# @task(2)
# def get_all_hero(self):
# self.client.get("/heros/")
@task
def create_hero(self):
hero = HeroFactory().build()
hero_create = HeroCreate(**hero.dict()).post()
self.client.post(
f"{config.api_client.url}/hero/",
json=hero_create.dict(),
)
@task(3)
def update_hero(self):
hero = HeroFactory().build()
hero_update = HeroUpdate(id=1, name=hero.name)
self.client.patch(
"/hero/",
json=hero_update.dict(exclude_none=True),
)
@task
def delete_hero(self):
heros = Heros.list()
id = random.choice(heros.__root__).id
self.client.delete(
f"/hero/{id}",
)

View file

BIN
map.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

BIN
micro

Binary file not shown.

View file

@ -79,7 +79,7 @@ def run_migrations_online() -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True,
render_as_batch=False,
version_table=f'{config.get_main_option("project")}_alembic_version',
)

View file

@ -8,9 +8,6 @@ Create Date: ${create_date}
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
${imports if imports else ""}
# revision identifiers, used by Alembic.
@ -22,8 +19,6 @@ depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:

View file

@ -1,32 +0,0 @@
"""add x and y
Revision ID: 3555f61aaa79
Revises: 79972ec5f79d
Create Date: 2023-06-22 15:03:27.338959
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "3555f61aaa79"
down_revision = "79972ec5f79d"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("hero", "y")
op.drop_column("hero", "x")
# ### end Alembic commands ###

View file

@ -1,68 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_3555f61aaa79.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| 79972ec5f79d |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y |
|------|-------------|----|---|---|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

View file

@ -1,65 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_79972ec5f79d.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 0 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id |
|------|-------------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

View file

@ -1,45 +0,0 @@
"""int
Revision ID: 79972ec5f79d
Revises:
Create Date: 2023-06-22 15:02:20.292322
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = "79972ec5f79d"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"hero",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"pet",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("birthday", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("pet")
op.drop_table("hero")
# ### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""add hero.lighting
Revision ID: a1cd0a1947be
Revises: c79214cdc7b3
Create Date: 2023-06-28 19:43:47.108749
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'a1cd0a1947be'
down_revision = 'c79214cdc7b3'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'lanturn_strength')
op.drop_column('hero', 'flashlight_strength')
# ### end Alembic commands ###

View file

@ -1,75 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_a1cd0a1947be.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| c79214cdc7b3 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 4 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -0,0 +1,29 @@
"""add birthday
Revision ID: a9bb6625c57b
Revises: c8516c888495
Create Date: 2023-05-25 19:00:58.137464
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a9bb6625c57b'
down_revision = 'c8516c888495'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('pet', sa.Column('birthday', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('pet', 'birthday')
# ### end Alembic commands ###

View file

@ -1,74 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_c79214cdc7b3.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| 3555f61aaa79 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size |
|------|-------------|----|---|---|------|
| tight-gold | successful-health | 1 | 6430 | 6231 | None |
| hard-rope | green-research | 2 | 1395 | 2865 | None |
| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
| huge-library | adult-body | 4 | 656 | 2377 | None |
| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
### Records Count
The table hero contains 1572 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

View file

@ -0,0 +1,44 @@
"""init
Revision ID: c8516c888495
Revises:
Create Date: 2023-05-25 18:42:37.057225
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'c8516c888495'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pet',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('hero',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('age', sa.Integer(), nullable=True),
sa.Column('shoe_size', sa.Integer(), nullable=True),
sa.Column('pet_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['pet_id'], ['pet.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('hero')
op.drop_table('pet')
# ### end Alembic commands ###

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: d79dd8e699d1
Revises: e1af975310a1
Create Date: 2023-06-28 19:54:19.322431
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'd79dd8e699d1'
down_revision = 'e1af975310a1'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'flashlight_angle')
# ### end Alembic commands ###

View file

@ -1,72 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_d79dd8e699d1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| e1af975310a1 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
|------|-------------|----|---|---|------|---------------------|------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
| flashlight_angle | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: e1af975310a1
Revises: a1cd0a1947be
Create Date: 2023-06-28 19:53:18.068873
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'e1af975310a1'
down_revision = 'a1cd0a1947be'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View file

@ -1,71 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_e1af975310a1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| a1cd0a1947be |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -1,34 +1,33 @@
"""add hero.size
"""add x, y, size
Revision ID: c79214cdc7b3
Revises: 3555f61aaa79
Create Date: 2023-06-28 11:39:02.606001
Revision ID: e26398d96dd0
Revises: a9bb6625c57b
Create Date: 2023-06-10 18:37:04.751553
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'c79214cdc7b3'
down_revision = '3555f61aaa79'
revision = 'e26398d96dd0'
down_revision = 'a9bb6625c57b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True))
op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=False))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'size')
op.drop_column('hero', 'y')
op.drop_column('hero', 'x')
# ### end Alembic commands ###

View file

@ -1,37 +0,0 @@
# # Import smtplib for the actual sending function
# import smtplib
# # Import the email modules we'll need
# from email.mime.text import MIMEText
# # Open a plain text file for reading. For this example, assume that
# # the text file contains only ASCII characters.
# # with open(textfile, 'rb') as fp:
# # # Create a text/plain message
# # msg = MIMEText(fp.read())
# msg = MIMEText("hello there", "plain", "utf-8")
# # me == the sender's email address
# # you == the recipient's email address
# me = "waylon@waylonwalker.com"
# you = "3195728809@msg.fi.google.com"
# msg["Subject"] = "Python SMTP test"
# msg["From"] = me
# msg["To"] = you
# # Send the message via our own SMTP server, but don't include the
# # envelope header.
# s = smtplib.SMTP("localhost")
# s.sendmail(me, [you], msg.as_string())
# s.quit()
import requests
requests.post(
"https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
auth=("api", "YOUR_API_KEY"),
data={
"from": "Excited User <mailgun@YOUR_DOMAIN_NAME>",
"to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
"subject": "Hello",
"text": "Testing some Mailgun awesomness!",
},
)

BIN
pet.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 738 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -24,52 +24,36 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy",
]
dependencies = [
"black",
"python-socketio[client]",
"python-socketio[client]",
"fastapi-socketio",
"psycopg2-binary",
'pygame',
'black',
'alembic',
'pygame',
'pyinstaller',
"pyflyby",
"anyconfig",
"copier",
"engorgio",
"fastapi",
"httpx",
"pydantic<2.0.0",
"pyflyby",
"pyinstaller",
"passlib[bcrypt]",
"polyfactory",
"psycopg2",
"python-jose[cryptography]",
"python-multipart",
"rich",
"sqlmodel",
"textual",
"toml",
"trogon",
"typer",
"uvicorn[standard]",
]
dynamic = ["version"]
[project.optional-dependencies]
game = [
"noise",
"pygame",
"polyfactory",
"faker",
]
api = [
"fastapi-socketio",
"passlib[bcrypt]",
"psycopg2",
"psycopg2-binary",
"python-jose[cryptography]",
"python-multipart",
"uvicorn[standard]",
]
manage = [
"alembic",
"polyfactory",
"faker",
]
all = [
"learn_sql_model[game, api, manage]",
]
[project.urls]
Documentation = "https://github.com/waylonwalker/learn-sql-model#readme"
Issues = "https://github.com/waylonwalker/learn-sql-model/issues"

23
rect.py
View file

@ -1,23 +0,0 @@
import pygame
pygame.init()
screen = pygame.display.set_mode((500, 500))
pygame.display.set_caption("draw a square")
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
surface = pygame.Surface((500, 500))
surface.fill((255, 0, 0))
color = (0, 0, 255)
rect = (200, 200, 100, 100)
pygame.draw.rect(surface, color, rect)
screen.blit(surface, (0, 0))
pygame.display.flip()

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = {{tables}};
const links = {{links}};
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -1,89 +1,86 @@
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select
from sqlmodel import SQLModel, Session
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.models.{{ modelname }} import {{ modelname }}, {{ modelname }}Create, {{ modelname }}Read, {{ modelname }}Update, {{ modelname }}s
from learn_sql_model.config import get_config, get_session
from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}, {{modelname}}Create, {{modelname}}Read, {{modelname}}Update
{{ modelname }}_router = APIRouter()
{{modelname.lower()}}_router = APIRouter()
@{{ modelname }}_router.on_event("startup")
@{{modelname.lower()}}_router.on_event("startup")
def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine)
...
SQLModel.metadata.create_all(get_config().database.engine)
@{{ modelname }}_router.get("/{{ modelname }}/{{{ modelname }}_id}")
def get_{{ modelname }}(
@{{modelname.lower()}}_router.get("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
async def get_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
{{ modelname }}_id: int,
) -> {{ modelname }}Read:
"get one {{ modelname }}"
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
if not {{ modelname }}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
return {{ modelname }}
{{modelname.lower()}}_id: int,
) -> {{modelname}}Read:
"get one {{modelname.lower()}}"
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
if not {{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{modelname}} not found")
return {{modelname.lower()}}
@{{ modelname }}_router.post("/{{ modelname }}/")
def post_{{ modelname }}(
@{{modelname.lower()}}_router.post("/{{modelname.lower()}}/")
async def post_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
{{ modelname }}: {{ modelname }}Create,
) -> {{ modelname }}Read:
"create a {{ modelname }}"
db_{{ modelname }} = {{ modelname }}.from_orm({{ modelname }})
session.add(db_{{ modelname }})
{{modelname.lower()}}: {{modelname}}Create,
) -> {{modelname}}Read:
"read all the {{modelname.lower()}}s"
db_{{modelname.lower()}} = {{modelname}}.from_orm({{modelname.lower()}})
session.add(db_{{modelname.lower()}})
session.commit()
session.refresh(db_{{ modelname }})
await manager.broadcast({{{ modelname }}.json()}, id=1)
return db_{{ modelname }}
session.refresh(db_{{modelname.lower()}})
await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
return db_{{modelname.lower()}}
@{{ modelname }}_router.patch("/{{ modelname }}/")
def patch_{{ modelname }}(
@{{modelname.lower()}}_router.patch("/{{modelname.lower()}}/")
async def patch_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
{{ modelname }}: {{ modelname }}Update,
) -> {{ modelname }}Read:
"update a {{ modelname }}"
db_{{ modelname }} = session.get({{ modelname }}, {{ modelname }}.id)
if not db_{{ modelname }}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
for key, value in {{ modelname }}.dict(exclude_unset=True).items():
setattr(db_{{ modelname }}, key, value)
session.add(db_{{ modelname }})
{{modelname.lower()}}: {{modelname}}Update,
) -> {{modelname}}Read:
"read all the {{modelname.lower()}}s"
db_{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
if not db_{{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{modelname}} not found")
for key, value in {{modelname.lower()}}.dict(exclude_unset=True).items():
setattr(db_{{modelname.lower()}}, key, value)
session.add(db_{{modelname.lower()}})
session.commit()
session.refresh(db_{{ modelname }})
await manager.broadcast({{{ modelname }}.json()}, id=1)
return db_{{ modelname }}
session.refresh(db_{{modelname.lower()}})
await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
return db_{{modelname.lower()}}
@{{ modelname }}_router.delete("/{{ modelname }}/{{{ modelname }}_id}")
def delete_{{ modelname }}(
@{{modelname.lower()}}_router.delete("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
async def delete_{{modelname.lower()}}(
*,
session: Session = Depends(get_session),
{{ modelname }}_id: int,
{{modelname.lower()}}_id: int,
):
"delete a {{ modelname }}"
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
if not {{ modelname }}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
session.delete({{ modelname }})
"read all the {{modelname.lower()}}s"
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
if not {{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{modelname}} not found")
session.delete({{modelname.lower()}})
session.commit()
await manager.broadcast(f"deleted {{ modelname }} {{{ modelname }}_id}", id=1)
await manager.broadcast(f"deleted {{modelname.lower()}} {{{modelname.lower()}}_id}", id=1)
return {"ok": True}
@{{ modelname }}_router.get("/{{ modelname }}s/")
def get_{{ modelname }}s(
@{{modelname.lower()}}_router.get("/{{modelname.lower()}}s/")
async def get_{{modelname.lower()}}s(
*,
session: Session = Depends(get_session),
) -> {{ modelname }}s:
"get all {{ modelname }}s"
statement = select({{ modelname }})
{{ modelname }}s = session.exec(statement).all()
return {{ modelname }}s(__root__={{ modelname }}s)
) -> list[{{modelname}}]:
"get all {{modelname.lower()}}s"
return {{modelname}}Read.list(session=session)

View file

@ -1,12 +1,14 @@
from faker import Faker
from polyfactory.factories.pydantic_factory import ModelFactory
from learn_sql_model.factories.pet import PetFactory
from learn_sql_model.models.{{ modelname }} import {{ modelname }}
from learn_sql_model.models.pet import Pet
from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
class {{ modelname }}Factory(ModelFactory[{{ modelname }}]):
__model__ = {{ modelname }}
class {{modelname}}Factory(ModelFactory[{{modelname.lower()}}]):
__model__ = {{modelname}}
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
id = None
__random_seed__ = 10

View file

@ -1,81 +1,93 @@
from typing import Dict, Optional
from typing import Optional
from fastapi import Depends, HTTPException
import httpx
from pydantic import BaseModel
from sqlmodel import Field, SQLModel
from sqlmodel import Field, Relationship, SQLModel, Session, select
from learn_sql_model.config import config
from learn_sql_model.config import config, get_config
from learn_sql_model.models.pet import Pet
class {{ modelname }}Base(SQLModel, table=False):
# put model attributes here
class {{modelname}}Base(SQLModel, table=False):
class {{ modelname }}({{ modelname }}Base, table=True):
id: int = Field(default=None, primary_key=True)
class {{modelname}}({{modelname}}Base, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
class {{ modelname }}Create({{ modelname }}Base):
class {{modelname}}Create({{modelname}}Base):
...
def post(self) -> {{ modelname }}:
def post(self) -> {{modelname}}:
r = httpx.post(
f"{config.api_client.url}/{{ modelname }}/",
f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ modelname }}.parse_obj(r.json())
class {{ modelname }}Read({{ modelname }}Base):
class {{modelname}}Read({{modelname}}Base):
id: int
@classmethod
def get(
cls,
id: int,
) -> {{ modelname }}:
r = httpx.get(f"{config.api_client.url}/{{ modelname }}/{id}")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ modelname }}Read.parse_obj(r.json())
class {{ modelname }}s(BaseModel):
__root__: list[{{ modelname }}]
) -> {{modelname}}:
with config.database.session as session:
{{modelname.lower()}} = session.get({{modelname}}, id)
if not {{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{modelname}} not found")
return {{modelname.lower()}}
@classmethod
def list(
self,
) -> {{ modelname }}:
r = httpx.get(f"{config.api_client.url}/{{ modelname }}s/")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ modelname }}s.parse_obj({"__root__": r.json()})
where=None,
offset=0,
limit=None,
session: Session = None,
) -> {{modelname}}:
if session is None:
session = get_config().database.session
statement = select({{modelname}})
if where != "None" and where is not None:
from sqlmodel import text
statement = statement.where(text(where))
statement = statement.offset(offset).limit(limit)
{{modelname.lower()}}es = session.exec(statement).all()
return {{modelname.lower()}}es
class {{ modelname }}Update(SQLModel):
# id is required to update the {{ modelname }}
class {{modelname}}Update(SQLModel):
# id is required to update the {{modelname.lower()}}
id: int
def update(self) -> {{ modelname }}:
# all other fields, must match the model, but with Optional default None
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="{{modelname.lower()}}")
def update(self) -> {{modelname}}:
r = httpx.patch(
f"{config.api_client.url}/{{ modelname }}/",
f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
class {{ modelname }}Delete(BaseModel):
class {{modelname}}Delete(BaseModel):
id: int
@classmethod
def delete(self, id: int) -> Dict[str, bool]:
def delete(self) -> {{modelname}}:
r = httpx.delete(
f"{config.api_client.url}/{{ modelname }}/{id}",
f"{config.api_client.url}/{{modelname.lower()}}/{self.id}",
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")

Some files were not shown because too many files have changed in this diff Show more