Compare commits

..

No commits in common. "main" and "v17" have entirely different histories.
main ... v17

103 changed files with 720 additions and 3490 deletions

View file

@ -1,978 +0,0 @@
# flyctl launch added from .gitignore
# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
### Data ###
**/*.csv
**/*.dat
**/*.efx
**/*.gbr
**/*.key
**/*.pps
**/*.ppt
**/*.pptx
**/*.sdf
**/*.tax2010
**/*.vcf
**/*.xml
### Emacs ###
# -*- mode: gitignore; -*-
**/*~
**/\#*\#
.emacs.desktop
.emacs.desktop.lock
**/*.elc
**/auto-save-list
**/tramp
**/.\#*
# Org-mode
**/.org-id-locations
**/*_archive
# flymake-mode
**/*_flymake.*
# eshell files
eshell/history
eshell/lastdir
# elpa packages
elpa
# reftex files
**/*.rel
# AUCTeX auto folder
auto
# cask packages
**/.cask
**/dist
# Flycheck
**/flycheck_*.el
# server auth directory
server
# projectiles files
**/.projectile
# directory configuration
**/.dir-locals.el
# network security
network-security.data
### Executable ###
**/*.app
**/*.bat
**/*.cgi
**/*.com
**/*.exe
**/*.gadget
**/*.jar
**/*.pif
**/*.vb
**/*.wsf
### Node ###
# Logs
**/logs
**/*.log
**/npm-debug.log*
**/yarn-debug.log*
**/yarn-error.log*
**/lerna-debug.log*
**/.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
**/pids
**/*.pid
**/*.seed
**/*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
**/lib-cov
# Coverage directory used by tools like istanbul
**/coverage
**/*.lcov
# nyc test coverage
**/.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
**/.grunt
# Bower dependency directory (https://bower.io/)
**/bower_components
# node-waf configuration
**/.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
**/build/Release
# Dependency directories
**/node_modules
**/jspm_packages
# Snowpack dependency directory (https://snowpack.dev/)
**/web_modules
# TypeScript cache
**/*.tsbuildinfo
# Optional npm cache directory
**/.npm
# Optional eslint cache
**/.eslintcache
# Optional stylelint cache
**/.stylelintcache
# Microbundle cache
**/.rpt2_cache
**/.rts2_cache_cjs
**/.rts2_cache_es
**/.rts2_cache_umd
# Optional REPL history
**/.node_repl_history
# Output of 'npm pack'
**/*.tgz
# Yarn Integrity file
**/.yarn-integrity
# dotenv environment variable files
**/.env
**/.env.development.local
**/.env.test.local
**/.env.production.local
**/.env.local
# parcel-bundler cache (https://parceljs.org/)
**/.cache
**/.parcel-cache
# Next.js build output
**/.next
**/out
# Nuxt.js build / generate output
**/.nuxt
**/dist
# Gatsby files
**/.cache
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
**/.vuepress/dist
# vuepress v2.x temp and cache directory
**/.temp
# Docusaurus cache and generated files
**/.docusaurus
# Serverless directories
**/.serverless
# FuseBox cache
**/.fusebox
# DynamoDB Local files
**/.dynamodb
# TernJS port file
**/.tern-port
# Stores VSCode versions used for testing VSCode extensions
**/.vscode-test
# yarn v2
**/.yarn/cache
**/.yarn/unplugged
**/.yarn/build-state.yml
**/.yarn/install-state.gz
**/.pnp.*
### Node Patch ###
# Serverless Webpack directories
**/.webpack
# Optional stylelint cache
# SvelteKit build / generate output
**/.svelte-kit
### PyCharm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
**/.idea/**/workspace.xml
**/.idea/**/tasks.xml
**/.idea/**/usage.statistics.xml
**/.idea/**/dictionaries
**/.idea/**/shelf
# AWS User-specific
**/.idea/**/aws.xml
# Generated files
**/.idea/**/contentModel.xml
# Sensitive or high-churn files
**/.idea/**/dataSources
**/.idea/**/dataSources.ids
**/.idea/**/dataSources.local.xml
**/.idea/**/sqlDataSources.xml
**/.idea/**/dynamic.xml
**/.idea/**/uiDesigner.xml
**/.idea/**/dbnavigator.xml
# Gradle
**/.idea/**/gradle.xml
**/.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
**/cmake-build-*
# Mongo Explorer plugin
**/.idea/**/mongoSettings.xml
# File-based project format
**/*.iws
# IntelliJ
**/out
# mpeltonen/sbt-idea plugin
**/.idea_modules
# JIRA plugin
**/atlassian-ide-plugin.xml
# Cursive Clojure plugin
**/.idea/replstate.xml
# SonarLint plugin
**/.idea/sonarlint
# Crashlytics plugin (for Android Studio and IntelliJ)
**/com_crashlytics_export_strings.xml
**/crashlytics.properties
**/crashlytics-build.properties
**/fabric.properties
# Editor-based Rest Client
**/.idea/httpRequests
# Android studio 3.1+ serialized cache file
**/.idea/caches/build_file_checksums.ser
### PyCharm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
**/.idea/**/sonarlint
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
**/.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
**/.idea/**/markdown-navigator.xml
**/.idea/**/markdown-navigator-enh.xml
**/.idea/**/markdown-navigator
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
**/.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
**/.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
**/.idea/**/azureSettings.xml
### Python ###
# Byte-compiled / optimized / DLL files
**/__pycache__
**/*.py[cod]
**/*$py.class
# C extensions
**/*.so
# Distribution / packaging
**/.Python
**/build
**/develop-eggs
**/downloads
**/eggs
**/.eggs
**/lib
**/lib64
**/parts
**/sdist
**/var
**/wheels
**/share/python-wheels
**/*.egg-info
**/.installed.cfg
**/*.egg
**/MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
**/*.manifest
**/*.spec
# Installer logs
**/pip-log.txt
**/pip-delete-this-directory.txt
# Unit test / coverage reports
**/htmlcov
**/.tox
**/.nox
**/.coverage
**/.coverage.*
**/nosetests.xml
**/coverage.xml
**/*.cover
**/*.py,cover
**/.hypothesis
**/.pytest_cache
**/cover
# Translations
**/*.mo
**/*.pot
# Django stuff:
**/local_settings.py
**/db.sqlite3
**/db.sqlite3-journal
# Flask stuff:
**/instance
**/.webassets-cache
# Scrapy stuff:
**/.scrapy
# Sphinx documentation
**/docs/_build
# PyBuilder
**/.pybuilder
**/target
# Jupyter Notebook
**/.ipynb_checkpoints
# IPython
**/profile_default
**/ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
**/.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
**/__pypackages__
# Celery stuff
**/celerybeat-schedule
**/celerybeat.pid
# SageMath parsed files
**/*.sage.py
# Environments
**/.venv
**/env
**/venv
**/ENV
**/env.bak
**/venv.bak
# Spyder project settings
**/.spyderproject
**/.spyproject
# Rope project settings
**/.ropeproject
# mkdocs documentation
site
# mypy
**/.mypy_cache
**/.dmypy.json
**/dmypy.json
# Pyre type checker
**/.pyre
# pytype static type analyzer
**/.pytype
# Cython debug symbols
**/cython_debug
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
**/poetry.toml
# ruff
**/.ruff_cache
# LSP config files
**/pyrightconfig.json
### SublimeText ###
# Cache files for Sublime Text
**/*.tmlanguage.cache
**/*.tmPreferences.cache
**/*.stTheme.cache
# Workspace files are user-specific
**/*.sublime-workspace
# Project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using Sublime Text
# *.sublime-project
# SFTP configuration file
**/sftp-config.json
**/sftp-config-alt*.json
# Package control specific files
**/Package Control.last-run
**/Package Control.ca-list
**/Package Control.ca-bundle
**/Package Control.system-ca-bundle
**/Package Control.cache
**/Package Control.ca-certs
**/Package Control.merged-ca-bundle
**/Package Control.user-ca-bundle
**/oscrypto-ca-bundle.crt
**/bh_unicode_properties.cache
# Sublime-github package stores a github token in this file
# https://packagecontrol.io/packages/sublime-github
**/GitHub.sublime-settings
### Vim ###
# Swap
**/[._]*.s[a-v][a-z]
!**/*.svg # comment out if you don't need vector files
**/[._]*.sw[a-p]
**/[._]s[a-rt-v][a-z]
**/[._]ss[a-gi-z]
**/[._]sw[a-p]
# Session
**/Session.vim
**/Sessionx.vim
# Temporary
**/.netrwhist
# Auto-generated tag files
**/tags
# Persistent undo
**/[._]*.un~
### VisualStudioCode ###
**/.vscode/*
!**/.vscode/settings.json
!**/.vscode/tasks.json
!**/.vscode/launch.json
!**/.vscode/extensions.json
!**/.vscode/*.code-snippets
# Local History for Visual Studio Code
**/.history
# Built Visual Studio Code Extensions
**/*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
**/.history
**/.ionide
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
**/*.rsuser
**/*.suo
**/*.user
**/*.userosscache
**/*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
**/*.userprefs
# Mono auto generated files
**/mono_crash.*
# Build results
**/[Dd]ebug
**/[Dd]ebugPublic
**/[Rr]elease
**/[Rr]eleases
**/x64
**/x86
**/[Ww][Ii][Nn]32
**/[Aa][Rr][Mm]
**/[Aa][Rr][Mm]64
**/bld
**/[Bb]in
**/[Oo]bj
**/[Ll]og
**/[Ll]ogs
# Visual Studio 2015/2017 cache/options directory
**/.vs
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
**/Generated\ Files
# MSTest test Results
**/[Tt]est[Rr]esult*
**/[Bb]uild[Ll]og.*
# NUnit
**/*.VisualState.xml
**/TestResult.xml
**/nunit-*.xml
# Build Results of an ATL Project
**/[Dd]ebugPS
**/[Rr]eleasePS
**/dlldata.c
# Benchmark Results
**/BenchmarkDotNet.Artifacts
# .NET Core
**/project.lock.json
**/project.fragment.lock.json
**/artifacts
# ASP.NET Scaffolding
**/ScaffoldingReadMe.txt
# StyleCop
**/StyleCopReport.xml
# Files built by Visual Studio
**/*_i.c
**/*_p.c
**/*_h.h
**/*.ilk
**/*.meta
**/*.obj
**/*.iobj
**/*.pch
**/*.pdb
**/*.ipdb
**/*.pgc
**/*.pgd
**/*.rsp
**/*.sbr
**/*.tlb
**/*.tli
**/*.tlh
**/*.tmp
**/*.tmp_proj
**/*_wpftmp.csproj
**/*.tlog
**/*.vspscc
**/*.vssscc
**/.builds
**/*.pidb
**/*.svclog
**/*.scc
# Chutzpah Test files
**/_Chutzpah*
# Visual C++ cache files
**/ipch
**/*.aps
**/*.ncb
**/*.opendb
**/*.opensdf
**/*.cachefile
**/*.VC.db
**/*.VC.VC.opendb
# Visual Studio profiler
**/*.psess
**/*.vsp
**/*.vspx
**/*.sap
# Visual Studio Trace Files
**/*.e2e
# TFS 2012 Local Workspace
**/$tf
# Guidance Automation Toolkit
**/*.gpState
# ReSharper is a .NET coding add-in
**/_ReSharper*
**/*.[Rr]e[Ss]harper
**/*.DotSettings.user
# TeamCity is a build add-in
**/_TeamCity*
# DotCover is a Code Coverage Tool
**/*.dotCover
# AxoCover is a Code Coverage Tool
**/.axoCover/*
!**/.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
**/coverage*.json
**/coverage*.xml
**/coverage*.info
# Visual Studio code coverage results
**/*.coverage
**/*.coveragexml
# NCrunch
**/_NCrunch_*
**/.*crunch*.local.xml
**/nCrunchTemp_*
# MightyMoose
**/*.mm.*
**/AutoTest.Net
# Web workbench (sass)
**/.sass-cache
# Installshield output folder
**/[Ee]xpress
# DocProject is a documentation generator add-in
**/DocProject/buildhelp
**/DocProject/Help/*.HxT
**/DocProject/Help/*.HxC
**/DocProject/Help/*.hhc
**/DocProject/Help/*.hhk
**/DocProject/Help/*.hhp
**/DocProject/Help/Html2
**/DocProject/Help/html
# Click-Once directory
**/publish
# Publish Web Output
**/*.[Pp]ublish.xml
**/*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
**/*.pubxml
**/*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
**/PublishScripts
# NuGet Packages
**/*.nupkg
# NuGet Symbol Packages
**/*.snupkg
# The packages folder can be ignored because of Package Restore
**/**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/**/[Pp]ackages/build
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
**/*.nuget.props
**/*.nuget.targets
# Microsoft Azure Build Output
**/csx
**/*.build.csdef
# Microsoft Azure Emulator
**/ecf
**/rcf
# Windows Store app package directories and files
**/AppPackages
**/BundleArtifacts
**/Package.StoreAssociation.xml
**/_pkginfo.txt
**/*.appx
**/*.appxbundle
**/*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
**/*.[Cc]ache
# but keep track of directories ending in .cache
!**/?*.[Cc]ache
# Others
**/ClientBin
**/~$*
**/*.dbmdl
**/*.dbproj.schemaview
**/*.jfm
**/*.pfx
**/*.publishsettings
**/orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
**/Generated_Code
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
**/_UpgradeReport_Files
**/Backup*
**/UpgradeLog*.XML
**/UpgradeLog*.htm
**/ServiceFabricBackup
**/*.rptproj.bak
# SQL Server files
**/*.mdf
**/*.ldf
**/*.ndf
# Business Intelligence projects
**/*.rdl.data
**/*.bim.layout
**/*.bim_*.settings
**/*.rptproj.rsuser
**/*- [Bb]ackup.rdl
**/*- [Bb]ackup ([0-9]).rdl
**/*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
**/FakesAssemblies
# GhostDoc plugin setting file
**/*.GhostDoc.xml
# Node.js Tools for Visual Studio
**/.ntvs_analysis.dat
# Visual Studio 6 build log
**/*.plg
# Visual Studio 6 workspace options file
**/*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
**/*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
**/*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
**/*.dsw
**/*.dsp
# Visual Studio 6 technical files
# Visual Studio LightSwitch build output
**/**/*.HTMLClient/GeneratedArtifacts
**/**/*.DesktopClient/GeneratedArtifacts
**/**/*.DesktopClient/ModelManifest.xml
**/**/*.Server/GeneratedArtifacts
**/**/*.Server/ModelManifest.xml
**/_Pvt_Extensions
# Paket dependency manager
**/.paket/paket.exe
**/paket-files
# FAKE - F# Make
**/.fake
# CodeRush personal settings
**/.cr/personal
# Python Tools for Visual Studio (PTVS)
**/*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
**/*.tss
# Telerik's JustMock configuration file
**/*.jmconfig
# BizTalk build output
**/*.btp.cs
**/*.btm.cs
**/*.odx.cs
**/*.xsd.cs
# OpenCover UI analysis results
**/OpenCover
# Azure Stream Analytics local run output
**/ASALocalRun
# MSBuild Binary and Structured Log
**/*.binlog
# NVidia Nsight GPU debugger configuration file
**/*.nvuser
# MFractors (Xamarin productivity tool) working folder
**/.mfractor
# Local History for Visual Studio
**/.localhistory
# Visual Studio History (VSHistory) files
**/.vshistory
# BeatPulse healthcheck temp database
**/healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
**/MigrationBackup
# Ionide (cross platform F# VS Code tools) working folder
**/.ionide
# Fody - auto-generated XML schema
**/FodyWeavers.xsd
# VS Code files for those working on multiple tools
**/*.code-workspace
# Local History for Visual Studio Code
# Windows Installer files from build outputs
**/*.cab
**/*.msi
**/*.msix
**/*.msm
**/*.msp
# JetBrains Rider
**/*.sln.iml
### VisualStudio Patch ###
# Additional files built by Visual Studio
# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
**/database.db
**/database.db
**/database.db
**/.markata.cache
**/database.sqlite
# flyctl launch added from .pytest_cache/.gitignore
# Created by pytest automatically.
.pytest_cache/**/*
# flyctl launch added from .ruff_cache/.gitignore
.ruff_cache/**/*
fly.toml

2
.gitignore vendored
View file

@ -967,5 +967,3 @@ database.db
database.db database.db
.markata.cache .markata.cache
database.sqlite database.sqlite
.env.dev
.env.dev.docker

View file

@ -4,9 +4,9 @@ WORKDIR /app
Copy pyproject.toml /app Copy pyproject.toml /app
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
COPY README.md /app COPY README.md /app
RUN pip3 install '.[all]' RUN pip3 install '.[api]'
COPY . /app COPY . /app
RUN pip3 install '.[all]' RUN pip3 install '.[api]'
EXPOSE 5000 EXPOSE 5000

View file

@ -1,6 +1,3 @@
> [!IMPORTANT]
> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
# Learn SQL Model # Learn SQL Model
learning sql model learning sql model

Binary file not shown.

Before

Width:  |  Height:  |  Size: 769 B

70
d3.py
View file

@ -1,70 +0,0 @@
import sqlite3
from jinja2 import Environment, FileSystemLoader
def get_tables_and_columns(conn):
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [
{
"name": table[0],
"columns": get_columns(conn, table[0]),
"foreign_keys": get_foreign_keys(conn, table[0]),
}
for table in cursor.fetchall()
]
return tables
def get_columns(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA table_info({table_name});")
columns = [row[1] for row in cursor.fetchall()]
return columns
def get_foreign_keys(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = [
{"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
for row in cursor.fetchall()
]
return foreign_keys
def generate_links(tables):
links = []
for t_index, table in enumerate(tables):
for fk in table["foreign_keys"]:
target_index = next(
i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
)
source_y = 40 + table["columns"].index(fk["from"]) * 20
target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
links.append(
{
"source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
"target": {"x": 50 + target_index * 150, "y": 50 + target_y},
}
)
return links
def generate_er_diagram(database_path):
conn = sqlite3.connect(database_path)
tables = get_tables_and_columns(conn)
links = [] # Currently, we won't extract relationships
links = generate_links(tables)
env = Environment(loader=FileSystemLoader("templates"))
template = env.get_template("er_diagram.html")
with open("index.html", "w") as f:
f.write(template.render(tables=tables, links=links))
if __name__ == "__main__":
db_path = "database.db"
generate_er_diagram(db_path)

View file

@ -1,72 +0,0 @@
![ER Diagram](er_diagram.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| f48730a783a5 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---
## Table: hero
### First 5 rows
| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
|------|-------------|---|---|------|-----|-----------|--------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| age | INTEGER | | | |
| shoe_size | INTEGER | | | |
| pet_id | INTEGER | pet.id | | |
| id | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

BIN
im.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = [{'name': 'pet', 'columns': ['name', 'birthday', 'id'], 'foreign_keys': []}, {'name': 'hero', 'columns': ['name', 'secret_name', 'x', 'y', 'size', 'age', 'shoe_size', 'pet_id', 'id'], 'foreign_keys': [{'id': 0, 'from': 'pet_id', 'to_table': 'pet', 'to': 'id'}]}];
const links = [{'source': {'x': 320, 'y': 230}, 'target': {'x': 50, 'y': 130}}];
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -1,16 +1,16 @@
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select from sqlmodel import SQLModel, Session
from learn_sql_model.config import get_session from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_config, get_session
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
hero_router = APIRouter() hero_router = APIRouter()
@hero_router.on_event("startup") @hero_router.on_event("startup")
async def on_startup() -> None: def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine) SQLModel.metadata.create_all(get_config().database.engine)
...
@hero_router.get("/hero/{hero_id}") @hero_router.get("/hero/{hero_id}")
@ -32,12 +32,12 @@ async def post_hero(
session: Session = Depends(get_session), session: Session = Depends(get_session),
hero: HeroCreate, hero: HeroCreate,
) -> HeroRead: ) -> HeroRead:
"create a hero" "read all the heros"
db_hero = Hero.from_orm(hero) db_hero = Hero.from_orm(hero)
session.add(db_hero) session.add(db_hero)
session.commit() session.commit()
session.refresh(db_hero) session.refresh(db_hero)
# await manager.broadcast({hero.json()}, id=1) await manager.broadcast({hero.json()}, id=1)
return db_hero return db_hero
@ -47,7 +47,7 @@ async def patch_hero(
session: Session = Depends(get_session), session: Session = Depends(get_session),
hero: HeroUpdate, hero: HeroUpdate,
) -> HeroRead: ) -> HeroRead:
"update a hero" "read all the heros"
db_hero = session.get(Hero, hero.id) db_hero = session.get(Hero, hero.id)
if not db_hero: if not db_hero:
raise HTTPException(status_code=404, detail="Hero not found") raise HTTPException(status_code=404, detail="Hero not found")
@ -56,7 +56,7 @@ async def patch_hero(
session.add(db_hero) session.add(db_hero)
session.commit() session.commit()
session.refresh(db_hero) session.refresh(db_hero)
# await manager.broadcast({hero.json()}, id=1) await manager.broadcast({hero.json()}, id=1)
return db_hero return db_hero
@ -66,13 +66,13 @@ async def delete_hero(
session: Session = Depends(get_session), session: Session = Depends(get_session),
hero_id: int, hero_id: int,
): ):
"delete a hero" "read all the heros"
hero = session.get(Hero, hero_id) hero = session.get(Hero, hero_id)
if not hero: if not hero:
raise HTTPException(status_code=404, detail="Hero not found") raise HTTPException(status_code=404, detail="Hero not found")
session.delete(hero) session.delete(hero)
session.commit() session.commit()
# await manager.broadcast(f"deleted hero {hero_id}", id=1) await manager.broadcast(f"deleted hero {hero_id}", id=1)
return {"ok": True} return {"ok": True}
@ -82,6 +82,4 @@ async def get_heros(
session: Session = Depends(get_session), session: Session = Depends(get_session),
) -> Heros: ) -> Heros:
"get all heros" "get all heros"
statement = select(Hero) return Heros.list(session=session)
heros = session.exec(statement).all()
return Heros(__root__=heros)

View file

@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse from fastapi.responses import HTMLResponse
from rich.console import Console from rich.console import Console
from sqlmodel import Session, select from sqlmodel import Session
from websockets.exceptions import ConnectionClosed from websockets.exceptions import ConnectionClosed
from learn_sql_model.api.websocket_connection_manager import manager from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session from learn_sql_model.config import get_session
from learn_sql_model.console import console from learn_sql_model.console import console
from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
web_socket_router = APIRouter() web_socket_router = APIRouter()
@ -46,9 +46,7 @@ async def websocket_endpoint_connect(
): ):
Console().log(f"Client #{id} connecting") Console().log(f"Client #{id} connecting")
await manager.connect(websocket, channel) await manager.connect(websocket, channel)
statement = select(Hero) heros = Heros.list(session=session)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
await websocket.send_text(heros.json()) await websocket.send_text(heros.json())
try: try:
@ -85,18 +83,11 @@ async def websocket_endpoint_hero_echo(
while True: while True:
data = await websocket.receive_text() data = await websocket.receive_text()
hero = HeroUpdate.parse_raw(data) hero = HeroUpdate.parse_raw(data)
statement = select(Hero) heros = Heros.list(session=session)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
if heros != last_heros: if heros != last_heros:
await manager.broadcast(heros.json(), "heros") await manager.broadcast(heros.json(), "heros")
last_heros = heros last_heros = heros
db_hero = session.get(Hero, hero.id) hero.update(session=session)
for key, value in hero.dict(exclude_unset=True).items():
setattr(db_hero, key, value)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
console.print(heros) console.print(heros)
await websocket.send_text(heros.json()) await websocket.send_text(heros.json())
@ -105,9 +96,7 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session) HeroDelete(id=hero.id).delete(session=session)
except Exception: except Exception:
... ...
statement = select(Hero) heros = Heros.list(session=session)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
await manager.broadcast(heros.json(), "heros") await manager.broadcast(heros.json(), "heros")
print("disconnected") print("disconnected")
except ConnectionClosed: except ConnectionClosed:
@ -115,8 +104,6 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session) HeroDelete(id=hero.id).delete(session=session)
except Exception: except Exception:
... ...
statement = select(Hero) heros = Heros.list(session=session)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
await manager.broadcast(heros.json(), "heros") await manager.broadcast(heros.json(), "heros")
print("connection closed") print("connection closed")

View file

@ -33,8 +33,9 @@ def hero():
@hero_app.command() @hero_app.command()
@engorgio(typer=True)
def get( def get(
hero_id: Optional[int] = typer.Argument(), hero_id: Optional[int] = typer.Argument(default=None),
) -> Union[Hero, List[Hero]]: ) -> Union[Hero, List[Hero]]:
"get one hero" "get one hero"
hero = HeroRead.get(id=hero_id) hero = HeroRead.get(id=hero_id)
@ -43,19 +44,25 @@ def get(
@hero_app.command() @hero_app.command()
def list() -> Union[Hero, List[Hero]]: @engorgio(typer=True)
def list(
where: Optional[str] = None,
offset: int = 0,
limit: Optional[int] = None,
) -> Union[Hero, List[Hero]]:
"list many heros" "list many heros"
heros = Heros.list() heros = Heros.list(where=where, offset=offset, limit=limit)
Console().print(heros) Console().print(hero)
return heros return hero
@hero_app.command() @hero_app.command()
def clear() -> Union[Hero, List[Hero]]: def clear() -> Union[Hero, List[Hero]]:
"list many heros" "list many heros"
heros = Heros.list() heros = Heros.list()
for hero in heros.__root__: for hero in heros.heros:
HeroDelete.delete(id=hero.id) HeroDelete(id=hero.id).delete()
return hero return hero
@ -80,15 +87,14 @@ def update(
@hero_app.command() @hero_app.command()
@engorgio(typer=True) @engorgio(typer=True)
def delete( def delete(
hero_id: Optional[int] = typer.Argument(), hero: HeroDelete,
) -> Hero: ) -> Hero:
"delete a hero by id" "delete a hero by id"
hero = HeroDelete.delete(id=hero_id) hero.delete()
Console().print(hero)
return hero
@hero_app.command() @hero_app.command()
@engorgio(typer=True)
def populate( def populate(
n: int = 10, n: int = 10,
) -> Hero: ) -> Hero:

View file

@ -1,15 +1,11 @@
from pathlib import Path from pathlib import Path
from typing import Annotated
# import copier import alembic
from alembic.config import Config
import copier
import typer import typer
from learn_sql_model.cli.common import verbose_callback from learn_sql_model.cli.common import verbose_callback
from learn_sql_model.config import get_config
from learn_sql_model.optional import _optional_import_
alembic = _optional_import_("alembic", group="manage")
Config = _optional_import_("alembic.config", "Config", group="manage")
model_app = typer.Typer() model_app = typer.Typer()
@ -44,18 +40,11 @@ def create_revision(
callback=verbose_callback, callback=verbose_callback,
help="show the log messages", help="show the log messages",
), ),
message: Annotated[ message: str = typer.Option(
str,
typer.Option(
"--message",
"-m",
prompt=True, prompt=True,
), ),
] = None,
): ):
alembic_cfg = Config("alembic.ini") alembic_cfg = Config("alembic.ini")
config = get_config()
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.revision( alembic.command.revision(
config=alembic_cfg, config=alembic_cfg,
message=message, message=message,
@ -74,17 +63,7 @@ def checkout(
revision: str = typer.Option("head"), revision: str = typer.Option("head"),
): ):
alembic_cfg = Config("alembic.ini") alembic_cfg = Config("alembic.ini")
config = get_config() alembic.command.upgrade(config=alembic_cfg, revision="head")
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.upgrade(config=alembic_cfg, revision=revision)
@model_app.command()
def status():
alembic_cfg = Config("alembic.ini")
config = get_config()
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
alembic.command.current(config=alembic_cfg)
@model_app.command() @model_app.command()
@ -94,4 +73,5 @@ def populate(
callback=verbose_callback, callback=verbose_callback,
help="show the log messages", help="show the log messages",
), ),
): ... ):
...

View file

@ -4,8 +4,7 @@ from typing import TYPE_CHECKING
from fastapi import Depends from fastapi import Depends
from pydantic import BaseModel, BaseSettings, validator from pydantic import BaseModel, BaseSettings, validator
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlmodel import SQLModel, Session
from sqlmodel import Session
from learn_sql_model.standard_config import load from learn_sql_model.standard_config import load
@ -19,7 +18,6 @@ class ApiServer(BaseModel):
reload: bool = True reload: bool = True
log_level: str = "info" log_level: str = "info"
host: str = "0.0.0.0" host: str = "0.0.0.0"
workers: int = 1
class ApiClient(BaseModel): class ApiClient(BaseModel):
@ -27,6 +25,7 @@ class ApiClient(BaseModel):
protocol: str = "https" protocol: str = "https"
url: str = f"{protocol}://{host}" url: str = f"{protocol}://{host}"
class Database: class Database:
def __init__(self, config: "Config" = None) -> None: def __init__(self, config: "Config" = None) -> None:
if config is None: if config is None:
@ -41,21 +40,9 @@ class Database:
} }
self.db_state = ContextVar("db_state", default=self.db_state_default.copy()) self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
self.db_conf = {}
if 'sqlite' in self.config.database_url:
self.db_conf = {
'connect_args': {"check_same_thread": False},
'pool_recycle': 3600,
'pool_pre_ping': True,
}
self._engine = create_engine(
self.config.database_url,
**self.db_conf
)
@property @property
def engine(self) -> "Engine": def engine(self) -> "Engine":
return self._engine return create_engine(self.config.database_url)
@property @property
def session(self) -> "Session": def session(self) -> "Session":
@ -84,8 +71,7 @@ class Config(BaseSettings):
return get_database(config=self) return get_database(config=self)
def init(self) -> None: def init(self) -> None:
# SQLModel.metadata.create_all(self.database.engine) SQLModel.metadata.create_all(self.database.engine)
...
def get_database(config: Config = None) -> Database: def get_database(config: Config = None) -> Database:
@ -100,14 +86,9 @@ def get_config(overrides: dict = {}) -> Config:
return config return config
config = get_config()
database = get_database()
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
def get_session() -> "Session": def get_session() -> "Session":
with Session(database.engine) as session: config = get_config()
with Session(config.database.engine) as session:
yield session yield session

View file

@ -1,151 +0,0 @@
import sqlite3
from graphviz import Digraph
from learn_sql_model.config import get_config
config = get_config()
def generate_er_diagram(output_path):
# Connect to the SQLite database
database_path = config.database_url.replace("sqlite:///", "")
conn = sqlite3.connect(database_path)
cursor = conn.cursor()
# Get the table names from the database
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
# Create a new Digraph
dot = Digraph(format="png")
dot.attr(rankdir="TD")
# Iterate over the tables
for table in tables:
table_name = table[0]
dot.node(table_name, shape="box")
cursor.execute(f"PRAGMA table_info({table_name});")
columns = cursor.fetchall()
# Add the columns to the table node
for column in columns:
column_name = column[1]
dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
dot.edge(table_name, f"{table_name}.{column_name}")
# Check for foreign key relationships
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = cursor.fetchall()
# Add dotted lines for foreign key relationships
for foreign_key in foreign_keys:
from_column = foreign_key[3]
to_table = foreign_key[2]
to_column = foreign_key[4]
dot.node(f"{to_table}.{to_column}", shape="oval")
dot.edge(
f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
)
# Render and save the diagram
dot.render(output_path.replace(".png", ""), cleanup=True)
# Close the database connection
cursor.close()
conn.close()
def generate_er_markdown(output_path, er_diagram_path):
# Connect to the SQLite database
database_path = config.database_url.replace("sqlite:///", "")
conn = sqlite3.connect(database_path)
cursor = conn.cursor()
# Get the table names from the database
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
with open(output_path, "w") as f:
# Write the ER Diagram image
f.write(f"![ER Diagram]({er_diagram_path})\n\n---\n\n")
# Iterate over the tables
for table in tables:
table_name = table[0]
f.write(f"## Table: {table_name}\n\n")
# Get the table columns
cursor.execute(f"PRAGMA table_info({table_name});")
columns = cursor.fetchall()
f.write("### First 5 rows\n\n")
cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
rows = cursor.fetchall()
f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
f.write("|")
for column in columns:
# ---
f.write(f'{"-"*(len(column[1]) + 2)}|')
f.write("\n")
for row in rows:
f.write(f'| {" | ".join([str(r) for r in row])} |\n')
f.write("\n")
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = cursor.fetchall()
# Add dotted lines for foreign key relationships
fkeys = {}
for foreign_key in foreign_keys:
from_column = foreign_key[3]
to_table = foreign_key[2]
to_column = foreign_key[4]
fkeys[from_column] = f"{to_table}.{to_column}"
# Replace 'description' with the actual column name in the table that contains the description, if applicable
try:
cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
description = cursor.fetchone()
if description:
f.write(f"### Description\n\n{description[0]}\n\n")
except:
...
# Write the table columns
f.write("### Columns\n\n")
f.write("| Column Name | Type | Foreign Key | Example Value |\n")
f.write("|-------------|------|-------------|---------------|\n")
for column in columns:
column_name = column[1]
column_type = column[2]
fkey = ""
if column_name in fkeys:
fkey = fkeys[column_name]
f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
f.write("\n")
# Get the count of records
cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
records_count = cursor.fetchone()[0]
f.write(
f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
)
# Close the database connection
cursor.close()
conn.close()
if __name__ == "__main__":
# Usage example
database_path = "database.db"
md_output_path = "database.md"
er_output_path = "er_diagram.png"
generate_er_diagram(database_path, er_output_path)
generate_markdown(database_path, md_output_path, er_output_path)

View file

@ -10,6 +10,7 @@ class HeroFactory(ModelFactory[Hero]):
__model__ = Hero __model__ = Hero
__faker__ = Faker(locale="en_US") __faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True __set_as_default_factory_for_type__ = True
id = None
pet_id = None pet_id = None
@classmethod @classmethod

View file

@ -1,25 +0,0 @@
import pygame
class Debug:
def __init__(self, game):
self.game = game
self.is_open = False
self.debounce = False
def handle_events(self, events):
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F3 and not self.debounce:
self.is_open = not self.is_open
self.debounce = True
if event.type == pygame.KEYUP:
if event.key == pygame.K_F3:
self.debounce = False
def render(self):
if self.is_open:
text = self.game.font.render(
str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
)
self.game.screen.blit(text, (20, 20))

View file

@ -5,8 +5,6 @@ from websocket import create_connection
from learn_sql_model.config import get_config from learn_sql_model.config import get_config
from learn_sql_model.console import console from learn_sql_model.console import console
from learn_sql_model.game.debug import Debug
from learn_sql_model.game.light import Light
from learn_sql_model.game.map import Map from learn_sql_model.game.map import Map
from learn_sql_model.game.menu import Menu from learn_sql_model.game.menu import Menu
from learn_sql_model.game.player import Player from learn_sql_model.game.player import Player
@ -21,8 +19,7 @@ config = get_config()
class Client: class Client:
def __init__(self): def __init__(self):
# self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN) self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
self.screen = pygame.display.set_mode((1280, 720))
pygame.display.set_caption("Learn SQL Model") pygame.display.set_caption("Learn SQL Model")
self.clock = pygame.time.Clock() self.clock = pygame.time.Clock()
self.running = True self.running = True
@ -36,28 +33,17 @@ class Client:
self.player = Player(self) self.player = Player(self)
self.menu = Menu(self) self.menu = Menu(self)
self.map = Map(self) self.map = Map(self)
self.light = Light(self) self.font = pygame.font.SysFont("", 50)
self.font = pygame.font.SysFont("", 25)
self.joysticks = {} self.joysticks = {}
self.darkness = pygame.Surface(
(self.screen.get_width(), self.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.debug = Debug(self)
atexit.register(self.quit) atexit.register(self.quit)
@property @property
def ws(self): def ws(self):
def connect(): def connect():
if "https" in config.api_client.url: self._ws = create_connection(
url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho" f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
elif "http" in config.api_client.url: )
url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
else:
url = f"ws://{config.api_client.url}/wsecho"
self._ws = create_connection(url)
if not hasattr(self, "_ws"): if not hasattr(self, "_ws"):
connect() connect()
@ -66,13 +52,6 @@ class Client:
return self._ws return self._ws
def run(self): def run(self):
try:
from pyinstrument import Profiler
profiler = Profiler()
profiler.start()
except ImportError:
profiler = None
while self.running: while self.running:
console.print("running") console.print("running")
console.print("handle_events") console.print("handle_events")
@ -80,16 +59,12 @@ class Client:
console.print("update") console.print("update")
self.update() self.update()
console.print("render") console.print("render")
self.render() self.render()
time = self.clock.tick(60) time = self.clock.tick(60)
self.elapsed = time / 100 self.elapsed = time / 100
self.ticks += 1 self.ticks += 1
console.print(f"time: {time}") console.print(f"time: {time}")
console.print(f"ticks: {self.ticks}") console.print(f"ticks: {self.ticks}")
if profiler:
profiler.stop()
print(profiler.output_text())
self.quit() self.quit()
def quit(self): def quit(self):
@ -104,25 +79,13 @@ class Client:
self.map.render() self.map.render()
self.player.render() self.player.render()
if self.ticks % 1 == 0 or self.ticks == 0:
light_level = 0
self.darkness.fill((light_level, light_level, light_level))
self.light.render()
self.screen.blit(
self.darkness,
(0, 0),
special_flags=pygame.BLEND_MULT,
)
# update the screen # update the screen
self.menu.render() self.menu.render()
self.debug.render()
pygame.display.flip() pygame.display.flip()
def handle_events(self): def handle_events(self):
self.events = pygame.event.get() self.events = pygame.event.get()
self.menu.handle_events(self.events) self.menu.handle_events(self.events)
self.debug.handle_events(self.events)
self.player.handle_events() self.player.handle_events()
for event in self.events: for event in self.events:
if event.type == pygame.QUIT: if event.type == pygame.QUIT:

View file

@ -1,219 +0,0 @@
import bisect
from PIL import Image, ImageFilter
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
def rot_center(image, angle):
"""rotate an image while keeping its center and size"""
orig_rect = image.get_rect()
rot_image = pygame.transform.rotate(image, angle)
rot_rect = orig_rect.copy()
rot_rect.center = rot_image.get_rect().center
rot_image = rot_image.subsurface(rot_rect).copy()
return rot_image
class Light:
def __init__(self, game):
self.game = game
self.surf = pygame.Surface(
(self.game.screen.get_width(), self.game.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.surf.set_colorkey((0, 0, 0))
self.pre_render()
def pre_render(self):
# self.lights = {}
# for deg in range(-360, 360, 20):
# print("loading light", deg)
# self.lights[deg] = pygame.image.load(
# f"lights/light-{deg}.png"
# ).convert_alpha()
# return
light_surf = pygame.Surface(
(
self.game.player.hero.flashlight_strength * 3,
self.game.player.hero.flashlight_strength * 3,
),
pygame.SRCALPHA,
32,
)
v = pygame.math.Vector2(0, 1)
v.scale_to_length(self.game.player.hero.flashlight_strength)
for r in range(-90 - 25, -90 + 25):
_v = v.rotate(r)
pygame.draw.line(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
(
light_surf.get_width() / 2 + _v.x,
light_surf.get_height() / 2 + _v.y,
),
50,
)
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
self.light_surf = light_surf
self.light_surf.set_colorkey((0, 0, 0))
self.lights = {
deg: pygame.transform.rotate(self.light_surf, deg - 90)
for deg in range(-360, 360, 20)
}
for deg, light in self.lights.items():
pygame.image.save(light, f"lights/light-{deg}.png")
def render(self):
self.surf.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
v = pygame.math.Vector2(
mx - self.game.player.hero.x, my - self.game.player.hero.y
)
v.scale_to_length(self.game.player.hero.flashlight_strength)
self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
for other in self.game.player.others.__root__:
if other.id == self.game.player.hero.id:
continue
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
other.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
other.x - my_light.get_width() / 2,
other.y - my_light.get_height() / 2,
),
)
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
self.game.player.hero.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
self.game.player.hero.x - my_light.get_width() / 2,
self.game.player.hero.y - my_light.get_height() / 2,
),
)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
# 50,
# )
# # draw a circle
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# self.game.player.hero.lanturn_strength,
# )
# for other in self.game.player.others.__root__:
# if other.id == self.game.player.hero.id:
# continue
# v = pygame.math.Vector2(0, 1)
# v = v.rotate(-other.flashlight_angle)
# v.scale_to_length(other.flashlight_strength)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# (other.x + _v.x, other.y + _v.y),
# 50,
# )
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# other.lanturn_strength,
# )
self.game.darkness.blit(
self.surf,
(0, 0),
)

View file

@ -1,8 +1,7 @@
from learn_sql_model.optional import _optional_import_
import pydantic import pydantic
from rich.console import Console from rich.console import Console
from learn_sql_model.optional import _optional_import_
snoise2 = _optional_import_("noise", "snoise2", group="game") snoise2 = _optional_import_("noise", "snoise2", group="game")
pygame = _optional_import_("pygame", group="game") pygame = _optional_import_("pygame", group="game")
@ -36,41 +35,9 @@ class Map:
self.persistence = 0.05 # Amplitude of each octave self.persistence = 0.05 # Amplitude of each octave
self.lacunarity = 1.0 # Frequency of each octave self.lacunarity = 1.0 # Frequency of each octave
self.thresh = 125 self.thresh = 125
# try to load the map from map.png
try:
self.surf = pygame.image.load("map.png").convert_alpha()
# self.surf_pil = Image.frombytes(
# "RGBA",
# (self.surf.get_width(), self.surf.get_height()),
# pygame.image.tostring(self.surf, "RGBA", False),
# )
# self.surf_blur = (
# self.surf_pil.filter(
# ImageFilter.SMOOTH_MORE(),
# )
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# # sharpen
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# )
# self.surf = pygame.image.fromstring(
# self.surf_blur.tobytes(),
# (self.surf.get_width(), self.surf.get_height()),
# "RGBA",
# ).convert_alpha()
except FileNotFoundError:
self.pre_draw() self.pre_draw()
def refresh_surf(self): def refresh_surf(self):
self.surf = pygame.Surface((self.screen_width, self.screen_height)) self.surf = pygame.Surface((self.screen_width, self.screen_height))
def get_noise(self, x, y): def get_noise(self, x, y):
@ -86,7 +53,7 @@ class Map:
def render(self): def render(self):
self.game.screen.blit( self.game.screen.blit(
self.surf, pygame.transform.scale(self.surf, (self.screen_width, self.screen_height)),
(0, 0), (0, 0),
) )
@ -97,7 +64,6 @@ class Map:
def pre_draw(self): def pre_draw(self):
self.refresh_surf() self.refresh_surf()
for x in range(int(self.screen_width)): for x in range(int(self.screen_width)):
for y in range(int(self.screen_height)): for y in range(int(self.screen_height)):
if not self.point_check_collision(x, y): if not self.point_check_collision(x, y):
@ -112,7 +78,6 @@ class Map:
), ),
) )
pygame.image.save(self.surf, "map.png") pygame.image.save(self.surf, "map.png")
# av1 = ( # av1 = (
# Image.open("rock.jpg") # Image.open("rock.jpg")
# .convert("RGB") # .convert("RGB")

View file

@ -2,10 +2,6 @@ from typing import Callable, Tuple
from pydantic import BaseModel from pydantic import BaseModel
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
screen_sizes = [ screen_sizes = [
(480, 360), # 360p (480, 360), # 360p
@ -114,7 +110,7 @@ class Menu:
def handle_events(self, events): def handle_events(self, events):
self.hamburger.handle_events(self, events) self.hamburger.handle_events(self, events)
for event in events: for event in events:
if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open: if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # Left mouse button if event.button == 1: # Left mouse button
self.handle_click() self.handle_click()
@ -130,6 +126,7 @@ class Menu:
class Hamburger: class Hamburger:
def __init__(self, game): def __init__(self, game):
self.game = game self.game = game
self.hamburger_width = 50 self.hamburger_width = 50
self.bar_height = self.hamburger_width / 4 self.bar_height = self.hamburger_width / 4

View file

@ -12,34 +12,19 @@ HeroFactory = _optional_import_(
class Player: class Player:
def __init__(self, game): def __init__(self, game):
hero = HeroFactory().build( hero = HeroFactory().build(size=25, x=100, y=100)
size=25,
x=100,
y=100,
flashlight_strength=1000,
lanturn_strength=100,
flashlight_angle=0,
)
self.hero = HeroCreate(**hero.dict()).post() self.hero = HeroCreate(**hero.dict()).post()
self.hero.size = 64
self.game = game self.game = game
self.others = [] # Heros(heros=[]) self.others = Heros(heros=[])
self.width = 16 self.width = 16
self.height = 16 self.height = 16
self.white = (255, 255, 255) self.white = (255, 255, 255)
self.x = self.game.screen.get_width() / 2 self.x = self.game.screen.get_width() / 2
self.y = self.game.screen.get_height() / 2 self.y = self.game.screen.get_height() / 2
self.speed = 10 self.speed = 5
self.max_speed = 10 self.max_speed = 5
self.image = pygame.image.load("creeper.png").convert_alpha() self.image = pygame.image.load("player.png").convert_alpha()
self.pet_image = pygame.image.load("pet.png").convert_alpha()
self.image = pygame.transform.scale(
self.image, (self.hero.size, self.hero.size)
)
self.pet_image = pygame.transform.scale(
self.pet_image, (self.hero.size/1.5, self.hero.size/2)
)
self.x_last = self.x self.x_last = self.x
self.y_last = self.y self.y_last = self.y
self.hitbox_surface = pygame.Surface((self.width, self.height)) self.hitbox_surface = pygame.Surface((self.width, self.height))
@ -52,27 +37,18 @@ class Player:
self.moving_down = False self.moving_down = False
self.moving_left = False self.moving_left = False
self.moving_right = False self.moving_right = False
self.joysticks = {}
def rename_hero(self): def rename_hero(self):
old_hero = self.hero
hero = HeroFactory().build( hero = HeroFactory().build(
size=self.hero.size, size=self.hero.size, x=self.hero.x, y=self.hero.y, id=old_hero.id
x=self.hero.x,
y=self.hero.y,
id=self.hero.id,
flashlight_strength=self.hero.flashlight_strength,
lanturn_strength=self.hero.lanturn_strength,
) )
self.hero = HeroUpdate(**hero.dict()).update() self.hero = HeroCreate(**hero.dict()).post()
def quit(self): def quit(self):
try: try:
# session = get_config().database.session HeroDelete(id=self.hero.id).delete()
# hero = session.get(Hero, self.hero.id) except:
# session.delete(hero)
# session.commit()
HeroDelete.delete(id=self.hero.id)
except RuntimeError:
pass pass
def handle_events(self): def handle_events(self):
@ -176,16 +152,13 @@ class Player:
self.pos = pygame.math.Vector2(self.hero.x, self.hero.y) self.pos = pygame.math.Vector2(self.hero.x, self.hero.y)
if self.game.map.point_check_collision(self.pos.x, self.pos.y): if self.game.map.point_check_collision(self.pos.x, self.pos.y):
start_pos = pygame.math.Vector2(self.x_last, self.y_last) start_pos = pygame.math.Vector2(self.x_last, self.y_last)
end_pos = pygame.math.Vector2(self.hero.x, self.hero.y) end_pos = pygame.math.Vector2(self.hero.x, self.hero.y)
movement_vector = end_pos - start_pos movement_vector = end_pos - start_pos
try: try:
movement_direction = movement_vector.normalize() movement_direction = movement_vector.normalize()
except ValueError: except:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
movement_direction = movement_vector.normalize()
except ZeroDivisionError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128) end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos movement_vector = end_pos - start_pos
movement_direction = movement_vector.normalize() movement_direction = movement_vector.normalize()
@ -208,7 +181,7 @@ class Player:
self.x_last = self.hero.x self.x_last = self.hero.x
self.y_last = self.hero.y self.y_last = self.hero.y
if self.game.ticks % 60 == 0 or self.game.ticks == 0: if self.game.ticks % 5 == 0 or self.game.ticks == 0:
console.print("updating") console.print("updating")
update = HeroUpdate(**self.hero.dict(exclude_unset=True)) update = HeroUpdate(**self.hero.dict(exclude_unset=True))
console.print(update) console.print(update)
@ -227,35 +200,20 @@ class Player:
) )
def render(self): def render(self):
for other in self.others.__root__: for other in self.others.heros:
if other.id != self.hero.id: if other.id != self.hero.id:
# put self.image on the game.screen pygame.draw.circle(
self.game.screen.blit( self.game.screen, (255, 0, 0), (other.x, other.y), other.size
self.image,
(other.x - other.size / 2, other.y - other.size / 2),
) )
# pygame.draw.circle(
# self.game.screen, (255, 0, 0), (other.x, other.y), other.size
# )
self.game.screen.blit( self.game.screen.blit(
self.game.font.render(other.name, False, (255, 255, 255), 1), self.game.font.render(other.name, False, (255, 255, 255), 1),
(other.x - other.size / 2, other.y + other.size / 2), (other.x, other.y),
)
self.game.screen.blit(
self.image,
(self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
self.game.screen.blit(
self.pet_image,
(self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
) )
# pygame.draw.circle( pygame.draw.circle(
# self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
# ) )
self.game.screen.blit( self.game.screen.blit(
self.game.font.render(self.hero.name, False, (255, 255, 255), 1), self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
(self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2), (self.hero.x, self.hero.y),
) )

View file

@ -1,12 +1,12 @@
from typing import Dict, Optional from typing import Optional
from fastapi import HTTPException
import httpx import httpx
import pydantic
from pydantic import BaseModel from pydantic import BaseModel
from sqlmodel import Field, SQLModel from sqlmodel import Field, Relationship, SQLModel, Session, select
from learn_sql_model.config import config from learn_sql_model.config import config
from learn_sql_model.optional import optional from learn_sql_model.models.pet import Pet
class HeroBase(SQLModel, table=False): class HeroBase(SQLModel, table=False):
@ -14,27 +14,16 @@ class HeroBase(SQLModel, table=False):
secret_name: str secret_name: str
x: int x: int
y: int y: int
size: Optional[int] size: int
flashlight_strength: Optional[int] = 1000 age: Optional[int] = None
flashlight_angle: Optional[int] = 0 shoe_size: Optional[int] = None
lanturn_strength: Optional[int] = 100
# age: Optional[int] = None
# shoe_size: Optional[int] = None
# pet_id: Optional[int] = Field(default=None, foreign_key="pet.id") pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
# pet: Optional[Pet] = Relationship(back_populates="hero") pet: Optional[Pet] = Relationship(back_populates="hero")
@pydantic.validator("size", pre=True, always=True)
def validate_size(cls, v):
if v is None:
return 50
if v <= 0:
raise ValueError("size must be > 0")
return v
class Hero(HeroBase, table=True): class Hero(HeroBase, table=True):
id: int = Field(default=None, primary_key=True) id: Optional[int] = Field(default=None, primary_key=True)
class HeroCreate(HeroBase): class HeroCreate(HeroBase):
@ -59,46 +48,87 @@ class HeroRead(HeroBase):
cls, cls,
id: int, id: int,
) -> Hero: ) -> Hero:
r = httpx.get(f"{config.api_client.url}/hero/{id}") with config.database.session as session:
if r.status_code != 200: hero = session.get(Hero, id)
raise RuntimeError(f"{r.status_code}:\n {r.text}") if not hero:
return HeroRead.parse_obj(r.json()) raise HTTPException(status_code=404, detail="Hero not found")
return hero
class Heros(BaseModel): class Heros(BaseModel):
__root__: list[Hero] heros: list[Hero]
@classmethod @classmethod
def list( def list(
self, self,
where=None,
offset=0,
limit=None,
session: Session = None,
) -> Hero: ) -> Hero:
# with config.database.session as session:
def get_heros(session, where, offset, limit):
statement = select(Hero)
if where != "None" and where is not None:
from sqlmodel import text
statement = statement.where(text(where))
statement = statement.offset(offset).limit(limit)
heros = session.exec(statement).all()
return Heros(heros=heros)
if session is None:
r = httpx.get(f"{config.api_client.url}/heros/") r = httpx.get(f"{config.api_client.url}/heros/")
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Heros.parse_obj({"__root__": r.json()}) return Heros.parse_obj(r.json())
return get_heros(session, where, offset, limit)
@optional class HeroUpdate(SQLModel):
class HeroUpdate(HeroBase): # id is required to update the hero
id: int id: int
def update(self) -> Hero: # all other fields, must match the model, but with Optional default None
name: Optional[str] = None
secret_name: Optional[str] = None
age: Optional[int] = None
shoe_size: Optional[int] = None
x: int
y: int
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="hero")
def update(self, session: Session = None) -> Hero:
if session is not None:
db_hero = session.get(Hero, self.id)
if not db_hero:
raise HTTPException(status_code=404, detail="Hero not found")
for key, value in self.dict(exclude_unset=True).items():
setattr(db_hero, key, value)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
return db_hero
r = httpx.patch( r = httpx.patch(
f"{config.api_client.url}/hero/", f"{config.api_client.url}/hero/",
json=self.dict(exclude_none=True), json=self.dict(),
) )
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Hero.parse_obj(r.json())
class HeroDelete(BaseModel): class HeroDelete(BaseModel):
id: int id: int
@classmethod def delete(self) -> Hero:
def delete(self, id: int) -> Dict[str, bool]:
r = httpx.delete( r = httpx.delete(
f"{config.api_client.url}/hero/{id}", f"{config.api_client.url}/hero/{self.id}",
) )
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")

View file

@ -1,8 +1,5 @@
from typing import List, Optional
import textwrap import textwrap
import inspect
from pydantic import BaseModel
def _optional_import_( def _optional_import_(
module: str, module: str,
@ -64,33 +61,3 @@ def _optional_import_(
self._failed_import() self._failed_import()
return _failed_import() return _failed_import()
# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
# def decorator(cls):
# def wrapper(*args, **kwargs):
# if fields is None:
# fields = cls.__fields__
# if required is None:
# required = []
#
# for field in fields:
# if field not in required:
# cls.__fields__[field].required = False
# return _cls
# return wrapper
# return decorator
#
#
def optional(*fields):
def dec(_cls):
for field in fields:
_cls.__fields__[field].required = False
return _cls
if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
cls = fields[0]
fields = cls.__fields__
return dec(cls)
return dec

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 804 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 809 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 803 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

View file

@ -1,18 +0,0 @@
from locust import HttpUser, between, task
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate
class QuickstartUser(HttpUser):
wait_time = between(1, 2)
@task
def hello_world(self):
self.client.get("/hero/1")
self.client.get("/heros/")
@task(3)
def create_hero(self):
hero = HeroFactory().build()
HeroCreate(**hero.dict()).post()

View file

@ -1,58 +0,0 @@
import random
from locust import FastHttpUser, task
from learn_sql_model.config import get_config
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
config = get_config()
class QuickstartUser(FastHttpUser):
# wait_time = between(1, 2)
host = "http://localhost:5000"
# host = "https://waylonwalker.com"
def on_start(self):
self.client.verify = False
@task(6)
def get_a_hero(self):
# heros = Heros.list()
id = 1
# id = random.choice(heros.__root__).id
self.client.get(f"/hero/{id}")
# @task(2)
# def get_all_hero(self):
# self.client.get("/heros/")
@task
def create_hero(self):
hero = HeroFactory().build()
hero_create = HeroCreate(**hero.dict()).post()
self.client.post(
f"{config.api_client.url}/hero/",
json=hero_create.dict(),
)
@task(3)
def update_hero(self):
hero = HeroFactory().build()
hero_update = HeroUpdate(id=1, name=hero.name)
self.client.patch(
"/hero/",
json=hero_update.dict(exclude_none=True),
)
@task
def delete_hero(self):
heros = Heros.list()
id = random.choice(heros.__root__).id
self.client.delete(
f"/hero/{id}",
)

View file

BIN
map.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

BIN
micro

Binary file not shown.

View file

@ -79,7 +79,7 @@ def run_migrations_online() -> None:
context.configure( context.configure(
connection=connection, connection=connection,
target_metadata=target_metadata, target_metadata=target_metadata,
render_as_batch=True, render_as_batch=False,
version_table=f'{config.get_main_option("project")}_alembic_version', version_table=f'{config.get_main_option("project")}_alembic_version',
) )

View file

@ -8,9 +8,6 @@ Create Date: ${create_date}
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
import sqlmodel import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
${imports if imports else ""} ${imports if imports else ""}
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
@ -22,8 +19,6 @@ depends_on = ${repr(depends_on)}
def upgrade() -> None: def upgrade() -> None:
${upgrades if upgrades else "pass"} ${upgrades if upgrades else "pass"}
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None: def downgrade() -> None:

View file

@ -1,32 +0,0 @@
"""add x and y
Revision ID: 3555f61aaa79
Revises: 79972ec5f79d
Create Date: 2023-06-22 15:03:27.338959
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "3555f61aaa79"
down_revision = "79972ec5f79d"
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("hero", "y")
op.drop_column("hero", "x")
# ### end Alembic commands ###

View file

@ -1,68 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_3555f61aaa79.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| 79972ec5f79d |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y |
|------|-------------|----|---|---|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

View file

@ -1,65 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_79972ec5f79d.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 0 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id |
|------|-------------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

View file

@ -1,45 +0,0 @@
"""int
Revision ID: 79972ec5f79d
Revises:
Create Date: 2023-06-22 15:02:20.292322
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = "79972ec5f79d"
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"hero",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"pet",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("birthday", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("pet")
op.drop_table("hero")
# ### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""add hero.lighting
Revision ID: a1cd0a1947be
Revises: c79214cdc7b3
Create Date: 2023-06-28 19:43:47.108749
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'a1cd0a1947be'
down_revision = 'c79214cdc7b3'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'lanturn_strength')
op.drop_column('hero', 'flashlight_strength')
# ### end Alembic commands ###

View file

@ -1,75 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_a1cd0a1947be.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| c79214cdc7b3 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 4 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -0,0 +1,29 @@
"""add birthday
Revision ID: a9bb6625c57b
Revises: c8516c888495
Create Date: 2023-05-25 19:00:58.137464
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'a9bb6625c57b'
down_revision = 'c8516c888495'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('pet', sa.Column('birthday', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('pet', 'birthday')
# ### end Alembic commands ###

View file

@ -1,74 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_c79214cdc7b3.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| 3555f61aaa79 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size |
|------|-------------|----|---|---|------|
| tight-gold | successful-health | 1 | 6430 | 6231 | None |
| hard-rope | green-research | 2 | 1395 | 2865 | None |
| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
| huge-library | adult-body | 4 | 656 | 2377 | None |
| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
### Records Count
The table hero contains 1572 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

View file

@ -0,0 +1,44 @@
"""init
Revision ID: c8516c888495
Revises:
Create Date: 2023-05-25 18:42:37.057225
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
# revision identifiers, used by Alembic.
revision = 'c8516c888495'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('pet',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('hero',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('age', sa.Integer(), nullable=True),
sa.Column('shoe_size', sa.Integer(), nullable=True),
sa.Column('pet_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['pet_id'], ['pet.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('hero')
op.drop_table('pet')
# ### end Alembic commands ###

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: d79dd8e699d1
Revises: e1af975310a1
Create Date: 2023-06-28 19:54:19.322431
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'd79dd8e699d1'
down_revision = 'e1af975310a1'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'flashlight_angle')
# ### end Alembic commands ###

View file

@ -1,72 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_d79dd8e699d1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| e1af975310a1 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
|------|-------------|----|---|---|------|---------------------|------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
| flashlight_angle | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: e1af975310a1
Revises: a1cd0a1947be
Create Date: 2023-06-28 19:53:18.068873
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'e1af975310a1'
down_revision = 'a1cd0a1947be'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View file

@ -1,71 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_e1af975310a1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| a1cd0a1947be |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -1,34 +1,33 @@
"""add hero.size """add x, y, size
Revision ID: c79214cdc7b3 Revision ID: e26398d96dd0
Revises: 3555f61aaa79 Revises: a9bb6625c57b
Create Date: 2023-06-28 11:39:02.606001 Create Date: 2023-06-10 18:37:04.751553
""" """
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
import sqlmodel import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'c79214cdc7b3' revision = 'e26398d96dd0'
down_revision = '3555f61aaa79' down_revision = 'a9bb6625c57b'
branch_labels = None branch_labels = None
depends_on = None depends_on = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True)) op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=False))
# ### end Alembic commands ### # ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None: def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'size') op.drop_column('hero', 'size')
op.drop_column('hero', 'y')
op.drop_column('hero', 'x')
# ### end Alembic commands ### # ### end Alembic commands ###

View file

@ -1,37 +0,0 @@
# # Import smtplib for the actual sending function
# import smtplib
# # Import the email modules we'll need
# from email.mime.text import MIMEText
# # Open a plain text file for reading. For this example, assume that
# # the text file contains only ASCII characters.
# # with open(textfile, 'rb') as fp:
# # # Create a text/plain message
# # msg = MIMEText(fp.read())
# msg = MIMEText("hello there", "plain", "utf-8")
# # me == the sender's email address
# # you == the recipient's email address
# me = "waylon@waylonwalker.com"
# you = "3195728809@msg.fi.google.com"
# msg["Subject"] = "Python SMTP test"
# msg["From"] = me
# msg["To"] = you
# # Send the message via our own SMTP server, but don't include the
# # envelope header.
# s = smtplib.SMTP("localhost")
# s.sendmail(me, [you], msg.as_string())
# s.quit()
import requests
requests.post(
"https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
auth=("api", "YOUR_API_KEY"),
data={
"from": "Excited User <mailgun@YOUR_DOMAIN_NAME>",
"to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
"subject": "Hello",
"text": "Testing some Mailgun awesomness!",
},
)

BIN
pet.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 738 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -24,14 +24,13 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
] ]
dependencies = [ dependencies = [
"black",
"python-socketio[client]", "python-socketio[client]",
"anyconfig", "anyconfig",
"copier", "copier",
"engorgio", "engorgio",
"fastapi", "fastapi",
"httpx", "httpx",
"pydantic<2.0.0", "pydantic[dotenv]",
"pyflyby", "pyflyby",
"pyinstaller", "pyinstaller",
"rich", "rich",

23
rect.py
View file

@ -1,23 +0,0 @@
import pygame
pygame.init()
screen = pygame.display.set_mode((500, 500))
pygame.display.set_caption("draw a square")
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
surface = pygame.Surface((500, 500))
surface.fill((255, 0, 0))
color = (0, 0, 255)
rect = (200, 200, 100, 100)
pygame.draw.rect(surface, color, rect)
screen.blit(surface, (0, 0))
pygame.display.flip()

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = {{tables}};
const links = {{links}};
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -1,89 +1,86 @@
from fastapi import APIRouter, Depends, HTTPException from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select from sqlmodel import SQLModel, Session
from learn_sql_model.api.websocket_connection_manager import manager from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session from learn_sql_model.config import get_config, get_session
from learn_sql_model.models.{{ modelname }} import {{ modelname }}, {{ modelname }}Create, {{ modelname }}Read, {{ modelname }}Update, {{ modelname }}s from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}, {{modelname}}Create, {{modelname}}Read, {{modelname}}Update
{{ modelname }}_router = APIRouter() {{modelname.lower()}}_router = APIRouter()
@{{ modelname }}_router.on_event("startup") @{{modelname.lower()}}_router.on_event("startup")
def on_startup() -> None: def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine) SQLModel.metadata.create_all(get_config().database.engine)
...
@{{ modelname }}_router.get("/{{ modelname }}/{{{ modelname }}_id}") @{{modelname.lower()}}_router.get("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
def get_{{ modelname }}( async def get_{{modelname.lower()}}(
*, *,
session: Session = Depends(get_session), session: Session = Depends(get_session),
{{ modelname }}_id: int, {{modelname.lower()}}_id: int,
) -> {{ modelname }}Read: ) -> {{modelname}}Read:
"get one {{ modelname }}" "get one {{modelname.lower()}}"
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id) {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
if not {{ modelname }}: if not {{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found") raise HTTPException(status_code=404, detail="{{modelname}} not found")
return {{ modelname }} return {{modelname.lower()}}
@{{ modelname }}_router.post("/{{ modelname }}/") @{{modelname.lower()}}_router.post("/{{modelname.lower()}}/")
def post_{{ modelname }}( async def post_{{modelname.lower()}}(
*, *,
session: Session = Depends(get_session), session: Session = Depends(get_session),
{{ modelname }}: {{ modelname }}Create, {{modelname.lower()}}: {{modelname}}Create,
) -> {{ modelname }}Read: ) -> {{modelname}}Read:
"create a {{ modelname }}" "read all the {{modelname.lower()}}s"
db_{{ modelname }} = {{ modelname }}.from_orm({{ modelname }}) db_{{modelname.lower()}} = {{modelname}}.from_orm({{modelname.lower()}})
session.add(db_{{ modelname }}) session.add(db_{{modelname.lower()}})
session.commit() session.commit()
session.refresh(db_{{ modelname }}) session.refresh(db_{{modelname.lower()}})
await manager.broadcast({{{ modelname }}.json()}, id=1) await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
return db_{{ modelname }} return db_{{modelname.lower()}}
@{{ modelname }}_router.patch("/{{ modelname }}/") @{{modelname.lower()}}_router.patch("/{{modelname.lower()}}/")
def patch_{{ modelname }}( async def patch_{{modelname.lower()}}(
*, *,
session: Session = Depends(get_session), session: Session = Depends(get_session),
{{ modelname }}: {{ modelname }}Update, {{modelname.lower()}}: {{modelname}}Update,
) -> {{ modelname }}Read: ) -> {{modelname}}Read:
"update a {{ modelname }}" "read all the {{modelname.lower()}}s"
db_{{ modelname }} = session.get({{ modelname }}, {{ modelname }}.id) db_{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
if not db_{{ modelname }}: if not db_{{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found") raise HTTPException(status_code=404, detail="{{modelname}} not found")
for key, value in {{ modelname }}.dict(exclude_unset=True).items(): for key, value in {{modelname.lower()}}.dict(exclude_unset=True).items():
setattr(db_{{ modelname }}, key, value) setattr(db_{{modelname.lower()}}, key, value)
session.add(db_{{ modelname }}) session.add(db_{{modelname.lower()}})
session.commit() session.commit()
session.refresh(db_{{ modelname }}) session.refresh(db_{{modelname.lower()}})
await manager.broadcast({{{ modelname }}.json()}, id=1) await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
return db_{{ modelname }} return db_{{modelname.lower()}}
@{{ modelname }}_router.delete("/{{ modelname }}/{{{ modelname }}_id}") @{{modelname.lower()}}_router.delete("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
def delete_{{ modelname }}( async def delete_{{modelname.lower()}}(
*, *,
session: Session = Depends(get_session), session: Session = Depends(get_session),
{{ modelname }}_id: int, {{modelname.lower()}}_id: int,
): ):
"delete a {{ modelname }}" "read all the {{modelname.lower()}}s"
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id) {{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
if not {{ modelname }}: if not {{modelname.lower()}}:
raise HTTPException(status_code=404, detail="{{ modelname }} not found") raise HTTPException(status_code=404, detail="{{modelname}} not found")
session.delete({{ modelname }}) session.delete({{modelname.lower()}})
session.commit() session.commit()
await manager.broadcast(f"deleted {{ modelname }} {{{ modelname }}_id}", id=1) await manager.broadcast(f"deleted {{modelname.lower()}} {{{modelname.lower()}}_id}", id=1)
return {"ok": True} return {"ok": True}
@{{ modelname }}_router.get("/{{ modelname }}s/") @{{modelname.lower()}}_router.get("/{{modelname.lower()}}s/")
def get_{{ modelname }}s( async def get_{{modelname.lower()}}s(
*, *,
session: Session = Depends(get_session), session: Session = Depends(get_session),
) -> {{ modelname }}s: ) -> list[{{modelname}}]:
"get all {{ modelname }}s" "get all {{modelname.lower()}}s"
statement = select({{ modelname }}) return {{modelname}}Read.list(session=session)
{{ modelname }}s = session.exec(statement).all()
return {{ modelname }}s(__root__={{ modelname }}s)

View file

@ -1,12 +1,14 @@
from faker import Faker from faker import Faker
from polyfactory.factories.pydantic_factory import ModelFactory from polyfactory.factories.pydantic_factory import ModelFactory
from learn_sql_model.factories.pet import PetFactory from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
from learn_sql_model.models.{{ modelname }} import {{ modelname }}
from learn_sql_model.models.pet import Pet
class {{ modelname }}Factory(ModelFactory[{{ modelname }}]): class {{modelname}}Factory(ModelFactory[{{modelname.lower()}}]):
__model__ = {{ modelname }} __model__ = {{modelname}}
__faker__ = Faker(locale="en_US") __faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True __set_as_default_factory_for_type__ = True
id = None
__random_seed__ = 10

View file

@ -1,81 +1,93 @@
from typing import Dict, Optional from typing import Optional
from fastapi import Depends, HTTPException
import httpx import httpx
from pydantic import BaseModel from pydantic import BaseModel
from sqlmodel import Field, SQLModel from sqlmodel import Field, Relationship, SQLModel, Session, select
from learn_sql_model.config import config from learn_sql_model.config import config, get_config
from learn_sql_model.models.pet import Pet
class {{ modelname }}Base(SQLModel, table=False): class {{modelname}}Base(SQLModel, table=False):
# put model attributes here
class {{ modelname }}({{ modelname }}Base, table=True): class {{modelname}}({{modelname}}Base, table=True):
id: int = Field(default=None, primary_key=True) id: Optional[int] = Field(default=None, primary_key=True)
class {{ modelname }}Create({{ modelname }}Base): class {{modelname}}Create({{modelname}}Base):
... ...
def post(self) -> {{ modelname }}: def post(self) -> {{modelname}}:
r = httpx.post( r = httpx.post(
f"{config.api_client.url}/{{ modelname }}/", f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(), json=self.dict(),
) )
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ modelname }}.parse_obj(r.json())
class {{modelname}}Read({{modelname}}Base):
class {{ modelname }}Read({{ modelname }}Base):
id: int id: int
@classmethod @classmethod
def get( def get(
cls, cls,
id: int, id: int,
) -> {{ modelname }}: ) -> {{modelname}}:
r = httpx.get(f"{config.api_client.url}/{{ modelname }}/{id}") with config.database.session as session:
if r.status_code != 200: {{modelname.lower()}} = session.get({{modelname}}, id)
raise RuntimeError(f"{r.status_code}:\n {r.text}") if not {{modelname.lower()}}:
return {{ modelname }}Read.parse_obj(r.json()) raise HTTPException(status_code=404, detail="{{modelname}} not found")
return {{modelname.lower()}}
class {{ modelname }}s(BaseModel):
__root__: list[{{ modelname }}]
@classmethod @classmethod
def list( def list(
self, self,
) -> {{ modelname }}: where=None,
r = httpx.get(f"{config.api_client.url}/{{ modelname }}s/") offset=0,
if r.status_code != 200: limit=None,
raise RuntimeError(f"{r.status_code}:\n {r.text}") session: Session = None,
return {{ modelname }}s.parse_obj({"__root__": r.json()}) ) -> {{modelname}}:
if session is None:
session = get_config().database.session
statement = select({{modelname}})
if where != "None" and where is not None:
from sqlmodel import text
statement = statement.where(text(where))
statement = statement.offset(offset).limit(limit)
{{modelname.lower()}}es = session.exec(statement).all()
return {{modelname.lower()}}es
class {{ modelname }}Update(SQLModel): class {{modelname}}Update(SQLModel):
# id is required to update the {{ modelname }} # id is required to update the {{modelname.lower()}}
id: int id: int
def update(self) -> {{ modelname }}: # all other fields, must match the model, but with Optional default None
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="{{modelname.lower()}}")
def update(self) -> {{modelname}}:
r = httpx.patch( r = httpx.patch(
f"{config.api_client.url}/{{ modelname }}/", f"{config.api_client.url}/{{modelname.lower()}}/",
json=self.dict(), json=self.dict(),
) )
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")
class {{ modelname }}Delete(BaseModel): class {{modelname}}Delete(BaseModel):
id: int id: int
@classmethod def delete(self) -> {{modelname}}:
def delete(self, id: int) -> Dict[str, bool]:
r = httpx.delete( r = httpx.delete(
f"{config.api_client.url}/{{ modelname }}/{id}", f"{config.api_client.url}/{{modelname.lower()}}/{self.id}",
) )
if r.status_code != 200: if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}") raise RuntimeError(f"{r.status_code}:\n {r.text}")

View file

@ -1,103 +1,234 @@
from typing import Optional from fastapi.testclient import TestClient
import pytest
from sqlalchemy import create_engine
from sqlmodel import SQLModel, Session, select
from sqlmodel.pool import StaticPool
from typer.testing import CliRunner
import httpx from learn_sql_model.api.app import app
from pydantic import BaseModel from learn_sql_model.config import get_config, get_session
from sqlmodel import Field, Relationship, SQLModel from learn_sql_model.factories.{{modelname.lower()}} import {{modelname}}Factory
from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
from learn_sql_model.config import config runner = CliRunner()
from learn_sql_model.models.pet import Pet client = TestClient(app)
class {{ model.lower }}Base(SQLModel, table=False): @pytest.fixture(name="session")
name: str def session_fixture():
secret_name: str engine = create_engine(
x: int "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
y: int
size: int
age: Optional[int] = None
shoe_size: Optional[int] = None
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
class {{ model.lower }}({{ model.lower }}Base, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
class {{ model.lower }}Create({{ model.lower }}Base):
...
def post(self) -> {{ model.lower }}:
r = httpx.post(
f"{config.api_client.url}/{{ model.lower() }}/",
json=self.dict(),
) )
if r.status_code != 200: SQLModel.metadata.create_all(engine)
raise RuntimeError(f"{r.status_code}:\n {r.text}") with Session(engine) as session:
yield session
return {{ model.lower }}.parse_obj(r.json())
class {{ model.lower }}Read({{ model.lower }}Base): @pytest.fixture(name="client")
id: int def client_fixture(session: Session):
def get_session_override():
return session
@classmethod app.dependency_overrides[get_session] = get_session_override
def get(
cls, client = TestClient(app)
id: int, yield client
) -> {{ model.lower }}: app.dependency_overrides.clear()
r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}/{id}")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ model.lower() }}
class {{ model.lower }}s(BaseModel): def test_api_post(client: TestClient):
{{ model.lower() }}s: list[{{ model.lower }}] {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
{{modelname.lower()}}_dict = {{modelname.lower()}}.dict()
response = client.post("/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {{modelname.lower()}}_dict})
response_{{modelname.lower()}} = {{modelname}}.parse_obj(response.json())
@classmethod assert response.status_code == 200
def list( assert response_{{modelname.lower()}}.name == "Steelman"
self, assert response_{{modelname.lower()}}.age == 25
) -> {{ model.lower }}:
r = httpx.get(f"{config.api_client.url}/{{ model.lower() }}s/")
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return {{ model.lower }}s.parse_obj(r.json())
class {{ model.lower }}Update(SQLModel): def test_api_read_{{modelname.lower()}}es(session: Session, client: TestClient):
# id is required to update the {{ model.lower() }} {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
id: int {{modelname.lower()}}_2 = {{modelname}}(name="Rusty-Man", secret_name="Tommy Sharp", age=48)
session.add({{modelname.lower()}}_1)
session.add({{modelname.lower()}}_2)
session.commit()
# all other fields, must match the model, but with Optional default None response = client.get("/{{modelname.lower()}}s/")
name: Optional[str] = None data = response.json()
secret_name: Optional[str] = None
age: Optional[int] = None
shoe_size: Optional[int] = None
x: int
y: int
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id") assert response.status_code == 200
pet: Optional[Pet] = Relationship(back_populates="{{ model.lower() }}")
def update(self) -> {{ model.lower }}: assert len(data) == 2
r = httpx.patch( assert data[0]["name"] == {{modelname.lower()}}_1.name
f"{config.api_client.url}/{{ model.lower() }}/", assert data[0]["secret_name"] == {{modelname.lower()}}_1.secret_name
json=self.dict(), assert data[0]["age"] == {{modelname.lower()}}_1.age
assert data[0]["id"] == {{modelname.lower()}}_1.id
assert data[1]["name"] == {{modelname.lower()}}_2.name
assert data[1]["secret_name"] == {{modelname.lower()}}_2.secret_name
assert data[1]["age"] == {{modelname.lower()}}_2.age
assert data[1]["id"] == {{modelname.lower()}}_2.id
def test_api_read_{{modelname.lower()}}(session: Session, client: TestClient):
{{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
response = client.get(f"/{{modelname.lower()}}/999")
assert response.status_code == 404
def test_api_read_{{modelname.lower()}}_404(session: Session, client: TestClient):
{{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
response = client.get(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
data = response.json()
assert response.status_code == 200
assert data["name"] == {{modelname.lower()}}_1.name
assert data["secret_name"] == {{modelname.lower()}}_1.secret_name
assert data["age"] == {{modelname.lower()}}_1.age
assert data["id"] == {{modelname.lower()}}_1.id
def test_api_update_{{modelname.lower()}}(session: Session, client: TestClient):
{{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
response = client.patch(
f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": {{modelname.lower()}}_1.id}}
) )
if r.status_code != 200: data = response.json()
raise RuntimeError(f"{r.status_code}:\n {r.text}")
assert response.status_code == 200
assert data["name"] == "Deadpuddle"
assert data["secret_name"] == "Dive Wilson"
assert data["age"] is None
assert data["id"] == {{modelname.lower()}}_1.id
class {{ model.lower }}Delete(BaseModel): def test_api_update_{{modelname.lower()}}_404(session: Session, client: TestClient):
id: int {{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
def delete(self) -> {{ model.lower }}: response = client.patch(f"/{{modelname.lower()}}/", json={"{{modelname.lower()}}": {"name": "Deadpuddle", "id": 999}})
r = httpx.delete( assert response.status_code == 404
f"{config.api_client.url}/{{ model.lower() }}/{self.id}",
def test_delete_{{modelname.lower()}}(session: Session, client: TestClient):
{{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
response = client.delete(f"/{{modelname.lower()}}/{{{modelname.lower()}}_1.id}")
{{modelname.lower()}}_in_db = session.get({{modelname}}, {{modelname.lower()}}_1.id)
assert response.status_code == 200
assert {{modelname.lower()}}_in_db is None
def test_delete_{{modelname.lower()}}_404(session: Session, client: TestClient):
{{modelname.lower()}}_1 = {{modelname}}(name="Deadpond", secret_name="Dive Wilson")
session.add({{modelname.lower()}}_1)
session.commit()
response = client.delete(f"/{{modelname.lower()}}/999")
assert response.status_code == 404
def test_config_memory(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
) )
if r.status_code != 200: config = get_config()
raise RuntimeError(f"{r.status_code}:\n {r.text}") SQLModel.metadata.create_all(config.database.engine)
return {"ok": True} {{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
with config.database.session as session:
session.add({{modelname.lower()}})
session.commit()
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
{{modelname.lower()}}es = session.exec(select({{modelname}})).all()
assert {{modelname.lower()}}.name == "Steelman"
assert {{modelname.lower()}}.age == 25
assert len({{modelname.lower()}}es) == 1
def test_cli_get(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
{{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
with config.database.session as session:
session.add({{modelname.lower()}})
session.commit()
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "1"])
assert result.exit_code == 0
assert f"name='{{{modelname.lower()}}.name}'" in result.stdout
assert f"secret_name='{{{modelname.lower()}}.secret_name}'" in result.stdout
def test_cli_get_404(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
{{modelname.lower()}} = {{modelname}}Factory().build(name="Steelman", age=25)
with config.database.session as session:
session.add({{modelname.lower()}})
session.commit()
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
result = runner.invoke({{modelname.lower()}}_app, ["get", "--{{modelname.lower()}}-id", "999"])
assert result.exception.status_code == 404
assert result.exception.detail == "{{modelname}} not found"
def test_cli_list(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
{{modelname.lower()}}_1 = {{modelname}}Factory().build(name="Steelman", age=25)
{{modelname.lower()}}_2 = {{modelname}}Factory().build(name="Hunk", age=52)
with config.database.session as session:
session.add({{modelname.lower()}}_1)
session.add({{modelname.lower()}}_2)
session.commit()
session.refresh({{modelname.lower()}}_1)
session.refresh({{modelname.lower()}}_2)
result = runner.invoke({{modelname.lower()}}_app, ["list"])
assert result.exit_code == 0
assert f"name='{{{modelname.lower()}}_1.name}'" in result.stdout
assert f"secret_name='{{{modelname.lower()}}_1.secret_name}'" in result.stdout
assert f"name='{{{modelname.lower()}}_2.name}'" in result.stdout
assert f"secret_name='{{{modelname.lower()}}_2.secret_name}'" in result.stdout

View file

@ -1,16 +1,14 @@
from learn_sql_model.console import console from learn_sql_model.console import console
def test_default_console_is_quiet(capsys):
console.print("hello")
captured = capsys.readouterr()
assert captured.out == ""
def test_default_console_not_quiet(capsys): def test_default_console_not_quiet(capsys):
console.quiet = False
console.print("hello") console.print("hello")
captured = capsys.readouterr() captured = capsys.readouterr()
assert captured.out == "hello\n" assert captured.out == "hello\n"
def test_default_console_is_quiet(capsys):
console.quiet = True
console.print("hello")
captured = capsys.readouterr()
assert captured.out == ""

View file

@ -1,16 +1,15 @@
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import pytest import pytest
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlmodel import SQLModel, Session from sqlmodel import SQLModel, Session, select
from sqlmodel.pool import StaticPool from sqlmodel.pool import StaticPool
from typer.testing import CliRunner from typer.testing import CliRunner
from learn_sql_model.api.app import app from learn_sql_model.api.app import app
from learn_sql_model.cli.hero import hero_app from learn_sql_model.cli.hero import hero_app
from learn_sql_model.config import get_session from learn_sql_model.config import get_config, get_session
from learn_sql_model.factories.hero import HeroFactory from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models import hero as hero_models from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead
from learn_sql_model.models.hero import Hero, HeroCreate, HeroDelete, HeroRead
runner = CliRunner() runner = CliRunner()
client = TestClient(app) client = TestClient(app)
@ -39,19 +38,21 @@ def client_fixture(session: Session):
def test_api_post(client: TestClient): def test_api_post(client: TestClient):
hero = HeroFactory().build() hero = HeroFactory().build(name="Steelman", age=25)
hero_dict = hero.dict() hero_dict = hero.dict()
response = client.post("/hero/", json=hero_dict) response = client.post("/hero/", json={"hero": hero_dict})
response_hero = Hero.parse_obj(response.json()) response_hero = Hero.parse_obj(response.json())
assert response.status_code == 200 assert response.status_code == 200
assert response_hero.name == hero.name assert response_hero.name == "Steelman"
assert response_hero.age == 25
def test_api_read_heros(session: Session, client: TestClient): def test_api_read_heroes(session: Session, client: TestClient):
heros = HeroFactory().batch(5) hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
for hero in heros: hero_2 = Hero(name="Rusty-Man", secret_name="Tommy Sharp", age=48)
session.add(hero) session.add(hero_1)
session.add(hero_2)
session.commit() session.commit()
response = client.get("/heros/") response = client.get("/heros/")
@ -59,31 +60,35 @@ def test_api_read_heros(session: Session, client: TestClient):
assert response.status_code == 200 assert response.status_code == 200
assert len(data) == 5 assert len(data) == 2
for d in data: assert data[0]["name"] == hero_1.name
api_hero = Hero.parse_obj(d) assert data[0]["secret_name"] == hero_1.secret_name
my_hero = [hero for hero in heros if hero.id == api_hero.id][0] assert data[0]["age"] == hero_1.age
for key, value in api_hero.dict(exclude_unset=True).items(): assert data[0]["id"] == hero_1.id
assert getattr(my_hero, key) == value assert data[1]["name"] == hero_2.name
assert data[1]["secret_name"] == hero_2.secret_name
assert data[1]["age"] == hero_2.age
assert data[1]["id"] == hero_2.id
def test_api_read_hero(session: Session, client: TestClient): def test_api_read_hero(session: Session, client: TestClient):
hero = HeroFactory().build() hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero) session.add(hero_1)
session.commit() session.commit()
response = client.get(f"/hero/{hero.id}") response = client.get(f"/hero/{hero_1.id}")
data = response.json() data = response.json()
response_hero = Hero.parse_obj(data)
assert response.status_code == 200 assert response.status_code == 200
for key, value in hero.dict(exclude_unset=True).items(): assert data["name"] == hero_1.name
assert getattr(response_hero, key) == value assert data["secret_name"] == hero_1.secret_name
assert data["age"] == hero_1.age
assert data["id"] == hero_1.id
def test_api_read_hero_404(session: Session, client: TestClient): def test_api_read_hero_404(session: Session, client: TestClient):
hero = HeroFactory().build() hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero) session.add(hero_1)
session.commit() session.commit()
response = client.get(f"/hero/999") response = client.get(f"/hero/999")
@ -91,33 +96,33 @@ def test_api_read_hero_404(session: Session, client: TestClient):
def test_api_update_hero(session: Session, client: TestClient): def test_api_update_hero(session: Session, client: TestClient):
hero = HeroFactory().build() hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
new_hero = HeroFactory().build()
session.add(hero)
session.commit()
response = client.patch(
f"/hero/", json={"id": hero.id, **new_hero.dict(exclude={"id"})}
)
data = response.json()
response_hero = Hero.parse_obj(data)
assert response.status_code == 200
for key, value in hero.dict(exclude_unset=True).items():
assert getattr(response_hero, key) == value
def test_api_update_hero_404(session: Session, client: TestClient):
hero_1 = HeroFactory().build()
session.add(hero_1) session.add(hero_1)
session.commit() session.commit()
response = client.patch(f"/hero/", json={"name": "Deadpuddle", "id": 999}) response = client.patch(
f"/hero/", json={"hero": {"name": "Deadpuddle", "id": hero_1.id}}
)
data = response.json()
assert response.status_code == 200
assert data["name"] == "Deadpuddle"
assert data["secret_name"] == "Dive Wilson"
assert data["age"] is None
assert data["id"] == hero_1.id
def test_api_update_hero_404(session: Session, client: TestClient):
hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1)
session.commit()
response = client.patch(f"/hero/", json={"hero": {"name": "Deadpuddle", "id": 999}})
assert response.status_code == 404 assert response.status_code == 404
def test_delete_hero(session: Session, client: TestClient): def test_delete_hero(session: Session, client: TestClient):
hero_1 = HeroFactory().build() hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1) session.add(hero_1)
session.commit() session.commit()
@ -131,7 +136,7 @@ def test_delete_hero(session: Session, client: TestClient):
def test_delete_hero_404(session: Session, client: TestClient): def test_delete_hero_404(session: Session, client: TestClient):
hero_1 = HeroFactory().build() hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson")
session.add(hero_1) session.add(hero_1)
session.commit() session.commit()
@ -139,181 +144,135 @@ def test_delete_hero_404(session: Session, client: TestClient):
assert response.status_code == 404 assert response.status_code == 404
def test_cli_get(mocker): def test_config_memory(mocker):
hero = HeroFactory().build() mocker.patch(
hero = HeroRead(**hero.dict(exclude_none=True)) "learn_sql_model.config.Database.engine",
httpx = mocker.patch.object(hero_models, "httpx") new_callable=lambda: create_engine(
httpx.get.return_value = mocker.Mock() "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
httpx.get.return_value.status_code = 200 ),
httpx.get.return_value.json.return_value = hero.dict() )
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
hero = HeroFactory().build(name="Steelman", age=25)
with config.database.session as session:
session.add(hero)
session.commit()
hero = session.get(Hero, hero.id)
heroes = session.exec(select(Hero)).all()
assert hero.name == "Steelman"
assert hero.age == 25
assert len(heroes) == 1
result = runner.invoke(hero_app, ["get", "1"])
def test_cli_get(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
hero = HeroFactory().build(name="Steelman", age=25)
with config.database.session as session:
session.add(hero)
session.commit()
hero = session.get(Hero, hero.id)
result = runner.invoke(hero_app, ["get", "--hero-id", "1"])
assert result.exit_code == 0 assert result.exit_code == 0
for key, value in hero.dict(exclude_unset=True).items(): assert f"name='{hero.name}'" in result.stdout
if type(value) == str: assert f"secret_name='{hero.secret_name}'" in result.stdout
assert f"{key}='{value}'" in result.stdout
elif type(value) == int:
assert f"{key}={value}" in result.stdout
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 0
def test_cli_get_404(mocker): def test_cli_get_404(mocker):
hero = HeroFactory().build() mocker.patch(
hero = HeroRead(**hero.dict(exclude_none=True)) "learn_sql_model.config.Database.engine",
httpx = mocker.patch.object(hero_models, "httpx") new_callable=lambda: create_engine(
httpx.get.return_value = mocker.Mock() "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
httpx.get.return_value.status_code = 404 ),
httpx.get.return_value.text = "Hero not found" )
httpx.get.return_value.json.return_value = hero.dict()
result = runner.invoke(hero_app, ["get", "999"]) config = get_config()
assert result.exit_code == 1 SQLModel.metadata.create_all(config.database.engine)
assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
assert httpx.get.call_count == 1 hero = HeroFactory().build(name="Steelman", age=25)
assert httpx.post.call_count == 0 with config.database.session as session:
assert httpx.delete.call_count == 0 session.add(hero)
session.commit()
hero = session.get(Hero, hero.id)
result = runner.invoke(hero_app, ["get", "--hero-id", "999"])
assert result.exception.status_code == 404
assert result.exception.detail == "Hero not found"
def test_cli_list(mocker): def test_cli_list(mocker):
heros = HeroFactory().batch(5) mocker.patch(
httpx = mocker.patch.object(hero_models, "httpx") "learn_sql_model.config.Database.engine",
httpx.get.return_value = mocker.Mock() new_callable=lambda: create_engine(
httpx.get.return_value.status_code = 200 "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
httpx.get.return_value.json.return_value = heros ),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
hero_1 = HeroFactory().build(name="Steelman", age=25)
hero_2 = HeroFactory().build(name="Hunk", age=52)
with config.database.session as session:
session.add(hero_1)
session.add(hero_2)
session.commit()
session.refresh(hero_1)
session.refresh(hero_2)
result = runner.invoke(hero_app, ["list"]) result = runner.invoke(hero_app, ["list"])
assert result.exit_code == 0 assert result.exit_code == 0
assert f"name='{hero_1.name}'" in result.stdout
for hero in heros: assert f"secret_name='{hero_1.secret_name}'" in result.stdout
for key, value in hero.dict(exclude_unset=True).items(): assert f"name='{hero_2.name}'" in result.stdout
if type(value) == str: assert f"secret_name='{hero_2.secret_name}'" in result.stdout
assert f"{key}='{value}'" in result.stdout
elif type(value) == int:
assert f"{key}={value}" in result.stdout
def test_model_post(mocker): def test_model_post(mocker):
hero = HeroFactory().build() patch_httpx_post = mocker.patch(
"httpx.post", return_value=mocker.Mock(status_code=200)
)
hero = HeroFactory().build(name="Steelman", age=25)
hero_create = HeroCreate(**hero.dict()) hero_create = HeroCreate(**hero.dict())
hero_create.post()
httpx = mocker.patch.object(hero_models, "httpx") assert patch_httpx_post.call_count == 1
httpx.post.return_value = mocker.Mock()
httpx.post.return_value.status_code = 200
httpx.post.return_value.json.return_value = hero.dict()
result = hero_create.post()
assert result == hero
assert httpx.get.call_count == 0
assert httpx.post.call_count == 1
assert httpx.delete.call_count == 0
def test_model_post_500(mocker): def test_model_post_500(mocker):
hero = HeroFactory().build() patch_httpx_post = mocker.patch(
"httpx.post", return_value=mocker.Mock(status_code=500)
)
hero = HeroFactory().build(name="Steelman", age=25)
hero_create = HeroCreate(**hero.dict()) hero_create = HeroCreate(**hero.dict())
httpx = mocker.patch.object(hero_models, "httpx")
httpx.post.return_value = mocker.Mock()
httpx.post.return_value.status_code = 500
httpx.post.return_value.json.return_value = hero.dict()
with pytest.raises(RuntimeError): with pytest.raises(RuntimeError):
hero_create.post() hero_create.post()
assert httpx.get.call_count == 0 assert patch_httpx_post.call_count == 1
assert httpx.post.call_count == 1
assert httpx.delete.call_count == 0
def test_model_read_hero(mocker): def test_model_read_hero(mocker, session: Session, client: TestClient):
hero = HeroFactory().build() mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
httpx = mocker.patch.object(hero_models, "httpx") config = get_config()
httpx.get.return_value = mocker.Mock() SQLModel.metadata.create_all(config.database.engine)
httpx.get.return_value.status_code = 200
httpx.get.return_value.json.return_value = hero.dict() hero = Hero(name="Deadpond", secret_name="Dive Wilson")
session = config.database.session
session.add(hero)
session.commit()
session.refresh(hero)
hero_read = HeroRead.get(id=hero.id) hero_read = HeroRead.get(id=hero.id)
assert hero_read.name == hero.name assert hero_read.name == "Deadpond"
assert hero_read.secret_name == hero.secret_name assert hero_read.secret_name == "Dive Wilson"
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 0
def test_model_read_hero_404(mocker):
hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
httpx.get.return_value.status_code = 404
httpx.get.return_value.text = "Hero not found"
with pytest.raises(RuntimeError) as e:
HeroRead.get(id=hero.id)
assert e.value.args[0] == "404: Hero not found"
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 0
def test_model_delete_hero(mocker):
hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.delete.return_value = mocker.Mock()
httpx.delete.return_value.status_code = 200
httpx.delete.return_value.json.return_value = hero.dict()
hero_delete = HeroDelete.delete(id=hero.id)
assert hero_delete == {"ok": True}
assert httpx.get.call_count == 0
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 1
def test_model_delete_hero_404(mocker):
hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.delete.return_value = mocker.Mock()
httpx.delete.return_value.status_code = 404
httpx.get.return_value.text = "Hero not found"
with pytest.raises(RuntimeError) as e:
HeroDelete.delete(id=hero.id)
assert e.value.args[0] == "404: Hero not found"
assert httpx.get.call_count == 0
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 1
def test_cli_delete_hero(mocker):
hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.delete.return_value = mocker.Mock()
httpx.delete.return_value.status_code = 200
httpx.delete.return_value.json.return_value = hero.dict()
result = runner.invoke(hero_app, ["delete", "--hero-id", "1"])
assert result.exit_code == 0
assert "{'ok': True}" in result.stdout
assert httpx.get.call_count == 0
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 1
def test_cli_delete_hero_404(mocker):
hero = HeroFactory().build()
httpx = mocker.patch.object(hero_models, "httpx")
httpx.delete.return_value = mocker.Mock()
httpx.delete.return_value.status_code = 404
httpx.delete.return_value.text = "Hero not found"
httpx.delete.return_value.json.return_value = hero.dict()
result = runner.invoke(hero_app, ["delete", "--hero-id", "999"])
assert result.exit_code == 1
assert " ".join(result.exception.args[0].split()) == "404: Hero not found"
assert httpx.get.call_count == 0
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 1

Some files were not shown because too many files have changed in this diff Show more