Compare commits

..

No commits in common. "main" and "v25" have entirely different histories.
main ... v25

89 changed files with 173 additions and 2605 deletions

View file

@ -1,978 +0,0 @@
# flyctl launch added from .gitignore
# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
### Data ###
**/*.csv
**/*.dat
**/*.efx
**/*.gbr
**/*.key
**/*.pps
**/*.ppt
**/*.pptx
**/*.sdf
**/*.tax2010
**/*.vcf
**/*.xml
### Emacs ###
# -*- mode: gitignore; -*-
**/*~
**/\#*\#
.emacs.desktop
.emacs.desktop.lock
**/*.elc
**/auto-save-list
**/tramp
**/.\#*
# Org-mode
**/.org-id-locations
**/*_archive
# flymake-mode
**/*_flymake.*
# eshell files
eshell/history
eshell/lastdir
# elpa packages
elpa
# reftex files
**/*.rel
# AUCTeX auto folder
auto
# cask packages
**/.cask
**/dist
# Flycheck
**/flycheck_*.el
# server auth directory
server
# projectiles files
**/.projectile
# directory configuration
**/.dir-locals.el
# network security
network-security.data
### Executable ###
**/*.app
**/*.bat
**/*.cgi
**/*.com
**/*.exe
**/*.gadget
**/*.jar
**/*.pif
**/*.vb
**/*.wsf
### Node ###
# Logs
**/logs
**/*.log
**/npm-debug.log*
**/yarn-debug.log*
**/yarn-error.log*
**/lerna-debug.log*
**/.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
**/pids
**/*.pid
**/*.seed
**/*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
**/lib-cov
# Coverage directory used by tools like istanbul
**/coverage
**/*.lcov
# nyc test coverage
**/.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
**/.grunt
# Bower dependency directory (https://bower.io/)
**/bower_components
# node-waf configuration
**/.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
**/build/Release
# Dependency directories
**/node_modules
**/jspm_packages
# Snowpack dependency directory (https://snowpack.dev/)
**/web_modules
# TypeScript cache
**/*.tsbuildinfo
# Optional npm cache directory
**/.npm
# Optional eslint cache
**/.eslintcache
# Optional stylelint cache
**/.stylelintcache
# Microbundle cache
**/.rpt2_cache
**/.rts2_cache_cjs
**/.rts2_cache_es
**/.rts2_cache_umd
# Optional REPL history
**/.node_repl_history
# Output of 'npm pack'
**/*.tgz
# Yarn Integrity file
**/.yarn-integrity
# dotenv environment variable files
**/.env
**/.env.development.local
**/.env.test.local
**/.env.production.local
**/.env.local
# parcel-bundler cache (https://parceljs.org/)
**/.cache
**/.parcel-cache
# Next.js build output
**/.next
**/out
# Nuxt.js build / generate output
**/.nuxt
**/dist
# Gatsby files
**/.cache
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
**/.vuepress/dist
# vuepress v2.x temp and cache directory
**/.temp
# Docusaurus cache and generated files
**/.docusaurus
# Serverless directories
**/.serverless
# FuseBox cache
**/.fusebox
# DynamoDB Local files
**/.dynamodb
# TernJS port file
**/.tern-port
# Stores VSCode versions used for testing VSCode extensions
**/.vscode-test
# yarn v2
**/.yarn/cache
**/.yarn/unplugged
**/.yarn/build-state.yml
**/.yarn/install-state.gz
**/.pnp.*
### Node Patch ###
# Serverless Webpack directories
**/.webpack
# Optional stylelint cache
# SvelteKit build / generate output
**/.svelte-kit
### PyCharm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
**/.idea/**/workspace.xml
**/.idea/**/tasks.xml
**/.idea/**/usage.statistics.xml
**/.idea/**/dictionaries
**/.idea/**/shelf
# AWS User-specific
**/.idea/**/aws.xml
# Generated files
**/.idea/**/contentModel.xml
# Sensitive or high-churn files
**/.idea/**/dataSources
**/.idea/**/dataSources.ids
**/.idea/**/dataSources.local.xml
**/.idea/**/sqlDataSources.xml
**/.idea/**/dynamic.xml
**/.idea/**/uiDesigner.xml
**/.idea/**/dbnavigator.xml
# Gradle
**/.idea/**/gradle.xml
**/.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
**/cmake-build-*
# Mongo Explorer plugin
**/.idea/**/mongoSettings.xml
# File-based project format
**/*.iws
# IntelliJ
**/out
# mpeltonen/sbt-idea plugin
**/.idea_modules
# JIRA plugin
**/atlassian-ide-plugin.xml
# Cursive Clojure plugin
**/.idea/replstate.xml
# SonarLint plugin
**/.idea/sonarlint
# Crashlytics plugin (for Android Studio and IntelliJ)
**/com_crashlytics_export_strings.xml
**/crashlytics.properties
**/crashlytics-build.properties
**/fabric.properties
# Editor-based Rest Client
**/.idea/httpRequests
# Android studio 3.1+ serialized cache file
**/.idea/caches/build_file_checksums.ser
### PyCharm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
**/.idea/**/sonarlint
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
**/.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
**/.idea/**/markdown-navigator.xml
**/.idea/**/markdown-navigator-enh.xml
**/.idea/**/markdown-navigator
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
**/.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
**/.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
**/.idea/**/azureSettings.xml
### Python ###
# Byte-compiled / optimized / DLL files
**/__pycache__
**/*.py[cod]
**/*$py.class
# C extensions
**/*.so
# Distribution / packaging
**/.Python
**/build
**/develop-eggs
**/downloads
**/eggs
**/.eggs
**/lib
**/lib64
**/parts
**/sdist
**/var
**/wheels
**/share/python-wheels
**/*.egg-info
**/.installed.cfg
**/*.egg
**/MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
**/*.manifest
**/*.spec
# Installer logs
**/pip-log.txt
**/pip-delete-this-directory.txt
# Unit test / coverage reports
**/htmlcov
**/.tox
**/.nox
**/.coverage
**/.coverage.*
**/nosetests.xml
**/coverage.xml
**/*.cover
**/*.py,cover
**/.hypothesis
**/.pytest_cache
**/cover
# Translations
**/*.mo
**/*.pot
# Django stuff:
**/local_settings.py
**/db.sqlite3
**/db.sqlite3-journal
# Flask stuff:
**/instance
**/.webassets-cache
# Scrapy stuff:
**/.scrapy
# Sphinx documentation
**/docs/_build
# PyBuilder
**/.pybuilder
**/target
# Jupyter Notebook
**/.ipynb_checkpoints
# IPython
**/profile_default
**/ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
**/.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
**/__pypackages__
# Celery stuff
**/celerybeat-schedule
**/celerybeat.pid
# SageMath parsed files
**/*.sage.py
# Environments
**/.venv
**/env
**/venv
**/ENV
**/env.bak
**/venv.bak
# Spyder project settings
**/.spyderproject
**/.spyproject
# Rope project settings
**/.ropeproject
# mkdocs documentation
site
# mypy
**/.mypy_cache
**/.dmypy.json
**/dmypy.json
# Pyre type checker
**/.pyre
# pytype static type analyzer
**/.pytype
# Cython debug symbols
**/cython_debug
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
**/poetry.toml
# ruff
**/.ruff_cache
# LSP config files
**/pyrightconfig.json
### SublimeText ###
# Cache files for Sublime Text
**/*.tmlanguage.cache
**/*.tmPreferences.cache
**/*.stTheme.cache
# Workspace files are user-specific
**/*.sublime-workspace
# Project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using Sublime Text
# *.sublime-project
# SFTP configuration file
**/sftp-config.json
**/sftp-config-alt*.json
# Package control specific files
**/Package Control.last-run
**/Package Control.ca-list
**/Package Control.ca-bundle
**/Package Control.system-ca-bundle
**/Package Control.cache
**/Package Control.ca-certs
**/Package Control.merged-ca-bundle
**/Package Control.user-ca-bundle
**/oscrypto-ca-bundle.crt
**/bh_unicode_properties.cache
# Sublime-github package stores a github token in this file
# https://packagecontrol.io/packages/sublime-github
**/GitHub.sublime-settings
### Vim ###
# Swap
**/[._]*.s[a-v][a-z]
!**/*.svg # comment out if you don't need vector files
**/[._]*.sw[a-p]
**/[._]s[a-rt-v][a-z]
**/[._]ss[a-gi-z]
**/[._]sw[a-p]
# Session
**/Session.vim
**/Sessionx.vim
# Temporary
**/.netrwhist
# Auto-generated tag files
**/tags
# Persistent undo
**/[._]*.un~
### VisualStudioCode ###
**/.vscode/*
!**/.vscode/settings.json
!**/.vscode/tasks.json
!**/.vscode/launch.json
!**/.vscode/extensions.json
!**/.vscode/*.code-snippets
# Local History for Visual Studio Code
**/.history
# Built Visual Studio Code Extensions
**/*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
**/.history
**/.ionide
### VisualStudio ###
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
##
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
# User-specific files
**/*.rsuser
**/*.suo
**/*.user
**/*.userosscache
**/*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
**/*.userprefs
# Mono auto generated files
**/mono_crash.*
# Build results
**/[Dd]ebug
**/[Dd]ebugPublic
**/[Rr]elease
**/[Rr]eleases
**/x64
**/x86
**/[Ww][Ii][Nn]32
**/[Aa][Rr][Mm]
**/[Aa][Rr][Mm]64
**/bld
**/[Bb]in
**/[Oo]bj
**/[Ll]og
**/[Ll]ogs
# Visual Studio 2015/2017 cache/options directory
**/.vs
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# Visual Studio 2017 auto generated files
**/Generated\ Files
# MSTest test Results
**/[Tt]est[Rr]esult*
**/[Bb]uild[Ll]og.*
# NUnit
**/*.VisualState.xml
**/TestResult.xml
**/nunit-*.xml
# Build Results of an ATL Project
**/[Dd]ebugPS
**/[Rr]eleasePS
**/dlldata.c
# Benchmark Results
**/BenchmarkDotNet.Artifacts
# .NET Core
**/project.lock.json
**/project.fragment.lock.json
**/artifacts
# ASP.NET Scaffolding
**/ScaffoldingReadMe.txt
# StyleCop
**/StyleCopReport.xml
# Files built by Visual Studio
**/*_i.c
**/*_p.c
**/*_h.h
**/*.ilk
**/*.meta
**/*.obj
**/*.iobj
**/*.pch
**/*.pdb
**/*.ipdb
**/*.pgc
**/*.pgd
**/*.rsp
**/*.sbr
**/*.tlb
**/*.tli
**/*.tlh
**/*.tmp
**/*.tmp_proj
**/*_wpftmp.csproj
**/*.tlog
**/*.vspscc
**/*.vssscc
**/.builds
**/*.pidb
**/*.svclog
**/*.scc
# Chutzpah Test files
**/_Chutzpah*
# Visual C++ cache files
**/ipch
**/*.aps
**/*.ncb
**/*.opendb
**/*.opensdf
**/*.cachefile
**/*.VC.db
**/*.VC.VC.opendb
# Visual Studio profiler
**/*.psess
**/*.vsp
**/*.vspx
**/*.sap
# Visual Studio Trace Files
**/*.e2e
# TFS 2012 Local Workspace
**/$tf
# Guidance Automation Toolkit
**/*.gpState
# ReSharper is a .NET coding add-in
**/_ReSharper*
**/*.[Rr]e[Ss]harper
**/*.DotSettings.user
# TeamCity is a build add-in
**/_TeamCity*
# DotCover is a Code Coverage Tool
**/*.dotCover
# AxoCover is a Code Coverage Tool
**/.axoCover/*
!**/.axoCover/settings.json
# Coverlet is a free, cross platform Code Coverage Tool
**/coverage*.json
**/coverage*.xml
**/coverage*.info
# Visual Studio code coverage results
**/*.coverage
**/*.coveragexml
# NCrunch
**/_NCrunch_*
**/.*crunch*.local.xml
**/nCrunchTemp_*
# MightyMoose
**/*.mm.*
**/AutoTest.Net
# Web workbench (sass)
**/.sass-cache
# Installshield output folder
**/[Ee]xpress
# DocProject is a documentation generator add-in
**/DocProject/buildhelp
**/DocProject/Help/*.HxT
**/DocProject/Help/*.HxC
**/DocProject/Help/*.hhc
**/DocProject/Help/*.hhk
**/DocProject/Help/*.hhp
**/DocProject/Help/Html2
**/DocProject/Help/html
# Click-Once directory
**/publish
# Publish Web Output
**/*.[Pp]ublish.xml
**/*.azurePubxml
# Note: Comment the next line if you want to checkin your web deploy settings,
# but database connection strings (with potential passwords) will be unencrypted
**/*.pubxml
**/*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
**/PublishScripts
# NuGet Packages
**/*.nupkg
# NuGet Symbol Packages
**/*.snupkg
# The packages folder can be ignored because of Package Restore
**/**/[Pp]ackages/*
# except build/, which is used as an MSBuild target.
!**/**/[Pp]ackages/build
# Uncomment if necessary however generally it will be regenerated when needed
#!**/[Pp]ackages/repositories.config
# NuGet v3's project.json files produces more ignorable files
**/*.nuget.props
**/*.nuget.targets
# Microsoft Azure Build Output
**/csx
**/*.build.csdef
# Microsoft Azure Emulator
**/ecf
**/rcf
# Windows Store app package directories and files
**/AppPackages
**/BundleArtifacts
**/Package.StoreAssociation.xml
**/_pkginfo.txt
**/*.appx
**/*.appxbundle
**/*.appxupload
# Visual Studio cache files
# files ending in .cache can be ignored
**/*.[Cc]ache
# but keep track of directories ending in .cache
!**/?*.[Cc]ache
# Others
**/ClientBin
**/~$*
**/*.dbmdl
**/*.dbproj.schemaview
**/*.jfm
**/*.pfx
**/*.publishsettings
**/orleans.codegen.cs
# Including strong name files can present a security risk
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
#*.snk
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
**/Generated_Code
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
**/_UpgradeReport_Files
**/Backup*
**/UpgradeLog*.XML
**/UpgradeLog*.htm
**/ServiceFabricBackup
**/*.rptproj.bak
# SQL Server files
**/*.mdf
**/*.ldf
**/*.ndf
# Business Intelligence projects
**/*.rdl.data
**/*.bim.layout
**/*.bim_*.settings
**/*.rptproj.rsuser
**/*- [Bb]ackup.rdl
**/*- [Bb]ackup ([0-9]).rdl
**/*- [Bb]ackup ([0-9][0-9]).rdl
# Microsoft Fakes
**/FakesAssemblies
# GhostDoc plugin setting file
**/*.GhostDoc.xml
# Node.js Tools for Visual Studio
**/.ntvs_analysis.dat
# Visual Studio 6 build log
**/*.plg
# Visual Studio 6 workspace options file
**/*.opt
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
**/*.vbw
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
**/*.vbp
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
**/*.dsw
**/*.dsp
# Visual Studio 6 technical files
# Visual Studio LightSwitch build output
**/**/*.HTMLClient/GeneratedArtifacts
**/**/*.DesktopClient/GeneratedArtifacts
**/**/*.DesktopClient/ModelManifest.xml
**/**/*.Server/GeneratedArtifacts
**/**/*.Server/ModelManifest.xml
**/_Pvt_Extensions
# Paket dependency manager
**/.paket/paket.exe
**/paket-files
# FAKE - F# Make
**/.fake
# CodeRush personal settings
**/.cr/personal
# Python Tools for Visual Studio (PTVS)
**/*.pyc
# Cake - Uncomment if you are using it
# tools/**
# !tools/packages.config
# Tabs Studio
**/*.tss
# Telerik's JustMock configuration file
**/*.jmconfig
# BizTalk build output
**/*.btp.cs
**/*.btm.cs
**/*.odx.cs
**/*.xsd.cs
# OpenCover UI analysis results
**/OpenCover
# Azure Stream Analytics local run output
**/ASALocalRun
# MSBuild Binary and Structured Log
**/*.binlog
# NVidia Nsight GPU debugger configuration file
**/*.nvuser
# MFractors (Xamarin productivity tool) working folder
**/.mfractor
# Local History for Visual Studio
**/.localhistory
# Visual Studio History (VSHistory) files
**/.vshistory
# BeatPulse healthcheck temp database
**/healthchecksdb
# Backup folder for Package Reference Convert tool in Visual Studio 2017
**/MigrationBackup
# Ionide (cross platform F# VS Code tools) working folder
**/.ionide
# Fody - auto-generated XML schema
**/FodyWeavers.xsd
# VS Code files for those working on multiple tools
**/*.code-workspace
# Local History for Visual Studio Code
# Windows Installer files from build outputs
**/*.cab
**/*.msi
**/*.msix
**/*.msm
**/*.msp
# JetBrains Rider
**/*.sln.iml
### VisualStudio Patch ###
# Additional files built by Visual Studio
# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
**/database.db
**/database.db
**/database.db
**/.markata.cache
**/database.sqlite
# flyctl launch added from .pytest_cache/.gitignore
# Created by pytest automatically.
.pytest_cache/**/*
# flyctl launch added from .ruff_cache/.gitignore
.ruff_cache/**/*
fly.toml

2
.gitignore vendored
View file

@ -967,5 +967,3 @@ database.db
database.db
.markata.cache
database.sqlite
.env.dev
.env.dev.docker

View file

@ -4,9 +4,9 @@ WORKDIR /app
Copy pyproject.toml /app
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
COPY README.md /app
RUN pip3 install '.[all]'
RUN pip3 install '.[api]'
COPY . /app
RUN pip3 install '.[all]'
RUN pip3 install '.[api]'
EXPOSE 5000

View file

@ -1,6 +1,3 @@
> [!IMPORTANT]
> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
# Learn SQL Model
learning sql model

Binary file not shown.

Before

Width:  |  Height:  |  Size: 769 B

70
d3.py
View file

@ -1,70 +0,0 @@
import sqlite3
from jinja2 import Environment, FileSystemLoader
def get_tables_and_columns(conn):
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = [
{
"name": table[0],
"columns": get_columns(conn, table[0]),
"foreign_keys": get_foreign_keys(conn, table[0]),
}
for table in cursor.fetchall()
]
return tables
def get_columns(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA table_info({table_name});")
columns = [row[1] for row in cursor.fetchall()]
return columns
def get_foreign_keys(conn, table_name):
cursor = conn.cursor()
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
foreign_keys = [
{"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
for row in cursor.fetchall()
]
return foreign_keys
def generate_links(tables):
links = []
for t_index, table in enumerate(tables):
for fk in table["foreign_keys"]:
target_index = next(
i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
)
source_y = 40 + table["columns"].index(fk["from"]) * 20
target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
links.append(
{
"source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
"target": {"x": 50 + target_index * 150, "y": 50 + target_y},
}
)
return links
def generate_er_diagram(database_path):
conn = sqlite3.connect(database_path)
tables = get_tables_and_columns(conn)
links = [] # Currently, we won't extract relationships
links = generate_links(tables)
env = Environment(loader=FileSystemLoader("templates"))
template = env.get_template("er_diagram.html")
with open("index.html", "w") as f:
f.write(template.render(tables=tables, links=links))
if __name__ == "__main__":
db_path = "database.db"
generate_er_diagram(db_path)

View file

@ -1,72 +0,0 @@
![ER Diagram](er_diagram.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| f48730a783a5 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---
## Table: hero
### First 5 rows
| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
|------|-------------|---|---|------|-----|-----------|--------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| age | INTEGER | | | |
| shoe_size | INTEGER | | | |
| pet_id | INTEGER | pet.id | | |
| id | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

BIN
im.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = [{'name': 'pet', 'columns': ['name', 'birthday', 'id'], 'foreign_keys': []}, {'name': 'hero', 'columns': ['name', 'secret_name', 'x', 'y', 'size', 'age', 'shoe_size', 'pet_id', 'id'], 'foreign_keys': [{'id': 0, 'from': 'pet_id', 'to_table': 'pet', 'to': 'id'}]}];
const links = [{'source': {'x': 320, 'y': 230}, 'target': {'x': 50, 'y': 130}}];
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -8,13 +8,13 @@ hero_router = APIRouter()
@hero_router.on_event("startup")
async def on_startup() -> None:
def on_startup() -> None:
# SQLModel.metadata.create_all(get_config().database.engine)
...
@hero_router.get("/hero/{hero_id}")
async def get_hero(
def get_hero(
*,
session: Session = Depends(get_session),
hero_id: int,
@ -27,7 +27,7 @@ async def get_hero(
@hero_router.post("/hero/")
async def post_hero(
def post_hero(
*,
session: Session = Depends(get_session),
hero: HeroCreate,
@ -42,7 +42,7 @@ async def post_hero(
@hero_router.patch("/hero/")
async def patch_hero(
def patch_hero(
*,
session: Session = Depends(get_session),
hero: HeroUpdate,
@ -61,7 +61,7 @@ async def patch_hero(
@hero_router.delete("/hero/{hero_id}")
async def delete_hero(
def delete_hero(
*,
session: Session = Depends(get_session),
hero_id: int,
@ -77,7 +77,7 @@ async def delete_hero(
@hero_router.get("/heros/")
async def get_heros(
def get_heros(
*,
session: Session = Depends(get_session),
) -> Heros:

View file

@ -1,13 +1,13 @@
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
from fastapi.responses import HTMLResponse
from rich.console import Console
from sqlmodel import Session, select
from sqlmodel import Session
from websockets.exceptions import ConnectionClosed
from learn_sql_model.api.websocket_connection_manager import manager
from learn_sql_model.config import get_session
from learn_sql_model.console import console
from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros
from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
web_socket_router = APIRouter()
@ -46,9 +46,7 @@ async def websocket_endpoint_connect(
):
Console().log(f"Client #{id} connecting")
await manager.connect(websocket, channel)
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await websocket.send_text(heros.json())
try:
@ -85,18 +83,11 @@ async def websocket_endpoint_hero_echo(
while True:
data = await websocket.receive_text()
hero = HeroUpdate.parse_raw(data)
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
if heros != last_heros:
await manager.broadcast(heros.json(), "heros")
last_heros = heros
db_hero = session.get(Hero, hero.id)
for key, value in hero.dict(exclude_unset=True).items():
setattr(db_hero, key, value)
session.add(db_hero)
session.commit()
session.refresh(db_hero)
hero.update(session=session)
console.print(heros)
await websocket.send_text(heros.json())
@ -105,9 +96,7 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("disconnected")
except ConnectionClosed:
@ -115,8 +104,6 @@ async def websocket_endpoint_hero_echo(
HeroDelete(id=hero.id).delete(session=session)
except Exception:
...
statement = select(Hero)
heros = session.exec(statement).all()
heros = Heros(__root__=heros)
heros = Heros.list(session=session)
await manager.broadcast(heros.json(), "heros")
print("connection closed")

View file

@ -54,7 +54,7 @@ def list() -> Union[Hero, List[Hero]]:
def clear() -> Union[Hero, List[Hero]]:
"list many heros"
heros = Heros.list()
for hero in heros.__root__:
for hero in heros.heros:
HeroDelete.delete(id=hero.id)
return hero

View file

@ -1,15 +1,13 @@
from pathlib import Path
from typing import Annotated
# import copier
import alembic
from alembic.config import Config
import copier
import typer
from learn_sql_model.cli.common import verbose_callback
from learn_sql_model.config import get_config
from learn_sql_model.optional import _optional_import_
alembic = _optional_import_("alembic", group="manage")
Config = _optional_import_("alembic.config", "Config", group="manage")
model_app = typer.Typer()
@ -94,4 +92,5 @@ def populate(
callback=verbose_callback,
help="show the log messages",
),
): ...
):
...

View file

@ -4,7 +4,6 @@ from typing import TYPE_CHECKING
from fastapi import Depends
from pydantic import BaseModel, BaseSettings, validator
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlmodel import Session
from learn_sql_model.standard_config import load
@ -19,7 +18,6 @@ class ApiServer(BaseModel):
reload: bool = True
log_level: str = "info"
host: str = "0.0.0.0"
workers: int = 1
class ApiClient(BaseModel):
@ -27,6 +25,7 @@ class ApiClient(BaseModel):
protocol: str = "https"
url: str = f"{protocol}://{host}"
class Database:
def __init__(self, config: "Config" = None) -> None:
if config is None:
@ -41,21 +40,9 @@ class Database:
}
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
self.db_conf = {}
if 'sqlite' in self.config.database_url:
self.db_conf = {
'connect_args': {"check_same_thread": False},
'pool_recycle': 3600,
'pool_pre_ping': True,
}
self._engine = create_engine(
self.config.database_url,
**self.db_conf
)
@property
def engine(self) -> "Engine":
return self._engine
return create_engine(self.config.database_url)
@property
def session(self) -> "Session":
@ -100,14 +87,10 @@ def get_config(overrides: dict = {}) -> Config:
return config
config = get_config()
database = get_database()
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
def get_session() -> "Session":
with Session(database.engine) as session:
config = get_config()
engine = create_engine(config.database_url)
with Session(engine) as session:
yield session

View file

@ -10,6 +10,7 @@ class HeroFactory(ModelFactory[Hero]):
__model__ = Hero
__faker__ = Faker(locale="en_US")
__set_as_default_factory_for_type__ = True
id = None
pet_id = None
@classmethod

View file

@ -1,25 +0,0 @@
import pygame
class Debug:
def __init__(self, game):
self.game = game
self.is_open = False
self.debounce = False
def handle_events(self, events):
for event in events:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_F3 and not self.debounce:
self.is_open = not self.is_open
self.debounce = True
if event.type == pygame.KEYUP:
if event.key == pygame.K_F3:
self.debounce = False
def render(self):
if self.is_open:
text = self.game.font.render(
str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
)
self.game.screen.blit(text, (20, 20))

View file

@ -5,11 +5,10 @@ from websocket import create_connection
from learn_sql_model.config import get_config
from learn_sql_model.console import console
from learn_sql_model.game.debug import Debug
from learn_sql_model.game.light import Light
from learn_sql_model.game.map import Map
from learn_sql_model.game.menu import Menu
from learn_sql_model.game.player import Player
from learn_sql_model.game.light import Light
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
@ -21,8 +20,7 @@ config = get_config()
class Client:
def __init__(self):
# self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
self.screen = pygame.display.set_mode((1280, 720))
self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
pygame.display.set_caption("Learn SQL Model")
self.clock = pygame.time.Clock()
self.running = True
@ -37,27 +35,17 @@ class Client:
self.menu = Menu(self)
self.map = Map(self)
self.light = Light(self)
self.font = pygame.font.SysFont("", 25)
self.font = pygame.font.SysFont("", 50)
self.joysticks = {}
self.darkness = pygame.Surface(
(self.screen.get_width(), self.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.debug = Debug(self)
atexit.register(self.quit)
@property
def ws(self):
def connect():
if "https" in config.api_client.url:
url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
elif "http" in config.api_client.url:
url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
else:
url = f"ws://{config.api_client.url}/wsecho"
self._ws = create_connection(url)
self._ws = create_connection(
f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
)
if not hasattr(self, "_ws"):
connect()
@ -66,13 +54,6 @@ class Client:
return self._ws
def run(self):
try:
from pyinstrument import Profiler
profiler = Profiler()
profiler.start()
except ImportError:
profiler = None
while self.running:
console.print("running")
console.print("handle_events")
@ -80,16 +61,12 @@ class Client:
console.print("update")
self.update()
console.print("render")
self.render()
time = self.clock.tick(60)
self.elapsed = time / 100
self.ticks += 1
console.print(f"time: {time}")
console.print(f"ticks: {self.ticks}")
if profiler:
profiler.stop()
print(profiler.output_text())
self.quit()
def quit(self):
@ -103,26 +80,15 @@ class Client:
self.screen.fill((0, 0, 0))
self.map.render()
self.player.render()
if self.ticks % 1 == 0 or self.ticks == 0:
light_level = 0
self.darkness.fill((light_level, light_level, light_level))
self.light.render()
self.screen.blit(
self.darkness,
(0, 0),
special_flags=pygame.BLEND_MULT,
)
self.light.render()
# update the screen
self.menu.render()
self.debug.render()
pygame.display.flip()
def handle_events(self):
self.events = pygame.event.get()
self.menu.handle_events(self.events)
self.debug.handle_events(self.events)
self.player.handle_events()
for event in self.events:
if event.type == pygame.QUIT:

View file

@ -1,219 +1,25 @@
import bisect
from PIL import Image, ImageFilter
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
def rot_center(image, angle):
"""rotate an image while keeping its center and size"""
orig_rect = image.get_rect()
rot_image = pygame.transform.rotate(image, angle)
rot_rect = orig_rect.copy()
rot_rect.center = rot_image.get_rect().center
rot_image = rot_image.subsurface(rot_rect).copy()
return rot_image
class Light:
def __init__(self, game):
self.game = game
self.surf = pygame.Surface(
(self.game.screen.get_width(), self.game.screen.get_height()),
pygame.SRCALPHA,
32,
)
self.surf.set_colorkey((0, 0, 0))
self.pre_render()
def pre_render(self):
# self.lights = {}
# for deg in range(-360, 360, 20):
# print("loading light", deg)
# self.lights[deg] = pygame.image.load(
# f"lights/light-{deg}.png"
# ).convert_alpha()
# return
light_surf = pygame.Surface(
(
self.game.player.hero.flashlight_strength * 3,
self.game.player.hero.flashlight_strength * 3,
),
pygame.SRCALPHA,
32,
)
v = pygame.math.Vector2(0, 1)
v.scale_to_length(self.game.player.hero.flashlight_strength)
for r in range(-90 - 25, -90 + 25):
_v = v.rotate(r)
pygame.draw.line(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
(
light_surf.get_width() / 2 + _v.x,
light_surf.get_height() / 2 + _v.y,
),
50,
)
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
pygame.draw.circle(
light_surf,
(255, 250, 205),
(light_surf.get_width() / 2, light_surf.get_height() / 2),
self.game.player.hero.lanturn_strength,
)
light_surf_pil = Image.frombytes(
"RGBA",
(light_surf.get_width(), light_surf.get_height()),
pygame.image.tostring(light_surf, "RGBA", False),
)
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
light_surf = pygame.image.fromstring(
light_surf_blur.tobytes(),
(light_surf.get_width(), light_surf.get_height()),
"RGBA",
).convert_alpha()
self.light_surf = light_surf
self.light_surf.set_colorkey((0, 0, 0))
self.lights = {
deg: pygame.transform.rotate(self.light_surf, deg - 90)
for deg in range(-360, 360, 20)
}
for deg, light in self.lights.items():
pygame.image.save(light, f"lights/light-{deg}.png")
def render(self):
self.surf.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
v = pygame.math.Vector2(
mx - self.game.player.hero.x, my - self.game.player.hero.y
)
v.scale_to_length(self.game.player.hero.flashlight_strength)
self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
v.scale_to_length(1000)
for other in self.game.player.others.__root__:
if other.id == self.game.player.hero.id:
continue
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
other.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
other.x - my_light.get_width() / 2,
other.y - my_light.get_height() / 2,
),
for r in range(0, 360):
_v = v.rotate(r)
pygame.draw.line(
self.game.screen,
(255, 250, 205),
(self.game.player.hero.x, self.game.player.hero.y),
(self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
50,
)
light_index = list(self.lights.keys())[
bisect.bisect_left(
list(self.lights.keys()),
self.game.player.hero.flashlight_angle + 90,
)
]
my_light = self.lights[light_index]
self.surf.blit(
my_light,
(
self.game.player.hero.x - my_light.get_width() / 2,
self.game.player.hero.y - my_light.get_height() / 2,
),
)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
# 50,
# )
# # draw a circle
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (self.game.player.hero.x, self.game.player.hero.y),
# self.game.player.hero.lanturn_strength,
# )
# for other in self.game.player.others.__root__:
# if other.id == self.game.player.hero.id:
# continue
# v = pygame.math.Vector2(0, 1)
# v = v.rotate(-other.flashlight_angle)
# v.scale_to_length(other.flashlight_strength)
# for r in range(-25, 25):
# _v = v.rotate(r)
# pygame.draw.line(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# (other.x + _v.x, other.y + _v.y),
# 50,
# )
# pygame.draw.circle(
# self.surf,
# (255, 250, 205),
# (other.x, other.y),
# other.lanturn_strength,
# )
self.game.darkness.blit(
self.surf,
(0, 0),
)

View file

@ -1,8 +1,7 @@
from learn_sql_model.optional import _optional_import_
import pydantic
from rich.console import Console
from learn_sql_model.optional import _optional_import_
snoise2 = _optional_import_("noise", "snoise2", group="game")
pygame = _optional_import_("pygame", group="game")
@ -36,41 +35,9 @@ class Map:
self.persistence = 0.05 # Amplitude of each octave
self.lacunarity = 1.0 # Frequency of each octave
self.thresh = 125
# try to load the map from map.png
try:
self.surf = pygame.image.load("map.png").convert_alpha()
# self.surf_pil = Image.frombytes(
# "RGBA",
# (self.surf.get_width(), self.surf.get_height()),
# pygame.image.tostring(self.surf, "RGBA", False),
# )
# self.surf_blur = (
# self.surf_pil.filter(
# ImageFilter.SMOOTH_MORE(),
# )
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# .filter(ImageFilter.SMOOTH_MORE())
# # sharpen
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
# )
# self.surf = pygame.image.fromstring(
# self.surf_blur.tobytes(),
# (self.surf.get_width(), self.surf.get_height()),
# "RGBA",
# ).convert_alpha()
except FileNotFoundError:
self.pre_draw()
self.pre_draw()
def refresh_surf(self):
self.surf = pygame.Surface((self.screen_width, self.screen_height))
def get_noise(self, x, y):
@ -86,7 +53,7 @@ class Map:
def render(self):
self.game.screen.blit(
self.surf,
pygame.transform.scale(self.surf, (self.screen_width, self.screen_height)),
(0, 0),
)
@ -97,7 +64,6 @@ class Map:
def pre_draw(self):
self.refresh_surf()
for x in range(int(self.screen_width)):
for y in range(int(self.screen_height)):
if not self.point_check_collision(x, y):
@ -112,7 +78,6 @@ class Map:
),
)
pygame.image.save(self.surf, "map.png")
# av1 = (
# Image.open("rock.jpg")
# .convert("RGB")

View file

@ -1,7 +1,6 @@
from typing import Callable, Tuple
from pydantic import BaseModel
from learn_sql_model.optional import _optional_import_
pygame = _optional_import_("pygame", group="game")
@ -114,7 +113,7 @@ class Menu:
def handle_events(self, events):
self.hamburger.handle_events(self, events)
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1: # Left mouse button
self.handle_click()

View file

@ -12,34 +12,19 @@ HeroFactory = _optional_import_(
class Player:
def __init__(self, game):
hero = HeroFactory().build(
size=25,
x=100,
y=100,
flashlight_strength=1000,
lanturn_strength=100,
flashlight_angle=0,
)
hero = HeroFactory().build(size=25, x=100, y=100)
self.hero = HeroCreate(**hero.dict()).post()
self.hero.size = 64
self.game = game
self.others = [] # Heros(heros=[])
self.others = Heros(heros=[])
self.width = 16
self.height = 16
self.white = (255, 255, 255)
self.x = self.game.screen.get_width() / 2
self.y = self.game.screen.get_height() / 2
self.speed = 10
self.max_speed = 10
self.image = pygame.image.load("creeper.png").convert_alpha()
self.pet_image = pygame.image.load("pet.png").convert_alpha()
self.image = pygame.transform.scale(
self.image, (self.hero.size, self.hero.size)
)
self.pet_image = pygame.transform.scale(
self.pet_image, (self.hero.size/1.5, self.hero.size/2)
)
self.speed = 5
self.max_speed = 5
self.image = pygame.image.load("player.png").convert_alpha()
self.x_last = self.x
self.y_last = self.y
self.hitbox_surface = pygame.Surface((self.width, self.height))
@ -55,23 +40,15 @@ class Player:
self.joysticks = {}
def rename_hero(self):
old_hero = self.hero
hero = HeroFactory().build(
size=self.hero.size,
x=self.hero.x,
y=self.hero.y,
id=self.hero.id,
flashlight_strength=self.hero.flashlight_strength,
lanturn_strength=self.hero.lanturn_strength,
size=self.hero.size, x=self.hero.x, y=self.hero.y, id=old_hero.id
)
self.hero = HeroUpdate(**hero.dict()).update()
self.hero = HeroCreate(**hero.dict()).post()
def quit(self):
try:
# session = get_config().database.session
# hero = session.get(Hero, self.hero.id)
# session.delete(hero)
# session.commit()
HeroDelete.delete(id=self.hero.id)
HeroDelete(id=self.hero.id).delete()
except RuntimeError:
pass
@ -181,10 +158,6 @@ class Player:
movement_vector = end_pos - start_pos
try:
movement_direction = movement_vector.normalize()
except ValueError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
movement_direction = movement_vector.normalize()
except ZeroDivisionError:
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
movement_vector = end_pos - start_pos
@ -208,7 +181,7 @@ class Player:
self.x_last = self.hero.x
self.y_last = self.hero.y
if self.game.ticks % 60 == 0 or self.game.ticks == 0:
if self.game.ticks % 5 == 0 or self.game.ticks == 0:
console.print("updating")
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
console.print(update)
@ -227,35 +200,20 @@ class Player:
)
def render(self):
for other in self.others.__root__:
for other in self.others.heros:
if other.id != self.hero.id:
# put self.image on the game.screen
self.game.screen.blit(
self.image,
(other.x - other.size / 2, other.y - other.size / 2),
pygame.draw.circle(
self.game.screen, (255, 0, 0), (other.x, other.y), other.size
)
# pygame.draw.circle(
# self.game.screen, (255, 0, 0), (other.x, other.y), other.size
# )
self.game.screen.blit(
self.game.font.render(other.name, False, (255, 255, 255), 1),
(other.x - other.size / 2, other.y + other.size / 2),
(other.x, other.y),
)
self.game.screen.blit(
self.image,
(self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
self.game.screen.blit(
self.pet_image,
(self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
)
# pygame.draw.circle(
# self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
# )
pygame.draw.circle(
self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
)
self.game.screen.blit(
self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
(self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2),
(self.hero.x, self.hero.y),
)

View file

@ -1,12 +1,10 @@
from typing import Dict, Optional
import httpx
import pydantic
from pydantic import BaseModel
from sqlmodel import Field, SQLModel
from learn_sql_model.config import config
from learn_sql_model.optional import optional
class HeroBase(SQLModel, table=False):
@ -14,27 +12,16 @@ class HeroBase(SQLModel, table=False):
secret_name: str
x: int
y: int
size: Optional[int]
flashlight_strength: Optional[int] = 1000
flashlight_angle: Optional[int] = 0
lanturn_strength: Optional[int] = 100
# size: int
# age: Optional[int] = None
# shoe_size: Optional[int] = None
# pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
# pet: Optional[Pet] = Relationship(back_populates="hero")
@pydantic.validator("size", pre=True, always=True)
def validate_size(cls, v):
if v is None:
return 50
if v <= 0:
raise ValueError("size must be > 0")
return v
class Hero(HeroBase, table=True):
id: int = Field(default=None, primary_key=True)
id: Optional[int] = Field(default=None, primary_key=True)
class HeroCreate(HeroBase):
@ -78,18 +65,28 @@ class Heros(BaseModel):
return Heros.parse_obj({"__root__": r.json()})
@optional
class HeroUpdate(HeroBase):
class HeroUpdate(SQLModel):
# id is required to update the hero
id: int
# all other fields, must match the model, but with Optional default None
name: Optional[str] = None
secret_name: Optional[str] = None
# age: Optional[int] = None
# shoe_size: Optional[int] = None
# x: Optional[int]
# y: Optional[int]
# pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
# pet: Optional[Pet] = Relationship(back_populates="hero")
def update(self) -> Hero:
r = httpx.patch(
f"{config.api_client.url}/hero/",
json=self.dict(exclude_none=True),
json=self.dict(),
)
if r.status_code != 200:
raise RuntimeError(f"{r.status_code}:\n {r.text}")
return Hero.parse_obj(r.json())
class HeroDelete(BaseModel):

View file

@ -1,8 +1,5 @@
from typing import List, Optional
import textwrap
import inspect
from pydantic import BaseModel
def _optional_import_(
module: str,
@ -64,33 +61,3 @@ def _optional_import_(
self._failed_import()
return _failed_import()
# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
# def decorator(cls):
# def wrapper(*args, **kwargs):
# if fields is None:
# fields = cls.__fields__
# if required is None:
# required = []
#
# for field in fields:
# if field not in required:
# cls.__fields__[field].required = False
# return _cls
# return wrapper
# return decorator
#
#
def optional(*fields):
def dec(_cls):
for field in fields:
_cls.__fields__[field].required = False
return _cls
if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
cls = fields[0]
fields = cls.__fields__
return dec(cls)
return dec

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 804 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 809 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 581 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 714 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 812 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 841 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 580 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 827 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 876 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 803 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 770 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 726 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 901 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 811 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 883 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 810 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 696 KiB

View file

@ -1,18 +0,0 @@
from locust import HttpUser, between, task
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate
class QuickstartUser(HttpUser):
wait_time = between(1, 2)
@task
def hello_world(self):
self.client.get("/hero/1")
self.client.get("/heros/")
@task(3)
def create_hero(self):
hero = HeroFactory().build()
HeroCreate(**hero.dict()).post()

View file

@ -1,58 +0,0 @@
import random
from locust import FastHttpUser, task
from learn_sql_model.config import get_config
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
config = get_config()
class QuickstartUser(FastHttpUser):
# wait_time = between(1, 2)
host = "http://localhost:5000"
# host = "https://waylonwalker.com"
def on_start(self):
self.client.verify = False
@task(6)
def get_a_hero(self):
# heros = Heros.list()
id = 1
# id = random.choice(heros.__root__).id
self.client.get(f"/hero/{id}")
# @task(2)
# def get_all_hero(self):
# self.client.get("/heros/")
@task
def create_hero(self):
hero = HeroFactory().build()
hero_create = HeroCreate(**hero.dict()).post()
self.client.post(
f"{config.api_client.url}/hero/",
json=hero_create.dict(),
)
@task(3)
def update_hero(self):
hero = HeroFactory().build()
hero_update = HeroUpdate(id=1, name=hero.name)
self.client.patch(
"/hero/",
json=hero_update.dict(exclude_none=True),
)
@task
def delete_hero(self):
heros = Heros.list()
id = random.choice(heros.__root__).id
self.client.delete(
f"/hero/{id}",
)

View file

BIN
map.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

BIN
micro

Binary file not shown.

View file

@ -79,7 +79,7 @@ def run_migrations_online() -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True,
render_as_batch=False,
version_table=f'{config.get_main_option("project")}_alembic_version',
)

View file

@ -7,26 +7,30 @@ Create Date: 2023-06-22 15:03:27.338959
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = "3555f61aaa79"
down_revision = "79972ec5f79d"
revision = '3555f61aaa79'
down_revision = '79972ec5f79d'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("hero", "y")
op.drop_column("hero", "x")
op.drop_column('hero', 'y')
op.drop_column('hero', 'x')
# ### end Alembic commands ###

View file

@ -8,10 +8,13 @@ Create Date: 2023-06-22 15:02:20.292322
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = "79972ec5f79d"
revision = '79972ec5f79d'
down_revision = None
branch_labels = None
depends_on = None
@ -19,27 +22,25 @@ depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"hero",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
op.create_table('hero',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table(
"pet",
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("birthday", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("id"),
op.create_table('pet',
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column('birthday', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("pet")
op.drop_table("hero")
op.drop_table('pet')
op.drop_table('hero')
# ### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""add hero.lighting
Revision ID: a1cd0a1947be
Revises: c79214cdc7b3
Create Date: 2023-06-28 19:43:47.108749
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'a1cd0a1947be'
down_revision = 'c79214cdc7b3'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'lanturn_strength')
op.drop_column('hero', 'flashlight_strength')
# ### end Alembic commands ###

View file

@ -1,75 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_a1cd0a1947be.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| c79214cdc7b3 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 4 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -1,34 +0,0 @@
"""add hero.size
Revision ID: c79214cdc7b3
Revises: 3555f61aaa79
Create Date: 2023-06-28 11:39:02.606001
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'c79214cdc7b3'
down_revision = '3555f61aaa79'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'size')
# ### end Alembic commands ###

View file

@ -1,74 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_c79214cdc7b3.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| 3555f61aaa79 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size |
|------|-------------|----|---|---|------|
| tight-gold | successful-health | 1 | 6430 | 6231 | None |
| hard-rope | green-research | 2 | 1395 | 2865 | None |
| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
| huge-library | adult-body | 4 | 656 | 2377 | None |
| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
### Records Count
The table hero contains 1572 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: d79dd8e699d1
Revises: e1af975310a1
Create Date: 2023-06-28 19:54:19.322431
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'd79dd8e699d1'
down_revision = 'e1af975310a1'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('hero', 'flashlight_angle')
# ### end Alembic commands ###

View file

@ -1,72 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_d79dd8e699d1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| e1af975310a1 |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
|------|-------------|----|---|---|------|---------------------|------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
| flashlight_angle | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

View file

@ -1,34 +0,0 @@
"""add hero.flashlight_angle
Revision ID: e1af975310a1
Revises: a1cd0a1947be
Create Date: 2023-06-28 19:53:18.068873
"""
from alembic import op
import sqlalchemy as sa
import sqlmodel
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
from learn_sql_model.config import get_config
# revision identifiers, used by Alembic.
revision = 'e1af975310a1'
down_revision = 'a1cd0a1947be'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View file

@ -1,71 +0,0 @@
![ER Diagram](migrations/versions/er_diagram_e1af975310a1.png)
---
## Table: learn_sql_model_alembic_version
### First 5 rows
| version_num |
|-------------|
| a1cd0a1947be |
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| version_num | VARCHAR(32) | | | |
### Records Count
The table learn_sql_model_alembic_version contains 1 records.
---
## Table: hero
### First 5 rows
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|------|-------------|----|---|---|------|---------------------|------------------|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| secret_name | VARCHAR | | | |
| id | INTEGER | | | |
| x | INTEGER | | | |
| y | INTEGER | | | |
| size | INTEGER | | | |
| flashlight_strength | INTEGER | | | |
| lanturn_strength | INTEGER | | | |
### Records Count
The table hero contains 0 records.
---
## Table: pet
### First 5 rows
| name | birthday | id |
|------|----------|----|
### Columns
| Column Name | Type | Foreign Key | Example Value |
|-------------|------|-------------|---------------|
| name | VARCHAR | | | |
| birthday | DATETIME | | | |
| id | INTEGER | | | |
### Records Count
The table pet contains 0 records.
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 55 KiB

View file

@ -1,37 +0,0 @@
# # Import smtplib for the actual sending function
# import smtplib
# # Import the email modules we'll need
# from email.mime.text import MIMEText
# # Open a plain text file for reading. For this example, assume that
# # the text file contains only ASCII characters.
# # with open(textfile, 'rb') as fp:
# # # Create a text/plain message
# # msg = MIMEText(fp.read())
# msg = MIMEText("hello there", "plain", "utf-8")
# # me == the sender's email address
# # you == the recipient's email address
# me = "waylon@waylonwalker.com"
# you = "3195728809@msg.fi.google.com"
# msg["Subject"] = "Python SMTP test"
# msg["From"] = me
# msg["To"] = you
# # Send the message via our own SMTP server, but don't include the
# # envelope header.
# s = smtplib.SMTP("localhost")
# s.sendmail(me, [you], msg.as_string())
# s.quit()
import requests
requests.post(
"https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
auth=("api", "YOUR_API_KEY"),
data={
"from": "Excited User <mailgun@YOUR_DOMAIN_NAME>",
"to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
"subject": "Hello",
"text": "Testing some Mailgun awesomness!",
},
)

BIN
pet.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 738 B

View file

@ -24,14 +24,13 @@ classifiers = [
"Programming Language :: Python :: Implementation :: PyPy",
]
dependencies = [
"black",
"python-socketio[client]",
"anyconfig",
"copier",
"engorgio",
"fastapi",
"httpx",
"pydantic<2.0.0",
"pydantic[dotenv]",
"pyflyby",
"pyinstaller",
"rich",

23
rect.py
View file

@ -1,23 +0,0 @@
import pygame
pygame.init()
screen = pygame.display.set_mode((500, 500))
pygame.display.set_caption("draw a square")
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
surface = pygame.Surface((500, 500))
surface.fill((255, 0, 0))
color = (0, 0, 255)
rect = (200, 200, 100, 100)
pygame.draw.rect(surface, color, rect)
screen.blit(surface, (0, 0))
pygame.display.flip()

View file

@ -1,129 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<title>ER Diagram</title>
<!-- Include d3.js -->
<script src="https://d3js.org/d3.v6.min.js"></script>
<style>
body {
margin: 0;
}
.table {
font-family: Arial, sans-serif;
font-size: 14px;
cursor: pointer;
}
.table-name {
font-weight: bold;
font-size: 16px;
}
.foreign-key {
fill: #b30000;
}
.link {
stroke: #999;
stroke-opacity: 0.6;
stroke-width: 2px;
fill: none;
}
.link-curved-path {
pointer-events: none;
}
</style>
</head>
<body>
<div id="er-diagram"></div>
<script>
// Generate ER diagram
const tables = {{tables}};
const links = {{links}};
const width = window.innerWidth;
const height = window.innerHeight;
const tableElemWidth = 120;
const tableElemHeight = d => 20 * (d.columns.length + 1);
let svg = d3.select("#er-diagram")
.append("svg")
.attr("width", width)
.attr("height", height);
let g = svg.append("g");
let linkGroup = g.selectAll(".link")
.data(links)
.join("path")
.attr("class", "link");
let tableGroup = g.selectAll(".table")
.data(tables)
.join("g")
.attr("class", "table")
.classed("collapsed", false)
.on("click", (event, d) => {
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
});
let zoomBehavior = d3.zoom()
.scaleExtent([0.1, 4])
.on("zoom", function (event) {
g.attr("transform", event.transform);
});
svg.call(zoomBehavior);
let rect = tableGroup.append("rect")
.attr("width", tableElemWidth)
.attr("height", tableElemHeight)
.attr("fill", "#eee");
let text = tableGroup.append("text")
.attr("class", "table-name")
.attr("x", 10)
.attr("y", 20)
.text(d => d.name);
let columnText = tableGroup.selectAll(".column")
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
.join("text")
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
.attr("x", 10)
.attr("y", (d, i) => 40 + i * 20)
.text(d => d.name);
// Physics simulation and force layout
let simulation = d3.forceSimulation(tables)
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
.force("charge", d3.forceManyBody().strength(-800))
.force("x", d3.forceX(width / 2).strength(0.1))
.force("y", d3.forceY(height / 2).strength(0.1))
.on("tick", () => {
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
linkGroup.attr("d", d => {
const srcX = d.source.x + tableElemWidth;
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
const tgtX = d.target.x;
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
const deltaX = tgtX - srcX;
const deltaY = tgtY - srcY;
const curveFactor = 50;
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
});
columnText.style("display", (d, i, nodes) => {
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
});
});
</script>
</body>
</html>

View file

@ -12,7 +12,7 @@ class {{ modelname }}Base(SQLModel, table=False):
class {{ modelname }}({{ modelname }}Base, table=True):
id: int = Field(default=None, primary_key=True)
id: Optional[int] = Field(default=None, primary_key=True)
class {{ modelname }}Create({{ modelname }}Base):

View file

@ -1,16 +1,16 @@
from fastapi.testclient import TestClient
import pytest
from sqlalchemy import create_engine
from sqlmodel import SQLModel, Session
from sqlmodel import SQLModel, Session, select
from sqlmodel.pool import StaticPool
from typer.testing import CliRunner
from learn_sql_model.api.app import app
from learn_sql_model.cli.hero import hero_app
from learn_sql_model.config import get_session
from learn_sql_model.config import get_config, get_session
from learn_sql_model.factories.hero import HeroFactory
from learn_sql_model.models import hero as hero_models
from learn_sql_model.models.hero import Hero, HeroCreate, HeroDelete, HeroRead
from learn_sql_model.models.hero import Hero, HeroCreate, HeroDelete, HeroRead, Heros
runner = CliRunner()
client = TestClient(app)
@ -48,10 +48,11 @@ def test_api_post(client: TestClient):
assert response_hero.name == hero.name
def test_api_read_heros(session: Session, client: TestClient):
heros = HeroFactory().batch(5)
for hero in heros:
session.add(hero)
def test_api_read_heroes(session: Session, client: TestClient):
hero_1 = HeroFactory().build()
hero_2 = HeroFactory().build()
session.add(hero_1)
session.add(hero_2)
session.commit()
response = client.get("/heros/")
@ -59,31 +60,32 @@ def test_api_read_heros(session: Session, client: TestClient):
assert response.status_code == 200
assert len(data) == 5
for d in data:
api_hero = Hero.parse_obj(d)
my_hero = [hero for hero in heros if hero.id == api_hero.id][0]
for key, value in api_hero.dict(exclude_unset=True).items():
assert getattr(my_hero, key) == value
assert len(data) == 2
assert data[0]["name"] == hero_1.name
assert data[0]["secret_name"] == hero_1.secret_name
assert data[0]["id"] == hero_1.id
assert data[1]["name"] == hero_2.name
assert data[1]["secret_name"] == hero_2.secret_name
assert data[1]["id"] == hero_2.id
def test_api_read_hero(session: Session, client: TestClient):
hero = HeroFactory().build()
session.add(hero)
hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
response = client.get(f"/hero/{hero.id}")
response = client.get(f"/hero/{hero_1.id}")
data = response.json()
response_hero = Hero.parse_obj(data)
assert response.status_code == 200
for key, value in hero.dict(exclude_unset=True).items():
assert getattr(response_hero, key) == value
assert data["name"] == hero_1.name
assert data["secret_name"] == hero_1.secret_name
assert data["id"] == hero_1.id
def test_api_read_hero_404(session: Session, client: TestClient):
hero = HeroFactory().build()
session.add(hero)
hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
response = client.get(f"/hero/999")
@ -91,20 +93,17 @@ def test_api_read_hero_404(session: Session, client: TestClient):
def test_api_update_hero(session: Session, client: TestClient):
hero = HeroFactory().build()
new_hero = HeroFactory().build()
session.add(hero)
hero_1 = HeroFactory().build()
session.add(hero_1)
session.commit()
response = client.patch(
f"/hero/", json={"id": hero.id, **new_hero.dict(exclude={"id"})}
)
response = client.patch(f"/hero/", json={"name": "Deadpuddle", "id": hero_1.id})
data = response.json()
response_hero = Hero.parse_obj(data)
assert response.status_code == 200
for key, value in hero.dict(exclude_unset=True).items():
assert getattr(response_hero, key) == value
assert data["name"] == "Deadpuddle"
assert data["secret_name"] == hero_1.secret_name
assert data["id"] == hero_1.id
def test_api_update_hero_404(session: Session, client: TestClient):
@ -139,6 +138,25 @@ def test_delete_hero_404(session: Session, client: TestClient):
assert response.status_code == 404
def test_config_memory(mocker):
mocker.patch(
"learn_sql_model.config.Database.engine",
new_callable=lambda: create_engine(
"sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool
),
)
config = get_config()
SQLModel.metadata.create_all(config.database.engine)
hero = HeroFactory().build()
with config.database.session as session:
session.add(hero)
session.commit()
db_hero = session.get(Hero, hero.id)
db_heroes = session.exec(select(Hero)).all()
assert db_hero.name == hero.name
assert len(db_heroes) == 1
def test_cli_get(mocker):
hero = HeroFactory().build()
hero = HeroRead(**hero.dict(exclude_none=True))
@ -149,11 +167,8 @@ def test_cli_get(mocker):
result = runner.invoke(hero_app, ["get", "1"])
assert result.exit_code == 0
for key, value in hero.dict(exclude_unset=True).items():
if type(value) == str:
assert f"{key}='{value}'" in result.stdout
elif type(value) == int:
assert f"{key}={value}" in result.stdout
assert f"name='{hero.name}'" in result.stdout
assert f"secret_name='{hero.secret_name}'" in result.stdout
assert httpx.get.call_count == 1
assert httpx.post.call_count == 0
assert httpx.delete.call_count == 0
@ -177,21 +192,20 @@ def test_cli_get_404(mocker):
def test_cli_list(mocker):
heros = HeroFactory().batch(5)
hero_1 = HeroRead(**HeroFactory().build().dict(exclude_none=True))
hero_2 = HeroRead(**HeroFactory().build().dict(exclude_none=True))
heros = Heros(__root__=[hero_1, hero_2])
httpx = mocker.patch.object(hero_models, "httpx")
httpx.get.return_value = mocker.Mock()
httpx.get.return_value.status_code = 200
httpx.get.return_value.json.return_value = heros
httpx.get.return_value.json.return_value = heros.dict()["__root__"]
result = runner.invoke(hero_app, ["list"])
assert result.exit_code == 0
for hero in heros:
for key, value in hero.dict(exclude_unset=True).items():
if type(value) == str:
assert f"{key}='{value}'" in result.stdout
elif type(value) == int:
assert f"{key}={value}" in result.stdout
assert f"name='{hero_1.name}'" in result.stdout
assert f"secret_name='{hero_1.secret_name}'" in result.stdout
assert f"name='{hero_2.name}'" in result.stdout
assert f"secret_name='{hero_2.secret_name}'" in result.stdout
def test_model_post(mocker):

20
tmp.sh
View file

@ -1,20 +0,0 @@
max="$1"
date
echo "url: $2
rate: $max calls / second"
START=$(date +%s);
get () {
curl -s -v "$1" 2>&1 | tr '\r\n' '\\n' | awk -v date="$(date +'%r')" '{print $0"\n-----", date}' >> /tmp/perf-test.log
}
while true
do
echo $(($(date +%s) - START)) | awk '{print int($1/60)":"int($1%60)}'
sleep 1
for i in `seq 1 $max`
do
get $2 &
done
done

View file

@ -1,84 +0,0 @@
import random
import sys
# Initialize player attributes
player = {
"name": input("Enter your character's name: "),
"health": 100,
"food": 100,
"x": 5,
"y": 5,
"day": 1,
}
# Define game resources
resources = {
"food": 50,
"water": 50,
}
# Define game constants
MAP_WIDTH, MAP_HEIGHT = 20, 10
PLAYER_CHAR = "(o)"
ENEMY_CHAR = "(?)"
# Game loop
while player["health"] > 0:
# Create the game map
game_map = [[" " for _ in range(MAP_WIDTH)] for _ in range(MAP_HEIGHT)]
game_map[player["y"]][player["x"]] = PLAYER_CHAR
# Place enemies randomly on the map
for _ in range(random.randint(1, 3)):
enemy_x = random.randint(0, MAP_WIDTH - 1)
enemy_y = random.randint(0, MAP_HEIGHT - 1)
game_map[enemy_y][enemy_x] = ENEMY_CHAR
# Print the game map
for row in game_map:
print("".join(row))
print(f"\nDay {player['day']}")
print(f"Name: {player['name']}")
print(f"Health: {player['health']} HP {'*' * player['health']}")
print(f"Food: {player['food']} Hunger {'*' * player['food']}")
print(f"Coordinates: ({player['x']}, {player['y']})")
# Player input for movement
move = input("Move (W/A/S/D): ").upper()
# Update player position based on input
if move == "W" and player["y"] > 0:
player["y"] -= 1
elif move == "S" and player["y"] < MAP_HEIGHT - 1:
player["y"] += 1
elif move == "A" and player["x"] > 0:
player["x"] -= 1
elif move == "D" and player["x"] < MAP_WIDTH - 1:
player["x"] += 1
# Consume resources
player["food"] -= random.randint(5, 15)
# Check if the player has enough resources
if player["food"] < 0:
player["food"] = 0
player["health"] -= 10
# Check if the player encounters an enemy
if game_map[player["y"]][player["x"]] == ENEMY_CHAR:
enemy_damage = random.randint(10, 30)
player["health"] -= enemy_damage
print(f"You encountered an enemy and took {enemy_damage} damage!")
# Rest for the day
player["day"] += 1
# Exit the game if health reaches zero
if player["health"] <= 0:
print("Game Over. You did not survive.")
break
input("Press Enter to continue to the next day...")
sys.exit()