Compare commits
35 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 29535201ea | |||
| 6d5bfaeeda | |||
| f53879f961 | |||
| a665e1e31c | |||
| 85be664107 | |||
| 241b692584 | |||
| c3b1f40a58 | |||
| 32c78767df | |||
| f8bc84211c | |||
| 1f117ebda5 | |||
| facb3b7e18 | |||
| 51d637754f | |||
| d07d15897b | |||
| 42f16fb2b8 | |||
| f66255ea0d | |||
| d0b3712f17 | |||
| f0f1ce5018 | |||
| d747299c95 | |||
| 3490305e39 | |||
| 4ad297a291 | |||
| 28eda9e899 | |||
| e86e432102 | |||
| 8480cd3d24 | |||
| c501db800e | |||
| eb448597c8 | |||
| a7e6f2c4e5 | |||
| 7db07c7d35 | |||
|
|
c2bba92e39 | ||
| da41253583 | |||
| 70f4a89e30 | |||
|
|
78cc5ff0cd | ||
| a96b1cc5a9 | |||
| b98170651d | |||
| 8c571730fb | |||
| 956dbf546f |
978
.dockerignore
Normal file
|
|
@ -0,0 +1,978 @@
|
||||||
|
# flyctl launch added from .gitignore
|
||||||
|
# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
|
||||||
|
# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
|
||||||
|
|
||||||
|
### Data ###
|
||||||
|
**/*.csv
|
||||||
|
**/*.dat
|
||||||
|
**/*.efx
|
||||||
|
**/*.gbr
|
||||||
|
**/*.key
|
||||||
|
**/*.pps
|
||||||
|
**/*.ppt
|
||||||
|
**/*.pptx
|
||||||
|
**/*.sdf
|
||||||
|
**/*.tax2010
|
||||||
|
**/*.vcf
|
||||||
|
**/*.xml
|
||||||
|
|
||||||
|
### Emacs ###
|
||||||
|
# -*- mode: gitignore; -*-
|
||||||
|
**/*~
|
||||||
|
**/\#*\#
|
||||||
|
.emacs.desktop
|
||||||
|
.emacs.desktop.lock
|
||||||
|
**/*.elc
|
||||||
|
**/auto-save-list
|
||||||
|
**/tramp
|
||||||
|
**/.\#*
|
||||||
|
|
||||||
|
# Org-mode
|
||||||
|
**/.org-id-locations
|
||||||
|
**/*_archive
|
||||||
|
|
||||||
|
# flymake-mode
|
||||||
|
**/*_flymake.*
|
||||||
|
|
||||||
|
# eshell files
|
||||||
|
eshell/history
|
||||||
|
eshell/lastdir
|
||||||
|
|
||||||
|
# elpa packages
|
||||||
|
elpa
|
||||||
|
|
||||||
|
# reftex files
|
||||||
|
**/*.rel
|
||||||
|
|
||||||
|
# AUCTeX auto folder
|
||||||
|
auto
|
||||||
|
|
||||||
|
# cask packages
|
||||||
|
**/.cask
|
||||||
|
**/dist
|
||||||
|
|
||||||
|
# Flycheck
|
||||||
|
**/flycheck_*.el
|
||||||
|
|
||||||
|
# server auth directory
|
||||||
|
server
|
||||||
|
|
||||||
|
# projectiles files
|
||||||
|
**/.projectile
|
||||||
|
|
||||||
|
# directory configuration
|
||||||
|
**/.dir-locals.el
|
||||||
|
|
||||||
|
# network security
|
||||||
|
network-security.data
|
||||||
|
|
||||||
|
|
||||||
|
### Executable ###
|
||||||
|
**/*.app
|
||||||
|
**/*.bat
|
||||||
|
**/*.cgi
|
||||||
|
**/*.com
|
||||||
|
**/*.exe
|
||||||
|
**/*.gadget
|
||||||
|
**/*.jar
|
||||||
|
**/*.pif
|
||||||
|
**/*.vb
|
||||||
|
**/*.wsf
|
||||||
|
|
||||||
|
### Node ###
|
||||||
|
# Logs
|
||||||
|
**/logs
|
||||||
|
**/*.log
|
||||||
|
**/npm-debug.log*
|
||||||
|
**/yarn-debug.log*
|
||||||
|
**/yarn-error.log*
|
||||||
|
**/lerna-debug.log*
|
||||||
|
**/.pnpm-debug.log*
|
||||||
|
|
||||||
|
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||||
|
**/report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
**/pids
|
||||||
|
**/*.pid
|
||||||
|
**/*.seed
|
||||||
|
**/*.pid.lock
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
**/lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
**/coverage
|
||||||
|
**/*.lcov
|
||||||
|
|
||||||
|
# nyc test coverage
|
||||||
|
**/.nyc_output
|
||||||
|
|
||||||
|
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
**/.grunt
|
||||||
|
|
||||||
|
# Bower dependency directory (https://bower.io/)
|
||||||
|
**/bower_components
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
**/.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||||
|
**/build/Release
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
**/node_modules
|
||||||
|
**/jspm_packages
|
||||||
|
|
||||||
|
# Snowpack dependency directory (https://snowpack.dev/)
|
||||||
|
**/web_modules
|
||||||
|
|
||||||
|
# TypeScript cache
|
||||||
|
**/*.tsbuildinfo
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
**/.npm
|
||||||
|
|
||||||
|
# Optional eslint cache
|
||||||
|
**/.eslintcache
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
**/.stylelintcache
|
||||||
|
|
||||||
|
# Microbundle cache
|
||||||
|
**/.rpt2_cache
|
||||||
|
**/.rts2_cache_cjs
|
||||||
|
**/.rts2_cache_es
|
||||||
|
**/.rts2_cache_umd
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
**/.node_repl_history
|
||||||
|
|
||||||
|
# Output of 'npm pack'
|
||||||
|
**/*.tgz
|
||||||
|
|
||||||
|
# Yarn Integrity file
|
||||||
|
**/.yarn-integrity
|
||||||
|
|
||||||
|
# dotenv environment variable files
|
||||||
|
**/.env
|
||||||
|
**/.env.development.local
|
||||||
|
**/.env.test.local
|
||||||
|
**/.env.production.local
|
||||||
|
**/.env.local
|
||||||
|
|
||||||
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
|
**/.cache
|
||||||
|
**/.parcel-cache
|
||||||
|
|
||||||
|
# Next.js build output
|
||||||
|
**/.next
|
||||||
|
**/out
|
||||||
|
|
||||||
|
# Nuxt.js build / generate output
|
||||||
|
**/.nuxt
|
||||||
|
**/dist
|
||||||
|
|
||||||
|
# Gatsby files
|
||||||
|
**/.cache
|
||||||
|
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||||
|
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||||
|
# public
|
||||||
|
|
||||||
|
# vuepress build output
|
||||||
|
**/.vuepress/dist
|
||||||
|
|
||||||
|
# vuepress v2.x temp and cache directory
|
||||||
|
**/.temp
|
||||||
|
|
||||||
|
# Docusaurus cache and generated files
|
||||||
|
**/.docusaurus
|
||||||
|
|
||||||
|
# Serverless directories
|
||||||
|
**/.serverless
|
||||||
|
|
||||||
|
# FuseBox cache
|
||||||
|
**/.fusebox
|
||||||
|
|
||||||
|
# DynamoDB Local files
|
||||||
|
**/.dynamodb
|
||||||
|
|
||||||
|
# TernJS port file
|
||||||
|
**/.tern-port
|
||||||
|
|
||||||
|
# Stores VSCode versions used for testing VSCode extensions
|
||||||
|
**/.vscode-test
|
||||||
|
|
||||||
|
# yarn v2
|
||||||
|
**/.yarn/cache
|
||||||
|
**/.yarn/unplugged
|
||||||
|
**/.yarn/build-state.yml
|
||||||
|
**/.yarn/install-state.gz
|
||||||
|
**/.pnp.*
|
||||||
|
|
||||||
|
### Node Patch ###
|
||||||
|
# Serverless Webpack directories
|
||||||
|
**/.webpack
|
||||||
|
|
||||||
|
# Optional stylelint cache
|
||||||
|
|
||||||
|
# SvelteKit build / generate output
|
||||||
|
**/.svelte-kit
|
||||||
|
|
||||||
|
### PyCharm ###
|
||||||
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||||
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
**/.idea/**/workspace.xml
|
||||||
|
**/.idea/**/tasks.xml
|
||||||
|
**/.idea/**/usage.statistics.xml
|
||||||
|
**/.idea/**/dictionaries
|
||||||
|
**/.idea/**/shelf
|
||||||
|
|
||||||
|
# AWS User-specific
|
||||||
|
**/.idea/**/aws.xml
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
**/.idea/**/contentModel.xml
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
**/.idea/**/dataSources
|
||||||
|
**/.idea/**/dataSources.ids
|
||||||
|
**/.idea/**/dataSources.local.xml
|
||||||
|
**/.idea/**/sqlDataSources.xml
|
||||||
|
**/.idea/**/dynamic.xml
|
||||||
|
**/.idea/**/uiDesigner.xml
|
||||||
|
**/.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
# Gradle
|
||||||
|
**/.idea/**/gradle.xml
|
||||||
|
**/.idea/**/libraries
|
||||||
|
|
||||||
|
# Gradle and Maven with auto-import
|
||||||
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
|
# auto-import.
|
||||||
|
# .idea/artifacts
|
||||||
|
# .idea/compiler.xml
|
||||||
|
# .idea/jarRepositories.xml
|
||||||
|
# .idea/modules.xml
|
||||||
|
# .idea/*.iml
|
||||||
|
# .idea/modules
|
||||||
|
# *.iml
|
||||||
|
# *.ipr
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
**/cmake-build-*
|
||||||
|
|
||||||
|
# Mongo Explorer plugin
|
||||||
|
**/.idea/**/mongoSettings.xml
|
||||||
|
|
||||||
|
# File-based project format
|
||||||
|
**/*.iws
|
||||||
|
|
||||||
|
# IntelliJ
|
||||||
|
**/out
|
||||||
|
|
||||||
|
# mpeltonen/sbt-idea plugin
|
||||||
|
**/.idea_modules
|
||||||
|
|
||||||
|
# JIRA plugin
|
||||||
|
**/atlassian-ide-plugin.xml
|
||||||
|
|
||||||
|
# Cursive Clojure plugin
|
||||||
|
**/.idea/replstate.xml
|
||||||
|
|
||||||
|
# SonarLint plugin
|
||||||
|
**/.idea/sonarlint
|
||||||
|
|
||||||
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
**/com_crashlytics_export_strings.xml
|
||||||
|
**/crashlytics.properties
|
||||||
|
**/crashlytics-build.properties
|
||||||
|
**/fabric.properties
|
||||||
|
|
||||||
|
# Editor-based Rest Client
|
||||||
|
**/.idea/httpRequests
|
||||||
|
|
||||||
|
# Android studio 3.1+ serialized cache file
|
||||||
|
**/.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
|
### PyCharm Patch ###
|
||||||
|
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
|
||||||
|
|
||||||
|
# *.iml
|
||||||
|
# modules.xml
|
||||||
|
# .idea/misc.xml
|
||||||
|
# *.ipr
|
||||||
|
|
||||||
|
# Sonarlint plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/7973-sonarlint
|
||||||
|
**/.idea/**/sonarlint
|
||||||
|
|
||||||
|
# SonarQube Plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
|
||||||
|
**/.idea/**/sonarIssues.xml
|
||||||
|
|
||||||
|
# Markdown Navigator plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
|
||||||
|
**/.idea/**/markdown-navigator.xml
|
||||||
|
**/.idea/**/markdown-navigator-enh.xml
|
||||||
|
**/.idea/**/markdown-navigator
|
||||||
|
|
||||||
|
# Cache file creation bug
|
||||||
|
# See https://youtrack.jetbrains.com/issue/JBR-2257
|
||||||
|
**/.idea/$CACHE_FILE$
|
||||||
|
|
||||||
|
# CodeStream plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/12206-codestream
|
||||||
|
**/.idea/codestream.xml
|
||||||
|
|
||||||
|
# Azure Toolkit for IntelliJ plugin
|
||||||
|
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
|
||||||
|
**/.idea/**/azureSettings.xml
|
||||||
|
|
||||||
|
### Python ###
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
**/__pycache__
|
||||||
|
**/*.py[cod]
|
||||||
|
**/*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
**/*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
**/.Python
|
||||||
|
**/build
|
||||||
|
**/develop-eggs
|
||||||
|
**/downloads
|
||||||
|
**/eggs
|
||||||
|
**/.eggs
|
||||||
|
**/lib
|
||||||
|
**/lib64
|
||||||
|
**/parts
|
||||||
|
**/sdist
|
||||||
|
**/var
|
||||||
|
**/wheels
|
||||||
|
**/share/python-wheels
|
||||||
|
**/*.egg-info
|
||||||
|
**/.installed.cfg
|
||||||
|
**/*.egg
|
||||||
|
**/MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
**/*.manifest
|
||||||
|
**/*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
**/pip-log.txt
|
||||||
|
**/pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
**/htmlcov
|
||||||
|
**/.tox
|
||||||
|
**/.nox
|
||||||
|
**/.coverage
|
||||||
|
**/.coverage.*
|
||||||
|
**/nosetests.xml
|
||||||
|
**/coverage.xml
|
||||||
|
**/*.cover
|
||||||
|
**/*.py,cover
|
||||||
|
**/.hypothesis
|
||||||
|
**/.pytest_cache
|
||||||
|
**/cover
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
**/*.mo
|
||||||
|
**/*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
**/local_settings.py
|
||||||
|
**/db.sqlite3
|
||||||
|
**/db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
**/instance
|
||||||
|
**/.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
**/.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
**/docs/_build
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
**/.pybuilder
|
||||||
|
**/target
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
**/.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
**/profile_default
|
||||||
|
**/ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
**/.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
**/__pypackages__
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
**/celerybeat-schedule
|
||||||
|
**/celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
**/*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
**/.venv
|
||||||
|
**/env
|
||||||
|
**/venv
|
||||||
|
**/ENV
|
||||||
|
**/env.bak
|
||||||
|
**/venv.bak
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
**/.spyderproject
|
||||||
|
**/.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
**/.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
**/.mypy_cache
|
||||||
|
**/.dmypy.json
|
||||||
|
**/dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
**/.pyre
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
**/.pytype
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
**/cython_debug
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
### Python Patch ###
|
||||||
|
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||||
|
**/poetry.toml
|
||||||
|
|
||||||
|
# ruff
|
||||||
|
**/.ruff_cache
|
||||||
|
|
||||||
|
# LSP config files
|
||||||
|
**/pyrightconfig.json
|
||||||
|
|
||||||
|
### SublimeText ###
|
||||||
|
# Cache files for Sublime Text
|
||||||
|
**/*.tmlanguage.cache
|
||||||
|
**/*.tmPreferences.cache
|
||||||
|
**/*.stTheme.cache
|
||||||
|
|
||||||
|
# Workspace files are user-specific
|
||||||
|
**/*.sublime-workspace
|
||||||
|
|
||||||
|
# Project files should be checked into the repository, unless a significant
|
||||||
|
# proportion of contributors will probably not be using Sublime Text
|
||||||
|
# *.sublime-project
|
||||||
|
|
||||||
|
# SFTP configuration file
|
||||||
|
**/sftp-config.json
|
||||||
|
**/sftp-config-alt*.json
|
||||||
|
|
||||||
|
# Package control specific files
|
||||||
|
**/Package Control.last-run
|
||||||
|
**/Package Control.ca-list
|
||||||
|
**/Package Control.ca-bundle
|
||||||
|
**/Package Control.system-ca-bundle
|
||||||
|
**/Package Control.cache
|
||||||
|
**/Package Control.ca-certs
|
||||||
|
**/Package Control.merged-ca-bundle
|
||||||
|
**/Package Control.user-ca-bundle
|
||||||
|
**/oscrypto-ca-bundle.crt
|
||||||
|
**/bh_unicode_properties.cache
|
||||||
|
|
||||||
|
# Sublime-github package stores a github token in this file
|
||||||
|
# https://packagecontrol.io/packages/sublime-github
|
||||||
|
**/GitHub.sublime-settings
|
||||||
|
|
||||||
|
### Vim ###
|
||||||
|
# Swap
|
||||||
|
**/[._]*.s[a-v][a-z]
|
||||||
|
!**/*.svg # comment out if you don't need vector files
|
||||||
|
**/[._]*.sw[a-p]
|
||||||
|
**/[._]s[a-rt-v][a-z]
|
||||||
|
**/[._]ss[a-gi-z]
|
||||||
|
**/[._]sw[a-p]
|
||||||
|
|
||||||
|
# Session
|
||||||
|
**/Session.vim
|
||||||
|
**/Sessionx.vim
|
||||||
|
|
||||||
|
# Temporary
|
||||||
|
**/.netrwhist
|
||||||
|
# Auto-generated tag files
|
||||||
|
**/tags
|
||||||
|
# Persistent undo
|
||||||
|
**/[._]*.un~
|
||||||
|
|
||||||
|
### VisualStudioCode ###
|
||||||
|
**/.vscode/*
|
||||||
|
!**/.vscode/settings.json
|
||||||
|
!**/.vscode/tasks.json
|
||||||
|
!**/.vscode/launch.json
|
||||||
|
!**/.vscode/extensions.json
|
||||||
|
!**/.vscode/*.code-snippets
|
||||||
|
|
||||||
|
# Local History for Visual Studio Code
|
||||||
|
**/.history
|
||||||
|
|
||||||
|
# Built Visual Studio Code Extensions
|
||||||
|
**/*.vsix
|
||||||
|
|
||||||
|
### VisualStudioCode Patch ###
|
||||||
|
# Ignore all local history of files
|
||||||
|
**/.history
|
||||||
|
**/.ionide
|
||||||
|
|
||||||
|
### VisualStudio ###
|
||||||
|
## Ignore Visual Studio temporary files, build results, and
|
||||||
|
## files generated by popular Visual Studio add-ons.
|
||||||
|
##
|
||||||
|
## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore
|
||||||
|
|
||||||
|
# User-specific files
|
||||||
|
**/*.rsuser
|
||||||
|
**/*.suo
|
||||||
|
**/*.user
|
||||||
|
**/*.userosscache
|
||||||
|
**/*.sln.docstates
|
||||||
|
|
||||||
|
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||||
|
**/*.userprefs
|
||||||
|
|
||||||
|
# Mono auto generated files
|
||||||
|
**/mono_crash.*
|
||||||
|
|
||||||
|
# Build results
|
||||||
|
**/[Dd]ebug
|
||||||
|
**/[Dd]ebugPublic
|
||||||
|
**/[Rr]elease
|
||||||
|
**/[Rr]eleases
|
||||||
|
**/x64
|
||||||
|
**/x86
|
||||||
|
**/[Ww][Ii][Nn]32
|
||||||
|
**/[Aa][Rr][Mm]
|
||||||
|
**/[Aa][Rr][Mm]64
|
||||||
|
**/bld
|
||||||
|
**/[Bb]in
|
||||||
|
**/[Oo]bj
|
||||||
|
**/[Ll]og
|
||||||
|
**/[Ll]ogs
|
||||||
|
|
||||||
|
# Visual Studio 2015/2017 cache/options directory
|
||||||
|
**/.vs
|
||||||
|
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||||
|
#wwwroot/
|
||||||
|
|
||||||
|
# Visual Studio 2017 auto generated files
|
||||||
|
**/Generated\ Files
|
||||||
|
|
||||||
|
# MSTest test Results
|
||||||
|
**/[Tt]est[Rr]esult*
|
||||||
|
**/[Bb]uild[Ll]og.*
|
||||||
|
|
||||||
|
# NUnit
|
||||||
|
**/*.VisualState.xml
|
||||||
|
**/TestResult.xml
|
||||||
|
**/nunit-*.xml
|
||||||
|
|
||||||
|
# Build Results of an ATL Project
|
||||||
|
**/[Dd]ebugPS
|
||||||
|
**/[Rr]eleasePS
|
||||||
|
**/dlldata.c
|
||||||
|
|
||||||
|
# Benchmark Results
|
||||||
|
**/BenchmarkDotNet.Artifacts
|
||||||
|
|
||||||
|
# .NET Core
|
||||||
|
**/project.lock.json
|
||||||
|
**/project.fragment.lock.json
|
||||||
|
**/artifacts
|
||||||
|
|
||||||
|
# ASP.NET Scaffolding
|
||||||
|
**/ScaffoldingReadMe.txt
|
||||||
|
|
||||||
|
# StyleCop
|
||||||
|
**/StyleCopReport.xml
|
||||||
|
|
||||||
|
# Files built by Visual Studio
|
||||||
|
**/*_i.c
|
||||||
|
**/*_p.c
|
||||||
|
**/*_h.h
|
||||||
|
**/*.ilk
|
||||||
|
**/*.meta
|
||||||
|
**/*.obj
|
||||||
|
**/*.iobj
|
||||||
|
**/*.pch
|
||||||
|
**/*.pdb
|
||||||
|
**/*.ipdb
|
||||||
|
**/*.pgc
|
||||||
|
**/*.pgd
|
||||||
|
**/*.rsp
|
||||||
|
**/*.sbr
|
||||||
|
**/*.tlb
|
||||||
|
**/*.tli
|
||||||
|
**/*.tlh
|
||||||
|
**/*.tmp
|
||||||
|
**/*.tmp_proj
|
||||||
|
**/*_wpftmp.csproj
|
||||||
|
**/*.tlog
|
||||||
|
**/*.vspscc
|
||||||
|
**/*.vssscc
|
||||||
|
**/.builds
|
||||||
|
**/*.pidb
|
||||||
|
**/*.svclog
|
||||||
|
**/*.scc
|
||||||
|
|
||||||
|
# Chutzpah Test files
|
||||||
|
**/_Chutzpah*
|
||||||
|
|
||||||
|
# Visual C++ cache files
|
||||||
|
**/ipch
|
||||||
|
**/*.aps
|
||||||
|
**/*.ncb
|
||||||
|
**/*.opendb
|
||||||
|
**/*.opensdf
|
||||||
|
**/*.cachefile
|
||||||
|
**/*.VC.db
|
||||||
|
**/*.VC.VC.opendb
|
||||||
|
|
||||||
|
# Visual Studio profiler
|
||||||
|
**/*.psess
|
||||||
|
**/*.vsp
|
||||||
|
**/*.vspx
|
||||||
|
**/*.sap
|
||||||
|
|
||||||
|
# Visual Studio Trace Files
|
||||||
|
**/*.e2e
|
||||||
|
|
||||||
|
# TFS 2012 Local Workspace
|
||||||
|
**/$tf
|
||||||
|
|
||||||
|
# Guidance Automation Toolkit
|
||||||
|
**/*.gpState
|
||||||
|
|
||||||
|
# ReSharper is a .NET coding add-in
|
||||||
|
**/_ReSharper*
|
||||||
|
**/*.[Rr]e[Ss]harper
|
||||||
|
**/*.DotSettings.user
|
||||||
|
|
||||||
|
# TeamCity is a build add-in
|
||||||
|
**/_TeamCity*
|
||||||
|
|
||||||
|
# DotCover is a Code Coverage Tool
|
||||||
|
**/*.dotCover
|
||||||
|
|
||||||
|
# AxoCover is a Code Coverage Tool
|
||||||
|
**/.axoCover/*
|
||||||
|
!**/.axoCover/settings.json
|
||||||
|
|
||||||
|
# Coverlet is a free, cross platform Code Coverage Tool
|
||||||
|
**/coverage*.json
|
||||||
|
**/coverage*.xml
|
||||||
|
**/coverage*.info
|
||||||
|
|
||||||
|
# Visual Studio code coverage results
|
||||||
|
**/*.coverage
|
||||||
|
**/*.coveragexml
|
||||||
|
|
||||||
|
# NCrunch
|
||||||
|
**/_NCrunch_*
|
||||||
|
**/.*crunch*.local.xml
|
||||||
|
**/nCrunchTemp_*
|
||||||
|
|
||||||
|
# MightyMoose
|
||||||
|
**/*.mm.*
|
||||||
|
**/AutoTest.Net
|
||||||
|
|
||||||
|
# Web workbench (sass)
|
||||||
|
**/.sass-cache
|
||||||
|
|
||||||
|
# Installshield output folder
|
||||||
|
**/[Ee]xpress
|
||||||
|
|
||||||
|
# DocProject is a documentation generator add-in
|
||||||
|
**/DocProject/buildhelp
|
||||||
|
**/DocProject/Help/*.HxT
|
||||||
|
**/DocProject/Help/*.HxC
|
||||||
|
**/DocProject/Help/*.hhc
|
||||||
|
**/DocProject/Help/*.hhk
|
||||||
|
**/DocProject/Help/*.hhp
|
||||||
|
**/DocProject/Help/Html2
|
||||||
|
**/DocProject/Help/html
|
||||||
|
|
||||||
|
# Click-Once directory
|
||||||
|
**/publish
|
||||||
|
|
||||||
|
# Publish Web Output
|
||||||
|
**/*.[Pp]ublish.xml
|
||||||
|
**/*.azurePubxml
|
||||||
|
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||||
|
# but database connection strings (with potential passwords) will be unencrypted
|
||||||
|
**/*.pubxml
|
||||||
|
**/*.publishproj
|
||||||
|
|
||||||
|
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||||
|
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||||
|
# in these scripts will be unencrypted
|
||||||
|
**/PublishScripts
|
||||||
|
|
||||||
|
# NuGet Packages
|
||||||
|
**/*.nupkg
|
||||||
|
# NuGet Symbol Packages
|
||||||
|
**/*.snupkg
|
||||||
|
# The packages folder can be ignored because of Package Restore
|
||||||
|
**/**/[Pp]ackages/*
|
||||||
|
# except build/, which is used as an MSBuild target.
|
||||||
|
!**/**/[Pp]ackages/build
|
||||||
|
# Uncomment if necessary however generally it will be regenerated when needed
|
||||||
|
#!**/[Pp]ackages/repositories.config
|
||||||
|
# NuGet v3's project.json files produces more ignorable files
|
||||||
|
**/*.nuget.props
|
||||||
|
**/*.nuget.targets
|
||||||
|
|
||||||
|
# Microsoft Azure Build Output
|
||||||
|
**/csx
|
||||||
|
**/*.build.csdef
|
||||||
|
|
||||||
|
# Microsoft Azure Emulator
|
||||||
|
**/ecf
|
||||||
|
**/rcf
|
||||||
|
|
||||||
|
# Windows Store app package directories and files
|
||||||
|
**/AppPackages
|
||||||
|
**/BundleArtifacts
|
||||||
|
**/Package.StoreAssociation.xml
|
||||||
|
**/_pkginfo.txt
|
||||||
|
**/*.appx
|
||||||
|
**/*.appxbundle
|
||||||
|
**/*.appxupload
|
||||||
|
|
||||||
|
# Visual Studio cache files
|
||||||
|
# files ending in .cache can be ignored
|
||||||
|
**/*.[Cc]ache
|
||||||
|
# but keep track of directories ending in .cache
|
||||||
|
!**/?*.[Cc]ache
|
||||||
|
|
||||||
|
# Others
|
||||||
|
**/ClientBin
|
||||||
|
**/~$*
|
||||||
|
**/*.dbmdl
|
||||||
|
**/*.dbproj.schemaview
|
||||||
|
**/*.jfm
|
||||||
|
**/*.pfx
|
||||||
|
**/*.publishsettings
|
||||||
|
**/orleans.codegen.cs
|
||||||
|
|
||||||
|
# Including strong name files can present a security risk
|
||||||
|
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||||
|
#*.snk
|
||||||
|
|
||||||
|
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||||
|
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||||
|
#bower_components/
|
||||||
|
|
||||||
|
# RIA/Silverlight projects
|
||||||
|
**/Generated_Code
|
||||||
|
|
||||||
|
# Backup & report files from converting an old project file
|
||||||
|
# to a newer Visual Studio version. Backup files are not needed,
|
||||||
|
# because we have git ;-)
|
||||||
|
**/_UpgradeReport_Files
|
||||||
|
**/Backup*
|
||||||
|
**/UpgradeLog*.XML
|
||||||
|
**/UpgradeLog*.htm
|
||||||
|
**/ServiceFabricBackup
|
||||||
|
**/*.rptproj.bak
|
||||||
|
|
||||||
|
# SQL Server files
|
||||||
|
**/*.mdf
|
||||||
|
**/*.ldf
|
||||||
|
**/*.ndf
|
||||||
|
|
||||||
|
# Business Intelligence projects
|
||||||
|
**/*.rdl.data
|
||||||
|
**/*.bim.layout
|
||||||
|
**/*.bim_*.settings
|
||||||
|
**/*.rptproj.rsuser
|
||||||
|
**/*- [Bb]ackup.rdl
|
||||||
|
**/*- [Bb]ackup ([0-9]).rdl
|
||||||
|
**/*- [Bb]ackup ([0-9][0-9]).rdl
|
||||||
|
|
||||||
|
# Microsoft Fakes
|
||||||
|
**/FakesAssemblies
|
||||||
|
|
||||||
|
# GhostDoc plugin setting file
|
||||||
|
**/*.GhostDoc.xml
|
||||||
|
|
||||||
|
# Node.js Tools for Visual Studio
|
||||||
|
**/.ntvs_analysis.dat
|
||||||
|
|
||||||
|
# Visual Studio 6 build log
|
||||||
|
**/*.plg
|
||||||
|
|
||||||
|
# Visual Studio 6 workspace options file
|
||||||
|
**/*.opt
|
||||||
|
|
||||||
|
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||||
|
**/*.vbw
|
||||||
|
|
||||||
|
# Visual Studio 6 auto-generated project file (contains which files were open etc.)
|
||||||
|
**/*.vbp
|
||||||
|
|
||||||
|
# Visual Studio 6 workspace and project file (working project files containing files to include in project)
|
||||||
|
**/*.dsw
|
||||||
|
**/*.dsp
|
||||||
|
|
||||||
|
# Visual Studio 6 technical files
|
||||||
|
|
||||||
|
# Visual Studio LightSwitch build output
|
||||||
|
**/**/*.HTMLClient/GeneratedArtifacts
|
||||||
|
**/**/*.DesktopClient/GeneratedArtifacts
|
||||||
|
**/**/*.DesktopClient/ModelManifest.xml
|
||||||
|
**/**/*.Server/GeneratedArtifacts
|
||||||
|
**/**/*.Server/ModelManifest.xml
|
||||||
|
**/_Pvt_Extensions
|
||||||
|
|
||||||
|
# Paket dependency manager
|
||||||
|
**/.paket/paket.exe
|
||||||
|
**/paket-files
|
||||||
|
|
||||||
|
# FAKE - F# Make
|
||||||
|
**/.fake
|
||||||
|
|
||||||
|
# CodeRush personal settings
|
||||||
|
**/.cr/personal
|
||||||
|
|
||||||
|
# Python Tools for Visual Studio (PTVS)
|
||||||
|
**/*.pyc
|
||||||
|
|
||||||
|
# Cake - Uncomment if you are using it
|
||||||
|
# tools/**
|
||||||
|
# !tools/packages.config
|
||||||
|
|
||||||
|
# Tabs Studio
|
||||||
|
**/*.tss
|
||||||
|
|
||||||
|
# Telerik's JustMock configuration file
|
||||||
|
**/*.jmconfig
|
||||||
|
|
||||||
|
# BizTalk build output
|
||||||
|
**/*.btp.cs
|
||||||
|
**/*.btm.cs
|
||||||
|
**/*.odx.cs
|
||||||
|
**/*.xsd.cs
|
||||||
|
|
||||||
|
# OpenCover UI analysis results
|
||||||
|
**/OpenCover
|
||||||
|
|
||||||
|
# Azure Stream Analytics local run output
|
||||||
|
**/ASALocalRun
|
||||||
|
|
||||||
|
# MSBuild Binary and Structured Log
|
||||||
|
**/*.binlog
|
||||||
|
|
||||||
|
# NVidia Nsight GPU debugger configuration file
|
||||||
|
**/*.nvuser
|
||||||
|
|
||||||
|
# MFractors (Xamarin productivity tool) working folder
|
||||||
|
**/.mfractor
|
||||||
|
|
||||||
|
# Local History for Visual Studio
|
||||||
|
**/.localhistory
|
||||||
|
|
||||||
|
# Visual Studio History (VSHistory) files
|
||||||
|
**/.vshistory
|
||||||
|
|
||||||
|
# BeatPulse healthcheck temp database
|
||||||
|
**/healthchecksdb
|
||||||
|
|
||||||
|
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||||
|
**/MigrationBackup
|
||||||
|
|
||||||
|
# Ionide (cross platform F# VS Code tools) working folder
|
||||||
|
**/.ionide
|
||||||
|
|
||||||
|
# Fody - auto-generated XML schema
|
||||||
|
**/FodyWeavers.xsd
|
||||||
|
|
||||||
|
# VS Code files for those working on multiple tools
|
||||||
|
**/*.code-workspace
|
||||||
|
|
||||||
|
# Local History for Visual Studio Code
|
||||||
|
|
||||||
|
# Windows Installer files from build outputs
|
||||||
|
**/*.cab
|
||||||
|
**/*.msi
|
||||||
|
**/*.msix
|
||||||
|
**/*.msm
|
||||||
|
**/*.msp
|
||||||
|
|
||||||
|
# JetBrains Rider
|
||||||
|
**/*.sln.iml
|
||||||
|
|
||||||
|
### VisualStudio Patch ###
|
||||||
|
# Additional files built by Visual Studio
|
||||||
|
|
||||||
|
# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode
|
||||||
|
**/database.db
|
||||||
|
**/database.db
|
||||||
|
**/database.db
|
||||||
|
**/.markata.cache
|
||||||
|
**/database.sqlite
|
||||||
|
|
||||||
|
# flyctl launch added from .pytest_cache/.gitignore
|
||||||
|
# Created by pytest automatically.
|
||||||
|
.pytest_cache/**/*
|
||||||
|
|
||||||
|
# flyctl launch added from .ruff_cache/.gitignore
|
||||||
|
.ruff_cache/**/*
|
||||||
|
fly.toml
|
||||||
10
.github/workflows/release.yml
vendored
|
|
@ -41,13 +41,15 @@ jobs:
|
||||||
with:
|
with:
|
||||||
name: lsm-${{ matrix.os }}
|
name: lsm-${{ matrix.os }}
|
||||||
path: dist
|
path: dist
|
||||||
- name: zip up dist
|
- uses: vimtor/action-zip@v1
|
||||||
run: zip -r dist.zip lsm-${{ matrix.os }}
|
with:
|
||||||
|
files: dist/
|
||||||
|
dest: lsm-${{ matrix.os }}.zip
|
||||||
- name: Upload Release Asset to versioned release
|
- name: Upload Release Asset to versioned release
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: gh release upload v${{ github.run_number }} lsm-${{ matrix.os }}
|
run: gh release upload v${{ github.run_number }} lsm-${{ matrix.os }}.zip
|
||||||
- name: Upload Release Asset to latest release
|
- name: Upload Release Asset to latest release
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: gh release upload latest lsm-${{ matrix.os }}
|
run: gh release upload latest lsm-${{ matrix.os }}.zip
|
||||||
|
|
|
||||||
2
.gitignore
vendored
|
|
@ -967,3 +967,5 @@ database.db
|
||||||
database.db
|
database.db
|
||||||
.markata.cache
|
.markata.cache
|
||||||
database.sqlite
|
database.sqlite
|
||||||
|
.env.dev
|
||||||
|
.env.dev.docker
|
||||||
|
|
|
||||||
2
.pyflyby
|
|
@ -1,3 +1,5 @@
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
|
||||||
from learn_sql_model.api.websocket_connection_manager import manager
|
from learn_sql_model.api.websocket_connection_manager import manager
|
||||||
from learn_sql_model.config import Config
|
from learn_sql_model.config import Config
|
||||||
|
|
|
||||||
|
|
@ -4,9 +4,9 @@ WORKDIR /app
|
||||||
Copy pyproject.toml /app
|
Copy pyproject.toml /app
|
||||||
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
|
COPY learn_sql_model/__about__.py /app/learn_sql_model/__about__.py
|
||||||
COPY README.md /app
|
COPY README.md /app
|
||||||
RUN pip3 install .
|
RUN pip3 install '.[all]'
|
||||||
COPY . /app
|
COPY . /app
|
||||||
RUN pip3 install .
|
RUN pip3 install '.[all]'
|
||||||
|
|
||||||
EXPOSE 5000
|
EXPOSE 5000
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,6 @@
|
||||||
|
> [!IMPORTANT]
|
||||||
|
> This project has been moved to https://git.wayl.one/waylon/learn-sql-model
|
||||||
|
|
||||||
# Learn SQL Model
|
# Learn SQL Model
|
||||||
|
|
||||||
learning sql model
|
learning sql model
|
||||||
|
|
|
||||||
BIN
creeper.png
Normal file
|
After Width: | Height: | Size: 769 B |
70
d3.py
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
|
|
||||||
|
def get_tables_and_columns(conn):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||||
|
tables = [
|
||||||
|
{
|
||||||
|
"name": table[0],
|
||||||
|
"columns": get_columns(conn, table[0]),
|
||||||
|
"foreign_keys": get_foreign_keys(conn, table[0]),
|
||||||
|
}
|
||||||
|
for table in cursor.fetchall()
|
||||||
|
]
|
||||||
|
return tables
|
||||||
|
|
||||||
|
|
||||||
|
def get_columns(conn, table_name):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(f"PRAGMA table_info({table_name});")
|
||||||
|
columns = [row[1] for row in cursor.fetchall()]
|
||||||
|
return columns
|
||||||
|
|
||||||
|
|
||||||
|
def get_foreign_keys(conn, table_name):
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
|
||||||
|
foreign_keys = [
|
||||||
|
{"id": row[0], "from": row[3], "to_table": row[2], "to": row[4]}
|
||||||
|
for row in cursor.fetchall()
|
||||||
|
]
|
||||||
|
return foreign_keys
|
||||||
|
|
||||||
|
|
||||||
|
def generate_links(tables):
|
||||||
|
links = []
|
||||||
|
for t_index, table in enumerate(tables):
|
||||||
|
for fk in table["foreign_keys"]:
|
||||||
|
target_index = next(
|
||||||
|
i for i, target in enumerate(tables) if target["name"] == fk["to_table"]
|
||||||
|
)
|
||||||
|
source_y = 40 + table["columns"].index(fk["from"]) * 20
|
||||||
|
target_y = 40 + tables[target_index]["columns"].index(fk["to"]) * 20
|
||||||
|
links.append(
|
||||||
|
{
|
||||||
|
"source": {"x": 50 + t_index * 150 + 120, "y": 50 + source_y},
|
||||||
|
"target": {"x": 50 + target_index * 150, "y": 50 + target_y},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return links
|
||||||
|
|
||||||
|
|
||||||
|
def generate_er_diagram(database_path):
|
||||||
|
conn = sqlite3.connect(database_path)
|
||||||
|
tables = get_tables_and_columns(conn)
|
||||||
|
links = [] # Currently, we won't extract relationships
|
||||||
|
links = generate_links(tables)
|
||||||
|
|
||||||
|
env = Environment(loader=FileSystemLoader("templates"))
|
||||||
|
template = env.get_template("er_diagram.html")
|
||||||
|
|
||||||
|
with open("index.html", "w") as f:
|
||||||
|
f.write(template.render(tables=tables, links=links))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
db_path = "database.db"
|
||||||
|
generate_er_diagram(db_path)
|
||||||
72
database.md
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| f48730a783a5 |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | x | y | size | age | shoe_size | pet_id | id |
|
||||||
|
|------|-------------|---|---|------|-----|-----------|--------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
| size | INTEGER | | | |
|
||||||
|
| age | INTEGER | | | |
|
||||||
|
| shoe_size | INTEGER | | | |
|
||||||
|
| pet_id | INTEGER | pet.id | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
er_diagram.png
Normal file
|
After Width: | Height: | Size: 67 KiB |
BIN
im.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
129
index.html
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<title>ER Diagram</title>
|
||||||
|
<!-- Include d3.js -->
|
||||||
|
<script src="https://d3js.org/d3.v6.min.js"></script>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-name {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.foreign-key {
|
||||||
|
fill: #b30000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.link {
|
||||||
|
stroke: #999;
|
||||||
|
stroke-opacity: 0.6;
|
||||||
|
stroke-width: 2px;
|
||||||
|
fill: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.link-curved-path {
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="er-diagram"></div>
|
||||||
|
<script>
|
||||||
|
// Generate ER diagram
|
||||||
|
const tables = [{'name': 'pet', 'columns': ['name', 'birthday', 'id'], 'foreign_keys': []}, {'name': 'hero', 'columns': ['name', 'secret_name', 'x', 'y', 'size', 'age', 'shoe_size', 'pet_id', 'id'], 'foreign_keys': [{'id': 0, 'from': 'pet_id', 'to_table': 'pet', 'to': 'id'}]}];
|
||||||
|
const links = [{'source': {'x': 320, 'y': 230}, 'target': {'x': 50, 'y': 130}}];
|
||||||
|
|
||||||
|
const width = window.innerWidth;
|
||||||
|
const height = window.innerHeight;
|
||||||
|
|
||||||
|
const tableElemWidth = 120;
|
||||||
|
const tableElemHeight = d => 20 * (d.columns.length + 1);
|
||||||
|
|
||||||
|
let svg = d3.select("#er-diagram")
|
||||||
|
.append("svg")
|
||||||
|
.attr("width", width)
|
||||||
|
.attr("height", height);
|
||||||
|
|
||||||
|
let g = svg.append("g");
|
||||||
|
|
||||||
|
let linkGroup = g.selectAll(".link")
|
||||||
|
.data(links)
|
||||||
|
.join("path")
|
||||||
|
.attr("class", "link");
|
||||||
|
|
||||||
|
let tableGroup = g.selectAll(".table")
|
||||||
|
.data(tables)
|
||||||
|
.join("g")
|
||||||
|
.attr("class", "table")
|
||||||
|
.classed("collapsed", false)
|
||||||
|
.on("click", (event, d) => {
|
||||||
|
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
|
||||||
|
});
|
||||||
|
|
||||||
|
let zoomBehavior = d3.zoom()
|
||||||
|
.scaleExtent([0.1, 4])
|
||||||
|
.on("zoom", function (event) {
|
||||||
|
g.attr("transform", event.transform);
|
||||||
|
});
|
||||||
|
|
||||||
|
svg.call(zoomBehavior);
|
||||||
|
|
||||||
|
let rect = tableGroup.append("rect")
|
||||||
|
.attr("width", tableElemWidth)
|
||||||
|
.attr("height", tableElemHeight)
|
||||||
|
.attr("fill", "#eee");
|
||||||
|
|
||||||
|
let text = tableGroup.append("text")
|
||||||
|
.attr("class", "table-name")
|
||||||
|
.attr("x", 10)
|
||||||
|
.attr("y", 20)
|
||||||
|
.text(d => d.name);
|
||||||
|
|
||||||
|
let columnText = tableGroup.selectAll(".column")
|
||||||
|
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
|
||||||
|
.join("text")
|
||||||
|
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
|
||||||
|
.attr("x", 10)
|
||||||
|
.attr("y", (d, i) => 40 + i * 20)
|
||||||
|
.text(d => d.name);
|
||||||
|
|
||||||
|
// Physics simulation and force layout
|
||||||
|
let simulation = d3.forceSimulation(tables)
|
||||||
|
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
|
||||||
|
.force("charge", d3.forceManyBody().strength(-800))
|
||||||
|
.force("x", d3.forceX(width / 2).strength(0.1))
|
||||||
|
.force("y", d3.forceY(height / 2).strength(0.1))
|
||||||
|
.on("tick", () => {
|
||||||
|
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
|
||||||
|
linkGroup.attr("d", d => {
|
||||||
|
const srcX = d.source.x + tableElemWidth;
|
||||||
|
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
|
||||||
|
const tgtX = d.target.x;
|
||||||
|
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
|
||||||
|
const deltaX = tgtX - srcX;
|
||||||
|
const deltaY = tgtY - srcY;
|
||||||
|
const curveFactor = 50;
|
||||||
|
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
|
||||||
|
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
|
||||||
|
});
|
||||||
|
columnText.style("display", (d, i, nodes) => {
|
||||||
|
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
|
|
@ -1,16 +1,16 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from sqlmodel import SQLModel, Session
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
from learn_sql_model.api.websocket_connection_manager import manager
|
from learn_sql_model.config import get_session
|
||||||
from learn_sql_model.config import get_config, get_session
|
|
||||||
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
|
from learn_sql_model.models.hero import Hero, HeroCreate, HeroRead, HeroUpdate, Heros
|
||||||
|
|
||||||
hero_router = APIRouter()
|
hero_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@hero_router.on_event("startup")
|
@hero_router.on_event("startup")
|
||||||
def on_startup() -> None:
|
async def on_startup() -> None:
|
||||||
SQLModel.metadata.create_all(get_config().database.engine)
|
# SQLModel.metadata.create_all(get_config().database.engine)
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
@hero_router.get("/hero/{hero_id}")
|
@hero_router.get("/hero/{hero_id}")
|
||||||
|
|
@ -32,12 +32,12 @@ async def post_hero(
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
hero: HeroCreate,
|
hero: HeroCreate,
|
||||||
) -> HeroRead:
|
) -> HeroRead:
|
||||||
"read all the heros"
|
"create a hero"
|
||||||
db_hero = Hero.from_orm(hero)
|
db_hero = Hero.from_orm(hero)
|
||||||
session.add(db_hero)
|
session.add(db_hero)
|
||||||
session.commit()
|
session.commit()
|
||||||
session.refresh(db_hero)
|
session.refresh(db_hero)
|
||||||
await manager.broadcast({hero.json()}, id=1)
|
# await manager.broadcast({hero.json()}, id=1)
|
||||||
return db_hero
|
return db_hero
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -47,7 +47,7 @@ async def patch_hero(
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
hero: HeroUpdate,
|
hero: HeroUpdate,
|
||||||
) -> HeroRead:
|
) -> HeroRead:
|
||||||
"read all the heros"
|
"update a hero"
|
||||||
db_hero = session.get(Hero, hero.id)
|
db_hero = session.get(Hero, hero.id)
|
||||||
if not db_hero:
|
if not db_hero:
|
||||||
raise HTTPException(status_code=404, detail="Hero not found")
|
raise HTTPException(status_code=404, detail="Hero not found")
|
||||||
|
|
@ -56,7 +56,7 @@ async def patch_hero(
|
||||||
session.add(db_hero)
|
session.add(db_hero)
|
||||||
session.commit()
|
session.commit()
|
||||||
session.refresh(db_hero)
|
session.refresh(db_hero)
|
||||||
await manager.broadcast({hero.json()}, id=1)
|
# await manager.broadcast({hero.json()}, id=1)
|
||||||
return db_hero
|
return db_hero
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -66,13 +66,13 @@ async def delete_hero(
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
hero_id: int,
|
hero_id: int,
|
||||||
):
|
):
|
||||||
"read all the heros"
|
"delete a hero"
|
||||||
hero = session.get(Hero, hero_id)
|
hero = session.get(Hero, hero_id)
|
||||||
if not hero:
|
if not hero:
|
||||||
raise HTTPException(status_code=404, detail="Hero not found")
|
raise HTTPException(status_code=404, detail="Hero not found")
|
||||||
session.delete(hero)
|
session.delete(hero)
|
||||||
session.commit()
|
session.commit()
|
||||||
await manager.broadcast(f"deleted hero {hero_id}", id=1)
|
# await manager.broadcast(f"deleted hero {hero_id}", id=1)
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -82,4 +82,6 @@ async def get_heros(
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
) -> Heros:
|
) -> Heros:
|
||||||
"get all heros"
|
"get all heros"
|
||||||
return Heros.list(session=session)
|
statement = select(Hero)
|
||||||
|
heros = session.exec(statement).all()
|
||||||
|
return Heros(__root__=heros)
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
|
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect
|
||||||
from fastapi.responses import HTMLResponse
|
from fastapi.responses import HTMLResponse
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from sqlmodel import Session
|
from sqlmodel import Session, select
|
||||||
from websockets.exceptions import ConnectionClosed
|
from websockets.exceptions import ConnectionClosed
|
||||||
|
|
||||||
from learn_sql_model.api.websocket_connection_manager import manager
|
from learn_sql_model.api.websocket_connection_manager import manager
|
||||||
from learn_sql_model.config import get_session
|
from learn_sql_model.config import get_session
|
||||||
from learn_sql_model.console import console
|
from learn_sql_model.console import console
|
||||||
from learn_sql_model.models.hero import HeroDelete, HeroUpdate, Heros
|
from learn_sql_model.models.hero import Hero, HeroDelete, HeroUpdate, Heros
|
||||||
|
|
||||||
web_socket_router = APIRouter()
|
web_socket_router = APIRouter()
|
||||||
|
|
||||||
|
|
@ -46,7 +46,9 @@ async def websocket_endpoint_connect(
|
||||||
):
|
):
|
||||||
Console().log(f"Client #{id} connecting")
|
Console().log(f"Client #{id} connecting")
|
||||||
await manager.connect(websocket, channel)
|
await manager.connect(websocket, channel)
|
||||||
heros = Heros.list(session=session)
|
statement = select(Hero)
|
||||||
|
heros = session.exec(statement).all()
|
||||||
|
heros = Heros(__root__=heros)
|
||||||
await websocket.send_text(heros.json())
|
await websocket.send_text(heros.json())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -83,11 +85,18 @@ async def websocket_endpoint_hero_echo(
|
||||||
while True:
|
while True:
|
||||||
data = await websocket.receive_text()
|
data = await websocket.receive_text()
|
||||||
hero = HeroUpdate.parse_raw(data)
|
hero = HeroUpdate.parse_raw(data)
|
||||||
heros = Heros.list(session=session)
|
statement = select(Hero)
|
||||||
|
heros = session.exec(statement).all()
|
||||||
|
heros = Heros(__root__=heros)
|
||||||
if heros != last_heros:
|
if heros != last_heros:
|
||||||
await manager.broadcast(heros.json(), "heros")
|
await manager.broadcast(heros.json(), "heros")
|
||||||
last_heros = heros
|
last_heros = heros
|
||||||
hero.update(session=session)
|
db_hero = session.get(Hero, hero.id)
|
||||||
|
for key, value in hero.dict(exclude_unset=True).items():
|
||||||
|
setattr(db_hero, key, value)
|
||||||
|
session.add(db_hero)
|
||||||
|
session.commit()
|
||||||
|
session.refresh(db_hero)
|
||||||
console.print(heros)
|
console.print(heros)
|
||||||
await websocket.send_text(heros.json())
|
await websocket.send_text(heros.json())
|
||||||
|
|
||||||
|
|
@ -96,7 +105,9 @@ async def websocket_endpoint_hero_echo(
|
||||||
HeroDelete(id=hero.id).delete(session=session)
|
HeroDelete(id=hero.id).delete(session=session)
|
||||||
except Exception:
|
except Exception:
|
||||||
...
|
...
|
||||||
heros = Heros.list(session=session)
|
statement = select(Hero)
|
||||||
|
heros = session.exec(statement).all()
|
||||||
|
heros = Heros(__root__=heros)
|
||||||
await manager.broadcast(heros.json(), "heros")
|
await manager.broadcast(heros.json(), "heros")
|
||||||
print("disconnected")
|
print("disconnected")
|
||||||
except ConnectionClosed:
|
except ConnectionClosed:
|
||||||
|
|
@ -104,6 +115,8 @@ async def websocket_endpoint_hero_echo(
|
||||||
HeroDelete(id=hero.id).delete(session=session)
|
HeroDelete(id=hero.id).delete(session=session)
|
||||||
except Exception:
|
except Exception:
|
||||||
...
|
...
|
||||||
heros = Heros.list(session=session)
|
statement = select(Hero)
|
||||||
|
heros = session.exec(statement).all()
|
||||||
|
heros = Heros(__root__=heros)
|
||||||
await manager.broadcast(heros.json(), "heros")
|
await manager.broadcast(heros.json(), "heros")
|
||||||
print("connection closed")
|
print("connection closed")
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
import httpx
|
import httpx
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
import typer
|
import typer
|
||||||
import uvicorn
|
|
||||||
|
|
||||||
from learn_sql_model.cli.common import verbose_callback
|
from learn_sql_model.cli.common import verbose_callback
|
||||||
from learn_sql_model.config import get_config
|
from learn_sql_model.config import get_config
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
uvicorn = _optional_import_("uvicorn", group="api")
|
||||||
api_app = typer.Typer()
|
api_app = typer.Typer()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ from rich.console import Console
|
||||||
import typer
|
import typer
|
||||||
|
|
||||||
from learn_sql_model.config import get_config
|
from learn_sql_model.config import get_config
|
||||||
from learn_sql_model.factories.hero import HeroFactory
|
|
||||||
from learn_sql_model.models.hero import (
|
from learn_sql_model.models.hero import (
|
||||||
Hero,
|
Hero,
|
||||||
HeroCreate,
|
HeroCreate,
|
||||||
|
|
@ -15,6 +14,13 @@ from learn_sql_model.models.hero import (
|
||||||
HeroUpdate,
|
HeroUpdate,
|
||||||
Heros,
|
Heros,
|
||||||
)
|
)
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
HeroFactory = _optional_import_(
|
||||||
|
"learn_sql_model.factories.hero",
|
||||||
|
"HeroFactory",
|
||||||
|
group="api",
|
||||||
|
)
|
||||||
|
|
||||||
hero_app = typer.Typer()
|
hero_app = typer.Typer()
|
||||||
|
|
||||||
|
|
@ -27,9 +33,8 @@ def hero():
|
||||||
|
|
||||||
|
|
||||||
@hero_app.command()
|
@hero_app.command()
|
||||||
@engorgio(typer=True)
|
|
||||||
def get(
|
def get(
|
||||||
hero_id: Optional[int] = typer.Argument(default=None),
|
hero_id: Optional[int] = typer.Argument(),
|
||||||
) -> Union[Hero, List[Hero]]:
|
) -> Union[Hero, List[Hero]]:
|
||||||
"get one hero"
|
"get one hero"
|
||||||
hero = HeroRead.get(id=hero_id)
|
hero = HeroRead.get(id=hero_id)
|
||||||
|
|
@ -38,25 +43,19 @@ def get(
|
||||||
|
|
||||||
|
|
||||||
@hero_app.command()
|
@hero_app.command()
|
||||||
@engorgio(typer=True)
|
def list() -> Union[Hero, List[Hero]]:
|
||||||
def list(
|
|
||||||
where: Optional[str] = None,
|
|
||||||
offset: int = 0,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
) -> Union[Hero, List[Hero]]:
|
|
||||||
"list many heros"
|
"list many heros"
|
||||||
heros = Heros.list(where=where, offset=offset, limit=limit)
|
heros = Heros.list()
|
||||||
Console().print(heros)
|
Console().print(heros)
|
||||||
return hero
|
return heros
|
||||||
|
|
||||||
|
|
||||||
@hero_app.command()
|
@hero_app.command()
|
||||||
def clear() -> Union[Hero, List[Hero]]:
|
def clear() -> Union[Hero, List[Hero]]:
|
||||||
"list many heros"
|
"list many heros"
|
||||||
heros = Heros.list()
|
heros = Heros.list()
|
||||||
for hero in heros.heros:
|
for hero in heros.__root__:
|
||||||
HeroDelete(id=hero.id).delete()
|
HeroDelete.delete(id=hero.id)
|
||||||
|
|
||||||
return hero
|
return hero
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -81,14 +80,15 @@ def update(
|
||||||
@hero_app.command()
|
@hero_app.command()
|
||||||
@engorgio(typer=True)
|
@engorgio(typer=True)
|
||||||
def delete(
|
def delete(
|
||||||
hero: HeroDelete,
|
hero_id: Optional[int] = typer.Argument(),
|
||||||
) -> Hero:
|
) -> Hero:
|
||||||
"delete a hero by id"
|
"delete a hero by id"
|
||||||
hero.delete()
|
hero = HeroDelete.delete(id=hero_id)
|
||||||
|
Console().print(hero)
|
||||||
|
return hero
|
||||||
|
|
||||||
|
|
||||||
@hero_app.command()
|
@hero_app.command()
|
||||||
@engorgio(typer=True)
|
|
||||||
def populate(
|
def populate(
|
||||||
n: int = 10,
|
n: int = 10,
|
||||||
) -> Hero:
|
) -> Hero:
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,15 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
import alembic
|
# import copier
|
||||||
from alembic.config import Config
|
|
||||||
import copier
|
|
||||||
import typer
|
import typer
|
||||||
|
|
||||||
from learn_sql_model.cli.common import verbose_callback
|
from learn_sql_model.cli.common import verbose_callback
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
alembic = _optional_import_("alembic", group="manage")
|
||||||
|
Config = _optional_import_("alembic.config", "Config", group="manage")
|
||||||
|
|
||||||
model_app = typer.Typer()
|
model_app = typer.Typer()
|
||||||
|
|
||||||
|
|
@ -40,11 +44,18 @@ def create_revision(
|
||||||
callback=verbose_callback,
|
callback=verbose_callback,
|
||||||
help="show the log messages",
|
help="show the log messages",
|
||||||
),
|
),
|
||||||
message: str = typer.Option(
|
message: Annotated[
|
||||||
prompt=True,
|
str,
|
||||||
),
|
typer.Option(
|
||||||
|
"--message",
|
||||||
|
"-m",
|
||||||
|
prompt=True,
|
||||||
|
),
|
||||||
|
] = None,
|
||||||
):
|
):
|
||||||
alembic_cfg = Config("alembic.ini")
|
alembic_cfg = Config("alembic.ini")
|
||||||
|
config = get_config()
|
||||||
|
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
|
||||||
alembic.command.revision(
|
alembic.command.revision(
|
||||||
config=alembic_cfg,
|
config=alembic_cfg,
|
||||||
message=message,
|
message=message,
|
||||||
|
|
@ -63,7 +74,17 @@ def checkout(
|
||||||
revision: str = typer.Option("head"),
|
revision: str = typer.Option("head"),
|
||||||
):
|
):
|
||||||
alembic_cfg = Config("alembic.ini")
|
alembic_cfg = Config("alembic.ini")
|
||||||
alembic.command.upgrade(config=alembic_cfg, revision="head")
|
config = get_config()
|
||||||
|
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
|
||||||
|
alembic.command.upgrade(config=alembic_cfg, revision=revision)
|
||||||
|
|
||||||
|
|
||||||
|
@model_app.command()
|
||||||
|
def status():
|
||||||
|
alembic_cfg = Config("alembic.ini")
|
||||||
|
config = get_config()
|
||||||
|
alembic_cfg.set_main_option("sqlalchemy.url", config.database_url)
|
||||||
|
alembic.command.current(config=alembic_cfg)
|
||||||
|
|
||||||
|
|
||||||
@model_app.command()
|
@model_app.command()
|
||||||
|
|
@ -73,5 +94,4 @@ def populate(
|
||||||
callback=verbose_callback,
|
callback=verbose_callback,
|
||||||
help="show the log messages",
|
help="show the log messages",
|
||||||
),
|
),
|
||||||
):
|
): ...
|
||||||
...
|
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,8 @@ from typing import TYPE_CHECKING
|
||||||
from fastapi import Depends
|
from fastapi import Depends
|
||||||
from pydantic import BaseModel, BaseSettings, validator
|
from pydantic import BaseModel, BaseSettings, validator
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlmodel import SQLModel, Session
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlmodel import Session
|
||||||
|
|
||||||
from learn_sql_model.standard_config import load
|
from learn_sql_model.standard_config import load
|
||||||
|
|
||||||
|
|
@ -18,6 +19,7 @@ class ApiServer(BaseModel):
|
||||||
reload: bool = True
|
reload: bool = True
|
||||||
log_level: str = "info"
|
log_level: str = "info"
|
||||||
host: str = "0.0.0.0"
|
host: str = "0.0.0.0"
|
||||||
|
workers: int = 1
|
||||||
|
|
||||||
|
|
||||||
class ApiClient(BaseModel):
|
class ApiClient(BaseModel):
|
||||||
|
|
@ -25,7 +27,6 @@ class ApiClient(BaseModel):
|
||||||
protocol: str = "https"
|
protocol: str = "https"
|
||||||
url: str = f"{protocol}://{host}"
|
url: str = f"{protocol}://{host}"
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
def __init__(self, config: "Config" = None) -> None:
|
def __init__(self, config: "Config" = None) -> None:
|
||||||
if config is None:
|
if config is None:
|
||||||
|
|
@ -40,9 +41,21 @@ class Database:
|
||||||
}
|
}
|
||||||
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
|
self.db_state = ContextVar("db_state", default=self.db_state_default.copy())
|
||||||
|
|
||||||
|
self.db_conf = {}
|
||||||
|
if 'sqlite' in self.config.database_url:
|
||||||
|
self.db_conf = {
|
||||||
|
'connect_args': {"check_same_thread": False},
|
||||||
|
'pool_recycle': 3600,
|
||||||
|
'pool_pre_ping': True,
|
||||||
|
}
|
||||||
|
self._engine = create_engine(
|
||||||
|
self.config.database_url,
|
||||||
|
**self.db_conf
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def engine(self) -> "Engine":
|
def engine(self) -> "Engine":
|
||||||
return create_engine(self.config.database_url)
|
return self._engine
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def session(self) -> "Session":
|
def session(self) -> "Session":
|
||||||
|
|
@ -71,7 +84,8 @@ class Config(BaseSettings):
|
||||||
return get_database(config=self)
|
return get_database(config=self)
|
||||||
|
|
||||||
def init(self) -> None:
|
def init(self) -> None:
|
||||||
SQLModel.metadata.create_all(self.database.engine)
|
# SQLModel.metadata.create_all(self.database.engine)
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
def get_database(config: Config = None) -> Database:
|
def get_database(config: Config = None) -> Database:
|
||||||
|
|
@ -86,9 +100,14 @@ def get_config(overrides: dict = {}) -> Config:
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
database = get_database()
|
||||||
|
|
||||||
|
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=database.engine)
|
||||||
|
|
||||||
|
|
||||||
def get_session() -> "Session":
|
def get_session() -> "Session":
|
||||||
config = get_config()
|
with Session(database.engine) as session:
|
||||||
with Session(config.database.engine) as session:
|
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
151
learn_sql_model/er_diagram.py
Normal file
|
|
@ -0,0 +1,151 @@
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
from graphviz import Digraph
|
||||||
|
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_er_diagram(output_path):
|
||||||
|
# Connect to the SQLite database
|
||||||
|
database_path = config.database_url.replace("sqlite:///", "")
|
||||||
|
conn = sqlite3.connect(database_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Get the table names from the database
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||||
|
tables = cursor.fetchall()
|
||||||
|
|
||||||
|
# Create a new Digraph
|
||||||
|
dot = Digraph(format="png")
|
||||||
|
dot.attr(rankdir="TD")
|
||||||
|
|
||||||
|
# Iterate over the tables
|
||||||
|
for table in tables:
|
||||||
|
table_name = table[0]
|
||||||
|
dot.node(table_name, shape="box")
|
||||||
|
cursor.execute(f"PRAGMA table_info({table_name});")
|
||||||
|
columns = cursor.fetchall()
|
||||||
|
|
||||||
|
# Add the columns to the table node
|
||||||
|
for column in columns:
|
||||||
|
column_name = column[1]
|
||||||
|
dot.node(f"{table_name}.{column_name}", label=column_name, shape="oval")
|
||||||
|
dot.edge(table_name, f"{table_name}.{column_name}")
|
||||||
|
|
||||||
|
# Check for foreign key relationships
|
||||||
|
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
|
||||||
|
foreign_keys = cursor.fetchall()
|
||||||
|
|
||||||
|
# Add dotted lines for foreign key relationships
|
||||||
|
for foreign_key in foreign_keys:
|
||||||
|
from_column = foreign_key[3]
|
||||||
|
to_table = foreign_key[2]
|
||||||
|
to_column = foreign_key[4]
|
||||||
|
dot.node(f"{to_table}.{to_column}", shape="oval")
|
||||||
|
dot.edge(
|
||||||
|
f"{table_name}.{from_column}", f"{to_table}.{to_column}", style="dotted"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Render and save the diagram
|
||||||
|
dot.render(output_path.replace(".png", ""), cleanup=True)
|
||||||
|
|
||||||
|
# Close the database connection
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_er_markdown(output_path, er_diagram_path):
|
||||||
|
# Connect to the SQLite database
|
||||||
|
database_path = config.database_url.replace("sqlite:///", "")
|
||||||
|
conn = sqlite3.connect(database_path)
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Get the table names from the database
|
||||||
|
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||||
|
tables = cursor.fetchall()
|
||||||
|
|
||||||
|
with open(output_path, "w") as f:
|
||||||
|
# Write the ER Diagram image
|
||||||
|
f.write(f"\n\n---\n\n")
|
||||||
|
|
||||||
|
# Iterate over the tables
|
||||||
|
for table in tables:
|
||||||
|
table_name = table[0]
|
||||||
|
|
||||||
|
f.write(f"## Table: {table_name}\n\n")
|
||||||
|
|
||||||
|
# Get the table columns
|
||||||
|
cursor.execute(f"PRAGMA table_info({table_name});")
|
||||||
|
columns = cursor.fetchall()
|
||||||
|
|
||||||
|
f.write("### First 5 rows\n\n")
|
||||||
|
cursor.execute(f"SELECT * FROM {table_name} LIMIT 5;")
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
f.write(f'| {" | ".join([c[1] for c in columns])} |\n')
|
||||||
|
f.write("|")
|
||||||
|
for column in columns:
|
||||||
|
# ---
|
||||||
|
f.write(f'{"-"*(len(column[1]) + 2)}|')
|
||||||
|
f.write("\n")
|
||||||
|
for row in rows:
|
||||||
|
f.write(f'| {" | ".join([str(r) for r in row])} |\n')
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
|
cursor.execute(f"PRAGMA foreign_key_list({table_name});")
|
||||||
|
foreign_keys = cursor.fetchall()
|
||||||
|
|
||||||
|
# Add dotted lines for foreign key relationships
|
||||||
|
fkeys = {}
|
||||||
|
for foreign_key in foreign_keys:
|
||||||
|
from_column = foreign_key[3]
|
||||||
|
to_table = foreign_key[2]
|
||||||
|
to_column = foreign_key[4]
|
||||||
|
fkeys[from_column] = f"{to_table}.{to_column}"
|
||||||
|
|
||||||
|
# Replace 'description' with the actual column name in the table that contains the description, if applicable
|
||||||
|
try:
|
||||||
|
cursor.execute(f"SELECT description FROM {table_name} LIMIT 1;")
|
||||||
|
description = cursor.fetchone()
|
||||||
|
if description:
|
||||||
|
f.write(f"### Description\n\n{description[0]}\n\n")
|
||||||
|
except:
|
||||||
|
...
|
||||||
|
|
||||||
|
# Write the table columns
|
||||||
|
f.write("### Columns\n\n")
|
||||||
|
f.write("| Column Name | Type | Foreign Key | Example Value |\n")
|
||||||
|
f.write("|-------------|------|-------------|---------------|\n")
|
||||||
|
|
||||||
|
for column in columns:
|
||||||
|
|
||||||
|
column_name = column[1]
|
||||||
|
column_type = column[2]
|
||||||
|
fkey = ""
|
||||||
|
if column_name in fkeys:
|
||||||
|
fkey = fkeys[column_name]
|
||||||
|
f.write(f"| {column_name} | {column_type} | {fkey} | | |\n")
|
||||||
|
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
|
# Get the count of records
|
||||||
|
cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
|
||||||
|
records_count = cursor.fetchone()[0]
|
||||||
|
f.write(
|
||||||
|
f"### Records Count\n\nThe table {table_name} contains {records_count} records.\n\n---\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Close the database connection
|
||||||
|
cursor.close()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# Usage example
|
||||||
|
database_path = "database.db"
|
||||||
|
md_output_path = "database.md"
|
||||||
|
er_output_path = "er_diagram.png"
|
||||||
|
|
||||||
|
generate_er_diagram(database_path, er_output_path)
|
||||||
|
generate_markdown(database_path, md_output_path, er_output_path)
|
||||||
|
|
@ -10,7 +10,6 @@ class HeroFactory(ModelFactory[Hero]):
|
||||||
__model__ = Hero
|
__model__ = Hero
|
||||||
__faker__ = Faker(locale="en_US")
|
__faker__ = Faker(locale="en_US")
|
||||||
__set_as_default_factory_for_type__ = True
|
__set_as_default_factory_for_type__ = True
|
||||||
id = None
|
|
||||||
pet_id = None
|
pet_id = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
||||||
25
learn_sql_model/game/debug.py
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
import pygame
|
||||||
|
|
||||||
|
|
||||||
|
class Debug:
|
||||||
|
def __init__(self, game):
|
||||||
|
self.game = game
|
||||||
|
self.is_open = False
|
||||||
|
self.debounce = False
|
||||||
|
|
||||||
|
def handle_events(self, events):
|
||||||
|
for event in events:
|
||||||
|
if event.type == pygame.KEYDOWN:
|
||||||
|
if event.key == pygame.K_F3 and not self.debounce:
|
||||||
|
self.is_open = not self.is_open
|
||||||
|
self.debounce = True
|
||||||
|
if event.type == pygame.KEYUP:
|
||||||
|
if event.key == pygame.K_F3:
|
||||||
|
self.debounce = False
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
if self.is_open:
|
||||||
|
text = self.game.font.render(
|
||||||
|
str(int(self.game.clock.get_fps())) + " fps", True, (255, 255, 255)
|
||||||
|
)
|
||||||
|
self.game.screen.blit(text, (20, 20))
|
||||||
|
|
@ -1,29 +1,28 @@
|
||||||
import atexit
|
import atexit
|
||||||
|
|
||||||
import pygame
|
|
||||||
from typer import Typer
|
from typer import Typer
|
||||||
from websocket import create_connection
|
from websocket import create_connection
|
||||||
|
|
||||||
from learn_sql_model.config import get_config
|
from learn_sql_model.config import get_config
|
||||||
from learn_sql_model.console import console
|
from learn_sql_model.console import console
|
||||||
from learn_sql_model.factories.hero import HeroFactory
|
from learn_sql_model.game.debug import Debug
|
||||||
from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
|
from learn_sql_model.game.light import Light
|
||||||
|
from learn_sql_model.game.map import Map
|
||||||
|
from learn_sql_model.game.menu import Menu
|
||||||
|
from learn_sql_model.game.player import Player
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
pygame = _optional_import_("pygame", group="game")
|
||||||
|
|
||||||
speed = 10
|
speed = 10
|
||||||
|
|
||||||
pygame.font.init() # you have to call this at the start,
|
|
||||||
# if you want to use this module.
|
|
||||||
my_font = pygame.font.SysFont("Comic Sans MS", 30)
|
|
||||||
|
|
||||||
config = get_config()
|
config = get_config()
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
hero = HeroFactory().build(size=50, x=100, y=100)
|
# self.screen = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
|
||||||
self.hero = HeroCreate(**hero.dict()).post()
|
self.screen = pygame.display.set_mode((1280, 720))
|
||||||
|
|
||||||
self.screen = pygame.display.set_mode((800, 600))
|
|
||||||
pygame.display.set_caption("Learn SQL Model")
|
pygame.display.set_caption("Learn SQL Model")
|
||||||
self.clock = pygame.time.Clock()
|
self.clock = pygame.time.Clock()
|
||||||
self.running = True
|
self.running = True
|
||||||
|
|
@ -34,16 +33,31 @@ class Client:
|
||||||
self.moving_left = False
|
self.moving_left = False
|
||||||
self.moving_right = False
|
self.moving_right = False
|
||||||
self.ticks = 0
|
self.ticks = 0
|
||||||
self.others = []
|
self.player = Player(self)
|
||||||
|
self.menu = Menu(self)
|
||||||
|
self.map = Map(self)
|
||||||
|
self.light = Light(self)
|
||||||
|
self.font = pygame.font.SysFont("", 25)
|
||||||
|
self.joysticks = {}
|
||||||
|
self.darkness = pygame.Surface(
|
||||||
|
(self.screen.get_width(), self.screen.get_height()),
|
||||||
|
pygame.SRCALPHA,
|
||||||
|
32,
|
||||||
|
)
|
||||||
|
self.debug = Debug(self)
|
||||||
|
|
||||||
atexit.register(self.quit)
|
atexit.register(self.quit)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ws(self):
|
def ws(self):
|
||||||
def connect():
|
def connect():
|
||||||
self._ws = create_connection(
|
if "https" in config.api_client.url:
|
||||||
f"ws://{config.api_client.url.replace('https://', '')}/wsecho"
|
url = f"wss://{config.api_client.url.replace('https://', '')}/wsecho"
|
||||||
)
|
elif "http" in config.api_client.url:
|
||||||
|
url = f"ws://{config.api_client.url.replace('http://', '')}/wsecho"
|
||||||
|
else:
|
||||||
|
url = f"ws://{config.api_client.url}/wsecho"
|
||||||
|
self._ws = create_connection(url)
|
||||||
|
|
||||||
if not hasattr(self, "_ws"):
|
if not hasattr(self, "_ws"):
|
||||||
connect()
|
connect()
|
||||||
|
|
@ -52,6 +66,13 @@ class Client:
|
||||||
return self._ws
|
return self._ws
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
try:
|
||||||
|
from pyinstrument import Profiler
|
||||||
|
|
||||||
|
profiler = Profiler()
|
||||||
|
profiler.start()
|
||||||
|
except ImportError:
|
||||||
|
profiler = None
|
||||||
while self.running:
|
while self.running:
|
||||||
console.print("running")
|
console.print("running")
|
||||||
console.print("handle_events")
|
console.print("handle_events")
|
||||||
|
|
@ -59,109 +80,60 @@ class Client:
|
||||||
console.print("update")
|
console.print("update")
|
||||||
self.update()
|
self.update()
|
||||||
console.print("render")
|
console.print("render")
|
||||||
|
|
||||||
self.render()
|
self.render()
|
||||||
time = self.clock.tick(60)
|
time = self.clock.tick(60)
|
||||||
|
self.elapsed = time / 100
|
||||||
self.ticks += 1
|
self.ticks += 1
|
||||||
console.print(f"time: {time}")
|
console.print(f"time: {time}")
|
||||||
console.print(f"ticks: {self.ticks}")
|
console.print(f"ticks: {self.ticks}")
|
||||||
|
if profiler:
|
||||||
|
profiler.stop()
|
||||||
|
print(profiler.output_text())
|
||||||
self.quit()
|
self.quit()
|
||||||
|
|
||||||
def quit(self):
|
def quit(self):
|
||||||
try:
|
self.running = False
|
||||||
HeroDelete(id=self.hero.id).delete()
|
self.player.quit()
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
if self.moving_up:
|
...
|
||||||
self.hero.y -= speed
|
|
||||||
if self.moving_down:
|
|
||||||
self.hero.y += speed
|
|
||||||
if self.moving_left:
|
|
||||||
self.hero.x -= speed
|
|
||||||
if self.moving_right:
|
|
||||||
self.hero.x += speed
|
|
||||||
|
|
||||||
if self.ticks % 5 == 0 or self.ticks == 0:
|
|
||||||
console.print("updating")
|
|
||||||
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
|
|
||||||
console.print(update)
|
|
||||||
self.ws.send(update.json())
|
|
||||||
console.print("sent")
|
|
||||||
|
|
||||||
raw_heros = self.ws.recv()
|
|
||||||
console.print(raw_heros)
|
|
||||||
self.others = Heros.parse_raw(raw_heros)
|
|
||||||
|
|
||||||
def render(self):
|
def render(self):
|
||||||
self.screen.fill((0, 0, 0))
|
self.screen.fill((0, 0, 0))
|
||||||
|
self.map.render()
|
||||||
|
self.player.render()
|
||||||
|
|
||||||
for other in self.others.heros:
|
if self.ticks % 1 == 0 or self.ticks == 0:
|
||||||
if other.id != self.hero.id:
|
light_level = 0
|
||||||
pygame.draw.circle(
|
self.darkness.fill((light_level, light_level, light_level))
|
||||||
self.screen, (255, 0, 0), (other.x, other.y), other.size
|
self.light.render()
|
||||||
)
|
|
||||||
self.screen.blit(
|
|
||||||
my_font.render(other.name, False, (255, 255, 255), 1),
|
|
||||||
(other.x, other.y),
|
|
||||||
)
|
|
||||||
|
|
||||||
pygame.draw.circle(
|
|
||||||
self.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
|
|
||||||
)
|
|
||||||
self.screen.blit(
|
self.screen.blit(
|
||||||
my_font.render(self.hero.name, False, (255, 255, 255)),
|
self.darkness,
|
||||||
(self.hero.x, self.hero.y),
|
(0, 0),
|
||||||
|
special_flags=pygame.BLEND_MULT,
|
||||||
)
|
)
|
||||||
|
|
||||||
# update the screen
|
# update the screen
|
||||||
|
self.menu.render()
|
||||||
|
self.debug.render()
|
||||||
pygame.display.flip()
|
pygame.display.flip()
|
||||||
|
|
||||||
def handle_events(self):
|
def handle_events(self):
|
||||||
self.events = pygame.event.get()
|
self.events = pygame.event.get()
|
||||||
|
self.menu.handle_events(self.events)
|
||||||
|
self.debug.handle_events(self.events)
|
||||||
|
self.player.handle_events()
|
||||||
for event in self.events:
|
for event in self.events:
|
||||||
if event.type == pygame.QUIT:
|
if event.type == pygame.QUIT:
|
||||||
self.running = False
|
self.running = False
|
||||||
if event.type == pygame.KEYDOWN:
|
if event.type == pygame.JOYDEVICEADDED:
|
||||||
if event.key == pygame.K_ESCAPE:
|
# This event will be generated when the program starts for every
|
||||||
self.running = False
|
# joystick, filling up the list without needing to create them manually.
|
||||||
if event.key == pygame.K_LEFT:
|
joy = pygame.joystick.Joystick(event.device_index)
|
||||||
self.moving_left = True
|
self.joysticks[joy.get_instance_id()] = joy
|
||||||
if event.key == pygame.K_RIGHT:
|
if event.type == pygame.JOYDEVICEREMOVED:
|
||||||
self.moving_right = True
|
del self.joysticks[event.instance_id]
|
||||||
if event.key == pygame.K_UP:
|
|
||||||
self.moving_up = True
|
|
||||||
if event.key == pygame.K_DOWN:
|
|
||||||
self.moving_down = True
|
|
||||||
# wasd
|
|
||||||
if event.key == pygame.K_w:
|
|
||||||
self.moving_up = True
|
|
||||||
if event.key == pygame.K_s:
|
|
||||||
self.moving_down = True
|
|
||||||
if event.key == pygame.K_a:
|
|
||||||
self.moving_left = True
|
|
||||||
if event.key == pygame.K_d:
|
|
||||||
self.moving_right = True
|
|
||||||
# controller left joystick
|
|
||||||
|
|
||||||
if event.type == pygame.KEYUP:
|
|
||||||
if event.key == pygame.K_LEFT:
|
|
||||||
self.moving_left = False
|
|
||||||
if event.key == pygame.K_RIGHT:
|
|
||||||
self.moving_right = False
|
|
||||||
if event.key == pygame.K_UP:
|
|
||||||
self.moving_up = False
|
|
||||||
if event.key == pygame.K_DOWN:
|
|
||||||
self.moving_down = False
|
|
||||||
# wasd
|
|
||||||
if event.key == pygame.K_w:
|
|
||||||
self.moving_up = False
|
|
||||||
if event.key == pygame.K_s:
|
|
||||||
self.moving_down = False
|
|
||||||
if event.key == pygame.K_a:
|
|
||||||
self.moving_left = False
|
|
||||||
if event.key == pygame.K_d:
|
|
||||||
self.moving_right = False
|
|
||||||
|
|
||||||
def check_events(self):
|
def check_events(self):
|
||||||
pass
|
pass
|
||||||
|
|
|
||||||
219
learn_sql_model/game/light.py
Normal file
|
|
@ -0,0 +1,219 @@
|
||||||
|
import bisect
|
||||||
|
|
||||||
|
from PIL import Image, ImageFilter
|
||||||
|
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
pygame = _optional_import_("pygame", group="game")
|
||||||
|
|
||||||
|
|
||||||
|
def rot_center(image, angle):
|
||||||
|
"""rotate an image while keeping its center and size"""
|
||||||
|
orig_rect = image.get_rect()
|
||||||
|
rot_image = pygame.transform.rotate(image, angle)
|
||||||
|
rot_rect = orig_rect.copy()
|
||||||
|
rot_rect.center = rot_image.get_rect().center
|
||||||
|
rot_image = rot_image.subsurface(rot_rect).copy()
|
||||||
|
return rot_image
|
||||||
|
|
||||||
|
|
||||||
|
class Light:
|
||||||
|
def __init__(self, game):
|
||||||
|
self.game = game
|
||||||
|
self.surf = pygame.Surface(
|
||||||
|
(self.game.screen.get_width(), self.game.screen.get_height()),
|
||||||
|
pygame.SRCALPHA,
|
||||||
|
32,
|
||||||
|
)
|
||||||
|
self.surf.set_colorkey((0, 0, 0))
|
||||||
|
self.pre_render()
|
||||||
|
|
||||||
|
def pre_render(self):
|
||||||
|
|
||||||
|
# self.lights = {}
|
||||||
|
# for deg in range(-360, 360, 20):
|
||||||
|
# print("loading light", deg)
|
||||||
|
# self.lights[deg] = pygame.image.load(
|
||||||
|
# f"lights/light-{deg}.png"
|
||||||
|
# ).convert_alpha()
|
||||||
|
# return
|
||||||
|
|
||||||
|
light_surf = pygame.Surface(
|
||||||
|
(
|
||||||
|
self.game.player.hero.flashlight_strength * 3,
|
||||||
|
self.game.player.hero.flashlight_strength * 3,
|
||||||
|
),
|
||||||
|
pygame.SRCALPHA,
|
||||||
|
32,
|
||||||
|
)
|
||||||
|
|
||||||
|
v = pygame.math.Vector2(0, 1)
|
||||||
|
v.scale_to_length(self.game.player.hero.flashlight_strength)
|
||||||
|
for r in range(-90 - 25, -90 + 25):
|
||||||
|
_v = v.rotate(r)
|
||||||
|
pygame.draw.line(
|
||||||
|
light_surf,
|
||||||
|
(255, 250, 205),
|
||||||
|
(light_surf.get_width() / 2, light_surf.get_height() / 2),
|
||||||
|
(
|
||||||
|
light_surf.get_width() / 2 + _v.x,
|
||||||
|
light_surf.get_height() / 2 + _v.y,
|
||||||
|
),
|
||||||
|
50,
|
||||||
|
)
|
||||||
|
pygame.draw.circle(
|
||||||
|
light_surf,
|
||||||
|
(255, 250, 205),
|
||||||
|
(light_surf.get_width() / 2, light_surf.get_height() / 2),
|
||||||
|
self.game.player.hero.lanturn_strength,
|
||||||
|
)
|
||||||
|
|
||||||
|
light_surf_pil = Image.frombytes(
|
||||||
|
"RGBA",
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
pygame.image.tostring(light_surf, "RGBA", False),
|
||||||
|
)
|
||||||
|
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=100))
|
||||||
|
light_surf = pygame.image.fromstring(
|
||||||
|
light_surf_blur.tobytes(),
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
"RGBA",
|
||||||
|
).convert_alpha()
|
||||||
|
|
||||||
|
pygame.draw.circle(
|
||||||
|
light_surf,
|
||||||
|
(255, 250, 205),
|
||||||
|
(light_surf.get_width() / 2, light_surf.get_height() / 2),
|
||||||
|
self.game.player.hero.lanturn_strength,
|
||||||
|
)
|
||||||
|
|
||||||
|
light_surf_pil = Image.frombytes(
|
||||||
|
"RGBA",
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
pygame.image.tostring(light_surf, "RGBA", False),
|
||||||
|
)
|
||||||
|
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=50))
|
||||||
|
light_surf = pygame.image.fromstring(
|
||||||
|
light_surf_blur.tobytes(),
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
"RGBA",
|
||||||
|
).convert_alpha()
|
||||||
|
|
||||||
|
pygame.draw.circle(
|
||||||
|
light_surf,
|
||||||
|
(255, 250, 205),
|
||||||
|
(light_surf.get_width() / 2, light_surf.get_height() / 2),
|
||||||
|
self.game.player.hero.lanturn_strength,
|
||||||
|
)
|
||||||
|
|
||||||
|
light_surf_pil = Image.frombytes(
|
||||||
|
"RGBA",
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
pygame.image.tostring(light_surf, "RGBA", False),
|
||||||
|
)
|
||||||
|
light_surf_blur = light_surf_pil.filter(ImageFilter.GaussianBlur(radius=20))
|
||||||
|
light_surf = pygame.image.fromstring(
|
||||||
|
light_surf_blur.tobytes(),
|
||||||
|
(light_surf.get_width(), light_surf.get_height()),
|
||||||
|
"RGBA",
|
||||||
|
).convert_alpha()
|
||||||
|
|
||||||
|
self.light_surf = light_surf
|
||||||
|
self.light_surf.set_colorkey((0, 0, 0))
|
||||||
|
|
||||||
|
self.lights = {
|
||||||
|
deg: pygame.transform.rotate(self.light_surf, deg - 90)
|
||||||
|
for deg in range(-360, 360, 20)
|
||||||
|
}
|
||||||
|
|
||||||
|
for deg, light in self.lights.items():
|
||||||
|
pygame.image.save(light, f"lights/light-{deg}.png")
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
self.surf.fill((0, 0, 0))
|
||||||
|
mx, my = pygame.mouse.get_pos()
|
||||||
|
v = pygame.math.Vector2(
|
||||||
|
mx - self.game.player.hero.x, my - self.game.player.hero.y
|
||||||
|
)
|
||||||
|
v.scale_to_length(self.game.player.hero.flashlight_strength)
|
||||||
|
self.game.player.hero.flashlight_angle = v.angle_to(pygame.math.Vector2(1, 0))
|
||||||
|
|
||||||
|
for other in self.game.player.others.__root__:
|
||||||
|
if other.id == self.game.player.hero.id:
|
||||||
|
continue
|
||||||
|
|
||||||
|
light_index = list(self.lights.keys())[
|
||||||
|
bisect.bisect_left(
|
||||||
|
list(self.lights.keys()),
|
||||||
|
other.flashlight_angle + 90,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
my_light = self.lights[light_index]
|
||||||
|
self.surf.blit(
|
||||||
|
my_light,
|
||||||
|
(
|
||||||
|
other.x - my_light.get_width() / 2,
|
||||||
|
other.y - my_light.get_height() / 2,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
light_index = list(self.lights.keys())[
|
||||||
|
bisect.bisect_left(
|
||||||
|
list(self.lights.keys()),
|
||||||
|
self.game.player.hero.flashlight_angle + 90,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
my_light = self.lights[light_index]
|
||||||
|
self.surf.blit(
|
||||||
|
my_light,
|
||||||
|
(
|
||||||
|
self.game.player.hero.x - my_light.get_width() / 2,
|
||||||
|
self.game.player.hero.y - my_light.get_height() / 2,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# for r in range(-25, 25):
|
||||||
|
# _v = v.rotate(r)
|
||||||
|
# pygame.draw.line(
|
||||||
|
# self.surf,
|
||||||
|
# (255, 250, 205),
|
||||||
|
# (self.game.player.hero.x, self.game.player.hero.y),
|
||||||
|
# (self.game.player.hero.x + _v.x, self.game.player.hero.y + _v.y),
|
||||||
|
# 50,
|
||||||
|
# )
|
||||||
|
# # draw a circle
|
||||||
|
# pygame.draw.circle(
|
||||||
|
# self.surf,
|
||||||
|
# (255, 250, 205),
|
||||||
|
# (self.game.player.hero.x, self.game.player.hero.y),
|
||||||
|
# self.game.player.hero.lanturn_strength,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# for other in self.game.player.others.__root__:
|
||||||
|
# if other.id == self.game.player.hero.id:
|
||||||
|
# continue
|
||||||
|
# v = pygame.math.Vector2(0, 1)
|
||||||
|
# v = v.rotate(-other.flashlight_angle)
|
||||||
|
# v.scale_to_length(other.flashlight_strength)
|
||||||
|
# for r in range(-25, 25):
|
||||||
|
# _v = v.rotate(r)
|
||||||
|
# pygame.draw.line(
|
||||||
|
# self.surf,
|
||||||
|
# (255, 250, 205),
|
||||||
|
# (other.x, other.y),
|
||||||
|
# (other.x + _v.x, other.y + _v.y),
|
||||||
|
# 50,
|
||||||
|
# )
|
||||||
|
# pygame.draw.circle(
|
||||||
|
# self.surf,
|
||||||
|
# (255, 250, 205),
|
||||||
|
# (other.x, other.y),
|
||||||
|
# other.lanturn_strength,
|
||||||
|
# )
|
||||||
|
|
||||||
|
self.game.darkness.blit(
|
||||||
|
self.surf,
|
||||||
|
(0, 0),
|
||||||
|
)
|
||||||
134
learn_sql_model/game/map.py
Normal file
|
|
@ -0,0 +1,134 @@
|
||||||
|
import pydantic
|
||||||
|
from rich.console import Console
|
||||||
|
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
snoise2 = _optional_import_("noise", "snoise2", group="game")
|
||||||
|
pygame = _optional_import_("pygame", group="game")
|
||||||
|
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
|
||||||
|
class Point(pydantic.BaseModel):
|
||||||
|
x: int
|
||||||
|
y: int
|
||||||
|
|
||||||
|
|
||||||
|
class Map:
|
||||||
|
def __init__(self, game):
|
||||||
|
self.game = game
|
||||||
|
# self.grass = pygame.image.load("grass.webp").convert_alpha()
|
||||||
|
# self.rock = pygame.image.load("rock.jpg").convert_alpha()
|
||||||
|
# self.dirt = pygame.image.load("dirt.jpg").convert_alpha()
|
||||||
|
self.brown = (204, 153, 102)
|
||||||
|
self.grey = (128, 128, 128)
|
||||||
|
self.green = (0, 255, 0)
|
||||||
|
self.white = (255, 255, 255)
|
||||||
|
self.resolution = 16
|
||||||
|
self.scale = 0.14 # Determines the "smoothness" of the terrain
|
||||||
|
self.scale = 0.05 # Determines the "smoothness" of the terrain
|
||||||
|
self.offset = Point(x=0, y=0)
|
||||||
|
self.last_offset = self.offset
|
||||||
|
self.screen_width = self.game.screen.get_width()
|
||||||
|
self.screen_height = self.game.screen.get_height()
|
||||||
|
self.octaves = 2 # Number of layers of noise to combine
|
||||||
|
self.persistence = 0.05 # Amplitude of each octave
|
||||||
|
self.lacunarity = 1.0 # Frequency of each octave
|
||||||
|
self.thresh = 125
|
||||||
|
# try to load the map from map.png
|
||||||
|
try:
|
||||||
|
self.surf = pygame.image.load("map.png").convert_alpha()
|
||||||
|
|
||||||
|
# self.surf_pil = Image.frombytes(
|
||||||
|
# "RGBA",
|
||||||
|
# (self.surf.get_width(), self.surf.get_height()),
|
||||||
|
# pygame.image.tostring(self.surf, "RGBA", False),
|
||||||
|
# )
|
||||||
|
# self.surf_blur = (
|
||||||
|
# self.surf_pil.filter(
|
||||||
|
# ImageFilter.SMOOTH_MORE(),
|
||||||
|
# )
|
||||||
|
# .filter(ImageFilter.SMOOTH_MORE())
|
||||||
|
# .filter(ImageFilter.SMOOTH_MORE())
|
||||||
|
# .filter(ImageFilter.SMOOTH_MORE())
|
||||||
|
# .filter(ImageFilter.SMOOTH_MORE())
|
||||||
|
# .filter(ImageFilter.SMOOTH_MORE())
|
||||||
|
# # sharpen
|
||||||
|
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
|
||||||
|
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
|
||||||
|
# .filter(ImageFilter.UnsharpMask(radius=3, percent=100, threshold=3))
|
||||||
|
# )
|
||||||
|
|
||||||
|
# self.surf = pygame.image.fromstring(
|
||||||
|
# self.surf_blur.tobytes(),
|
||||||
|
# (self.surf.get_width(), self.surf.get_height()),
|
||||||
|
# "RGBA",
|
||||||
|
# ).convert_alpha()
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
self.pre_draw()
|
||||||
|
|
||||||
|
def refresh_surf(self):
|
||||||
|
|
||||||
|
self.surf = pygame.Surface((self.screen_width, self.screen_height))
|
||||||
|
|
||||||
|
def get_noise(self, x, y):
|
||||||
|
value = snoise2(
|
||||||
|
(x + self.offset.x) * self.scale,
|
||||||
|
(y + self.offset.y) * self.scale,
|
||||||
|
self.octaves,
|
||||||
|
self.persistence,
|
||||||
|
self.lacunarity,
|
||||||
|
)
|
||||||
|
value = (value + 1) / 2 * 255
|
||||||
|
return value
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.surf,
|
||||||
|
(0, 0),
|
||||||
|
)
|
||||||
|
|
||||||
|
def point_check_collision(self, x, y, thresh=None):
|
||||||
|
return self.get_noise(x / self.resolution, y / self.resolution) < (
|
||||||
|
thresh or self.thresh
|
||||||
|
)
|
||||||
|
|
||||||
|
def pre_draw(self):
|
||||||
|
self.refresh_surf()
|
||||||
|
|
||||||
|
for x in range(int(self.screen_width)):
|
||||||
|
for y in range(int(self.screen_height)):
|
||||||
|
if not self.point_check_collision(x, y):
|
||||||
|
pygame.draw.rect(
|
||||||
|
self.surf,
|
||||||
|
self.white,
|
||||||
|
(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
1,
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
pygame.image.save(self.surf, "map.png")
|
||||||
|
|
||||||
|
# av1 = (
|
||||||
|
# Image.open("rock.jpg")
|
||||||
|
# .convert("RGB")
|
||||||
|
# .resize((self.screen_width, self.screen_height))
|
||||||
|
# )
|
||||||
|
# av2 = (
|
||||||
|
# Image.open("dirt.jpg")
|
||||||
|
# .convert("RGB")
|
||||||
|
# .resize((self.screen_width, self.screen_height))
|
||||||
|
# )
|
||||||
|
# mask = (
|
||||||
|
# Image.open("map.png")
|
||||||
|
# .convert("L")
|
||||||
|
# .resize((self.screen_width, self.screen_height))
|
||||||
|
# .filter(ImageFilter.GaussianBlur(3))
|
||||||
|
# )
|
||||||
|
# Image.composite(av2, av1, mask).save("result.png")
|
||||||
|
# result = pygame.image.load("result.png")
|
||||||
|
# self.surf.blit(result, (0, 0))
|
||||||
185
learn_sql_model/game/menu.py
Normal file
|
|
@ -0,0 +1,185 @@
|
||||||
|
from typing import Callable, Tuple
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
pygame = _optional_import_("pygame", group="game")
|
||||||
|
|
||||||
|
|
||||||
|
screen_sizes = [
|
||||||
|
(480, 360), # 360p
|
||||||
|
(640, 480), # VGA
|
||||||
|
(800, 600), # SVGA
|
||||||
|
(1024, 768), # XGA
|
||||||
|
(1280, 720), # HD 720p
|
||||||
|
(1366, 768), # HD 1366x768
|
||||||
|
(1600, 900), # HD+ 1600x900
|
||||||
|
(1920, 1080), # Full HD 1080p
|
||||||
|
(2560, 1440), # 2K / QHD 1440p
|
||||||
|
(3840, 2160), # 4K / UHD 2160p
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class MenuItem(BaseModel):
|
||||||
|
display_text: str
|
||||||
|
on_click: Callable = None
|
||||||
|
text_color: Tuple[str, str, str] = (0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
|
class Menu:
|
||||||
|
def __init__(self, game):
|
||||||
|
pygame.font.init()
|
||||||
|
|
||||||
|
self.game = game
|
||||||
|
self.hamburger = Hamburger(game)
|
||||||
|
|
||||||
|
self.padding = 10
|
||||||
|
self.font_size = 50
|
||||||
|
self.line_height = 55
|
||||||
|
|
||||||
|
self.menu_width = min(
|
||||||
|
max(200, self.game.screen.get_width() * 0.8), self.game.screen.get_width()
|
||||||
|
)
|
||||||
|
self.menu_height = min(
|
||||||
|
max(200, self.game.screen.get_height() * 0.8), self.game.screen.get_height()
|
||||||
|
)
|
||||||
|
self.x = (self.game.screen.get_width() - self.menu_width) / 2
|
||||||
|
self.y = (self.game.screen.get_height() - self.menu_height) / 2
|
||||||
|
self.color = (100, 100, 100)
|
||||||
|
self.is_menu_open = False
|
||||||
|
|
||||||
|
self.surface = pygame.Surface((self.menu_width, self.menu_height))
|
||||||
|
self.font = pygame.font.SysFont("", self.font_size)
|
||||||
|
|
||||||
|
self.screen_size_index = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def items(self) -> list[MenuItem]:
|
||||||
|
return [
|
||||||
|
MenuItem(
|
||||||
|
display_text="Menu",
|
||||||
|
on_click=lambda: print("clicked on me, the menu"),
|
||||||
|
),
|
||||||
|
MenuItem(
|
||||||
|
display_text="Screen Size",
|
||||||
|
on_click=self.next_screen_size,
|
||||||
|
),
|
||||||
|
MenuItem(
|
||||||
|
display_text=f"{self.game.screen.get_width()}x{self.game.screen.get_height()}",
|
||||||
|
color=(50, 0, 0),
|
||||||
|
on_click=self.next_screen_size,
|
||||||
|
),
|
||||||
|
MenuItem(
|
||||||
|
display_text=f"{self.game.player.hero.name}",
|
||||||
|
color=(50, 0, 0),
|
||||||
|
on_click=self.game.player.rename_hero,
|
||||||
|
),
|
||||||
|
MenuItem(
|
||||||
|
display_text="quit",
|
||||||
|
color=(50, 0, 0),
|
||||||
|
on_click=lambda: self.game.quit(),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
if self.is_menu_open:
|
||||||
|
self.surface.fill(self.color)
|
||||||
|
|
||||||
|
pos = (self.padding, self.padding)
|
||||||
|
for item in self.items:
|
||||||
|
text = self.font.render(item.display_text, True, item.text_color)
|
||||||
|
self.surface.blit(text, pos)
|
||||||
|
pos = (pos[0], pos[1] + self.line_height)
|
||||||
|
|
||||||
|
self.game.screen.blit(self.surface, (self.x, self.y))
|
||||||
|
|
||||||
|
self.hamburger.render()
|
||||||
|
|
||||||
|
def next_screen_size(self):
|
||||||
|
if self.screen_size_index is False:
|
||||||
|
self.screen = pygame.display.set_mode(screen_sizes[0])
|
||||||
|
self.screen_size_index = 0
|
||||||
|
if self.screen_size_index == len(screen_sizes) - 1:
|
||||||
|
self.screen_size_index = 0
|
||||||
|
else:
|
||||||
|
self.screen_size_index += 1
|
||||||
|
self.screen = pygame.display.set_mode(screen_sizes[self.screen_size_index])
|
||||||
|
|
||||||
|
def get_mouse_pos(self):
|
||||||
|
"get mouse position relative to self.surface"
|
||||||
|
x, y = pygame.mouse.get_pos()
|
||||||
|
return x - self.x, y - self.y
|
||||||
|
|
||||||
|
def handle_events(self, events):
|
||||||
|
self.hamburger.handle_events(self, events)
|
||||||
|
for event in events:
|
||||||
|
if event.type == pygame.MOUSEBUTTONDOWN and self.is_menu_open:
|
||||||
|
if event.button == 1: # Left mouse button
|
||||||
|
self.handle_click()
|
||||||
|
|
||||||
|
def handle_click(self):
|
||||||
|
pos = self.get_mouse_pos()
|
||||||
|
pos_idx = int(pos[1] // self.line_height)
|
||||||
|
if pos_idx > len(self.items):
|
||||||
|
return
|
||||||
|
if pos_idx < 0:
|
||||||
|
return
|
||||||
|
self.items[pos_idx].on_click()
|
||||||
|
|
||||||
|
|
||||||
|
class Hamburger:
|
||||||
|
def __init__(self, game):
|
||||||
|
self.game = game
|
||||||
|
self.hamburger_width = 50
|
||||||
|
self.bar_height = self.hamburger_width / 4
|
||||||
|
self.bar_spacing = self.hamburger_width / 20
|
||||||
|
self.hamburger_height = self.bar_height * 3 + self.bar_spacing * 2
|
||||||
|
self.x = self.game.screen.get_width() - self.hamburger_width - 20
|
||||||
|
self.y = 20
|
||||||
|
self.color = (100, 100, 100)
|
||||||
|
self.rect = pygame.Rect(
|
||||||
|
self.x, self.y, self.hamburger_width, self.hamburger_height
|
||||||
|
)
|
||||||
|
self.surface = pygame.Surface((self.hamburger_width, self.hamburger_height))
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
pygame.draw.rect(
|
||||||
|
self.surface,
|
||||||
|
self.color,
|
||||||
|
(0, 0, self.hamburger_width, self.bar_height),
|
||||||
|
)
|
||||||
|
pygame.draw.rect(
|
||||||
|
self.surface,
|
||||||
|
self.color,
|
||||||
|
(
|
||||||
|
0,
|
||||||
|
self.bar_height + self.bar_spacing,
|
||||||
|
self.hamburger_width,
|
||||||
|
self.bar_height,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
pygame.draw.rect(
|
||||||
|
self.surface,
|
||||||
|
self.color,
|
||||||
|
(
|
||||||
|
0,
|
||||||
|
2 * (self.bar_height + self.bar_spacing),
|
||||||
|
self.hamburger_width,
|
||||||
|
self.bar_height,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.game.screen.blit(self.surface, (self.x, self.y))
|
||||||
|
|
||||||
|
def handle_events(self, menu: Menu, events):
|
||||||
|
for event in events:
|
||||||
|
if event.type == pygame.MOUSEBUTTONDOWN:
|
||||||
|
if event.button == 1: # Left mouse button
|
||||||
|
self.handle_click(menu)
|
||||||
|
|
||||||
|
def handle_click(self, menu):
|
||||||
|
pos = pygame.mouse.get_pos()
|
||||||
|
|
||||||
|
if self.rect.collidepoint(pos):
|
||||||
|
menu.is_menu_open = not menu.is_menu_open
|
||||||
261
learn_sql_model/game/player.py
Normal file
|
|
@ -0,0 +1,261 @@
|
||||||
|
from learn_sql_model.console import console
|
||||||
|
from learn_sql_model.models.hero import HeroCreate, HeroDelete, HeroUpdate, Heros
|
||||||
|
from learn_sql_model.optional import _optional_import_
|
||||||
|
|
||||||
|
pygame = _optional_import_("pygame", group="game")
|
||||||
|
HeroFactory = _optional_import_(
|
||||||
|
"learn_sql_model.factories.hero",
|
||||||
|
"HeroFactory",
|
||||||
|
group="game",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Player:
|
||||||
|
def __init__(self, game):
|
||||||
|
hero = HeroFactory().build(
|
||||||
|
size=25,
|
||||||
|
x=100,
|
||||||
|
y=100,
|
||||||
|
flashlight_strength=1000,
|
||||||
|
lanturn_strength=100,
|
||||||
|
flashlight_angle=0,
|
||||||
|
)
|
||||||
|
self.hero = HeroCreate(**hero.dict()).post()
|
||||||
|
self.hero.size = 64
|
||||||
|
|
||||||
|
self.game = game
|
||||||
|
self.others = [] # Heros(heros=[])
|
||||||
|
self.width = 16
|
||||||
|
self.height = 16
|
||||||
|
self.white = (255, 255, 255)
|
||||||
|
self.x = self.game.screen.get_width() / 2
|
||||||
|
self.y = self.game.screen.get_height() / 2
|
||||||
|
self.speed = 10
|
||||||
|
self.max_speed = 10
|
||||||
|
self.image = pygame.image.load("creeper.png").convert_alpha()
|
||||||
|
self.pet_image = pygame.image.load("pet.png").convert_alpha()
|
||||||
|
self.image = pygame.transform.scale(
|
||||||
|
self.image, (self.hero.size, self.hero.size)
|
||||||
|
)
|
||||||
|
self.pet_image = pygame.transform.scale(
|
||||||
|
self.pet_image, (self.hero.size/1.5, self.hero.size/2)
|
||||||
|
)
|
||||||
|
self.x_last = self.x
|
||||||
|
self.y_last = self.y
|
||||||
|
self.hitbox_surface = pygame.Surface((self.width, self.height))
|
||||||
|
self.hitbox_surface.fill(self.white)
|
||||||
|
pygame.draw.rect(
|
||||||
|
self.hitbox_surface, (255, 0, 0), (0, 0, self.width, self.height), 1
|
||||||
|
)
|
||||||
|
self.hitbox_surface.set_alpha(0)
|
||||||
|
self.moving_up = False
|
||||||
|
self.moving_down = False
|
||||||
|
self.moving_left = False
|
||||||
|
self.moving_right = False
|
||||||
|
self.joysticks = {}
|
||||||
|
|
||||||
|
def rename_hero(self):
|
||||||
|
hero = HeroFactory().build(
|
||||||
|
size=self.hero.size,
|
||||||
|
x=self.hero.x,
|
||||||
|
y=self.hero.y,
|
||||||
|
id=self.hero.id,
|
||||||
|
flashlight_strength=self.hero.flashlight_strength,
|
||||||
|
lanturn_strength=self.hero.lanturn_strength,
|
||||||
|
)
|
||||||
|
self.hero = HeroUpdate(**hero.dict()).update()
|
||||||
|
|
||||||
|
def quit(self):
|
||||||
|
try:
|
||||||
|
# session = get_config().database.session
|
||||||
|
# hero = session.get(Hero, self.hero.id)
|
||||||
|
# session.delete(hero)
|
||||||
|
# session.commit()
|
||||||
|
HeroDelete.delete(id=self.hero.id)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def handle_events(self):
|
||||||
|
# Update the self
|
||||||
|
for event in self.game.events:
|
||||||
|
if event.type == pygame.QUIT:
|
||||||
|
self.running = False
|
||||||
|
if event.type == pygame.KEYDOWN:
|
||||||
|
if event.key == pygame.K_ESCAPE:
|
||||||
|
self.running = False
|
||||||
|
if event.key == pygame.K_LEFT:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_left = True
|
||||||
|
if event.key == pygame.K_RIGHT:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_right = True
|
||||||
|
if event.key == pygame.K_UP:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_up = True
|
||||||
|
if event.key == pygame.K_DOWN:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_down = True
|
||||||
|
# wasd
|
||||||
|
if event.key == pygame.K_w:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_up = True
|
||||||
|
if event.key == pygame.K_s:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_down = True
|
||||||
|
if event.key == pygame.K_a:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_left = True
|
||||||
|
if event.key == pygame.K_d:
|
||||||
|
self.speed = self.max_speed
|
||||||
|
self.moving_right = True
|
||||||
|
|
||||||
|
if event.type == pygame.KEYUP:
|
||||||
|
if event.key == pygame.K_LEFT:
|
||||||
|
self.moving_left = False
|
||||||
|
if event.key == pygame.K_RIGHT:
|
||||||
|
self.moving_right = False
|
||||||
|
if event.key == pygame.K_UP:
|
||||||
|
self.moving_up = False
|
||||||
|
if event.key == pygame.K_DOWN:
|
||||||
|
self.moving_down = False
|
||||||
|
# wasd
|
||||||
|
if event.key == pygame.K_w:
|
||||||
|
self.moving_up = False
|
||||||
|
if event.key == pygame.K_s:
|
||||||
|
self.moving_down = False
|
||||||
|
if event.key == pygame.K_a:
|
||||||
|
self.moving_left = False
|
||||||
|
if event.key == pygame.K_d:
|
||||||
|
self.moving_right = False
|
||||||
|
|
||||||
|
for joystick in self.joysticks.values():
|
||||||
|
if abs(joystick.get_axis(0)) > 0.2:
|
||||||
|
self.x += joystick.get_axis(0) * 10 * self.speed * self.elapsed
|
||||||
|
if abs(joystick.get_axis(1)) > 0.2:
|
||||||
|
self.y += joystick.get_axis(1) * 10 * self.speed * self.elapsed
|
||||||
|
|
||||||
|
if abs(joystick.get_axis(3)) > 0.2 and abs(joystick.get_axis(4)) > 0.2:
|
||||||
|
pygame.mouse.set_pos(
|
||||||
|
(
|
||||||
|
pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
|
||||||
|
pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif abs(joystick.get_axis(3)) > 0.2:
|
||||||
|
pygame.mouse.set_pos(
|
||||||
|
(
|
||||||
|
pygame.mouse.get_pos()[0] + joystick.get_axis(3) * 32,
|
||||||
|
pygame.mouse.get_pos()[1],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif abs(joystick.get_axis(4)) > 0.2:
|
||||||
|
pygame.mouse.set_pos(
|
||||||
|
(
|
||||||
|
pygame.mouse.get_pos()[0],
|
||||||
|
pygame.mouse.get_pos()[1] + joystick.get_axis(4) * 32,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if self.moving_left:
|
||||||
|
self.hero.x -= self.speed
|
||||||
|
if self.moving_right:
|
||||||
|
self.hero.x += self.speed
|
||||||
|
if self.moving_up:
|
||||||
|
self.hero.y -= self.speed
|
||||||
|
if self.moving_down:
|
||||||
|
self.hero.y += self.speed
|
||||||
|
# Check for self collisions with the walls and the black tiles on the map
|
||||||
|
if self.hero.x < 0:
|
||||||
|
self.hero.x = 0
|
||||||
|
if self.hero.x > self.game.screen.get_width() - self.width:
|
||||||
|
self.hero.x = self.game.screen.get_width() - self.width
|
||||||
|
if self.hero.y < 0:
|
||||||
|
self.hero.y = 0
|
||||||
|
if self.hero.y > self.game.screen.get_height() - self.height:
|
||||||
|
self.hero.y = self.game.screen.get_height() - self.height
|
||||||
|
|
||||||
|
self.pos = pygame.math.Vector2(self.hero.x, self.hero.y)
|
||||||
|
|
||||||
|
if self.game.map.point_check_collision(self.pos.x, self.pos.y):
|
||||||
|
start_pos = pygame.math.Vector2(self.x_last, self.y_last)
|
||||||
|
end_pos = pygame.math.Vector2(self.hero.x, self.hero.y)
|
||||||
|
movement_vector = end_pos - start_pos
|
||||||
|
try:
|
||||||
|
movement_direction = movement_vector.normalize()
|
||||||
|
except ValueError:
|
||||||
|
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
|
||||||
|
movement_vector = end_pos - start_pos
|
||||||
|
movement_direction = movement_vector.normalize()
|
||||||
|
except ZeroDivisionError:
|
||||||
|
end_pos = pygame.math.Vector2(self.hero.x + 128, self.hero.y + 128)
|
||||||
|
movement_vector = end_pos - start_pos
|
||||||
|
movement_direction = movement_vector.normalize()
|
||||||
|
movement_speed = 0.05
|
||||||
|
|
||||||
|
self.hero.x = self.x_last
|
||||||
|
self.hero.y = self.y_last
|
||||||
|
|
||||||
|
self.pos = pygame.math.Vector2(start_pos)
|
||||||
|
|
||||||
|
while self.game.map.point_check_collision(self.pos.x, self.pos.y):
|
||||||
|
self.pos += movement_speed * movement_direction
|
||||||
|
self.hero.x = self.pos.x
|
||||||
|
self.hero.y = self.pos.y
|
||||||
|
|
||||||
|
self.pos -= movement_speed * movement_direction
|
||||||
|
self.hero.x = self.pos.x
|
||||||
|
self.hero.y = self.pos.y
|
||||||
|
|
||||||
|
self.x_last = self.hero.x
|
||||||
|
self.y_last = self.hero.y
|
||||||
|
|
||||||
|
if self.game.ticks % 60 == 0 or self.game.ticks == 0:
|
||||||
|
console.print("updating")
|
||||||
|
update = HeroUpdate(**self.hero.dict(exclude_unset=True))
|
||||||
|
console.print(update)
|
||||||
|
self.game.ws.send(update.json())
|
||||||
|
console.print("sent")
|
||||||
|
|
||||||
|
raw_heros = self.game.ws.recv()
|
||||||
|
console.print(raw_heros)
|
||||||
|
self.others = Heros.parse_raw(raw_heros)
|
||||||
|
|
||||||
|
def draw(self):
|
||||||
|
self.move()
|
||||||
|
self.game.screen.blit(
|
||||||
|
pygame.transform.scale(self.image, (16, 16)),
|
||||||
|
(self.x - 8 - self.game.map.offset.x, self.y - 8 - self.game.map.offset.y),
|
||||||
|
)
|
||||||
|
|
||||||
|
def render(self):
|
||||||
|
for other in self.others.__root__:
|
||||||
|
if other.id != self.hero.id:
|
||||||
|
# put self.image on the game.screen
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.image,
|
||||||
|
(other.x - other.size / 2, other.y - other.size / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# pygame.draw.circle(
|
||||||
|
# self.game.screen, (255, 0, 0), (other.x, other.y), other.size
|
||||||
|
# )
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.game.font.render(other.name, False, (255, 255, 255), 1),
|
||||||
|
(other.x - other.size / 2, other.y + other.size / 2),
|
||||||
|
)
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.image,
|
||||||
|
(self.hero.x - self.hero.size / 2, self.hero.y - self.hero.size / 2),
|
||||||
|
)
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.pet_image,
|
||||||
|
(self.hero.x + self.hero.size / 2, self.hero.y - self.hero.size / 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
# pygame.draw.circle(
|
||||||
|
# self.game.screen, (0, 0, 255), (self.hero.x, self.hero.y), self.hero.size
|
||||||
|
# )
|
||||||
|
|
||||||
|
self.game.screen.blit(
|
||||||
|
self.game.font.render(self.hero.name, False, (255, 255, 255), 1),
|
||||||
|
(self.hero.x - self.hero.size / 2, self.hero.y + self.hero.size / 2),
|
||||||
|
)
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
from typing import Optional
|
from typing import Dict, Optional
|
||||||
|
|
||||||
from fastapi import HTTPException
|
|
||||||
import httpx
|
import httpx
|
||||||
|
import pydantic
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlmodel import Field, Relationship, SQLModel, Session, select
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
from learn_sql_model.config import config
|
from learn_sql_model.config import config
|
||||||
from learn_sql_model.models.pet import Pet
|
from learn_sql_model.optional import optional
|
||||||
|
|
||||||
|
|
||||||
class HeroBase(SQLModel, table=False):
|
class HeroBase(SQLModel, table=False):
|
||||||
|
|
@ -14,16 +14,27 @@ class HeroBase(SQLModel, table=False):
|
||||||
secret_name: str
|
secret_name: str
|
||||||
x: int
|
x: int
|
||||||
y: int
|
y: int
|
||||||
size: int
|
size: Optional[int]
|
||||||
age: Optional[int] = None
|
flashlight_strength: Optional[int] = 1000
|
||||||
shoe_size: Optional[int] = None
|
flashlight_angle: Optional[int] = 0
|
||||||
|
lanturn_strength: Optional[int] = 100
|
||||||
|
# age: Optional[int] = None
|
||||||
|
# shoe_size: Optional[int] = None
|
||||||
|
|
||||||
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
|
# pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
|
||||||
pet: Optional[Pet] = Relationship(back_populates="hero")
|
# pet: Optional[Pet] = Relationship(back_populates="hero")
|
||||||
|
|
||||||
|
@pydantic.validator("size", pre=True, always=True)
|
||||||
|
def validate_size(cls, v):
|
||||||
|
if v is None:
|
||||||
|
return 50
|
||||||
|
if v <= 0:
|
||||||
|
raise ValueError("size must be > 0")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
class Hero(HeroBase, table=True):
|
class Hero(HeroBase, table=True):
|
||||||
id: Optional[int] = Field(default=None, primary_key=True)
|
id: int = Field(default=None, primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class HeroCreate(HeroBase):
|
class HeroCreate(HeroBase):
|
||||||
|
|
@ -48,87 +59,46 @@ class HeroRead(HeroBase):
|
||||||
cls,
|
cls,
|
||||||
id: int,
|
id: int,
|
||||||
) -> Hero:
|
) -> Hero:
|
||||||
with config.database.session as session:
|
r = httpx.get(f"{config.api_client.url}/hero/{id}")
|
||||||
hero = session.get(Hero, id)
|
if r.status_code != 200:
|
||||||
if not hero:
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
raise HTTPException(status_code=404, detail="Hero not found")
|
return HeroRead.parse_obj(r.json())
|
||||||
return hero
|
|
||||||
|
|
||||||
|
|
||||||
class Heros(BaseModel):
|
class Heros(BaseModel):
|
||||||
heros: list[Hero]
|
__root__: list[Hero]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list(
|
def list(
|
||||||
self,
|
self,
|
||||||
where=None,
|
|
||||||
offset=0,
|
|
||||||
limit=None,
|
|
||||||
session: Session = None,
|
|
||||||
) -> Hero:
|
) -> Hero:
|
||||||
# with config.database.session as session:
|
r = httpx.get(f"{config.api_client.url}/heros/")
|
||||||
|
if r.status_code != 200:
|
||||||
def get_heros(session, where, offset, limit):
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
statement = select(Hero)
|
return Heros.parse_obj({"__root__": r.json()})
|
||||||
if where != "None" and where is not None:
|
|
||||||
from sqlmodel import text
|
|
||||||
|
|
||||||
statement = statement.where(text(where))
|
|
||||||
statement = statement.offset(offset).limit(limit)
|
|
||||||
heros = session.exec(statement).all()
|
|
||||||
return Heros(heros=heros)
|
|
||||||
|
|
||||||
if session is None:
|
|
||||||
|
|
||||||
r = httpx.get(f"{config.api_client.url}/heros/")
|
|
||||||
if r.status_code != 200:
|
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
|
||||||
return Heros.parse_obj(r.json())
|
|
||||||
|
|
||||||
return get_heros(session, where, offset, limit)
|
|
||||||
|
|
||||||
|
|
||||||
class HeroUpdate(SQLModel):
|
@optional
|
||||||
# id is required to update the hero
|
class HeroUpdate(HeroBase):
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
# all other fields, must match the model, but with Optional default None
|
def update(self) -> Hero:
|
||||||
name: Optional[str] = None
|
|
||||||
secret_name: Optional[str] = None
|
|
||||||
age: Optional[int] = None
|
|
||||||
shoe_size: Optional[int] = None
|
|
||||||
x: int
|
|
||||||
y: int
|
|
||||||
|
|
||||||
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
|
|
||||||
pet: Optional[Pet] = Relationship(back_populates="hero")
|
|
||||||
|
|
||||||
def update(self, session: Session = None) -> Hero:
|
|
||||||
if session is not None:
|
|
||||||
db_hero = session.get(Hero, self.id)
|
|
||||||
if not db_hero:
|
|
||||||
raise HTTPException(status_code=404, detail="Hero not found")
|
|
||||||
for key, value in self.dict(exclude_unset=True).items():
|
|
||||||
setattr(db_hero, key, value)
|
|
||||||
session.add(db_hero)
|
|
||||||
session.commit()
|
|
||||||
session.refresh(db_hero)
|
|
||||||
return db_hero
|
|
||||||
|
|
||||||
r = httpx.patch(
|
r = httpx.patch(
|
||||||
f"{config.api_client.url}/hero/",
|
f"{config.api_client.url}/hero/",
|
||||||
json=self.dict(),
|
json=self.dict(exclude_none=True),
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
|
return Hero.parse_obj(r.json())
|
||||||
|
|
||||||
|
|
||||||
class HeroDelete(BaseModel):
|
class HeroDelete(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
def delete(self) -> Hero:
|
@classmethod
|
||||||
|
def delete(self, id: int) -> Dict[str, bool]:
|
||||||
r = httpx.delete(
|
r = httpx.delete(
|
||||||
f"{config.api_client.url}/hero/{self.id}",
|
f"{config.api_client.url}/hero/{id}",
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
|
|
|
||||||
96
learn_sql_model/optional.py
Normal file
|
|
@ -0,0 +1,96 @@
|
||||||
|
from typing import List, Optional
|
||||||
|
import textwrap
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
def _optional_import_(
|
||||||
|
module: str,
|
||||||
|
name: str = None,
|
||||||
|
group: str = None,
|
||||||
|
package="learn_sql_model",
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
lazily throws import errors only then the optional import is used, and
|
||||||
|
includes a group install command for the user to install all dependencies
|
||||||
|
for the requested feature.
|
||||||
|
"""
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(module)
|
||||||
|
return module if name is None else getattr(module, name)
|
||||||
|
except ImportError as e:
|
||||||
|
msg = textwrap.dedent(
|
||||||
|
f"""
|
||||||
|
"pip install '{package}[{group}]'" package to make use of this feature
|
||||||
|
Alternatively "pip install '{package}[all]'" package to install all optional dependencies
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
import_error = e
|
||||||
|
|
||||||
|
class _failed_import:
|
||||||
|
"""
|
||||||
|
Lazily throw an import error. Errors should be thrown whether the
|
||||||
|
user tries to call the module, get an attubute from the module, or
|
||||||
|
getitem from the module.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _failed_import(self, *args):
|
||||||
|
raise ImportError(msg) from import_error
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
"""
|
||||||
|
Throw error if the user tries to call the module i.e
|
||||||
|
_optional_import_('dummy')()
|
||||||
|
"""
|
||||||
|
self._failed_import(*args)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
"""
|
||||||
|
Throw error if the user tries to get an attribute from the
|
||||||
|
module i.e _optional_import_('dummy').dummy.
|
||||||
|
"""
|
||||||
|
if name == "_failed_import":
|
||||||
|
return object.__getattribute__(self, name)
|
||||||
|
self._failed_import()
|
||||||
|
|
||||||
|
def __getitem__(self, name):
|
||||||
|
"""
|
||||||
|
Throw error if the user tries to get an item from the module
|
||||||
|
i.e _optional_import_('dummy')['dummy']
|
||||||
|
"""
|
||||||
|
self._failed_import()
|
||||||
|
|
||||||
|
return _failed_import()
|
||||||
|
|
||||||
|
|
||||||
|
# def optional(fields: Optional[List[str]]=None, required: Optional[List[str]]=None):
|
||||||
|
# def decorator(cls):
|
||||||
|
# def wrapper(*args, **kwargs):
|
||||||
|
# if fields is None:
|
||||||
|
# fields = cls.__fields__
|
||||||
|
# if required is None:
|
||||||
|
# required = []
|
||||||
|
#
|
||||||
|
# for field in fields:
|
||||||
|
# if field not in required:
|
||||||
|
# cls.__fields__[field].required = False
|
||||||
|
# return _cls
|
||||||
|
# return wrapper
|
||||||
|
# return decorator
|
||||||
|
#
|
||||||
|
#
|
||||||
|
def optional(*fields):
|
||||||
|
def dec(_cls):
|
||||||
|
for field in fields:
|
||||||
|
_cls.__fields__[field].required = False
|
||||||
|
return _cls
|
||||||
|
|
||||||
|
if fields and inspect.isclass(fields[0]) and issubclass(fields[0], BaseModel):
|
||||||
|
cls = fields[0]
|
||||||
|
fields = cls.__fields__
|
||||||
|
return dec(cls)
|
||||||
|
return dec
|
||||||
|
|
||||||
BIN
lights/light--100.png
Normal file
|
After Width: | Height: | Size: 770 KiB |
BIN
lights/light--120.png
Normal file
|
After Width: | Height: | Size: 804 KiB |
BIN
lights/light--140.png
Normal file
|
After Width: | Height: | Size: 876 KiB |
BIN
lights/light--160.png
Normal file
|
After Width: | Height: | Size: 810 KiB |
BIN
lights/light--180.png
Normal file
|
After Width: | Height: | Size: 580 KiB |
BIN
lights/light--20.png
Normal file
|
After Width: | Height: | Size: 811 KiB |
BIN
lights/light--200.png
Normal file
|
After Width: | Height: | Size: 841 KiB |
BIN
lights/light--220.png
Normal file
|
After Width: | Height: | Size: 910 KiB |
BIN
lights/light--240.png
Normal file
|
After Width: | Height: | Size: 812 KiB |
BIN
lights/light--260.png
Normal file
|
After Width: | Height: | Size: 714 KiB |
BIN
lights/light--280.png
Normal file
|
After Width: | Height: | Size: 696 KiB |
BIN
lights/light--300.png
Normal file
|
After Width: | Height: | Size: 810 KiB |
BIN
lights/light--320.png
Normal file
|
After Width: | Height: | Size: 883 KiB |
BIN
lights/light--340.png
Normal file
|
After Width: | Height: | Size: 827 KiB |
BIN
lights/light--360.png
Normal file
|
After Width: | Height: | Size: 581 KiB |
BIN
lights/light--40.png
Normal file
|
After Width: | Height: | Size: 901 KiB |
BIN
lights/light--60.png
Normal file
|
After Width: | Height: | Size: 809 KiB |
BIN
lights/light--80.png
Normal file
|
After Width: | Height: | Size: 726 KiB |
BIN
lights/light-0.png
Normal file
|
After Width: | Height: | Size: 581 KiB |
BIN
lights/light-100.png
Normal file
|
After Width: | Height: | Size: 714 KiB |
BIN
lights/light-120.png
Normal file
|
After Width: | Height: | Size: 812 KiB |
BIN
lights/light-140.png
Normal file
|
After Width: | Height: | Size: 910 KiB |
BIN
lights/light-160.png
Normal file
|
After Width: | Height: | Size: 841 KiB |
BIN
lights/light-180.png
Normal file
|
After Width: | Height: | Size: 580 KiB |
BIN
lights/light-20.png
Normal file
|
After Width: | Height: | Size: 827 KiB |
BIN
lights/light-200.png
Normal file
|
After Width: | Height: | Size: 810 KiB |
BIN
lights/light-220.png
Normal file
|
After Width: | Height: | Size: 876 KiB |
BIN
lights/light-240.png
Normal file
|
After Width: | Height: | Size: 803 KiB |
BIN
lights/light-260.png
Normal file
|
After Width: | Height: | Size: 770 KiB |
BIN
lights/light-280.png
Normal file
|
After Width: | Height: | Size: 726 KiB |
BIN
lights/light-300.png
Normal file
|
After Width: | Height: | Size: 810 KiB |
BIN
lights/light-320.png
Normal file
|
After Width: | Height: | Size: 901 KiB |
BIN
lights/light-340.png
Normal file
|
After Width: | Height: | Size: 811 KiB |
BIN
lights/light-40.png
Normal file
|
After Width: | Height: | Size: 883 KiB |
BIN
lights/light-60.png
Normal file
|
After Width: | Height: | Size: 810 KiB |
BIN
lights/light-80.png
Normal file
|
After Width: | Height: | Size: 696 KiB |
18
load_test.py
Normal file
|
|
@ -0,0 +1,18 @@
|
||||||
|
from locust import HttpUser, between, task
|
||||||
|
|
||||||
|
from learn_sql_model.factories.hero import HeroFactory
|
||||||
|
from learn_sql_model.models.hero import HeroCreate
|
||||||
|
|
||||||
|
|
||||||
|
class QuickstartUser(HttpUser):
|
||||||
|
wait_time = between(1, 2)
|
||||||
|
|
||||||
|
@task
|
||||||
|
def hello_world(self):
|
||||||
|
self.client.get("/hero/1")
|
||||||
|
self.client.get("/heros/")
|
||||||
|
|
||||||
|
@task(3)
|
||||||
|
def create_hero(self):
|
||||||
|
hero = HeroFactory().build()
|
||||||
|
HeroCreate(**hero.dict()).post()
|
||||||
58
locustfile.py
Normal file
|
|
@ -0,0 +1,58 @@
|
||||||
|
import random
|
||||||
|
|
||||||
|
from locust import FastHttpUser, task
|
||||||
|
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
from learn_sql_model.factories.hero import HeroFactory
|
||||||
|
from learn_sql_model.models.hero import HeroCreate, HeroUpdate, Heros
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
|
|
||||||
|
|
||||||
|
class QuickstartUser(FastHttpUser):
|
||||||
|
# wait_time = between(1, 2)
|
||||||
|
host = "http://localhost:5000"
|
||||||
|
# host = "https://waylonwalker.com"
|
||||||
|
|
||||||
|
def on_start(self):
|
||||||
|
self.client.verify = False
|
||||||
|
|
||||||
|
@task(6)
|
||||||
|
def get_a_hero(self):
|
||||||
|
# heros = Heros.list()
|
||||||
|
id = 1
|
||||||
|
# id = random.choice(heros.__root__).id
|
||||||
|
|
||||||
|
self.client.get(f"/hero/{id}")
|
||||||
|
|
||||||
|
# @task(2)
|
||||||
|
# def get_all_hero(self):
|
||||||
|
# self.client.get("/heros/")
|
||||||
|
|
||||||
|
@task
|
||||||
|
def create_hero(self):
|
||||||
|
hero = HeroFactory().build()
|
||||||
|
hero_create = HeroCreate(**hero.dict()).post()
|
||||||
|
|
||||||
|
self.client.post(
|
||||||
|
f"{config.api_client.url}/hero/",
|
||||||
|
json=hero_create.dict(),
|
||||||
|
)
|
||||||
|
|
||||||
|
@task(3)
|
||||||
|
def update_hero(self):
|
||||||
|
hero = HeroFactory().build()
|
||||||
|
hero_update = HeroUpdate(id=1, name=hero.name)
|
||||||
|
|
||||||
|
self.client.patch(
|
||||||
|
"/hero/",
|
||||||
|
json=hero_update.dict(exclude_none=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
@task
|
||||||
|
def delete_hero(self):
|
||||||
|
heros = Heros.list()
|
||||||
|
id = random.choice(heros.__root__).id
|
||||||
|
self.client.delete(
|
||||||
|
f"/hero/{id}",
|
||||||
|
)
|
||||||
0
map.pkl
Normal file
BIN
map.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
micro
Normal file
|
|
@ -79,7 +79,7 @@ def run_migrations_online() -> None:
|
||||||
context.configure(
|
context.configure(
|
||||||
connection=connection,
|
connection=connection,
|
||||||
target_metadata=target_metadata,
|
target_metadata=target_metadata,
|
||||||
render_as_batch=False,
|
render_as_batch=True,
|
||||||
version_table=f'{config.get_main_option("project")}_alembic_version',
|
version_table=f'{config.get_main_option("project")}_alembic_version',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,9 @@ Create Date: ${create_date}
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlmodel
|
import sqlmodel
|
||||||
|
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
${imports if imports else ""}
|
${imports if imports else ""}
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
|
|
@ -19,6 +22,8 @@ depends_on = ${repr(depends_on)}
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
${upgrades if upgrades else "pass"}
|
${upgrades if upgrades else "pass"}
|
||||||
|
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
|
|
|
||||||
32
migrations/versions/3555f61aaa79_add_x_and_y.py
Normal file
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""add x and y
|
||||||
|
|
||||||
|
Revision ID: 3555f61aaa79
|
||||||
|
Revises: 79972ec5f79d
|
||||||
|
Create Date: 2023-06-22 15:03:27.338959
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "3555f61aaa79"
|
||||||
|
down_revision = "79972ec5f79d"
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("hero", sa.Column("x", sa.Integer(), nullable=False))
|
||||||
|
op.add_column("hero", sa.Column("y", sa.Integer(), nullable=False))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column("hero", "y")
|
||||||
|
op.drop_column("hero", "x")
|
||||||
|
# ### end Alembic commands ###
|
||||||
68
migrations/versions/3555f61aaa79_er_diagram.md
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| 79972ec5f79d |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id | x | y |
|
||||||
|
|------|-------------|----|---|---|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/3555f61aaa79_er_diagram.png
Normal file
|
After Width: | Height: | Size: 39 KiB |
65
migrations/versions/79972ec5f79d_er_diagram.md
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id |
|
||||||
|
|------|-------------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/79972ec5f79d_er_diagram.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
45
migrations/versions/79972ec5f79d_int.py
Normal file
|
|
@ -0,0 +1,45 @@
|
||||||
|
"""int
|
||||||
|
|
||||||
|
Revision ID: 79972ec5f79d
|
||||||
|
Revises:
|
||||||
|
Create Date: 2023-06-22 15:02:20.292322
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = "79972ec5f79d"
|
||||||
|
down_revision = None
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
"hero",
|
||||||
|
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
op.create_table(
|
||||||
|
"pet",
|
||||||
|
sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
||||||
|
sa.Column("birthday", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
# generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
# generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table("pet")
|
||||||
|
op.drop_table("hero")
|
||||||
|
# ### end Alembic commands ###
|
||||||
36
migrations/versions/a1cd0a1947be_add_hero_lighting.py
Normal file
|
|
@ -0,0 +1,36 @@
|
||||||
|
"""add hero.lighting
|
||||||
|
|
||||||
|
Revision ID: a1cd0a1947be
|
||||||
|
Revises: c79214cdc7b3
|
||||||
|
Create Date: 2023-06-28 19:43:47.108749
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'a1cd0a1947be'
|
||||||
|
down_revision = 'c79214cdc7b3'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('hero', sa.Column('flashlight_strength', sa.Integer(), nullable=True))
|
||||||
|
op.add_column('hero', sa.Column('lanturn_strength', sa.Integer(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('hero', 'lanturn_strength')
|
||||||
|
op.drop_column('hero', 'flashlight_strength')
|
||||||
|
# ### end Alembic commands ###
|
||||||
75
migrations/versions/a1cd0a1947be_er_diagram.md
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| c79214cdc7b3 |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|
||||||
|
|------|-------------|----|---|---|------|---------------------|------------------|
|
||||||
|
| deep-insect | unusual-inspection | 1 | 100 | 100 | 25 | None | None |
|
||||||
|
| flat-foundation | personal-incident | 2 | 100 | 100 | 25 | None | None |
|
||||||
|
| formal-cap | mental-substance | 3 | 100 | 100 | 25 | None | None |
|
||||||
|
| political-routine | low-engineer | 4 | 100 | 100 | 25 | None | None |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
| size | INTEGER | | | |
|
||||||
|
| flashlight_strength | INTEGER | | | |
|
||||||
|
| lanturn_strength | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 4 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/a1cd0a1947be_er_diagram.png
Normal file
|
After Width: | Height: | Size: 55 KiB |
|
|
@ -1,29 +0,0 @@
|
||||||
"""add birthday
|
|
||||||
|
|
||||||
Revision ID: a9bb6625c57b
|
|
||||||
Revises: c8516c888495
|
|
||||||
Create Date: 2023-05-25 19:00:58.137464
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
import sqlmodel
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'a9bb6625c57b'
|
|
||||||
down_revision = 'c8516c888495'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.add_column('pet', sa.Column('birthday', sa.DateTime(), nullable=True))
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_column('pet', 'birthday')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
@ -1,33 +1,34 @@
|
||||||
"""add x, y, size
|
"""add hero.size
|
||||||
|
|
||||||
Revision ID: e26398d96dd0
|
Revision ID: c79214cdc7b3
|
||||||
Revises: a9bb6625c57b
|
Revises: 3555f61aaa79
|
||||||
Create Date: 2023-06-10 18:37:04.751553
|
Create Date: 2023-06-28 11:39:02.606001
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
import sqlmodel
|
import sqlmodel
|
||||||
|
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = 'e26398d96dd0'
|
revision = 'c79214cdc7b3'
|
||||||
down_revision = 'a9bb6625c57b'
|
down_revision = '3555f61aaa79'
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.add_column('hero', sa.Column('x', sa.Integer(), nullable=False))
|
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=True))
|
||||||
op.add_column('hero', sa.Column('y', sa.Integer(), nullable=False))
|
|
||||||
op.add_column('hero', sa.Column('size', sa.Integer(), nullable=False))
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.drop_column('hero', 'size')
|
op.drop_column('hero', 'size')
|
||||||
op.drop_column('hero', 'y')
|
|
||||||
op.drop_column('hero', 'x')
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
74
migrations/versions/c79214cdc7b3_er_diagram.md
Normal file
|
|
@ -0,0 +1,74 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| 3555f61aaa79 |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id | x | y | size |
|
||||||
|
|------|-------------|----|---|---|------|
|
||||||
|
| tight-gold | successful-health | 1 | 6430 | 6231 | None |
|
||||||
|
| hard-rope | green-research | 2 | 1395 | 2865 | None |
|
||||||
|
| sure-priority | pretty-series | 3 | 2770 | 7835 | None |
|
||||||
|
| huge-library | adult-body | 4 | 656 | 2377 | None |
|
||||||
|
| specific-courage | suspicious-delivery | 5 | 4193 | 9011 | None |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
| size | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 1572 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/c79214cdc7b3_er_diagram.png
Normal file
|
After Width: | Height: | Size: 41 KiB |
|
|
@ -1,44 +0,0 @@
|
||||||
"""init
|
|
||||||
|
|
||||||
Revision ID: c8516c888495
|
|
||||||
Revises:
|
|
||||||
Create Date: 2023-05-25 18:42:37.057225
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
import sqlmodel
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'c8516c888495'
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('pet',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('hero',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
|
||||||
sa.Column('secret_name', sqlmodel.sql.sqltypes.AutoString(), nullable=False),
|
|
||||||
sa.Column('age', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('shoe_size', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('pet_id', sa.Integer(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['pet_id'], ['pet.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table('hero')
|
|
||||||
op.drop_table('pet')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""add hero.flashlight_angle
|
||||||
|
|
||||||
|
Revision ID: d79dd8e699d1
|
||||||
|
Revises: e1af975310a1
|
||||||
|
Create Date: 2023-06-28 19:54:19.322431
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'd79dd8e699d1'
|
||||||
|
down_revision = 'e1af975310a1'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('hero', sa.Column('flashlight_angle', sa.Integer(), nullable=True))
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('hero', 'flashlight_angle')
|
||||||
|
# ### end Alembic commands ###
|
||||||
72
migrations/versions/d79dd8e699d1_er_diagram.md
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| e1af975310a1 |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength | flashlight_angle |
|
||||||
|
|------|-------------|----|---|---|------|---------------------|------------------|------------------|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
| size | INTEGER | | | |
|
||||||
|
| flashlight_strength | INTEGER | | | |
|
||||||
|
| lanturn_strength | INTEGER | | | |
|
||||||
|
| flashlight_angle | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/d79dd8e699d1_er_diagram.png
Normal file
|
After Width: | Height: | Size: 61 KiB |
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""add hero.flashlight_angle
|
||||||
|
|
||||||
|
Revision ID: e1af975310a1
|
||||||
|
Revises: a1cd0a1947be
|
||||||
|
Create Date: 2023-06-28 19:53:18.068873
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
import sqlmodel
|
||||||
|
from learn_sql_model.er_diagram import generate_er_diagram, generate_er_markdown
|
||||||
|
from learn_sql_model.config import get_config
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'e1af975310a1'
|
||||||
|
down_revision = 'a1cd0a1947be'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
generate_er_diagram(f'migrations/versions/{revision}_er_diagram.png')
|
||||||
|
generate_er_markdown(f'migrations/versions/{revision}_er_diagram.md', f'migrations/versions/er_diagram_{revision}.png')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
71
migrations/versions/e1af975310a1_er_diagram.md
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|

|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: learn_sql_model_alembic_version
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| version_num |
|
||||||
|
|-------------|
|
||||||
|
| a1cd0a1947be |
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| version_num | VARCHAR(32) | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table learn_sql_model_alembic_version contains 1 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: hero
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | secret_name | id | x | y | size | flashlight_strength | lanturn_strength |
|
||||||
|
|------|-------------|----|---|---|------|---------------------|------------------|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| secret_name | VARCHAR | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
| x | INTEGER | | | |
|
||||||
|
| y | INTEGER | | | |
|
||||||
|
| size | INTEGER | | | |
|
||||||
|
| flashlight_strength | INTEGER | | | |
|
||||||
|
| lanturn_strength | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table hero contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Table: pet
|
||||||
|
|
||||||
|
### First 5 rows
|
||||||
|
|
||||||
|
| name | birthday | id |
|
||||||
|
|------|----------|----|
|
||||||
|
|
||||||
|
### Columns
|
||||||
|
|
||||||
|
| Column Name | Type | Foreign Key | Example Value |
|
||||||
|
|-------------|------|-------------|---------------|
|
||||||
|
| name | VARCHAR | | | |
|
||||||
|
| birthday | DATETIME | | | |
|
||||||
|
| id | INTEGER | | | |
|
||||||
|
|
||||||
|
### Records Count
|
||||||
|
|
||||||
|
The table pet contains 0 records.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
BIN
migrations/versions/e1af975310a1_er_diagram.png
Normal file
|
After Width: | Height: | Size: 55 KiB |
37
notify.py
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
# # Import smtplib for the actual sending function
|
||||||
|
# import smtplib
|
||||||
|
# # Import the email modules we'll need
|
||||||
|
# from email.mime.text import MIMEText
|
||||||
|
|
||||||
|
# # Open a plain text file for reading. For this example, assume that
|
||||||
|
# # the text file contains only ASCII characters.
|
||||||
|
# # with open(textfile, 'rb') as fp:
|
||||||
|
# # # Create a text/plain message
|
||||||
|
# # msg = MIMEText(fp.read())
|
||||||
|
# msg = MIMEText("hello there", "plain", "utf-8")
|
||||||
|
|
||||||
|
# # me == the sender's email address
|
||||||
|
# # you == the recipient's email address
|
||||||
|
# me = "waylon@waylonwalker.com"
|
||||||
|
# you = "3195728809@msg.fi.google.com"
|
||||||
|
# msg["Subject"] = "Python SMTP test"
|
||||||
|
# msg["From"] = me
|
||||||
|
# msg["To"] = you
|
||||||
|
|
||||||
|
# # Send the message via our own SMTP server, but don't include the
|
||||||
|
# # envelope header.
|
||||||
|
# s = smtplib.SMTP("localhost")
|
||||||
|
# s.sendmail(me, [you], msg.as_string())
|
||||||
|
# s.quit()
|
||||||
|
import requests
|
||||||
|
|
||||||
|
requests.post(
|
||||||
|
"https://api.mailgun.net/v3/YOUR_DOMAIN_NAME/messages",
|
||||||
|
auth=("api", "YOUR_API_KEY"),
|
||||||
|
data={
|
||||||
|
"from": "Excited User <mailgun@YOUR_DOMAIN_NAME>",
|
||||||
|
"to": ["bar@example.com", "YOU@YOUR_DOMAIN_NAME"],
|
||||||
|
"subject": "Hello",
|
||||||
|
"text": "Testing some Mailgun awesomness!",
|
||||||
|
},
|
||||||
|
)
|
||||||
BIN
pet.png
Normal file
|
After Width: | Height: | Size: 738 B |
BIN
player.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
|
|
@ -24,36 +24,52 @@ classifiers = [
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"python-socketio[client]",
|
"black",
|
||||||
"fastapi-socketio",
|
"python-socketio[client]",
|
||||||
"psycopg2-binary",
|
|
||||||
'pygame',
|
|
||||||
'black',
|
|
||||||
'alembic',
|
|
||||||
'pygame',
|
|
||||||
'pyinstaller',
|
|
||||||
"pyflyby",
|
|
||||||
"anyconfig",
|
"anyconfig",
|
||||||
"copier",
|
"copier",
|
||||||
"engorgio",
|
"engorgio",
|
||||||
"fastapi",
|
"fastapi",
|
||||||
"httpx",
|
"httpx",
|
||||||
"passlib[bcrypt]",
|
"pydantic<2.0.0",
|
||||||
"polyfactory",
|
"pyflyby",
|
||||||
"psycopg2",
|
"pyinstaller",
|
||||||
"python-jose[cryptography]",
|
|
||||||
"python-multipart",
|
|
||||||
"rich",
|
"rich",
|
||||||
"sqlmodel",
|
"sqlmodel",
|
||||||
"textual",
|
"textual",
|
||||||
"toml",
|
"toml",
|
||||||
"trogon",
|
"trogon",
|
||||||
"typer",
|
"typer",
|
||||||
"uvicorn[standard]",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
game = [
|
||||||
|
"noise",
|
||||||
|
"pygame",
|
||||||
|
"polyfactory",
|
||||||
|
"faker",
|
||||||
|
]
|
||||||
|
api = [
|
||||||
|
"fastapi-socketio",
|
||||||
|
"passlib[bcrypt]",
|
||||||
|
"psycopg2",
|
||||||
|
"psycopg2-binary",
|
||||||
|
"python-jose[cryptography]",
|
||||||
|
"python-multipart",
|
||||||
|
"uvicorn[standard]",
|
||||||
|
]
|
||||||
|
manage = [
|
||||||
|
"alembic",
|
||||||
|
"polyfactory",
|
||||||
|
"faker",
|
||||||
|
]
|
||||||
|
all = [
|
||||||
|
"learn_sql_model[game, api, manage]",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Documentation = "https://github.com/waylonwalker/learn-sql-model#readme"
|
Documentation = "https://github.com/waylonwalker/learn-sql-model#readme"
|
||||||
Issues = "https://github.com/waylonwalker/learn-sql-model/issues"
|
Issues = "https://github.com/waylonwalker/learn-sql-model/issues"
|
||||||
|
|
|
||||||
23
rect.py
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
import pygame
|
||||||
|
|
||||||
|
pygame.init()
|
||||||
|
|
||||||
|
screen = pygame.display.set_mode((500, 500))
|
||||||
|
pygame.display.set_caption("draw a square")
|
||||||
|
|
||||||
|
running = True
|
||||||
|
|
||||||
|
while running:
|
||||||
|
for event in pygame.event.get():
|
||||||
|
if event.type == pygame.QUIT:
|
||||||
|
running = False
|
||||||
|
|
||||||
|
surface = pygame.Surface((500, 500))
|
||||||
|
surface.fill((255, 0, 0))
|
||||||
|
|
||||||
|
color = (0, 0, 255)
|
||||||
|
rect = (200, 200, 100, 100)
|
||||||
|
pygame.draw.rect(surface, color, rect)
|
||||||
|
|
||||||
|
screen.blit(surface, (0, 0))
|
||||||
|
pygame.display.flip()
|
||||||
129
templates/er_diagram.html
Normal file
|
|
@ -0,0 +1,129 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8" />
|
||||||
|
<title>ER Diagram</title>
|
||||||
|
<!-- Include d3.js -->
|
||||||
|
<script src="https://d3js.org/d3.v6.min.js"></script>
|
||||||
|
<style>
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table {
|
||||||
|
font-family: Arial, sans-serif;
|
||||||
|
font-size: 14px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.table-name {
|
||||||
|
font-weight: bold;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.foreign-key {
|
||||||
|
fill: #b30000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.link {
|
||||||
|
stroke: #999;
|
||||||
|
stroke-opacity: 0.6;
|
||||||
|
stroke-width: 2px;
|
||||||
|
fill: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.link-curved-path {
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="er-diagram"></div>
|
||||||
|
<script>
|
||||||
|
// Generate ER diagram
|
||||||
|
const tables = {{tables}};
|
||||||
|
const links = {{links}};
|
||||||
|
|
||||||
|
const width = window.innerWidth;
|
||||||
|
const height = window.innerHeight;
|
||||||
|
|
||||||
|
const tableElemWidth = 120;
|
||||||
|
const tableElemHeight = d => 20 * (d.columns.length + 1);
|
||||||
|
|
||||||
|
let svg = d3.select("#er-diagram")
|
||||||
|
.append("svg")
|
||||||
|
.attr("width", width)
|
||||||
|
.attr("height", height);
|
||||||
|
|
||||||
|
let g = svg.append("g");
|
||||||
|
|
||||||
|
let linkGroup = g.selectAll(".link")
|
||||||
|
.data(links)
|
||||||
|
.join("path")
|
||||||
|
.attr("class", "link");
|
||||||
|
|
||||||
|
let tableGroup = g.selectAll(".table")
|
||||||
|
.data(tables)
|
||||||
|
.join("g")
|
||||||
|
.attr("class", "table")
|
||||||
|
.classed("collapsed", false)
|
||||||
|
.on("click", (event, d) => {
|
||||||
|
d3.select(event.currentTarget).classed("collapsed", !d3.select(event.currentTarget).classed("collapsed"));
|
||||||
|
});
|
||||||
|
|
||||||
|
let zoomBehavior = d3.zoom()
|
||||||
|
.scaleExtent([0.1, 4])
|
||||||
|
.on("zoom", function (event) {
|
||||||
|
g.attr("transform", event.transform);
|
||||||
|
});
|
||||||
|
|
||||||
|
svg.call(zoomBehavior);
|
||||||
|
|
||||||
|
let rect = tableGroup.append("rect")
|
||||||
|
.attr("width", tableElemWidth)
|
||||||
|
.attr("height", tableElemHeight)
|
||||||
|
.attr("fill", "#eee");
|
||||||
|
|
||||||
|
let text = tableGroup.append("text")
|
||||||
|
.attr("class", "table-name")
|
||||||
|
.attr("x", 10)
|
||||||
|
.attr("y", 20)
|
||||||
|
.text(d => d.name);
|
||||||
|
|
||||||
|
let columnText = tableGroup.selectAll(".column")
|
||||||
|
.data(d => d.columns.map(col => ({name: col, is_foreign_key: d.foreign_keys.some(fk => fk.from === col)})))
|
||||||
|
.join("text")
|
||||||
|
.attr("class", d => d.is_foreign_key ? "column foreign-key" : "column")
|
||||||
|
.attr("x", 10)
|
||||||
|
.attr("y", (d, i) => 40 + i * 20)
|
||||||
|
.text(d => d.name);
|
||||||
|
|
||||||
|
// Physics simulation and force layout
|
||||||
|
let simulation = d3.forceSimulation(tables)
|
||||||
|
.force("link", d3.forceLink(links).id(d => d.name).distance(200))
|
||||||
|
.force("charge", d3.forceManyBody().strength(-800))
|
||||||
|
.force("x", d3.forceX(width / 2).strength(0.1))
|
||||||
|
.force("y", d3.forceY(height / 2).strength(0.1))
|
||||||
|
.on("tick", () => {
|
||||||
|
tableGroup.attr("transform", d => `translate(${d.x}, ${d.y})`);
|
||||||
|
linkGroup.attr("d", d => {
|
||||||
|
const srcX = d.source.x + tableElemWidth;
|
||||||
|
const srcY = d.source.y + 40 + d.source.columns.findIndex(c => c === d.source_col) * 20;
|
||||||
|
const tgtX = d.target.x;
|
||||||
|
const tgtY = d.target.y + 40 + d.target.columns.findIndex(c => c === d.target_col) * 20;
|
||||||
|
const deltaX = tgtX - srcX;
|
||||||
|
const deltaY = tgtY - srcY;
|
||||||
|
const curveFactor = 50;
|
||||||
|
const curveY = deltaY < 0 ? -curveFactor : curveFactor;
|
||||||
|
return `M${srcX},${srcY}C${srcX + deltaX / 2},${srcY + curveY} ${tgtX - deltaX / 2},${tgtY - curveY} ${tgtX},${tgtY}`;
|
||||||
|
});
|
||||||
|
columnText.style("display", (d, i, nodes) => {
|
||||||
|
return d3.select(nodes[i].parentNode).classed("collapsed") ? "none" : null;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
|
|
@ -1,86 +1,89 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException
|
from fastapi import APIRouter, Depends, HTTPException
|
||||||
from sqlmodel import SQLModel, Session
|
from sqlmodel import Session, select
|
||||||
|
|
||||||
from learn_sql_model.api.websocket_connection_manager import manager
|
from learn_sql_model.api.websocket_connection_manager import manager
|
||||||
from learn_sql_model.config import get_config, get_session
|
from learn_sql_model.config import get_session
|
||||||
from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}, {{modelname}}Create, {{modelname}}Read, {{modelname}}Update
|
from learn_sql_model.models.{{ modelname }} import {{ modelname }}, {{ modelname }}Create, {{ modelname }}Read, {{ modelname }}Update, {{ modelname }}s
|
||||||
|
|
||||||
{{modelname.lower()}}_router = APIRouter()
|
{{ modelname }}_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.on_event("startup")
|
@{{ modelname }}_router.on_event("startup")
|
||||||
def on_startup() -> None:
|
def on_startup() -> None:
|
||||||
SQLModel.metadata.create_all(get_config().database.engine)
|
# SQLModel.metadata.create_all(get_config().database.engine)
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.get("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
|
@{{ modelname }}_router.get("/{{ modelname }}/{{{ modelname }}_id}")
|
||||||
async def get_{{modelname.lower()}}(
|
def get_{{ modelname }}(
|
||||||
*,
|
*,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
{{modelname.lower()}}_id: int,
|
{{ modelname }}_id: int,
|
||||||
) -> {{modelname}}Read:
|
) -> {{ modelname }}Read:
|
||||||
"get one {{modelname.lower()}}"
|
"get one {{ modelname }}"
|
||||||
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
|
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
|
||||||
if not {{modelname.lower()}}:
|
if not {{ modelname }}:
|
||||||
raise HTTPException(status_code=404, detail="{{modelname}} not found")
|
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
|
||||||
return {{modelname.lower()}}
|
return {{ modelname }}
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.post("/{{modelname.lower()}}/")
|
@{{ modelname }}_router.post("/{{ modelname }}/")
|
||||||
async def post_{{modelname.lower()}}(
|
def post_{{ modelname }}(
|
||||||
*,
|
*,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
{{modelname.lower()}}: {{modelname}}Create,
|
{{ modelname }}: {{ modelname }}Create,
|
||||||
) -> {{modelname}}Read:
|
) -> {{ modelname }}Read:
|
||||||
"read all the {{modelname.lower()}}s"
|
"create a {{ modelname }}"
|
||||||
db_{{modelname.lower()}} = {{modelname}}.from_orm({{modelname.lower()}})
|
db_{{ modelname }} = {{ modelname }}.from_orm({{ modelname }})
|
||||||
session.add(db_{{modelname.lower()}})
|
session.add(db_{{ modelname }})
|
||||||
session.commit()
|
session.commit()
|
||||||
session.refresh(db_{{modelname.lower()}})
|
session.refresh(db_{{ modelname }})
|
||||||
await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
|
await manager.broadcast({{{ modelname }}.json()}, id=1)
|
||||||
return db_{{modelname.lower()}}
|
return db_{{ modelname }}
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.patch("/{{modelname.lower()}}/")
|
@{{ modelname }}_router.patch("/{{ modelname }}/")
|
||||||
async def patch_{{modelname.lower()}}(
|
def patch_{{ modelname }}(
|
||||||
*,
|
*,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
{{modelname.lower()}}: {{modelname}}Update,
|
{{ modelname }}: {{ modelname }}Update,
|
||||||
) -> {{modelname}}Read:
|
) -> {{ modelname }}Read:
|
||||||
"read all the {{modelname.lower()}}s"
|
"update a {{ modelname }}"
|
||||||
db_{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}.id)
|
db_{{ modelname }} = session.get({{ modelname }}, {{ modelname }}.id)
|
||||||
if not db_{{modelname.lower()}}:
|
if not db_{{ modelname }}:
|
||||||
raise HTTPException(status_code=404, detail="{{modelname}} not found")
|
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
|
||||||
for key, value in {{modelname.lower()}}.dict(exclude_unset=True).items():
|
for key, value in {{ modelname }}.dict(exclude_unset=True).items():
|
||||||
setattr(db_{{modelname.lower()}}, key, value)
|
setattr(db_{{ modelname }}, key, value)
|
||||||
session.add(db_{{modelname.lower()}})
|
session.add(db_{{ modelname }})
|
||||||
session.commit()
|
session.commit()
|
||||||
session.refresh(db_{{modelname.lower()}})
|
session.refresh(db_{{ modelname }})
|
||||||
await manager.broadcast({{{modelname.lower()}}.json()}, id=1)
|
await manager.broadcast({{{ modelname }}.json()}, id=1)
|
||||||
return db_{{modelname.lower()}}
|
return db_{{ modelname }}
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.delete("/{{modelname.lower()}}/{{{modelname.lower()}}_id}")
|
@{{ modelname }}_router.delete("/{{ modelname }}/{{{ modelname }}_id}")
|
||||||
async def delete_{{modelname.lower()}}(
|
def delete_{{ modelname }}(
|
||||||
*,
|
*,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
{{modelname.lower()}}_id: int,
|
{{ modelname }}_id: int,
|
||||||
):
|
):
|
||||||
"read all the {{modelname.lower()}}s"
|
"delete a {{ modelname }}"
|
||||||
{{modelname.lower()}} = session.get({{modelname}}, {{modelname.lower()}}_id)
|
{{ modelname }} = session.get({{ modelname }}, {{ modelname }}_id)
|
||||||
if not {{modelname.lower()}}:
|
if not {{ modelname }}:
|
||||||
raise HTTPException(status_code=404, detail="{{modelname}} not found")
|
raise HTTPException(status_code=404, detail="{{ modelname }} not found")
|
||||||
session.delete({{modelname.lower()}})
|
session.delete({{ modelname }})
|
||||||
session.commit()
|
session.commit()
|
||||||
await manager.broadcast(f"deleted {{modelname.lower()}} {{{modelname.lower()}}_id}", id=1)
|
await manager.broadcast(f"deleted {{ modelname }} {{{ modelname }}_id}", id=1)
|
||||||
return {"ok": True}
|
return {"ok": True}
|
||||||
|
|
||||||
|
|
||||||
@{{modelname.lower()}}_router.get("/{{modelname.lower()}}s/")
|
@{{ modelname }}_router.get("/{{ modelname }}s/")
|
||||||
async def get_{{modelname.lower()}}s(
|
def get_{{ modelname }}s(
|
||||||
*,
|
*,
|
||||||
session: Session = Depends(get_session),
|
session: Session = Depends(get_session),
|
||||||
) -> list[{{modelname}}]:
|
) -> {{ modelname }}s:
|
||||||
"get all {{modelname.lower()}}s"
|
"get all {{ modelname }}s"
|
||||||
return {{modelname}}Read.list(session=session)
|
statement = select({{ modelname }})
|
||||||
|
{{ modelname }}s = session.exec(statement).all()
|
||||||
|
return {{ modelname }}s(__root__={{ modelname }}s)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,12 @@
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
from polyfactory.factories.pydantic_factory import ModelFactory
|
from polyfactory.factories.pydantic_factory import ModelFactory
|
||||||
|
|
||||||
from learn_sql_model.models.{{modelname.lower()}} import {{modelname}}
|
from learn_sql_model.factories.pet import PetFactory
|
||||||
|
from learn_sql_model.models.{{ modelname }} import {{ modelname }}
|
||||||
|
from learn_sql_model.models.pet import Pet
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}Factory(ModelFactory[{{modelname.lower()}}]):
|
class {{ modelname }}Factory(ModelFactory[{{ modelname }}]):
|
||||||
__model__ = {{modelname}}
|
__model__ = {{ modelname }}
|
||||||
__faker__ = Faker(locale="en_US")
|
__faker__ = Faker(locale="en_US")
|
||||||
__set_as_default_factory_for_type__ = True
|
__set_as_default_factory_for_type__ = True
|
||||||
id = None
|
|
||||||
|
|
||||||
__random_seed__ = 10
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,93 +1,81 @@
|
||||||
from typing import Optional
|
from typing import Dict, Optional
|
||||||
|
|
||||||
from fastapi import Depends, HTTPException
|
|
||||||
import httpx
|
import httpx
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from sqlmodel import Field, Relationship, SQLModel, Session, select
|
from sqlmodel import Field, SQLModel
|
||||||
|
|
||||||
from learn_sql_model.config import config, get_config
|
from learn_sql_model.config import config
|
||||||
from learn_sql_model.models.pet import Pet
|
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}Base(SQLModel, table=False):
|
class {{ modelname }}Base(SQLModel, table=False):
|
||||||
|
# put model attributes here
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}({{modelname}}Base, table=True):
|
class {{ modelname }}({{ modelname }}Base, table=True):
|
||||||
id: Optional[int] = Field(default=None, primary_key=True)
|
id: int = Field(default=None, primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}Create({{modelname}}Base):
|
class {{ modelname }}Create({{ modelname }}Base):
|
||||||
...
|
...
|
||||||
|
|
||||||
def post(self) -> {{modelname}}:
|
def post(self) -> {{ modelname }}:
|
||||||
r = httpx.post(
|
r = httpx.post(
|
||||||
f"{config.api_client.url}/{{modelname.lower()}}/",
|
f"{config.api_client.url}/{{ modelname }}/",
|
||||||
json=self.dict(),
|
json=self.dict(),
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
|
|
||||||
|
return {{ modelname }}.parse_obj(r.json())
|
||||||
|
|
||||||
class {{modelname}}Read({{modelname}}Base):
|
|
||||||
|
class {{ modelname }}Read({{ modelname }}Base):
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(
|
def get(
|
||||||
cls,
|
cls,
|
||||||
id: int,
|
id: int,
|
||||||
) -> {{modelname}}:
|
) -> {{ modelname }}:
|
||||||
with config.database.session as session:
|
r = httpx.get(f"{config.api_client.url}/{{ modelname }}/{id}")
|
||||||
{{modelname.lower()}} = session.get({{modelname}}, id)
|
if r.status_code != 200:
|
||||||
if not {{modelname.lower()}}:
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
raise HTTPException(status_code=404, detail="{{modelname}} not found")
|
return {{ modelname }}Read.parse_obj(r.json())
|
||||||
return {{modelname.lower()}}
|
|
||||||
|
|
||||||
|
class {{ modelname }}s(BaseModel):
|
||||||
|
__root__: list[{{ modelname }}]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def list(
|
def list(
|
||||||
self,
|
self,
|
||||||
where=None,
|
) -> {{ modelname }}:
|
||||||
offset=0,
|
r = httpx.get(f"{config.api_client.url}/{{ modelname }}s/")
|
||||||
limit=None,
|
if r.status_code != 200:
|
||||||
session: Session = None,
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
) -> {{modelname}}:
|
return {{ modelname }}s.parse_obj({"__root__": r.json()})
|
||||||
|
|
||||||
if session is None:
|
|
||||||
session = get_config().database.session
|
|
||||||
|
|
||||||
statement = select({{modelname}})
|
|
||||||
if where != "None" and where is not None:
|
|
||||||
from sqlmodel import text
|
|
||||||
|
|
||||||
statement = statement.where(text(where))
|
|
||||||
statement = statement.offset(offset).limit(limit)
|
|
||||||
{{modelname.lower()}}es = session.exec(statement).all()
|
|
||||||
return {{modelname.lower()}}es
|
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}Update(SQLModel):
|
class {{ modelname }}Update(SQLModel):
|
||||||
# id is required to update the {{modelname.lower()}}
|
# id is required to update the {{ modelname }}
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
# all other fields, must match the model, but with Optional default None
|
def update(self) -> {{ modelname }}:
|
||||||
|
|
||||||
pet_id: Optional[int] = Field(default=None, foreign_key="pet.id")
|
|
||||||
pet: Optional[Pet] = Relationship(back_populates="{{modelname.lower()}}")
|
|
||||||
|
|
||||||
def update(self) -> {{modelname}}:
|
|
||||||
r = httpx.patch(
|
r = httpx.patch(
|
||||||
f"{config.api_client.url}/{{modelname.lower()}}/",
|
f"{config.api_client.url}/{{ modelname }}/",
|
||||||
json=self.dict(),
|
json=self.dict(),
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
|
|
||||||
|
|
||||||
class {{modelname}}Delete(BaseModel):
|
class {{ modelname }}Delete(BaseModel):
|
||||||
id: int
|
id: int
|
||||||
|
|
||||||
def delete(self) -> {{modelname}}:
|
@classmethod
|
||||||
|
def delete(self, id: int) -> Dict[str, bool]:
|
||||||
r = httpx.delete(
|
r = httpx.delete(
|
||||||
f"{config.api_client.url}/{{modelname.lower()}}/{self.id}",
|
f"{config.api_client.url}/{{ modelname }}/{id}",
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
raise RuntimeError(f"{r.status_code}:\n {r.text}")
|
||||||
|
|
|
||||||