commit 4be274d9e2dc0914aae68d1db6ddb9ab61cd55e7 Author: Waylon S. Walker Date: Fri May 19 08:35:16 2023 -0500 init diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e1a3186 --- /dev/null +++ b/.gitignore @@ -0,0 +1,964 @@ +# Created by https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode +# Edit at https://www.toptal.com/developers/gitignore?templates=vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode + +### Data ### +*.csv +*.dat +*.efx +*.gbr +*.key +*.pps +*.ppt +*.pptx +*.sdf +*.tax2010 +*.vcf +*.xml + +### Emacs ### +# -*- mode: gitignore; -*- +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +*.elc +auto-save-list +tramp +.\#* + +# Org-mode +.org-id-locations +*_archive + +# flymake-mode +*_flymake.* + +# eshell files +/eshell/history +/eshell/lastdir + +# elpa packages +/elpa/ + +# reftex files +*.rel + +# AUCTeX auto folder +/auto/ + +# cask packages +.cask/ +dist/ + +# Flycheck +flycheck_*.el + +# server auth directory +/server/ + +# projectiles files +.projectile + +# directory configuration +.dir-locals.el + +# network security +/network-security.data + + +### Executable ### +*.app +*.bat +*.cgi +*.com +*.exe +*.gadget +*.jar +*.pif +*.vb +*.wsf + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +### Node Patch ### +# Serverless Webpack directories +.webpack/ + +# Optional stylelint cache + +# SvelteKit build / generate output +.svelte-kit + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### SublimeText ### +# Cache files for Sublime Text +*.tmlanguage.cache +*.tmPreferences.cache +*.stTheme.cache + +# Workspace files are user-specific +*.sublime-workspace + +# Project files should be checked into the repository, unless a significant +# proportion of contributors will probably not be using Sublime Text +# *.sublime-project + +# SFTP configuration file +sftp-config.json +sftp-config-alt*.json + +# Package control specific files +Package Control.last-run +Package Control.ca-list +Package Control.ca-bundle +Package Control.system-ca-bundle +Package Control.cache/ +Package Control.ca-certs/ +Package Control.merged-ca-bundle +Package Control.user-ca-bundle +oscrypto-ca-bundle.crt +bh_unicode_properties.cache + +# Sublime-github package stores a github token in this file +# https://packagecontrol.io/packages/sublime-github +GitHub.sublime-settings + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### VisualStudio ### +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +*.code-workspace + +# Local History for Visual Studio Code + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + +# End of https://www.toptal.com/developers/gitignore/api/vim,node,data,emacs,python,pycharm,executable,sublimetext,visualstudio,visualstudiocode diff --git a/.learn-sql-model-copier-answers.yml b/.learn-sql-model-copier-answers.yml new file mode 100644 index 0000000..59fe3aa --- /dev/null +++ b/.learn-sql-model-copier-answers.yml @@ -0,0 +1,11 @@ +# Changes here will be overwritten by Copier; NEVER EDIT MANUALLY +_commit: 992464f +_src_path: /home/waylon/git/pytool-template +author_github: waylonwalker +author_name: Waylon Walker +description: learning sql model +package_name: learn-sql-model +package_title: Learn Sql Model +package_title_input: Learn SQL Model +python_package: learn_sql_model + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..eb014a6 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +## 0.0.0 + +init diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..9cde24d --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) 2023-present Waylon S. Walker + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..969cb55 --- /dev/null +++ b/README.md @@ -0,0 +1,66 @@ +# Learn SQL Model + +learning sql model + +## Development + +```console +pip install hatch +hatch shell +``` + +## Start the Server + +```console +learn-sql-model api run +``` + +## Use the cli to manage Heros + +```console +learn-sql-model hero create + +# show them +learn-sql-model hero get + +# show one +learn-sql-model hero get --id 0 +``` + +## Use python to manage Heros + +```python +from learn_sql_model.models import Hero +# create a hero +bruce = Hero(name="Batman", secret_name="Bruce Wayne") +bruce.post() +# list all heros +Hero.get() +# get one hero +Hero.get(0) +``` + +## Use api to create hero + +```console +# create a curl POST request to create hero +curl -X POST \ + -H "Content-Type: application/json" \ + -d '{"name": "Batman", "secret_name": "Bruce Wayne"}' \ + http://localhost:5000/heros + +# list all heros +curl http://localhost:5000/heros +curl -X 'GET' \ + 'http://localhost:5000/heros/' \ + -H 'accept: application/json' + +# get one hero +curl -X 'GET' \ + 'http://localhost:5000/hero/9' \ + -H 'accept: application/json' +``` + +## License + +`learn-sql-model` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license. diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..77a55b7 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,110 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = sqlite:///database.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/database.db b/database.db new file mode 100644 index 0000000..7934602 Binary files /dev/null and b/database.db differ diff --git a/learn_sql_model/__about__.py b/learn_sql_model/__about__.py new file mode 100644 index 0000000..90da1db --- /dev/null +++ b/learn_sql_model/__about__.py @@ -0,0 +1,4 @@ +# SPDX-FileCopyrightText: 2023-present Waylon S. Walker +# +# SPDX-License-Identifier: MIT +__version__ = "0.0.0.dev1" diff --git a/learn_sql_model/__init__.py b/learn_sql_model/__init__.py new file mode 100644 index 0000000..2a9f0e4 --- /dev/null +++ b/learn_sql_model/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2023-present Waylon S. Walker +# +# SPDX-License-Identifier: MIT diff --git a/learn_sql_model/__main__.py b/learn_sql_model/__main__.py new file mode 100644 index 0000000..04b645e --- /dev/null +++ b/learn_sql_model/__main__.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: 2023-present Waylon S. Walker +# +# SPDX-License-Identifier: MIT +import sys + +if __name__ == '__main__': + from .cli import {{python_package}} + + sys.exit({{python_package}}()) diff --git a/learn_sql_model/api.py b/learn_sql_model/api.py new file mode 100644 index 0000000..4ea7eb1 --- /dev/null +++ b/learn_sql_model/api.py @@ -0,0 +1,64 @@ +from typing import Union + +from fastapi import FastAPI + +import httpx +from learn_sql_model.console import console +from learn_sql_model.models import Hero, Pet + +models = Union[Hero, Pet] + +# from learn_sql_model.config import config +# from learn_sql_model.models import Hero + +app = FastAPI() + + +app.post("/heroes/") + + +def post(self: models) -> None: + + try: + httpx.post("http://localhost:5000/heroes/", json=self.dict()) + except httpx.ConnectError: + console.log("local failover") + post_local(self) + + +def post_local(self: models) -> None: + from learn_sql_model.config import config + + with config.session as session: + session.add(self) + session.commit() + + +def get(self: models, instance: models = None) -> list[models]: + "read all the heros" + from learn_sql_model.config import config + + with config.session as session: + if instance is None: + heroes = session.exec(select(self)).all() + return heroes + else: + hero = session.exec(select(self).where(self.id == instance.id)).all().one() + return hero + + +@app.post("/heroes/") +def create_hero(hero: Hero): + post(hero) + + +@app.get("/heroes/") +def read_heroes() -> list[Hero]: + "read all the heros" + return get(Hero) + + +@app.get("/hero/") +def read_heroes(hero: Hero) -> list[Hero]: + "read all the heros" + return get(Hero, hero) diff --git a/learn_sql_model/api/__init__.py b/learn_sql_model/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/learn_sql_model/api/app.py b/learn_sql_model/api/app.py new file mode 100644 index 0000000..8b64875 --- /dev/null +++ b/learn_sql_model/api/app.py @@ -0,0 +1,9 @@ +from fastapi import FastAPI + +from learn_sql_model.api.hero import hero_router +from learn_sql_model.api.user import user_router + +app = FastAPI() + +app.include_router(hero_router) +app.include_router(user_router) diff --git a/learn_sql_model/api/hero.py b/learn_sql_model/api/hero.py new file mode 100644 index 0000000..2f05bfb --- /dev/null +++ b/learn_sql_model/api/hero.py @@ -0,0 +1,31 @@ +from typing import Annotated + +from fastapi import APIRouter, Depends + +from learn_sql_model.api.user import oauth2_scheme +from learn_sql_model.models import Hero + +hero_router = APIRouter() + + +@hero_router.get("/items/") +async def read_items(token: Annotated[str, Depends(oauth2_scheme)]): + return {"token": token} + + +@hero_router.get("/hero/{id}") +def get_hero(id: int) -> Hero: + "get one hero" + return Hero.get(item_id=id) + + +@hero_router.post("/hero/") +def post_hero(hero: Hero) -> Hero: + "read all the heros" + return hero.post() + + +@hero_router.get("/heros/") +def get_heros() -> list[Hero]: + "get all heros" + return Hero.get() diff --git a/learn_sql_model/api/user.py b/learn_sql_model/api/user.py new file mode 100644 index 0000000..0ede6db --- /dev/null +++ b/learn_sql_model/api/user.py @@ -0,0 +1,146 @@ +from datetime import datetime, timedelta +from typing import Annotated + +from fastapi import APIRouter, Depends, HTTPException, status +from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm +from jose import JWTError, jwt +from passlib.context import CryptContext +from pydantic import BaseModel + +# to get a string like this run: +# openssl rand -hex 32 +SECRET_KEY = "09d25e094faa6ca2556c818166b7a9563b93f7099f6f0f4caa6cf63b88e8d3e7" +ALGORITHM = "HS256" +ACCESS_TOKEN_EXPIRE_MINUTES = 30 + +user_router = APIRouter() + + +fake_users_db = { + "johndoe": { + "username": "johndoe", + "full_name": "John Doe", + "email": "johndoe@example.com", + "hashed_password": "$2b$12$EixZaYVK1fsbw1ZfbX3OXePaWxn96p36WQoeG6Lruj3vjPGga31lW", + "disabled": False, + } +} + + +class Token(BaseModel): + access_token: str + token_type: str + + +class TokenData(BaseModel): + username: str | None = None + + +class User(BaseModel): + username: str + email: str | None = None + full_name: str | None = None + disabled: bool | None = None + + +class UserInDB(User): + hashed_password: str + + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token") + + +def verify_password(plain_password, hashed_password): + return pwd_context.verify(plain_password, hashed_password) + + +def get_password_hash(password): + return pwd_context.hash(password) + + +def get_user(db, username: str): + if username in db: + user_dict = db[username] + return UserInDB(**user_dict) + + +def authenticate_user(fake_db, username: str, password: str): + user = get_user(fake_db, username) + if not user: + return False + if not verify_password(password, user.hashed_password): + return False + return user + + +def create_access_token(data: dict, expires_delta: timedelta | None = None): + to_encode = data.copy() + if expires_delta: + expire = datetime.utcnow() + expires_delta + else: + expire = datetime.utcnow() + timedelta(minutes=15) + to_encode.update({"exp": expire}) + encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM) + return encoded_jwt + + +async def get_current_user(token: Annotated[str, Depends(oauth2_scheme)]): + credentials_exception = HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Could not validate credentials", + headers={"WWW-Authenticate": "Bearer"}, + ) + try: + payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM]) + username: str = payload.get("sub") + if username is None: + raise credentials_exception + token_data = TokenData(username=username) + except JWTError: + raise credentials_exception + user = get_user(fake_users_db, username=token_data.username) + if user is None: + raise credentials_exception + return user + + +async def get_current_active_user( + current_user: Annotated[User, Depends(get_current_user)] +): + if current_user.disabled: + raise HTTPException(status_code=400, detail="Inactive user") + return current_user + + +@user_router.post("/token", response_model=Token) +async def login_for_access_token( + form_data: Annotated[OAuth2PasswordRequestForm, Depends()] +): + user = authenticate_user(fake_users_db, form_data.username, form_data.password) + if not user: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect username or password", + headers={"WWW-Authenticate": "Bearer"}, + ) + access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) + access_token = create_access_token( + data={"sub": user.username}, expires_delta=access_token_expires + ) + return {"access_token": access_token, "token_type": "bearer"} + + +@user_router.get("/users/me/", response_model=User) +async def read_users_me( + current_user: Annotated[User, Depends(get_current_active_user)] +): + return current_user + + +@user_router.get("/users/me/items/") +async def read_own_items( + current_user: Annotated[User, Depends(get_current_active_user)] +): + return [{"item_id": "Foo", "owner": current_user.username}] diff --git a/learn_sql_model/cli/__init__.py b/learn_sql_model/cli/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/learn_sql_model/cli/api.py b/learn_sql_model/cli/api.py new file mode 100644 index 0000000..1ca18c2 --- /dev/null +++ b/learn_sql_model/cli/api.py @@ -0,0 +1,28 @@ +import typer +import uvicorn + +from learn_sql_model.cli.common import verbose_callback + +api_app = typer.Typer() + + +@api_app.callback() +def api( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + "model cli" + + +@api_app.command() +def run( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + uvicorn.run("learn_sql_model.api.app:app", port=5000, log_level="info") diff --git a/learn_sql_model/cli/app.py b/learn_sql_model/cli/app.py new file mode 100644 index 0000000..4973bd0 --- /dev/null +++ b/learn_sql_model/cli/app.py @@ -0,0 +1,58 @@ +import typer + +from learn_sql_model.cli.api import api_app +from learn_sql_model.cli.common import verbose_callback +from learn_sql_model.cli.config import config_app +from learn_sql_model.cli.hero import hero_app +from learn_sql_model.cli.model_app import model_app +from learn_sql_model.cli.tui import tui_app + +app = typer.Typer( + name="learn_sql_model", + help="A rich terminal report for coveragepy.", +) +app.add_typer(config_app) +app.add_typer(tui_app) +app.add_typer(model_app) +app.add_typer(api_app) +app.add_typer(hero_app, name="hero") + + +def version_callback(value: bool) -> None: + """Callback function to print the version of the learn-sql-model package. + + Args: + value (bool): Boolean value to determine if the version should be printed. + + Raises: + typer.Exit: If the value is True, the version will be printed and the program will exit. + + Example: + version_callback(True) + """ + if value: + from learn_sql_model.__about__ import __version__ + + typer.echo(f"{__version__}") + raise typer.Exit() + + +@app.callback() +def main( + version: bool = typer.Option( + None, + "--version", + callback=version_callback, + is_eager=True, + ), + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +) -> None: + return + + +if __name__ == "__main__": + typer.run(main) diff --git a/learn_sql_model/cli/common.py b/learn_sql_model/cli/common.py new file mode 100644 index 0000000..e1dfff8 --- /dev/null +++ b/learn_sql_model/cli/common.py @@ -0,0 +1,6 @@ +from learn_sql_model.console import console + + +def verbose_callback(value: bool) -> None: + if value: + console.quiet = False diff --git a/learn_sql_model/cli/config.py b/learn_sql_model/cli/config.py new file mode 100644 index 0000000..a42a450 --- /dev/null +++ b/learn_sql_model/cli/config.py @@ -0,0 +1,29 @@ +from rich.console import Console +import typer + +from learn_sql_model.cli.common import verbose_callback +from learn_sql_model.config import config as configuration + +config_app = typer.Typer() + + +@config_app.callback() +def config( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + "configuration cli" + + +@config_app.command() +def show( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + Console().print(configuration) diff --git a/learn_sql_model/cli/hero.py b/learn_sql_model/cli/hero.py new file mode 100644 index 0000000..81aeca9 --- /dev/null +++ b/learn_sql_model/cli/hero.py @@ -0,0 +1,30 @@ +from typing import List, Union + +from pydantic_typer import expand_pydantic_args +from rich.console import Console +import typer + +from learn_sql_model.models import Hero + +hero_app = typer.Typer() + + +@hero_app.callback() +def hero(): + "model cli" + + +@hero_app.command() +def get(id: int = None) -> Union[Hero, List[Hero]]: + "get one hero" + hero = Hero.get(item_id=id) + Console().print(hero) + return hero + + +@hero_app.command() +@expand_pydantic_args(typer=True) +def create(hero: Hero) -> Hero: + "read all the heros" + hero = hero.post() + Console().print(hero) diff --git a/learn_sql_model/cli/model_app.py b/learn_sql_model/cli/model_app.py new file mode 100644 index 0000000..db43ee4 --- /dev/null +++ b/learn_sql_model/cli/model_app.py @@ -0,0 +1,119 @@ +from rich.console import Console +from sqlmodel import SQLModel, Session +import typer + +from learn_sql_model.cli.common import verbose_callback +from learn_sql_model.config import config +from learn_sql_model.models import Hero, Pet + +model_app = typer.Typer() + + +@model_app.callback() +def model( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + "model cli" + + +@model_app.command() +def create_revision( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), + message: str = typer.Option( + prompt=True, + ), +): + import alembic + # python -m alembic revision --autogenerate -m "New Attribute" + from alembic.config import Config + + alembic_cfg = Config("alembic.ini") + alembic.command.revision( + config=alembic_cfg, + message=message, + autogenerate=True, + ) + + +@model_app.command() +def show( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + + SQLModel.metadata.create_all(config.engine) + with Session(config.engine) as session: + heros = session.exec(select(Hero)).all() + Console().print(heros) + + +@model_app.command() +def read( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + from learn_sql_model.api import read_heroes + + Console().print(read_heroes()) + + +# @model_app.command() +# @expand_pydantic_args(typer=True) +# def create( +# hero: Hero, +# ): +# hero.post() + +# try: +# httpx.post("http://localhost:5000/heroes/", json=hero.dict()) +# except httpx.ConnectError: +# console.log("local failover") +# with Session(config.engine) as session: +# session.add(hero) +# session.commit() + + +@model_app.command() +def populate( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + + pet_1 = Pet(name="Deadpond-Dog") + hero_1 = Hero(name="Deadpond", secret_name="Dive Wilson", pets=[pet_1]) + hero_2 = Hero( + name="Spider-Boy", + secret_name="Pedro Parqueador", + pet=Pet(name="Spider-Boy-Dog"), + ) + hero_3 = Hero( + name="Rusty-Man", + secret_name="Tommy Sharp", + age=48, + pet=Pet(name="Rusty-Man-Dog"), + ) + + SQLModel.metadata.create_all(config.engine) + + with Session(config.engine) as session: + session.add(hero_1) + session.add(hero_2) + session.add(hero_3) + session.commit() diff --git a/learn_sql_model/cli/tui.py b/learn_sql_model/cli/tui.py new file mode 100644 index 0000000..ab30568 --- /dev/null +++ b/learn_sql_model/cli/tui.py @@ -0,0 +1,18 @@ +import typer + +from learn_sql_model.cli.common import verbose_callback +from learn_sql_model.tui.app import run_app + +tui_app = typer.Typer() + + +@tui_app.callback(invoke_without_command=True) +def i( + verbose: bool = typer.Option( + False, + callback=verbose_callback, + help="show the log messages", + ), +): + "interactive tui" + run_app() diff --git a/learn_sql_model/config.py b/learn_sql_model/config.py new file mode 100644 index 0000000..172d304 --- /dev/null +++ b/learn_sql_model/config.py @@ -0,0 +1,40 @@ +from typing import TYPE_CHECKING + +from pydantic import BaseSettings +from sqlmodel import SQLModel, Session, create_engine + +from learn_sql_model.models import Hero, Pet +from learn_sql_model.standard_config import load + +models = [Hero, Pet] + +if TYPE_CHECKING: + from sqlalchemy import Engine + + +class Config(BaseSettings): + database_url: str = "sqlite:///database.db" + + class Config: + env_prefix = "LEARN_SQL_MODEL_" + + @property + def engine(self) -> "Engine": + return create_engine(self.database_url) + + @property + def session(self) -> "Session": + return Session(self.engine) + + def create_db_and_tables(self) -> None: + SQLModel.metadata.create_all(self.engine) + + # def create_endpoints(self) -> None: + # for model in models: + # app.post("/heroes/")(Hero.post_local) + # app.get("/heroes/")(Hero.read_heroes) + + +raw_config = load("learn_sql_model") +config = Config(**raw_config) +config.create_db_and_tables() diff --git a/learn_sql_model/console.py b/learn_sql_model/console.py new file mode 100644 index 0000000..a25f03e --- /dev/null +++ b/learn_sql_model/console.py @@ -0,0 +1,4 @@ +from rich.console import Console + +console = Console() +console.quiet = False diff --git a/learn_sql_model/models.py b/learn_sql_model/models.py new file mode 100644 index 0000000..9c929ae --- /dev/null +++ b/learn_sql_model/models.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from typing import Optional + +from sqlmodel import Field, SQLModel, select + + +class FastModel(SQLModel): + def post(self): + from learn_sql_model.config import config + + with config.session as session: + session.add(self) + session.commit() + + @classmethod + def get(self, item_id: int = None): + from learn_sql_model.config import config + + with config.session as session: + if item_id is None: + return session.exec(select(self)).all() + return session.exec(select(self).where(self.id == item_id)).one() + + +class Hero(FastModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str + secret_name: str + age: Optional[int] = None + # new_attribute: Optional[str] = None + # pets: List["Pet"] = Relationship(back_populates="hero") + + +class Pet(FastModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + name: str = "Jim" + + +# age: Optional[int] = None + +# hero_id: int = Field(default=None, foreign_key="hero.id") +# hero: Optional[Hero] = Relationship(back_populates="pets") diff --git a/learn_sql_model/standard_config.py b/learn_sql_model/standard_config.py new file mode 100644 index 0000000..0f99499 --- /dev/null +++ b/learn_sql_model/standard_config.py @@ -0,0 +1,239 @@ +"""Standard Config. +A module to load tooling config from a users project space. + +Inspired from frustrations that some tools have a tool.ini, .tool.ini, +setup.cfg, or pyproject.toml. Some allow for global configs, some don't. Some +properly follow the users home directory, others end up in a weird temp +directory. Windows home directory is only more confusing. Some will even +respect the users `$XDG_HOME` directory. + + +This file is for any project that can be configured in plain text such as `ini` +or `toml` and not requiring a .py file. Just name your tool and let users put +config where it makes sense to them, no need to figure out resolution order. + +## Usage: + +``` python +from standard_config import load + +# Retrieve any overrides from the user +overrides = {'setting': True} +config = load('my_tool', overrides) +``` + +## Resolution Order + +* First global file with a tool key +* First local file with a tool key +* Environment variables prefixed with `TOOL` +* Overrides + +### Tool Specific Ini files + +Ini file formats must include a `` key. + +``` ini +[my_tool] +setting = True +``` + +### pyproject.toml + +Toml files must include a `tool.` key + +``` toml +[tool.my_tool] +setting = True +``` + +### setup.cfg + +setup.cfg files must include a `tool:` key + +``` ini +[tool:my_tool] +setting = True +``` + + +### global files to consider + +* /tool.ini +* /.tool +* /.tool.ini +* /.config/tool.ini +* /.config/.tool +* /.config/.tool.ini + +### local files to consider + +* /tool.ini +* /.tool +* /.tool.ini +* /pyproject.toml +* /setup.cfg + +""" + +import os +from pathlib import Path +from typing import Dict, List, Union + +import anyconfig + +# path_spec_type = List[Dict[str, Union[Path, str, List[str\}\}\}\} +path_spec_type = List + + +def _get_global_path_specs(tool: str) -> path_spec_type: + """ + Generate a list of standard pathspecs for global config files. + + Args: + tool (str): name of the tool to configure + """ + try: + home = Path(os.environ["XDG_HOME"]) + except KeyError: + home = Path.home() + + return [ + {"path_specs": home / f"{tool}.ini", "ac_parser": "ini", "keys": [tool]}, + {"path_specs": home / f".{tool}", "ac_parser": "ini", "keys": [tool]}, + {"path_specs": home / f".{tool}.ini", "ac_parser": "ini", "keys": [tool]}, + { + "path_specs": home / ".config" / f"{tool}.ini", + "ac_parser": "ini", + "keys": [tool], + }, + { + "path_specs": home / ".config" / f".{tool}", + "ac_parser": "ini", + "keys": [tool], + }, + { + "path_specs": home / ".config" / f".{tool}.ini", + "ac_parser": "ini", + "keys": [tool], + }, + ] + + +def _get_local_path_specs(tool: str, project_home: Union[str, Path]) -> path_spec_type: + """ + Generate a list of standard pathspecs for local, project directory config files. + + Args: + tool (str): name of the tool to configure + """ + return [ + { + "path_specs": Path(project_home) / f"{tool}.ini", + "ac_parser": "ini", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f".{tool}", + "ac_parser": "ini", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f".{tool}.ini", + "ac_parser": "ini", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f"{tool}.yml", + "ac_parser": "yaml", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f".{tool}.yml", + "ac_parser": "yaml", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f"{tool}.toml", + "ac_parser": "toml", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / f".{tool}.toml", + "ac_parser": "toml", + "keys": [tool], + }, + { + "path_specs": Path(project_home) / "pyproject.toml", + "ac_parser": "toml", + "keys": ["tool", tool], + }, + { + "path_specs": Path(project_home) / "setup.cfg", + "ac_parser": "ini", + "keys": [f"tool.{tool}"], + }, + ] + + +def _get_attrs(attrs: list, config: Dict) -> Dict: + """Get nested config data from a list of keys. + + specifically written for pyproject.toml which needs to get `tool` then `` + """ + for attr in attrs: + config = config[attr] + return config + + +def _load_files(config_path_specs: path_spec_type) -> Dict: + """Use anyconfig to load config files stopping at the first one that exists. + + config_path_specs (list): a list of pathspecs and keys to load + """ + for file in config_path_specs: + if file["path_specs"].exists(): + config = anyconfig.load(**file) + else: + # ignore missing files + continue + + try: + return _get_attrs(file["keys"], config) + except KeyError: + # ignore incorrect keys + continue + + return {} + + +def _load_env(tool: str) -> Dict: + """Load config from environment variables. + + Args: + tool (str): name of the tool to configure + """ + vars = [var for var in os.environ.keys() if var.startswith(tool.upper())] + return { + var.lower().strip(tool.lower()).strip("_").strip("-"): os.environ[var] + for var in vars + } + + +def load(tool: str, project_home: Union[Path, str] = ".", overrides: Dict = {}) -> Dict: + """Load tool config from standard config files. + + Resolution Order + + * First global file with a tool key + * First local file with a tool key + * Environment variables prefixed with `TOOL` + * Overrides + + Args: + tool (str): name of the tool to configure + """ + global_config = _load_files(_get_global_path_specs(tool)) + local_config = _load_files(_get_local_path_specs(tool, project_home)) + env_config = _load_env(tool) + return {**global_config, **local_config, **env_config, **overrides} diff --git a/learn_sql_model/tui/app.css b/learn_sql_model/tui/app.css new file mode 100644 index 0000000..7ed9fce --- /dev/null +++ b/learn_sql_model/tui/app.css @@ -0,0 +1,18 @@ +Screen { + align: center middle; + layers: main footer; +} + +Sidebar { + height: 100vh; + width: auto; + min-width: 20; + background: $secondary-background-darken-2; + dock: left; + margin-right: 1; + layer: main; +} + +Footer { + layer: footer; +} diff --git a/learn_sql_model/tui/app.py b/learn_sql_model/tui/app.py new file mode 100644 index 0000000..4e0130e --- /dev/null +++ b/learn_sql_model/tui/app.py @@ -0,0 +1,56 @@ +# config["tui"] = {} +# config["tui"]["bindings"] = {} + + +# class Sidebar(Static): +# def compose(self) -> ComposeResult: +# yield Container( +# Static("sidebar"), +# id="sidebar", +# ) + + +# class Tui(App): +# """A Textual app to manage requests.""" + +# CSS_PATH = Path("__file__").parent / "app.css" +# BINDINGS = [tuple(b.values()) for b in config["tui"]["bindings"]] + +# def compose(self) -> ComposeResult: +# """Create child widgets for the app.""" +# yield Container(Static("hello world")) +# yield Footer() + +# def action_toggle_dark(self) -> None: +# """An action to toggle dark mode.""" +# self.dark = not self.dark + +# def action_toggle_sidebar(self): +# try: +# self.query_one("PromptSidebar").remove() +# except NoMatches: +# self.mount(Sidebar()) + + +def run_app(): + ... + + +# import os +# import sys + +# from textual.features import parse_features + +# dev = "--dev" in sys.argv +# features = set(parse_features(os.environ.get("TEXTUAL", ""))) +# if dev: +# features.add("debug") +# features.add("devtools") + +# os.environ["TEXTUAL"] = ",".join(sorted(features)) +# app = Tui() +# app.run() + + +# if __name__ == "__main__": +# run_app() diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..ee54604 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,85 @@ +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import engine_from_config, pool + +from learn_sql_model.models import Hero, Pet +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata + +config = context.config +fileConfig(config.config_file_name) + +# Model / Schema imports + +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +__all__ = ["Hero", "Pet"] + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..3124b62 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,25 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/20da26039edf_name_the_pet.py b/migrations/versions/20da26039edf_name_the_pet.py new file mode 100644 index 0000000..168f8ce --- /dev/null +++ b/migrations/versions/20da26039edf_name_the_pet.py @@ -0,0 +1,31 @@ +"""name the pet + +Revision ID: 20da26039edf +Revises: 44fbe5f1a689 +Create Date: 2023-05-18 11:18:35.748333 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision = '20da26039edf' +down_revision = '44fbe5f1a689' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('hero', 'new_attribute') + op.add_column('pet', sa.Column('name', sqlmodel.sql.sqltypes.AutoString(), nullable=False)) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('pet', 'name') + op.add_column('hero', sa.Column('new_attribute', sa.VARCHAR(), nullable=True)) + # ### end Alembic commands ### diff --git a/migrations/versions/29ab77485a4d_new_attribute.py b/migrations/versions/29ab77485a4d_new_attribute.py new file mode 100644 index 0000000..fda8f79 --- /dev/null +++ b/migrations/versions/29ab77485a4d_new_attribute.py @@ -0,0 +1,29 @@ +"""New Attribute + +Revision ID: 29ab77485a4d +Revises: fe841f5746e5 +Create Date: 2023-05-18 11:09:53.112601 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision = '29ab77485a4d' +down_revision = 'fe841f5746e5' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('hero', sa.Column('new_attribute', sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('hero', 'new_attribute') + # ### end Alembic commands ### diff --git a/migrations/versions/44fbe5f1a689_new.py b/migrations/versions/44fbe5f1a689_new.py new file mode 100644 index 0000000..fc337dd --- /dev/null +++ b/migrations/versions/44fbe5f1a689_new.py @@ -0,0 +1,29 @@ +"""new + +Revision ID: 44fbe5f1a689 +Revises: 29ab77485a4d +Create Date: 2023-05-18 11:17:08.071058 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision = '44fbe5f1a689' +down_revision = '29ab77485a4d' +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/migrations/versions/fe841f5746e5_initial_migration.py b/migrations/versions/fe841f5746e5_initial_migration.py new file mode 100644 index 0000000..4323ae9 --- /dev/null +++ b/migrations/versions/fe841f5746e5_initial_migration.py @@ -0,0 +1,41 @@ +"""Initial Migration + +Revision ID: fe841f5746e5 +Revises: +Create Date: 2023-05-18 11:07:41.700265 + +""" +from alembic import op +import sqlalchemy as sa +import sqlmodel + +# revision identifiers, used by Alembic. +revision = "fe841f5746e5" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "hero", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("secret_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("age", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "pet", + sa.Column("id", sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("pet") + op.drop_table("hero") + # ### end Alembic commands ### diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..6393bc1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,167 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "learn-sql-model" +description = 'learning sql model' +readme = "README.md" +requires-python = ">=3.7" +license = "MIT" +keywords = [] +authors = [ + { name = "Waylon S. Walker", email = "waylon@waylonwalker.com" }, +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] +dependencies = [ + "rich", + "textual", + "typer", + "anyconfig", + "sqlmodel", + "fastapi", + "uvicorn[standard]", + "httpx", + "python-jose[cryptography]", + "passlib[bcrypt]", + "python-multipart", +] + +dynamic = ["version"] + +[project.urls] +Documentation = "https://github.com/waylonwalker/learn-sql-model#readme" +Issues = "https://github.com/waylonwalker/learn-sql-model/issues" +Source = "https://github.com/waylonwalker/learn-sql-model" +Changelog = "https://github.com/waylonwalker/learn-sql-model" + +[project.scripts] +learn-sql-model = "learn_sql_model.cli.app:app" + +[tool.hatch.version] +path = "learn_sql_model/__about__.py" + +[tool.hatch.envs.default] +dependencies = [ + "ipython", + "mypy", + "pyflyby", + "pytest", + "pytest-cov", + "pytest-mock", + "pytest-rich", + "ruff", + "black", +] +[tool.hatch.envs.default.scripts] +test = "coverage run -m pytest" +cov = "coverage-rich" +lint = "ruff learn_sql_model" +format = "black learn_sql_model" +format-check = "black --check learn_sql_model" +build-docs = "markata build" +lint-test = [ + "lint", + "format-check", + "test", + "cov", +] +test-lint = "lint-test" + +[[tool.hatch.envs.test.matrix]] +python = ["37", "38", "39", "310", "311"] + +[tool.coverage.run] +branch = true +parallel = true +omit = [ + "learn_sql_model/__about__.py", +] + +[tool.coverage.report] +exclude_lines = [ + "no cov", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", +] + +[tool.pytest.ini_options] +addopts = "-ra -q --rich" +asyncio_mode = "auto" +testpaths = ["tests"] + +[tool.coverage_rich] +fail-under=80 + +[tool.ruff] + +ignore = ["E501"] +target-version = "py37" + +[tool.ruff.per-file-ignores] +'tests/**/*.py' = ['S101'] + + +select = [ +"F", # Pyflakes +"E", # Error +"W", # Warning +"C90", # mccabe +"I", # isort +"N", # pep8-naming +# "D", # pydocstyle +"UP", # pyupgrade +"YTT", # flake8-2020 +"ANN", # flake8-annotations +"S", # flake8-bandit +"BLE", # flake8-blind-except +"FBT", # flake8-boolean-trap +"B", # flake8-bugbear +"A", # flake8-builtins +"COM", # flake8-commas +"C4", # flake8-comprehensions +"DTZ", # flake8-datetimez +"T10", # flake8-debugger +"DJ", # flake8-django +"EM", # flake8-errmsg +"EXE", # flake8-executable +"ISC", # flake8-implicit-str-concat +"ICN", # flake8-import-conventions +"G", # flake8-logging-format +"INP", # flake8-no-pep420 +"PIE", # flake8-pie +"T20", # flake8-print +"PYI", # flake8-pyi +"PT", # flake8-pytest-style +"Q", # flake8-quotes +"RSE", # flake8-raise +"RET", # flake8-return +"SLF", # flake8-self +"SIM", # flake8-simplify +"TID", # flake8-tidy-imports +"TCH", # flake8-type-checking +# "INT", # flake8-gettext +"ARG", # flake8-unused-arguments +"PTH", # flake8-use-pathlib +"ERA", # eradicate +"PD", # pandas-vet +"PGH", # pygrep-hooks +"PL", # Pylint +"PLC", # Convention +"PLE", # Error +"PLR", # Refactor +"PLW", # Warning +"TRY", # tryceratops +"NPY", # NumPy-specific rules +"RUF", # Ruff-specific rules +] diff --git a/templates/api.py b/templates/api.py new file mode 100644 index 0000000..782b7da --- /dev/null +++ b/templates/api.py @@ -0,0 +1,20 @@ +from learn_sql_model.api.app import app +from learn_sql_model.models import Hero + + +@app.post("/hero/") +def create_hero(hero: Hero) -> Hero: + post(hero) + return hero + + +@app.get("/hero/") +def read_heroes(hero: Hero) -> list[Hero]: + "read all the heros" + return hero.post() + + +@app.get("/heros/") +def read_heros() -> list[Hero]: + "read all the heros" + return Hero.get() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..2a9f0e4 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +# SPDX-FileCopyrightText: 2023-present Waylon S. Walker +# +# SPDX-License-Identifier: MIT