diff --git a/.gitignore b/.gitignore
old mode 100644
new mode 100755
diff --git a/copy-db.sh b/copy-db.sh
index 16d04df..6fe1523 100755
--- a/copy-db.sh
+++ b/copy-db.sh
@@ -1,5 +1,6 @@
#!/bin/bash
-scp ~/Documents/Code/lazywiki/bflw/lazy_wiki.sqlite3 vps:
+scp ~/Documents/Code/lazywiki/lazy_wiki.sqlite3 vps:
+(templates, sh to start)
ssh vps docker cp lazy_wiki.sqlite3 lazywiki:/db/lazy_wiki.sqlite3
ssh vps docker restart lazywiki
diff --git a/lazywiki/build/lib/lazy_wiki/__main__.py b/lazywiki/build/lib/lazy_wiki/__main__.py
old mode 100755
new mode 100644
index f42c908..985e47b
--- a/lazywiki/build/lib/lazy_wiki/__main__.py
+++ b/lazywiki/build/lib/lazy_wiki/__main__.py
@@ -1,9 +1,9 @@
from . import web
+import sys
def main():
- web.app.run(host = '0.0.0.0', port = 8080)
+ web.app.run(host = '0.0.0.0', port = int(sys.argv[2]))
if __name__ == '__main__':
- print("444444")
main()
diff --git a/lazywiki/build/lib/lazy_wiki/db.py b/lazywiki/build/lib/lazy_wiki/db.py
old mode 100755
new mode 100644
index c749f07..7d2a473
--- a/lazywiki/build/lib/lazy_wiki/db.py
+++ b/lazywiki/build/lib/lazy_wiki/db.py
@@ -3,7 +3,6 @@ from sqlalchemy.sql import select, update, insert
from sqlalchemy.sql.expression import literal
from .schema import metadata, articles
from markdown import markdown
-from string import whitespace, punctuation
import re
import os
import sys
@@ -58,7 +57,7 @@ def select_formatted_article(keyword):
while raw:
# if the remaining raw content starts with a keyword
word = select_longest_keyword_string_starts_with(raw)
- if word and raw[len(word['title'])] in punctuation + whitespace:
+ if word:
# use original capitalization for hyperlink text
original = raw[:len(word['title'])]
# create a Markdown hyperlink
diff --git a/lazywiki/build/lib/lazy_wiki/views/edit.tpl b/lazywiki/build/lib/lazy_wiki/views/edit.tpl
index a0504f3..067e48e 100755
--- a/lazywiki/build/lib/lazy_wiki/views/edit.tpl
+++ b/lazywiki/build/lib/lazy_wiki/views/edit.tpl
@@ -11,11 +11,11 @@
diff --git a/lazywiki/build/lib/lazy_wiki/web.py b/lazywiki/build/lib/lazy_wiki/web.py
old mode 100755
new mode 100644
diff --git a/start.sh b/start.sh
new file mode 100755
index 0000000..9199934
--- /dev/null
+++ b/start.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+python3 -m venv venv/
+source venv/bin/activate
+pip install ./lazywiki
+lazywiki ~/Documents/Code/lazywiki/ 8080
diff --git a/venv/bin/Activate.ps1 b/venv/bin/Activate.ps1
new file mode 100644
index 0000000..b49d77b
--- /dev/null
+++ b/venv/bin/Activate.ps1
@@ -0,0 +1,247 @@
+<#
+.Synopsis
+Activate a Python virtual environment for the current PowerShell session.
+
+.Description
+Pushes the python executable for a virtual environment to the front of the
+$Env:PATH environment variable and sets the prompt to signify that you are
+in a Python virtual environment. Makes use of the command line switches as
+well as the `pyvenv.cfg` file values present in the virtual environment.
+
+.Parameter VenvDir
+Path to the directory that contains the virtual environment to activate. The
+default value for this is the parent of the directory that the Activate.ps1
+script is located within.
+
+.Parameter Prompt
+The prompt prefix to display when this virtual environment is activated. By
+default, this prompt is the name of the virtual environment folder (VenvDir)
+surrounded by parentheses and followed by a single space (ie. '(.venv) ').
+
+.Example
+Activate.ps1
+Activates the Python virtual environment that contains the Activate.ps1 script.
+
+.Example
+Activate.ps1 -Verbose
+Activates the Python virtual environment that contains the Activate.ps1 script,
+and shows extra information about the activation as it executes.
+
+.Example
+Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
+Activates the Python virtual environment located in the specified location.
+
+.Example
+Activate.ps1 -Prompt "MyPython"
+Activates the Python virtual environment that contains the Activate.ps1 script,
+and prefixes the current prompt with the specified string (surrounded in
+parentheses) while the virtual environment is active.
+
+.Notes
+On Windows, it may be required to enable this Activate.ps1 script by setting the
+execution policy for the user. You can do this by issuing the following PowerShell
+command:
+
+PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
+
+For more information on Execution Policies:
+https://go.microsoft.com/fwlink/?LinkID=135170
+
+#>
+Param(
+ [Parameter(Mandatory = $false)]
+ [String]
+ $VenvDir,
+ [Parameter(Mandatory = $false)]
+ [String]
+ $Prompt
+)
+
+<# Function declarations --------------------------------------------------- #>
+
+<#
+.Synopsis
+Remove all shell session elements added by the Activate script, including the
+addition of the virtual environment's Python executable from the beginning of
+the PATH variable.
+
+.Parameter NonDestructive
+If present, do not remove this function from the global namespace for the
+session.
+
+#>
+function global:deactivate ([switch]$NonDestructive) {
+ # Revert to original values
+
+ # The prior prompt:
+ if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
+ Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
+ Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
+ }
+
+ # The prior PYTHONHOME:
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
+ Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
+ Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
+ }
+
+ # The prior PATH:
+ if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
+ Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
+ Remove-Item -Path Env:_OLD_VIRTUAL_PATH
+ }
+
+ # Just remove the VIRTUAL_ENV altogether:
+ if (Test-Path -Path Env:VIRTUAL_ENV) {
+ Remove-Item -Path env:VIRTUAL_ENV
+ }
+
+ # Just remove VIRTUAL_ENV_PROMPT altogether.
+ if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
+ Remove-Item -Path env:VIRTUAL_ENV_PROMPT
+ }
+
+ # Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
+ if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
+ Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
+ }
+
+ # Leave deactivate function in the global namespace if requested:
+ if (-not $NonDestructive) {
+ Remove-Item -Path function:deactivate
+ }
+}
+
+<#
+.Description
+Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
+given folder, and returns them in a map.
+
+For each line in the pyvenv.cfg file, if that line can be parsed into exactly
+two strings separated by `=` (with any amount of whitespace surrounding the =)
+then it is considered a `key = value` line. The left hand string is the key,
+the right hand is the value.
+
+If the value starts with a `'` or a `"` then the first and last character is
+stripped from the value before being captured.
+
+.Parameter ConfigDir
+Path to the directory that contains the `pyvenv.cfg` file.
+#>
+function Get-PyVenvConfig(
+ [String]
+ $ConfigDir
+) {
+ Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
+
+ # Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
+ $pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
+
+ # An empty map will be returned if no config file is found.
+ $pyvenvConfig = @{ }
+
+ if ($pyvenvConfigPath) {
+
+ Write-Verbose "File exists, parse `key = value` lines"
+ $pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
+
+ $pyvenvConfigContent | ForEach-Object {
+ $keyval = $PSItem -split "\s*=\s*", 2
+ if ($keyval[0] -and $keyval[1]) {
+ $val = $keyval[1]
+
+ # Remove extraneous quotations around a string value.
+ if ("'""".Contains($val.Substring(0, 1))) {
+ $val = $val.Substring(1, $val.Length - 2)
+ }
+
+ $pyvenvConfig[$keyval[0]] = $val
+ Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
+ }
+ }
+ }
+ return $pyvenvConfig
+}
+
+
+<# Begin Activate script --------------------------------------------------- #>
+
+# Determine the containing directory of this script
+$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
+$VenvExecDir = Get-Item -Path $VenvExecPath
+
+Write-Verbose "Activation script is located in path: '$VenvExecPath'"
+Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
+Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
+
+# Set values required in priority: CmdLine, ConfigFile, Default
+# First, get the location of the virtual environment, it might not be
+# VenvExecDir if specified on the command line.
+if ($VenvDir) {
+ Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
+}
+else {
+ Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
+ $VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
+ Write-Verbose "VenvDir=$VenvDir"
+}
+
+# Next, read the `pyvenv.cfg` file to determine any required value such
+# as `prompt`.
+$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
+
+# Next, set the prompt from the command line, or the config file, or
+# just use the name of the virtual environment folder.
+if ($Prompt) {
+ Write-Verbose "Prompt specified as argument, using '$Prompt'"
+}
+else {
+ Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
+ if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
+ Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
+ $Prompt = $pyvenvCfg['prompt'];
+ }
+ else {
+ Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
+ Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
+ $Prompt = Split-Path -Path $venvDir -Leaf
+ }
+}
+
+Write-Verbose "Prompt = '$Prompt'"
+Write-Verbose "VenvDir='$VenvDir'"
+
+# Deactivate any currently active virtual environment, but leave the
+# deactivate function in place.
+deactivate -nondestructive
+
+# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
+# that there is an activated venv.
+$env:VIRTUAL_ENV = $VenvDir
+
+if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
+
+ Write-Verbose "Setting prompt to '$Prompt'"
+
+ # Set the prompt to include the env name
+ # Make sure _OLD_VIRTUAL_PROMPT is global
+ function global:_OLD_VIRTUAL_PROMPT { "" }
+ Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
+ New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
+
+ function global:prompt {
+ Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
+ _OLD_VIRTUAL_PROMPT
+ }
+ $env:VIRTUAL_ENV_PROMPT = $Prompt
+}
+
+# Clear PYTHONHOME
+if (Test-Path -Path Env:PYTHONHOME) {
+ Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
+ Remove-Item -Path Env:PYTHONHOME
+}
+
+# Add the venv to the PATH
+Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
+$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
diff --git a/venv/bin/__pycache__/bottle.cpython-311.pyc b/venv/bin/__pycache__/bottle.cpython-311.pyc
new file mode 100644
index 0000000..a9eda63
Binary files /dev/null and b/venv/bin/__pycache__/bottle.cpython-311.pyc differ
diff --git a/venv/bin/activate b/venv/bin/activate
new file mode 100644
index 0000000..895f5fd
--- /dev/null
+++ b/venv/bin/activate
@@ -0,0 +1,69 @@
+# This file must be used with "source bin/activate" *from bash*
+# you cannot run it directly
+
+deactivate () {
+ # reset old environment variables
+ if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
+ PATH="${_OLD_VIRTUAL_PATH:-}"
+ export PATH
+ unset _OLD_VIRTUAL_PATH
+ fi
+ if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
+ PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
+ export PYTHONHOME
+ unset _OLD_VIRTUAL_PYTHONHOME
+ fi
+
+ # This should detect bash and zsh, which have a hash command that must
+ # be called to get it to forget past commands. Without forgetting
+ # past commands the $PATH changes we made may not be respected
+ if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+ hash -r 2> /dev/null
+ fi
+
+ if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
+ PS1="${_OLD_VIRTUAL_PS1:-}"
+ export PS1
+ unset _OLD_VIRTUAL_PS1
+ fi
+
+ unset VIRTUAL_ENV
+ unset VIRTUAL_ENV_PROMPT
+ if [ ! "${1:-}" = "nondestructive" ] ; then
+ # Self destruct!
+ unset -f deactivate
+ fi
+}
+
+# unset irrelevant variables
+deactivate nondestructive
+
+VIRTUAL_ENV="/home/grid/Documents/Code/lazywiki/venv"
+export VIRTUAL_ENV
+
+_OLD_VIRTUAL_PATH="$PATH"
+PATH="$VIRTUAL_ENV/bin:$PATH"
+export PATH
+
+# unset PYTHONHOME if set
+# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
+# could use `if (set -u; : $PYTHONHOME) ;` in bash
+if [ -n "${PYTHONHOME:-}" ] ; then
+ _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
+ unset PYTHONHOME
+fi
+
+if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
+ _OLD_VIRTUAL_PS1="${PS1:-}"
+ PS1="(venv) ${PS1:-}"
+ export PS1
+ VIRTUAL_ENV_PROMPT="(venv) "
+ export VIRTUAL_ENV_PROMPT
+fi
+
+# This should detect bash and zsh, which have a hash command that must
+# be called to get it to forget past commands. Without forgetting
+# past commands the $PATH changes we made may not be respected
+if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
+ hash -r 2> /dev/null
+fi
diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh
new file mode 100644
index 0000000..a7ecfde
--- /dev/null
+++ b/venv/bin/activate.csh
@@ -0,0 +1,26 @@
+# This file must be used with "source bin/activate.csh" *from csh*.
+# You cannot run it directly.
+# Created by Davide Di Blasi .
+# Ported to Python 3.3 venv by Andrew Svetlov
+
+alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+setenv VIRTUAL_ENV "/home/grid/Documents/Code/lazywiki/venv"
+
+set _OLD_VIRTUAL_PATH="$PATH"
+setenv PATH "$VIRTUAL_ENV/bin:$PATH"
+
+
+set _OLD_VIRTUAL_PROMPT="$prompt"
+
+if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
+ set prompt = "(venv) $prompt"
+ setenv VIRTUAL_ENV_PROMPT "(venv) "
+endif
+
+alias pydoc python -m pydoc
+
+rehash
diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish
new file mode 100644
index 0000000..19df370
--- /dev/null
+++ b/venv/bin/activate.fish
@@ -0,0 +1,69 @@
+# This file must be used with "source /bin/activate.fish" *from fish*
+# (https://fishshell.com/); you cannot run it directly.
+
+function deactivate -d "Exit virtual environment and return to normal shell environment"
+ # reset old environment variables
+ if test -n "$_OLD_VIRTUAL_PATH"
+ set -gx PATH $_OLD_VIRTUAL_PATH
+ set -e _OLD_VIRTUAL_PATH
+ end
+ if test -n "$_OLD_VIRTUAL_PYTHONHOME"
+ set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
+ set -e _OLD_VIRTUAL_PYTHONHOME
+ end
+
+ if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
+ set -e _OLD_FISH_PROMPT_OVERRIDE
+ # prevents error when using nested fish instances (Issue #93858)
+ if functions -q _old_fish_prompt
+ functions -e fish_prompt
+ functions -c _old_fish_prompt fish_prompt
+ functions -e _old_fish_prompt
+ end
+ end
+
+ set -e VIRTUAL_ENV
+ set -e VIRTUAL_ENV_PROMPT
+ if test "$argv[1]" != "nondestructive"
+ # Self-destruct!
+ functions -e deactivate
+ end
+end
+
+# Unset irrelevant variables.
+deactivate nondestructive
+
+set -gx VIRTUAL_ENV "/home/grid/Documents/Code/lazywiki/venv"
+
+set -gx _OLD_VIRTUAL_PATH $PATH
+set -gx PATH "$VIRTUAL_ENV/bin" $PATH
+
+# Unset PYTHONHOME if set.
+if set -q PYTHONHOME
+ set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
+ set -e PYTHONHOME
+end
+
+if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
+ # fish uses a function instead of an env var to generate the prompt.
+
+ # Save the current fish_prompt function as the function _old_fish_prompt.
+ functions -c fish_prompt _old_fish_prompt
+
+ # With the original prompt function renamed, we can override with our own.
+ function fish_prompt
+ # Save the return status of the last command.
+ set -l old_status $status
+
+ # Output the venv prompt; color taken from the blue of the Python logo.
+ printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
+
+ # Restore the return status of the previous command.
+ echo "exit $old_status" | .
+ # Output the original/"old" prompt.
+ _old_fish_prompt
+ end
+
+ set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
+ set -gx VIRTUAL_ENV_PROMPT "(venv) "
+end
diff --git a/venv/bin/bottle.py b/venv/bin/bottle.py
new file mode 100755
index 0000000..8e67bd9
--- /dev/null
+++ b/venv/bin/bottle.py
@@ -0,0 +1,3809 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# -*- coding: utf-8 -*-
+"""
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottlepy.org/
+
+Copyright (c) 2016, Marcel Hellkamp.
+License: MIT (see LICENSE for details)
+"""
+
+from __future__ import with_statement
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.12.25'
+__license__ = 'MIT'
+
+# The gevent server adapter needs to patch some modules before they are imported
+# This is why we parse the commandline parameters here but handle them later
+if __name__ == '__main__':
+ from optparse import OptionParser
+ _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
+ _opt = _cmd_parser.add_option
+ _opt("--version", action="store_true", help="show version number.")
+ _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
+ _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
+ _opt("-p", "--plugin", action="append", help="install additional plugin/s.")
+ _opt("--debug", action="store_true", help="start server in debug mode.")
+ _opt("--reload", action="store_true", help="auto-reload on file changes.")
+ _cmd_options, _cmd_args = _cmd_parser.parse_args()
+ if _cmd_options.server and _cmd_options.server.startswith('gevent'):
+ import gevent.monkey; gevent.monkey.patch_all()
+
+import base64, cgi, email.utils, functools, hmac, itertools, mimetypes,\
+ os, re, subprocess, sys, tempfile, threading, time, warnings, hashlib
+
+from datetime import date as datedate, datetime, timedelta
+from tempfile import TemporaryFile
+from traceback import format_exc, print_exc
+from unicodedata import normalize
+
+
+try: from simplejson import dumps as json_dumps, loads as json_lds
+except ImportError: # pragma: no cover
+ try: from json import dumps as json_dumps, loads as json_lds
+ except ImportError:
+ try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
+ except ImportError:
+ def json_dumps(data):
+ raise ImportError("JSON support requires Python 2.6 or simplejson.")
+ json_lds = json_dumps
+
+
+
+# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
+# It ain't pretty but it works... Sorry for the mess.
+
+py = sys.version_info
+py3k = py >= (3, 0, 0)
+py25 = py < (2, 6, 0)
+py31 = (3, 1, 0) <= py < (3, 2, 0)
+
+# Workaround for the missing "as" keyword in py3k.
+def _e(): return sys.exc_info()[1]
+
+# Workaround for the "print is a keyword/function" Python 2/3 dilemma
+# and a fallback for mod_wsgi (resticts stdout/err attribute access)
+try:
+ _stdout, _stderr = sys.stdout.write, sys.stderr.write
+except IOError:
+ _stdout = lambda x: sys.stdout.write(x)
+ _stderr = lambda x: sys.stderr.write(x)
+
+# Lots of stdlib and builtin differences.
+if py3k:
+ import http.client as httplib
+ import _thread as thread
+ from urllib.parse import urljoin, SplitResult as UrlSplitResult
+ from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
+ urlunquote = functools.partial(urlunquote, encoding='latin1')
+ from http.cookies import SimpleCookie
+ if py >= (3, 3, 0):
+ from collections.abc import MutableMapping as DictMixin
+ from types import ModuleType as new_module
+ else:
+ from collections import MutableMapping as DictMixin
+ from imp import new_module
+ import pickle
+ from io import BytesIO
+ from configparser import ConfigParser
+ from inspect import getfullargspec
+ def getargspec(func):
+ spec = getfullargspec(func)
+ kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
+ return kwargs, spec[1], spec[2], spec[3]
+
+ basestring = str
+ unicode = str
+ json_loads = lambda s: json_lds(touni(s))
+ callable = lambda x: hasattr(x, '__call__')
+ imap = map
+ def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
+else: # 2.x
+ import httplib
+ import thread
+ from urlparse import urljoin, SplitResult as UrlSplitResult
+ from urllib import urlencode, quote as urlquote, unquote as urlunquote
+ from Cookie import SimpleCookie
+ from itertools import imap
+ import cPickle as pickle
+ from imp import new_module
+ from StringIO import StringIO as BytesIO
+ from ConfigParser import SafeConfigParser as ConfigParser
+ from inspect import getargspec
+ if py25:
+ msg = "Python 2.5 support may be dropped in future versions of Bottle."
+ warnings.warn(msg, DeprecationWarning)
+ from UserDict import DictMixin
+ def next(it): return it.next()
+ bytes = str
+ else: # 2.6, 2.7
+ from collections import MutableMapping as DictMixin
+ unicode = unicode
+ json_loads = json_lds
+ eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec'))
+
+# Some helpers for string/byte handling
+def tob(s, enc='utf8'):
+ return s.encode(enc) if isinstance(s, unicode) else bytes(s)
+def touni(s, enc='utf8', err='strict'):
+ return s.decode(enc, err) if isinstance(s, bytes) else unicode(s)
+tonat = touni if py3k else tob
+
+# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
+# 3.1 needs a workaround.
+if py31:
+ from io import TextIOWrapper
+ class NCTextIOWrapper(TextIOWrapper):
+ def close(self): pass # Keep wrapped buffer open.
+
+
+# A bug in functools causes it to break if the wrapper is an instance method
+def update_wrapper(wrapper, wrapped, *a, **ka):
+ try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
+ except AttributeError: pass
+
+
+
+# These helpers are used at module level and need to be defined first.
+# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
+
+def depr(message, hard=False):
+ warnings.warn(message, DeprecationWarning, stacklevel=3)
+
+def makelist(data): # This is just to handy
+ if isinstance(data, (tuple, list, set, dict)): return list(data)
+ elif data: return [data]
+ else: return []
+
+
+class DictProperty(object):
+ ''' Property that maps to a key in a local dict-like attribute. '''
+ def __init__(self, attr, key=None, read_only=False):
+ self.attr, self.key, self.read_only = attr, key, read_only
+
+ def __call__(self, func):
+ functools.update_wrapper(self, func, updated=[])
+ self.getter, self.key = func, self.key or func.__name__
+ return self
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ key, storage = self.key, getattr(obj, self.attr)
+ if key not in storage: storage[key] = self.getter(obj)
+ return storage[key]
+
+ def __set__(self, obj, value):
+ if self.read_only: raise AttributeError("Read-Only property.")
+ getattr(obj, self.attr)[self.key] = value
+
+ def __delete__(self, obj):
+ if self.read_only: raise AttributeError("Read-Only property.")
+ del getattr(obj, self.attr)[self.key]
+
+
+class cached_property(object):
+ ''' A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property. '''
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+class lazy_attribute(object):
+ ''' A property that caches itself to the class object. '''
+ def __init__(self, func):
+ functools.update_wrapper(self, func, updated=[])
+ self.getter = func
+
+ def __get__(self, obj, cls):
+ value = self.getter(cls)
+ setattr(cls, self.__name__, value)
+ return value
+
+
+
+
+
+
+###############################################################################
+# Exceptions and Events ########################################################
+###############################################################################
+
+
+class BottleException(Exception):
+ """ A base class for exceptions used by bottle. """
+ pass
+
+
+
+
+
+
+###############################################################################
+# Routing ######################################################################
+###############################################################################
+
+
+class RouteError(BottleException):
+ """ This is a base class for all routing related exceptions """
+
+
+class RouteReset(BottleException):
+ """ If raised by a plugin or request handler, the route is reset and all
+ plugins are re-applied. """
+
+class RouterUnknownModeError(RouteError): pass
+
+
+class RouteSyntaxError(RouteError):
+ """ The route parser found something not supported by this router. """
+
+
+class RouteBuildError(RouteError):
+ """ The route could not be built. """
+
+
+def _re_flatten(p):
+ ''' Turn all capturing groups in a regular expression pattern into
+ non-capturing groups. '''
+ if '(' not in p: return p
+ return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
+ lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
+
+
+class Router(object):
+ ''' A Router is an ordered collection of route->target pairs. It is used to
+ efficiently match WSGI requests against a number of routes and return
+ the first target that satisfies the request. The target may be anything,
+ usually a string, ID or callable object. A route consists of a path-rule
+ and a HTTP method.
+
+ The path-rule is either a static path (e.g. `/contact`) or a dynamic
+ path that contains wildcards (e.g. `/wiki/`). The wildcard syntax
+ and details on the matching order are described in docs:`routing`.
+ '''
+
+ default_pattern = '[^/]+'
+ default_filter = 're'
+
+ #: The current CPython regexp implementation does not allow more
+ #: than 99 matching groups per regular expression.
+ _MAX_GROUPS_PER_PATTERN = 99
+
+ def __init__(self, strict=False):
+ self.rules = [] # All rules in order
+ self._groups = {} # index of regexes to find them in dyna_routes
+ self.builder = {} # Data structure for the url builder
+ self.static = {} # Search structure for static routes
+ self.dyna_routes = {}
+ self.dyna_regexes = {} # Search structure for dynamic routes
+ #: If true, static routes are no longer checked first.
+ self.strict_order = strict
+ self.filters = {
+ 're': lambda conf:
+ (_re_flatten(conf or self.default_pattern), None, None),
+ 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
+ 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
+ 'path': lambda conf: (r'.+?', None, None)}
+
+ def add_filter(self, name, func):
+ ''' Add a filter. The provided function is called with the configuration
+ string as parameter and must return a (regexp, to_python, to_url) tuple.
+ The first element is a string, the last two are callables or None. '''
+ self.filters[name] = func
+
+ rule_syntax = re.compile('(\\\\*)'\
+ '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
+ '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
+ '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))')
+
+ def _itertokens(self, rule):
+ offset, prefix = 0, ''
+ for match in self.rule_syntax.finditer(rule):
+ prefix += rule[offset:match.start()]
+ g = match.groups()
+ if len(g[0])%2: # Escaped wildcard
+ prefix += match.group(0)[len(g[0]):]
+ offset = match.end()
+ continue
+ if prefix:
+ yield prefix, None, None
+ name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
+ yield name, filtr or 'default', conf or None
+ offset, prefix = match.end(), ''
+ if offset <= len(rule) or prefix:
+ yield prefix+rule[offset:], None, None
+
+ def add(self, rule, method, target, name=None):
+ ''' Add a new rule or replace the target for an existing rule. '''
+ anons = 0 # Number of anonymous wildcards found
+ keys = [] # Names of keys
+ pattern = '' # Regular expression pattern with named groups
+ filters = [] # Lists of wildcard input filters
+ builder = [] # Data structure for the URL builder
+ is_static = True
+
+ for key, mode, conf in self._itertokens(rule):
+ if mode:
+ is_static = False
+ if mode == 'default': mode = self.default_filter
+ mask, in_filter, out_filter = self.filters[mode](conf)
+ if not key:
+ pattern += '(?:%s)' % mask
+ key = 'anon%d' % anons
+ anons += 1
+ else:
+ pattern += '(?P<%s>%s)' % (key, mask)
+ keys.append(key)
+ if in_filter: filters.append((key, in_filter))
+ builder.append((key, out_filter or str))
+ elif key:
+ pattern += re.escape(key)
+ builder.append((None, key))
+
+ self.builder[rule] = builder
+ if name: self.builder[name] = builder
+
+ if is_static and not self.strict_order:
+ self.static.setdefault(method, {})
+ self.static[method][self.build(rule)] = (target, None)
+ return
+
+ try:
+ re_pattern = re.compile('^(%s)$' % pattern)
+ re_match = re_pattern.match
+ except re.error:
+ raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
+
+ if filters:
+ def getargs(path):
+ url_args = re_match(path).groupdict()
+ for name, wildcard_filter in filters:
+ try:
+ url_args[name] = wildcard_filter(url_args[name])
+ except ValueError:
+ raise HTTPError(400, 'Path has wrong format.')
+ return url_args
+ elif re_pattern.groupindex:
+ def getargs(path):
+ return re_match(path).groupdict()
+ else:
+ getargs = None
+
+ flatpat = _re_flatten(pattern)
+ whole_rule = (rule, flatpat, target, getargs)
+
+ if (flatpat, method) in self._groups:
+ if DEBUG:
+ msg = 'Route <%s %s> overwrites a previously defined route'
+ warnings.warn(msg % (method, rule), RuntimeWarning)
+ self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
+ else:
+ self.dyna_routes.setdefault(method, []).append(whole_rule)
+ self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
+
+ self._compile(method)
+
+ def _compile(self, method):
+ all_rules = self.dyna_routes[method]
+ comborules = self.dyna_regexes[method] = []
+ maxgroups = self._MAX_GROUPS_PER_PATTERN
+ for x in range(0, len(all_rules), maxgroups):
+ some = all_rules[x:x+maxgroups]
+ combined = (flatpat for (_, flatpat, _, _) in some)
+ combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
+ combined = re.compile(combined).match
+ rules = [(target, getargs) for (_, _, target, getargs) in some]
+ comborules.append((combined, rules))
+
+ def build(self, _name, *anons, **query):
+ ''' Build an URL by filling the wildcards in a rule. '''
+ builder = self.builder.get(_name)
+ if not builder: raise RouteBuildError("No route with that name.", _name)
+ try:
+ for i, value in enumerate(anons): query['anon%d'%i] = value
+ url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
+ return url if not query else url+'?'+urlencode(query)
+ except KeyError:
+ raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
+
+ def match(self, environ):
+ ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
+ verb = environ['REQUEST_METHOD'].upper()
+ path = environ['PATH_INFO'] or '/'
+ target = None
+ if verb == 'HEAD':
+ methods = ['PROXY', verb, 'GET', 'ANY']
+ else:
+ methods = ['PROXY', verb, 'ANY']
+
+ for method in methods:
+ if method in self.static and path in self.static[method]:
+ target, getargs = self.static[method][path]
+ return target, getargs(path) if getargs else {}
+ elif method in self.dyna_regexes:
+ for combined, rules in self.dyna_regexes[method]:
+ match = combined(path)
+ if match:
+ target, getargs = rules[match.lastindex - 1]
+ return target, getargs(path) if getargs else {}
+
+ # No matching route found. Collect alternative methods for 405 response
+ allowed = set([])
+ nocheck = set(methods)
+ for method in set(self.static) - nocheck:
+ if path in self.static[method]:
+ allowed.add(method)
+ for method in set(self.dyna_regexes) - allowed - nocheck:
+ for combined, rules in self.dyna_regexes[method]:
+ match = combined(path)
+ if match:
+ allowed.add(method)
+ if allowed:
+ allow_header = ",".join(sorted(allowed))
+ raise HTTPError(405, "Method not allowed.", Allow=allow_header)
+
+ # No matching route and no alternative method found. We give up
+ raise HTTPError(404, "Not found: " + repr(path))
+
+
+
+
+
+
+class Route(object):
+ ''' This class wraps a route callback along with route specific metadata and
+ configuration and applies Plugins on demand. It is also responsible for
+ turing an URL path rule into a regular expression usable by the Router.
+ '''
+
+ def __init__(self, app, rule, method, callback, name=None,
+ plugins=None, skiplist=None, **config):
+ #: The application this route is installed to.
+ self.app = app
+ #: The path-rule string (e.g. ``/wiki/:page``).
+ self.rule = rule
+ #: The HTTP method as a string (e.g. ``GET``).
+ self.method = method
+ #: The original callback with no plugins applied. Useful for introspection.
+ self.callback = callback
+ #: The name of the route (if specified) or ``None``.
+ self.name = name or None
+ #: A list of route-specific plugins (see :meth:`Bottle.route`).
+ self.plugins = plugins or []
+ #: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
+ self.skiplist = skiplist or []
+ #: Additional keyword arguments passed to the :meth:`Bottle.route`
+ #: decorator are stored in this dictionary. Used for route-specific
+ #: plugin configuration and meta-data.
+ self.config = ConfigDict().load_dict(config, make_namespaces=True)
+
+ def __call__(self, *a, **ka):
+ depr("Some APIs changed to return Route() instances instead of"\
+ " callables. Make sure to use the Route.call method and not to"\
+ " call Route instances directly.") #0.12
+ return self.call(*a, **ka)
+
+ @cached_property
+ def call(self):
+ ''' The route callback with all plugins applied. This property is
+ created on demand and then cached to speed up subsequent requests.'''
+ return self._make_callback()
+
+ def reset(self):
+ ''' Forget any cached values. The next time :attr:`call` is accessed,
+ all plugins are re-applied. '''
+ self.__dict__.pop('call', None)
+
+ def prepare(self):
+ ''' Do all on-demand work immediately (useful for debugging).'''
+ self.call
+
+ @property
+ def _context(self):
+ depr('Switch to Plugin API v2 and access the Route object directly.') #0.12
+ return dict(rule=self.rule, method=self.method, callback=self.callback,
+ name=self.name, app=self.app, config=self.config,
+ apply=self.plugins, skip=self.skiplist)
+
+ def all_plugins(self):
+ ''' Yield all Plugins affecting this route. '''
+ unique = set()
+ for p in reversed(self.app.plugins + self.plugins):
+ if True in self.skiplist: break
+ name = getattr(p, 'name', False)
+ if name and (name in self.skiplist or name in unique): continue
+ if p in self.skiplist or type(p) in self.skiplist: continue
+ if name: unique.add(name)
+ yield p
+
+ def _make_callback(self):
+ callback = self.callback
+ for plugin in self.all_plugins():
+ try:
+ if hasattr(plugin, 'apply'):
+ api = getattr(plugin, 'api', 1)
+ context = self if api > 1 else self._context
+ callback = plugin.apply(callback, context)
+ else:
+ callback = plugin(callback)
+ except RouteReset: # Try again with changed configuration.
+ return self._make_callback()
+ if not callback is self.callback:
+ update_wrapper(callback, self.callback)
+ return callback
+
+ def get_undecorated_callback(self):
+ ''' Return the callback. If the callback is a decorated function, try to
+ recover the original function. '''
+ func = self.callback
+ func = getattr(func, '__func__' if py3k else 'im_func', func)
+ closure_attr = '__closure__' if py3k else 'func_closure'
+ while hasattr(func, closure_attr) and getattr(func, closure_attr):
+ func = getattr(func, closure_attr)[0].cell_contents
+ return func
+
+ def get_callback_args(self):
+ ''' Return a list of argument names the callback (most likely) accepts
+ as keyword arguments. If the callback is a decorated function, try
+ to recover the original function before inspection. '''
+ return getargspec(self.get_undecorated_callback())[0]
+
+ def get_config(self, key, default=None):
+ ''' Lookup a config field and return its value, first checking the
+ route.config, then route.app.config.'''
+ for conf in (self.config, self.app.config):
+ if key in conf: return conf[key]
+ return default
+
+ def __repr__(self):
+ cb = self.get_undecorated_callback()
+ return '<%s %r %r>' % (self.method, self.rule, cb)
+
+
+
+
+
+
+###############################################################################
+# Application Object ###########################################################
+###############################################################################
+
+
+class Bottle(object):
+ """ Each Bottle object represents a single, distinct web application and
+ consists of routes, callbacks, plugins, resources and configuration.
+ Instances are callable WSGI applications.
+
+ :param catchall: If true (default), handle all exceptions. Turn off to
+ let debugging middleware handle exceptions.
+ """
+
+ def __init__(self, catchall=True, autojson=True):
+
+ #: A :class:`ConfigDict` for app specific configuration.
+ self.config = ConfigDict()
+ self.config._on_change = functools.partial(self.trigger_hook, 'config')
+ self.config.meta_set('autojson', 'validate', bool)
+ self.config.meta_set('catchall', 'validate', bool)
+ self.config['catchall'] = catchall
+ self.config['autojson'] = autojson
+
+ #: A :class:`ResourceManager` for application files
+ self.resources = ResourceManager()
+
+ self.routes = [] # List of installed :class:`Route` instances.
+ self.router = Router() # Maps requests to :class:`Route` instances.
+ self.error_handler = {}
+
+ # Core plugins
+ self.plugins = [] # List of installed plugins.
+ if self.config['autojson']:
+ self.install(JSONPlugin())
+ self.install(TemplatePlugin())
+
+ #: If true, most exceptions are caught and returned as :exc:`HTTPError`
+ catchall = DictProperty('config', 'catchall')
+
+ __hook_names = 'before_request', 'after_request', 'app_reset', 'config'
+ __hook_reversed = 'after_request'
+
+ @cached_property
+ def _hooks(self):
+ return dict((name, []) for name in self.__hook_names)
+
+ def add_hook(self, name, func):
+ ''' Attach a callback to a hook. Three hooks are currently implemented:
+
+ before_request
+ Executed once before each request. The request context is
+ available, but no routing has happened yet.
+ after_request
+ Executed once after each request regardless of its outcome.
+ app_reset
+ Called whenever :meth:`Bottle.reset` is called.
+ '''
+ if name in self.__hook_reversed:
+ self._hooks[name].insert(0, func)
+ else:
+ self._hooks[name].append(func)
+
+ def remove_hook(self, name, func):
+ ''' Remove a callback from a hook. '''
+ if name in self._hooks and func in self._hooks[name]:
+ self._hooks[name].remove(func)
+ return True
+
+ def trigger_hook(self, __name, *args, **kwargs):
+ ''' Trigger a hook and return a list of results. '''
+ return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
+
+ def hook(self, name):
+ """ Return a decorator that attaches a callback to a hook. See
+ :meth:`add_hook` for details."""
+ def decorator(func):
+ self.add_hook(name, func)
+ return func
+ return decorator
+
+ def mount(self, prefix, app, **options):
+ ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
+ URL prefix. Example::
+
+ root_app.mount('/admin/', admin_app)
+
+ :param prefix: path prefix or `mount-point`. If it ends in a slash,
+ that slash is mandatory.
+ :param app: an instance of :class:`Bottle` or a WSGI application.
+
+ All other parameters are passed to the underlying :meth:`route` call.
+ '''
+ if isinstance(app, basestring):
+ depr('Parameter order of Bottle.mount() changed.', True) # 0.10
+
+ segments = [p for p in prefix.split('/') if p]
+ if not segments: raise ValueError('Empty path prefix.')
+ path_depth = len(segments)
+
+ def mountpoint_wrapper():
+ try:
+ request.path_shift(path_depth)
+ rs = HTTPResponse([])
+ def start_response(status, headerlist, exc_info=None):
+ if exc_info:
+ try:
+ _raise(*exc_info)
+ finally:
+ exc_info = None
+ rs.status = status
+ for name, value in headerlist: rs.add_header(name, value)
+ return rs.body.append
+ body = app(request.environ, start_response)
+ if body and rs.body: body = itertools.chain(rs.body, body)
+ rs.body = body or rs.body
+ return rs
+ finally:
+ request.path_shift(-path_depth)
+
+ options.setdefault('skip', True)
+ options.setdefault('method', 'PROXY')
+ options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
+ options['callback'] = mountpoint_wrapper
+
+ self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
+ if not prefix.endswith('/'):
+ self.route('/' + '/'.join(segments), **options)
+
+ def merge(self, routes):
+ ''' Merge the routes of another :class:`Bottle` application or a list of
+ :class:`Route` objects into this application. The routes keep their
+ 'owner', meaning that the :data:`Route.app` attribute is not
+ changed. '''
+ if isinstance(routes, Bottle):
+ routes = routes.routes
+ for route in routes:
+ self.add_route(route)
+
+ def install(self, plugin):
+ ''' Add a plugin to the list of plugins and prepare it for being
+ applied to all routes of this application. A plugin may be a simple
+ decorator or an object that implements the :class:`Plugin` API.
+ '''
+ if hasattr(plugin, 'setup'): plugin.setup(self)
+ if not callable(plugin) and not hasattr(plugin, 'apply'):
+ raise TypeError("Plugins must be callable or implement .apply()")
+ self.plugins.append(plugin)
+ self.reset()
+ return plugin
+
+ def uninstall(self, plugin):
+ ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type
+ object to remove all plugins that match that type, a string to remove
+ all plugins with a matching ``name`` attribute or ``True`` to remove all
+ plugins. Return the list of removed plugins. '''
+ removed, remove = [], plugin
+ for i, plugin in list(enumerate(self.plugins))[::-1]:
+ if remove is True or remove is plugin or remove is type(plugin) \
+ or getattr(plugin, 'name', True) == remove:
+ removed.append(plugin)
+ del self.plugins[i]
+ if hasattr(plugin, 'close'): plugin.close()
+ if removed: self.reset()
+ return removed
+
+ def reset(self, route=None):
+ ''' Reset all routes (force plugins to be re-applied) and clear all
+ caches. If an ID or route object is given, only that specific route
+ is affected. '''
+ if route is None: routes = self.routes
+ elif isinstance(route, Route): routes = [route]
+ else: routes = [self.routes[route]]
+ for route in routes: route.reset()
+ if DEBUG:
+ for route in routes: route.prepare()
+ self.trigger_hook('app_reset')
+
+ def close(self):
+ ''' Close the application and all installed plugins. '''
+ for plugin in self.plugins:
+ if hasattr(plugin, 'close'): plugin.close()
+ self.stopped = True
+
+ def run(self, **kwargs):
+ ''' Calls :func:`run` with the same parameters. '''
+ run(self, **kwargs)
+
+ def match(self, environ):
+ """ Search for a matching route and return a (:class:`Route` , urlargs)
+ tuple. The second value is a dictionary with parameters extracted
+ from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
+ return self.router.match(environ)
+
+ def get_url(self, routename, **kargs):
+ """ Return a string that matches a named route """
+ scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
+ location = self.router.build(routename, **kargs).lstrip('/')
+ return urljoin(urljoin('/', scriptname), location)
+
+ def add_route(self, route):
+ ''' Add a route object, but do not change the :data:`Route.app`
+ attribute.'''
+ self.routes.append(route)
+ self.router.add(route.rule, route.method, route, name=route.name)
+ if DEBUG: route.prepare()
+
+ def route(self, path=None, method='GET', callback=None, name=None,
+ apply=None, skip=None, **config):
+ """ A decorator to bind a function to a request URL. Example::
+
+ @app.route('/hello/:name')
+ def hello(name):
+ return 'Hello %s' % name
+
+ The ``:name`` part is a wildcard. See :class:`Router` for syntax
+ details.
+
+ :param path: Request path or a list of paths to listen to. If no
+ path is specified, it is automatically generated from the
+ signature of the function.
+ :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
+ methods to listen to. (default: `GET`)
+ :param callback: An optional shortcut to avoid the decorator
+ syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
+ :param name: The name for this route. (default: None)
+ :param apply: A decorator or plugin or a list of plugins. These are
+ applied to the route callback in addition to installed plugins.
+ :param skip: A list of plugins, plugin classes or names. Matching
+ plugins are not installed to this route. ``True`` skips all.
+
+ Any additional keyword arguments are stored as route-specific
+ configuration and passed to plugins (see :meth:`Plugin.apply`).
+ """
+ if callable(path): path, callback = None, path
+ plugins = makelist(apply)
+ skiplist = makelist(skip)
+ def decorator(callback):
+ # TODO: Documentation and tests
+ if isinstance(callback, basestring): callback = load(callback)
+ for rule in makelist(path) or yieldroutes(callback):
+ for verb in makelist(method):
+ verb = verb.upper()
+ route = Route(self, rule, verb, callback, name=name,
+ plugins=plugins, skiplist=skiplist, **config)
+ self.add_route(route)
+ return callback
+ return decorator(callback) if callback else decorator
+
+ def get(self, path=None, method='GET', **options):
+ """ Equals :meth:`route`. """
+ return self.route(path, method, **options)
+
+ def post(self, path=None, method='POST', **options):
+ """ Equals :meth:`route` with a ``POST`` method parameter. """
+ return self.route(path, method, **options)
+
+ def put(self, path=None, method='PUT', **options):
+ """ Equals :meth:`route` with a ``PUT`` method parameter. """
+ return self.route(path, method, **options)
+
+ def delete(self, path=None, method='DELETE', **options):
+ """ Equals :meth:`route` with a ``DELETE`` method parameter. """
+ return self.route(path, method, **options)
+
+ def error(self, code=500):
+ """ Decorator: Register an output handler for a HTTP error code"""
+ def wrapper(handler):
+ self.error_handler[int(code)] = handler
+ return handler
+ return wrapper
+
+ def default_error_handler(self, res):
+ return tob(template(ERROR_PAGE_TEMPLATE, e=res))
+
+ def _handle(self, environ):
+ try:
+
+ environ['bottle.app'] = self
+ request.bind(environ)
+ response.bind()
+
+ path = environ['bottle.raw_path'] = environ['PATH_INFO']
+ if py3k:
+ try:
+ environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
+ except UnicodeError:
+ return HTTPError(400, 'Invalid path string. Expected UTF-8')
+
+ try:
+ self.trigger_hook('before_request')
+ route, args = self.router.match(environ)
+ environ['route.handle'] = route
+ environ['bottle.route'] = route
+ environ['route.url_args'] = args
+ return route.call(**args)
+ finally:
+ self.trigger_hook('after_request')
+
+ except HTTPResponse:
+ return _e()
+ except RouteReset:
+ route.reset()
+ return self._handle(environ)
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ stacktrace = format_exc()
+ environ['wsgi.errors'].write(stacktrace)
+ return HTTPError(500, "Internal Server Error", _e(), stacktrace)
+
+ def _cast(self, out, peek=None):
+ """ Try to convert the parameter into something WSGI compatible and set
+ correct HTTP headers when possible.
+ Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
+ iterable of strings and iterable of unicodes
+ """
+
+ # Empty output is done here
+ if not out:
+ if 'Content-Length' not in response:
+ response['Content-Length'] = 0
+ return []
+ # Join lists of byte or unicode strings. Mixed lists are NOT supported
+ if isinstance(out, (tuple, list))\
+ and isinstance(out[0], (bytes, unicode)):
+ out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
+ # Encode unicode strings
+ if isinstance(out, unicode):
+ out = out.encode(response.charset)
+ # Byte Strings are just returned
+ if isinstance(out, bytes):
+ if 'Content-Length' not in response:
+ response['Content-Length'] = len(out)
+ return [out]
+ # HTTPError or HTTPException (recursive, because they may wrap anything)
+ # TODO: Handle these explicitly in handle() or make them iterable.
+ if isinstance(out, HTTPError):
+ out.apply(response)
+ out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
+ return self._cast(out)
+ if isinstance(out, HTTPResponse):
+ out.apply(response)
+ return self._cast(out.body)
+
+ # File-like objects.
+ if hasattr(out, 'read'):
+ if 'wsgi.file_wrapper' in request.environ:
+ return request.environ['wsgi.file_wrapper'](out)
+ elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
+ return WSGIFileWrapper(out)
+
+ # Handle Iterables. We peek into them to detect their inner type.
+ try:
+ iout = iter(out)
+ first = next(iout)
+ while not first:
+ first = next(iout)
+ except StopIteration:
+ return self._cast('')
+ except HTTPResponse:
+ first = _e()
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
+
+ # These are the inner types allowed in iterator or generator objects.
+ if isinstance(first, HTTPResponse):
+ return self._cast(first)
+ elif isinstance(first, bytes):
+ new_iter = itertools.chain([first], iout)
+ elif isinstance(first, unicode):
+ encoder = lambda x: x.encode(response.charset)
+ new_iter = imap(encoder, itertools.chain([first], iout))
+ else:
+ msg = 'Unsupported response type: %s' % type(first)
+ return self._cast(HTTPError(500, msg))
+ if hasattr(out, 'close'):
+ new_iter = _closeiter(new_iter, out.close)
+ return new_iter
+
+ def wsgi(self, environ, start_response):
+ """ The bottle WSGI-interface. """
+ try:
+ out = self._cast(self._handle(environ))
+ # rfc2616 section 4.3
+ if response._status_code in (100, 101, 204, 304)\
+ or environ['REQUEST_METHOD'] == 'HEAD':
+ if hasattr(out, 'close'): out.close()
+ out = []
+ start_response(response._status_line, response.headerlist)
+ return out
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ err = 'Critical error while processing request: %s
' \
+ % html_escape(environ.get('PATH_INFO', '/'))
+ if DEBUG:
+ err += 'Error:
\n\n%s\n
\n' \
+ 'Traceback:
\n\n%s\n
\n' \
+ % (html_escape(repr(_e())), html_escape(format_exc()))
+ environ['wsgi.errors'].write(err)
+ headers = [('Content-Type', 'text/html; charset=UTF-8')]
+ start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
+ return [tob(err)]
+
+ def __call__(self, environ, start_response):
+ ''' Each instance of :class:'Bottle' is a WSGI application. '''
+ return self.wsgi(environ, start_response)
+
+
+
+
+
+
+###############################################################################
+# HTTP and WSGI Tools ##########################################################
+###############################################################################
+
+class BaseRequest(object):
+ """ A wrapper for WSGI environment dictionaries that adds a lot of
+ convenient access methods and properties. Most of them are read-only.
+
+ Adding new attributes to a request actually adds them to the environ
+ dictionary (as 'bottle.request.ext.'). This is the recommended
+ way to store and access request-specific data.
+ """
+
+ __slots__ = ('environ')
+
+ #: Maximum size of memory buffer for :attr:`body` in bytes.
+ MEMFILE_MAX = 102400
+
+ def __init__(self, environ=None):
+ """ Wrap a WSGI environ dictionary. """
+ #: The wrapped WSGI environ dictionary. This is the only real attribute.
+ #: All other attributes actually are read-only properties.
+ self.environ = {} if environ is None else environ
+ self.environ['bottle.request'] = self
+
+ @DictProperty('environ', 'bottle.app', read_only=True)
+ def app(self):
+ ''' Bottle application handling this request. '''
+ raise RuntimeError('This request is not connected to an application.')
+
+ @DictProperty('environ', 'bottle.route', read_only=True)
+ def route(self):
+ """ The bottle :class:`Route` object that matches this request. """
+ raise RuntimeError('This request is not connected to a route.')
+
+ @DictProperty('environ', 'route.url_args', read_only=True)
+ def url_args(self):
+ """ The arguments extracted from the URL. """
+ raise RuntimeError('This request is not connected to a route.')
+
+ @property
+ def path(self):
+ ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
+ broken clients and avoid the "empty path" edge case). '''
+ return '/' + self.environ.get('PATH_INFO','').lstrip('/')
+
+ @property
+ def method(self):
+ ''' The ``REQUEST_METHOD`` value as an uppercase string. '''
+ return self.environ.get('REQUEST_METHOD', 'GET').upper()
+
+ @DictProperty('environ', 'bottle.request.headers', read_only=True)
+ def headers(self):
+ ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to
+ HTTP request headers. '''
+ return WSGIHeaderDict(self.environ)
+
+ def get_header(self, name, default=None):
+ ''' Return the value of a request header, or a given default value. '''
+ return self.headers.get(name, default)
+
+ @DictProperty('environ', 'bottle.request.cookies', read_only=True)
+ def cookies(self):
+ """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
+ decoded. Use :meth:`get_cookie` if you expect signed cookies. """
+ cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
+ return FormsDict((c.key, c.value) for c in cookies)
+
+ def get_cookie(self, key, default=None, secret=None):
+ """ Return the content of a cookie. To read a `Signed Cookie`, the
+ `secret` must match the one used to create the cookie (see
+ :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
+ cookie or wrong signature), return a default value. """
+ value = self.cookies.get(key)
+ if secret and value:
+ dec = cookie_decode(value, secret) # (key, value) tuple or None
+ return dec[1] if dec and dec[0] == key else default
+ return value or default
+
+ @DictProperty('environ', 'bottle.request.query', read_only=True)
+ def query(self):
+ ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These
+ values are sometimes called "URL arguments" or "GET parameters", but
+ not to be confused with "URL wildcards" as they are provided by the
+ :class:`Router`. '''
+ get = self.environ['bottle.get'] = FormsDict()
+ pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
+ for key, value in pairs:
+ get[key] = value
+ return get
+
+ @DictProperty('environ', 'bottle.request.forms', read_only=True)
+ def forms(self):
+ """ Form values parsed from an `url-encoded` or `multipart/form-data`
+ encoded POST or PUT request body. The result is returned as a
+ :class:`FormsDict`. All keys and values are strings. File uploads
+ are stored separately in :attr:`files`. """
+ forms = FormsDict()
+ forms.recode_unicode = self.POST.recode_unicode
+ for name, item in self.POST.allitems():
+ if not isinstance(item, FileUpload):
+ forms[name] = item
+ return forms
+
+ @DictProperty('environ', 'bottle.request.params', read_only=True)
+ def params(self):
+ """ A :class:`FormsDict` with the combined values of :attr:`query` and
+ :attr:`forms`. File uploads are stored in :attr:`files`. """
+ params = FormsDict()
+ for key, value in self.query.allitems():
+ params[key] = value
+ for key, value in self.forms.allitems():
+ params[key] = value
+ return params
+
+ @DictProperty('environ', 'bottle.request.files', read_only=True)
+ def files(self):
+ """ File uploads parsed from `multipart/form-data` encoded POST or PUT
+ request body. The values are instances of :class:`FileUpload`.
+
+ """
+ files = FormsDict()
+ files.recode_unicode = self.POST.recode_unicode
+ for name, item in self.POST.allitems():
+ if isinstance(item, FileUpload):
+ files[name] = item
+ return files
+
+ @DictProperty('environ', 'bottle.request.json', read_only=True)
+ def json(self):
+ ''' If the ``Content-Type`` header is ``application/json``, this
+ property holds the parsed content of the request body. Only requests
+ smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
+ exhaustion. '''
+ ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
+ if ctype == 'application/json':
+ b = self._get_body_string()
+ if not b:
+ return None
+ return json_loads(b)
+ return None
+
+ def _iter_body(self, read, bufsize):
+ maxread = max(0, self.content_length)
+ while maxread:
+ part = read(min(maxread, bufsize))
+ if not part: break
+ yield part
+ maxread -= len(part)
+
+ def _iter_chunked(self, read, bufsize):
+ err = HTTPError(400, 'Error while parsing chunked transfer body.')
+ rn, sem, bs = tob('\r\n'), tob(';'), tob('')
+ while True:
+ header = read(1)
+ while header[-2:] != rn:
+ c = read(1)
+ header += c
+ if not c: raise err
+ if len(header) > bufsize: raise err
+ size, _, _ = header.partition(sem)
+ try:
+ maxread = int(tonat(size.strip()), 16)
+ except ValueError:
+ raise err
+ if maxread == 0: break
+ buff = bs
+ while maxread > 0:
+ if not buff:
+ buff = read(min(maxread, bufsize))
+ part, buff = buff[:maxread], buff[maxread:]
+ if not part: raise err
+ yield part
+ maxread -= len(part)
+ if read(2) != rn:
+ raise err
+
+ @DictProperty('environ', 'bottle.request.body', read_only=True)
+ def _body(self):
+ body_iter = self._iter_chunked if self.chunked else self._iter_body
+ read_func = self.environ['wsgi.input'].read
+ body, body_size, is_temp_file = BytesIO(), 0, False
+ for part in body_iter(read_func, self.MEMFILE_MAX):
+ body.write(part)
+ body_size += len(part)
+ if not is_temp_file and body_size > self.MEMFILE_MAX:
+ body, tmp = TemporaryFile(mode='w+b'), body
+ body.write(tmp.getvalue())
+ del tmp
+ is_temp_file = True
+ self.environ['wsgi.input'] = body
+ body.seek(0)
+ return body
+
+ def _get_body_string(self):
+ ''' read body until content-length or MEMFILE_MAX into a string. Raise
+ HTTPError(413) on requests that are to large. '''
+ clen = self.content_length
+ if clen > self.MEMFILE_MAX:
+ raise HTTPError(413, 'Request to large')
+ if clen < 0: clen = self.MEMFILE_MAX + 1
+ data = self.body.read(clen)
+ if len(data) > self.MEMFILE_MAX: # Fail fast
+ raise HTTPError(413, 'Request to large')
+ return data
+
+ @property
+ def body(self):
+ """ The HTTP request body as a seek-able file-like object. Depending on
+ :attr:`MEMFILE_MAX`, this is either a temporary file or a
+ :class:`io.BytesIO` instance. Accessing this property for the first
+ time reads and replaces the ``wsgi.input`` environ variable.
+ Subsequent accesses just do a `seek(0)` on the file object. """
+ self._body.seek(0)
+ return self._body
+
+ @property
+ def chunked(self):
+ ''' True if Chunked transfer encoding was. '''
+ return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
+
+ #: An alias for :attr:`query`.
+ GET = query
+
+ @DictProperty('environ', 'bottle.request.post', read_only=True)
+ def POST(self):
+ """ The values of :attr:`forms` and :attr:`files` combined into a single
+ :class:`FormsDict`. Values are either strings (form values) or
+ instances of :class:`cgi.FieldStorage` (file uploads).
+ """
+ post = FormsDict()
+ # We default to application/x-www-form-urlencoded for everything that
+ # is not multipart and take the fast path (also: 3.1 workaround)
+ if not self.content_type.startswith('multipart/'):
+ pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
+ for key, value in pairs:
+ post[key] = value
+ return post
+
+ safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
+ for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
+ if key in self.environ: safe_env[key] = self.environ[key]
+ args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
+ if py31:
+ args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
+ newline='\n')
+ elif py3k:
+ args['encoding'] = 'utf8'
+ post.recode_unicode = False
+ data = cgi.FieldStorage(**args)
+ self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958
+ data = data.list or []
+ for item in data:
+ if item.filename is None:
+ post[item.name] = item.value
+ else:
+ post[item.name] = FileUpload(item.file, item.name,
+ item.filename, item.headers)
+ return post
+
+ @property
+ def url(self):
+ """ The full request URI including hostname and scheme. If your app
+ lives behind a reverse proxy or load balancer and you get confusing
+ results, make sure that the ``X-Forwarded-Host`` header is set
+ correctly. """
+ return self.urlparts.geturl()
+
+ @DictProperty('environ', 'bottle.request.urlparts', read_only=True)
+ def urlparts(self):
+ ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
+ The tuple contains (scheme, host, path, query_string and fragment),
+ but the fragment is always empty because it is not visible to the
+ server. '''
+ env = self.environ
+ http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
+ host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
+ if not host:
+ # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
+ host = env.get('SERVER_NAME', '127.0.0.1')
+ port = env.get('SERVER_PORT')
+ if port and port != ('80' if http == 'http' else '443'):
+ host += ':' + port
+ path = urlquote(self.fullpath)
+ return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
+
+ @property
+ def fullpath(self):
+ """ Request path including :attr:`script_name` (if present). """
+ return urljoin(self.script_name, self.path.lstrip('/'))
+
+ @property
+ def query_string(self):
+ """ The raw :attr:`query` part of the URL (everything in between ``?``
+ and ``#``) as a string. """
+ return self.environ.get('QUERY_STRING', '')
+
+ @property
+ def script_name(self):
+ ''' The initial portion of the URL's `path` that was removed by a higher
+ level (server or routing middleware) before the application was
+ called. This script path is returned with leading and tailing
+ slashes. '''
+ script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
+ return '/' + script_name + '/' if script_name else '/'
+
+ def path_shift(self, shift=1):
+ ''' Shift path segments from :attr:`path` to :attr:`script_name` and
+ vice versa.
+
+ :param shift: The number of path segments to shift. May be negative
+ to change the shift direction. (default: 1)
+ '''
+ script = self.environ.get('SCRIPT_NAME','/')
+ self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
+
+ @property
+ def content_length(self):
+ ''' The request body length as an integer. The client is responsible to
+ set this header. Otherwise, the real length of the body is unknown
+ and -1 is returned. In this case, :attr:`body` will be empty. '''
+ return int(self.environ.get('CONTENT_LENGTH') or -1)
+
+ @property
+ def content_type(self):
+ ''' The Content-Type header as a lowercase-string (default: empty). '''
+ return self.environ.get('CONTENT_TYPE', '').lower()
+
+ @property
+ def is_xhr(self):
+ ''' True if the request was triggered by a XMLHttpRequest. This only
+ works with JavaScript libraries that support the `X-Requested-With`
+ header (most of the popular libraries do). '''
+ requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
+ return requested_with.lower() == 'xmlhttprequest'
+
+ @property
+ def is_ajax(self):
+ ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. '''
+ return self.is_xhr
+
+ @property
+ def auth(self):
+ """ HTTP authentication data as a (user, password) tuple. This
+ implementation currently supports basic (not digest) authentication
+ only. If the authentication happened at a higher level (e.g. in the
+ front web-server or a middleware), the password field is None, but
+ the user field is looked up from the ``REMOTE_USER`` environ
+ variable. On any errors, None is returned. """
+ basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
+ if basic: return basic
+ ruser = self.environ.get('REMOTE_USER')
+ if ruser: return (ruser, None)
+ return None
+
+ @property
+ def remote_route(self):
+ """ A list of all IPs that were involved in this request, starting with
+ the client IP and followed by zero or more proxies. This does only
+ work if all proxies support the ```X-Forwarded-For`` header. Note
+ that this information can be forged by malicious clients. """
+ proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
+ if proxy: return [ip.strip() for ip in proxy.split(',')]
+ remote = self.environ.get('REMOTE_ADDR')
+ return [remote] if remote else []
+
+ @property
+ def remote_addr(self):
+ """ The client IP as a string. Note that this information can be forged
+ by malicious clients. """
+ route = self.remote_route
+ return route[0] if route else None
+
+ def copy(self):
+ """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """
+ return Request(self.environ.copy())
+
+ def get(self, value, default=None): return self.environ.get(value, default)
+ def __getitem__(self, key): return self.environ[key]
+ def __delitem__(self, key): self[key] = ""; del(self.environ[key])
+ def __iter__(self): return iter(self.environ)
+ def __len__(self): return len(self.environ)
+ def keys(self): return self.environ.keys()
+ def __setitem__(self, key, value):
+ """ Change an environ value and clear all caches that depend on it. """
+
+ if self.environ.get('bottle.request.readonly'):
+ raise KeyError('The environ dictionary is read-only.')
+
+ self.environ[key] = value
+ todelete = ()
+
+ if key == 'wsgi.input':
+ todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
+ elif key == 'QUERY_STRING':
+ todelete = ('query', 'params')
+ elif key.startswith('HTTP_'):
+ todelete = ('headers', 'cookies')
+
+ for key in todelete:
+ self.environ.pop('bottle.request.'+key, None)
+
+ def __repr__(self):
+ return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
+
+ def __getattr__(self, name):
+ ''' Search in self.environ for additional user defined attributes. '''
+ try:
+ var = self.environ['bottle.request.ext.%s'%name]
+ return var.__get__(self) if hasattr(var, '__get__') else var
+ except KeyError:
+ raise AttributeError('Attribute %r not defined.' % name)
+
+ def __setattr__(self, name, value):
+ if name == 'environ': return object.__setattr__(self, name, value)
+ self.environ['bottle.request.ext.%s'%name] = value
+
+
+def _hkey(key):
+ if '\n' in key or '\r' in key or '\0' in key:
+ raise ValueError("Header names must not contain control characters: %r" % key)
+ return key.title().replace('_', '-')
+
+
+def _hval(value):
+ value = tonat(value)
+ if '\n' in value or '\r' in value or '\0' in value:
+ raise ValueError("Header value must not contain control characters: %r" % value)
+ return value
+
+
+
+class HeaderProperty(object):
+ def __init__(self, name, reader=None, writer=None, default=''):
+ self.name, self.default = name, default
+ self.reader, self.writer = reader, writer
+ self.__doc__ = 'Current value of the %r header.' % name.title()
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ value = obj.get_header(self.name, self.default)
+ return self.reader(value) if self.reader else value
+
+ def __set__(self, obj, value):
+ obj[self.name] = self.writer(value) if self.writer else value
+
+ def __delete__(self, obj):
+ del obj[self.name]
+
+
+class BaseResponse(object):
+ """ Storage class for a response body as well as headers and cookies.
+
+ This class does support dict-like case-insensitive item-access to
+ headers, but is NOT a dict. Most notably, iterating over a response
+ yields parts of the body and not the headers.
+
+ :param body: The response body as one of the supported types.
+ :param status: Either an HTTP status code (e.g. 200) or a status line
+ including the reason phrase (e.g. '200 OK').
+ :param headers: A dictionary or a list of name-value pairs.
+
+ Additional keyword arguments are added to the list of headers.
+ Underscores in the header name are replaced with dashes.
+ """
+
+ default_status = 200
+ default_content_type = 'text/html; charset=UTF-8'
+
+ # Header blacklist for specific response codes
+ # (rfc2616 section 10.2.3 and 10.3.5)
+ bad_headers = {
+ 204: set(('Content-Type',)),
+ 304: set(('Allow', 'Content-Encoding', 'Content-Language',
+ 'Content-Length', 'Content-Range', 'Content-Type',
+ 'Content-Md5', 'Last-Modified'))}
+
+ def __init__(self, body='', status=None, headers=None, **more_headers):
+ self._cookies = None
+ self._headers = {}
+ self.body = body
+ self.status = status or self.default_status
+ if headers:
+ if isinstance(headers, dict):
+ headers = headers.items()
+ for name, value in headers:
+ self.add_header(name, value)
+ if more_headers:
+ for name, value in more_headers.items():
+ self.add_header(name, value)
+
+ def copy(self, cls=None):
+ ''' Returns a copy of self. '''
+ cls = cls or BaseResponse
+ assert issubclass(cls, BaseResponse)
+ copy = cls()
+ copy.status = self.status
+ copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
+ if self._cookies:
+ copy._cookies = SimpleCookie()
+ copy._cookies.load(self._cookies.output(header=''))
+ return copy
+
+ def __iter__(self):
+ return iter(self.body)
+
+ def close(self):
+ if hasattr(self.body, 'close'):
+ self.body.close()
+
+ @property
+ def status_line(self):
+ ''' The HTTP status line as a string (e.g. ``404 Not Found``).'''
+ return self._status_line
+
+ @property
+ def status_code(self):
+ ''' The HTTP status code as an integer (e.g. 404).'''
+ return self._status_code
+
+ def _set_status(self, status):
+ if isinstance(status, int):
+ code, status = status, _HTTP_STATUS_LINES.get(status)
+ elif ' ' in status:
+ status = status.strip()
+ code = int(status.split()[0])
+ else:
+ raise ValueError('String status line without a reason phrase.')
+ if not 100 <= code <= 999: raise ValueError('Status code out of range.')
+ self._status_code = code
+ self._status_line = str(status or ('%d Unknown' % code))
+
+ def _get_status(self):
+ return self._status_line
+
+ status = property(_get_status, _set_status, None,
+ ''' A writeable property to change the HTTP response status. It accepts
+ either a numeric code (100-999) or a string with a custom reason
+ phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
+ :data:`status_code` are updated accordingly. The return value is
+ always a status string. ''')
+ del _get_status, _set_status
+
+ @property
+ def headers(self):
+ ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
+ view on the response headers. '''
+ hdict = HeaderDict()
+ hdict.dict = self._headers
+ return hdict
+
+ def __contains__(self, name): return _hkey(name) in self._headers
+ def __delitem__(self, name): del self._headers[_hkey(name)]
+ def __getitem__(self, name): return self._headers[_hkey(name)][-1]
+ def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)]
+
+ def get_header(self, name, default=None):
+ ''' Return the value of a previously defined header. If there is no
+ header with that name, return a default value. '''
+ return self._headers.get(_hkey(name), [default])[-1]
+
+ def set_header(self, name, value):
+ ''' Create a new response header, replacing any previously defined
+ headers with the same name. '''
+ self._headers[_hkey(name)] = [_hval(value)]
+
+ def add_header(self, name, value):
+ ''' Add an additional response header, not removing duplicates. '''
+ self._headers.setdefault(_hkey(name), []).append(_hval(value))
+
+ def iter_headers(self):
+ ''' Yield (header, value) tuples, skipping headers that are not
+ allowed with the current response status code. '''
+ return self.headerlist
+
+ @property
+ def headerlist(self):
+ """ WSGI conform list of (header, value) tuples. """
+ out = []
+ headers = list(self._headers.items())
+ if 'Content-Type' not in self._headers:
+ headers.append(('Content-Type', [self.default_content_type]))
+ if self._status_code in self.bad_headers:
+ bad_headers = self.bad_headers[self._status_code]
+ headers = [h for h in headers if h[0] not in bad_headers]
+ out += [(name, val) for (name, vals) in headers for val in vals]
+ if self._cookies:
+ for c in self._cookies.values():
+ out.append(('Set-Cookie', _hval(c.OutputString())))
+ if py3k:
+ out = [(k, v.encode('utf8').decode('latin1')) for (k, v) in out]
+ return out
+
+ content_type = HeaderProperty('Content-Type')
+ content_length = HeaderProperty('Content-Length', reader=int)
+ expires = HeaderProperty('Expires',
+ reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
+ writer=lambda x: http_date(x))
+
+ @property
+ def charset(self, default='UTF-8'):
+ """ Return the charset specified in the content-type header (default: utf8). """
+ if 'charset=' in self.content_type:
+ return self.content_type.split('charset=')[-1].split(';')[0].strip()
+ return default
+
+ def set_cookie(self, name, value, secret=None, **options):
+ ''' Create a new cookie or replace an old one. If the `secret` parameter is
+ set, create a `Signed Cookie` (described below).
+
+ :param name: the name of the cookie.
+ :param value: the value of the cookie.
+ :param secret: a signature key required for signed cookies.
+
+ Additionally, this method accepts all RFC 2109 attributes that are
+ supported by :class:`cookie.Morsel`, including:
+
+ :param max_age: maximum age in seconds. (default: None)
+ :param expires: a datetime object or UNIX timestamp. (default: None)
+ :param domain: the domain that is allowed to read the cookie.
+ (default: current domain)
+ :param path: limits the cookie to a given path (default: current path)
+ :param secure: limit the cookie to HTTPS connections (default: off).
+ :param httponly: prevents client-side javascript to read this cookie
+ (default: off, requires Python 2.6 or newer).
+
+ If neither `expires` nor `max_age` is set (default), the cookie will
+ expire at the end of the browser session (as soon as the browser
+ window is closed).
+
+ Signed cookies may store any pickle-able object and are
+ cryptographically signed to prevent manipulation. Keep in mind that
+ cookies are limited to 4kb in most browsers.
+
+ Warning: Signed cookies are not encrypted (the client can still see
+ the content) and not copy-protected (the client can restore an old
+ cookie). The main intention is to make pickling and unpickling
+ save, not to store secret information at client side.
+ '''
+ if not self._cookies:
+ self._cookies = SimpleCookie()
+
+ if secret:
+ value = touni(cookie_encode((name, value), secret))
+ elif not isinstance(value, basestring):
+ raise TypeError('Secret key missing for non-string Cookie.')
+
+ if len(value) > 4096: raise ValueError('Cookie value to long.')
+ self._cookies[name] = value
+
+ for key, value in options.items():
+ if key == 'max_age':
+ if isinstance(value, timedelta):
+ value = value.seconds + value.days * 24 * 3600
+ if key == 'expires':
+ if isinstance(value, (datedate, datetime)):
+ value = value.timetuple()
+ elif isinstance(value, (int, float)):
+ value = time.gmtime(value)
+ value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
+ self._cookies[name][key.replace('_', '-')] = value
+
+ def delete_cookie(self, key, **kwargs):
+ ''' Delete a cookie. Be sure to use the same `domain` and `path`
+ settings as used to create the cookie. '''
+ kwargs['max_age'] = -1
+ kwargs['expires'] = 0
+ self.set_cookie(key, '', **kwargs)
+
+ def __repr__(self):
+ out = ''
+ for name, value in self.headerlist:
+ out += '%s: %s\n' % (name.title(), value.strip())
+ return out
+
+
+def local_property(name=None):
+ if name: depr('local_property() is deprecated and will be removed.') #0.12
+ ls = threading.local()
+ def fget(self):
+ try: return ls.var
+ except AttributeError:
+ raise RuntimeError("Request context not initialized.")
+ def fset(self, value): ls.var = value
+ def fdel(self): del ls.var
+ return property(fget, fset, fdel, 'Thread-local property')
+
+
+class LocalRequest(BaseRequest):
+ ''' A thread-local subclass of :class:`BaseRequest` with a different
+ set of attributes for each thread. There is usually only one global
+ instance of this class (:data:`request`). If accessed during a
+ request/response cycle, this instance always refers to the *current*
+ request (even on a multithreaded server). '''
+ bind = BaseRequest.__init__
+ environ = local_property()
+
+
+class LocalResponse(BaseResponse):
+ ''' A thread-local subclass of :class:`BaseResponse` with a different
+ set of attributes for each thread. There is usually only one global
+ instance of this class (:data:`response`). Its attributes are used
+ to build the HTTP response at the end of the request/response cycle.
+ '''
+ bind = BaseResponse.__init__
+ _status_line = local_property()
+ _status_code = local_property()
+ _cookies = local_property()
+ _headers = local_property()
+ body = local_property()
+
+
+Request = BaseRequest
+Response = BaseResponse
+
+
+class HTTPResponse(Response, BottleException):
+ def __init__(self, body='', status=None, headers=None, **more_headers):
+ super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
+
+ def apply(self, response):
+ response._status_code = self._status_code
+ response._status_line = self._status_line
+ response._headers = self._headers
+ response._cookies = self._cookies
+ response.body = self.body
+
+
+class HTTPError(HTTPResponse):
+ default_status = 500
+ def __init__(self, status=None, body=None, exception=None, traceback=None,
+ **options):
+ self.exception = exception
+ self.traceback = traceback
+ super(HTTPError, self).__init__(body, status, **options)
+
+
+
+
+
+###############################################################################
+# Plugins ######################################################################
+###############################################################################
+
+class PluginError(BottleException): pass
+
+
+class JSONPlugin(object):
+ name = 'json'
+ api = 2
+
+ def __init__(self, json_dumps=json_dumps):
+ self.json_dumps = json_dumps
+
+ def apply(self, callback, route):
+ dumps = self.json_dumps
+ if not dumps: return callback
+ def wrapper(*a, **ka):
+ try:
+ rv = callback(*a, **ka)
+ except HTTPResponse:
+ rv = _e()
+
+ if isinstance(rv, dict):
+ #Attempt to serialize, raises exception on failure
+ json_response = dumps(rv)
+ #Set content type only if serialization succesful
+ response.content_type = 'application/json'
+ return json_response
+ elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
+ rv.body = dumps(rv.body)
+ rv.content_type = 'application/json'
+ return rv
+
+ return wrapper
+
+
+class TemplatePlugin(object):
+ ''' This plugin applies the :func:`view` decorator to all routes with a
+ `template` config parameter. If the parameter is a tuple, the second
+ element must be a dict with additional options (e.g. `template_engine`)
+ or default variables for the template. '''
+ name = 'template'
+ api = 2
+
+ def apply(self, callback, route):
+ conf = route.config.get('template')
+ if isinstance(conf, (tuple, list)) and len(conf) == 2:
+ return view(conf[0], **conf[1])(callback)
+ elif isinstance(conf, str):
+ return view(conf)(callback)
+ else:
+ return callback
+
+
+#: Not a plugin, but part of the plugin API. TODO: Find a better place.
+class _ImportRedirect(object):
+ def __init__(self, name, impmask):
+ ''' Create a virtual package that redirects imports (see PEP 302). '''
+ self.name = name
+ self.impmask = impmask
+ self.module = sys.modules.setdefault(name, new_module(name))
+ self.module.__dict__.update({'__file__': __file__, '__path__': [],
+ '__all__': [], '__loader__': self})
+ sys.meta_path.append(self)
+
+ def find_module(self, fullname, path=None):
+ if '.' not in fullname: return
+ packname = fullname.rsplit('.', 1)[0]
+ if packname != self.name: return
+ return self
+
+ def load_module(self, fullname):
+ if fullname in sys.modules: return sys.modules[fullname]
+ modname = fullname.rsplit('.', 1)[1]
+ realname = self.impmask % modname
+ __import__(realname)
+ module = sys.modules[fullname] = sys.modules[realname]
+ setattr(self.module, modname, module)
+ module.__loader__ = self
+ return module
+
+
+
+
+
+
+###############################################################################
+# Common Utilities #############################################################
+###############################################################################
+
+
+class MultiDict(DictMixin):
+ """ This dict stores multiple values per key, but behaves exactly like a
+ normal dict in that it returns only the newest value for any given key.
+ There are special methods available to access the full list of values.
+ """
+
+ def __init__(self, *a, **k):
+ self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
+
+ def __len__(self): return len(self.dict)
+ def __iter__(self): return iter(self.dict)
+ def __contains__(self, key): return key in self.dict
+ def __delitem__(self, key): del self.dict[key]
+ def __getitem__(self, key): return self.dict[key][-1]
+ def __setitem__(self, key, value): self.append(key, value)
+ def keys(self): return self.dict.keys()
+
+ if py3k:
+ def values(self): return (v[-1] for v in self.dict.values())
+ def items(self): return ((k, v[-1]) for k, v in self.dict.items())
+ def allitems(self):
+ return ((k, v) for k, vl in self.dict.items() for v in vl)
+ iterkeys = keys
+ itervalues = values
+ iteritems = items
+ iterallitems = allitems
+
+ else:
+ def values(self): return [v[-1] for v in self.dict.values()]
+ def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
+ def iterkeys(self): return self.dict.iterkeys()
+ def itervalues(self): return (v[-1] for v in self.dict.itervalues())
+ def iteritems(self):
+ return ((k, v[-1]) for k, v in self.dict.iteritems())
+ def iterallitems(self):
+ return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
+ def allitems(self):
+ return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
+
+ def get(self, key, default=None, index=-1, type=None):
+ ''' Return the most recent value for a key.
+
+ :param default: The default value to be returned if the key is not
+ present or the type conversion fails.
+ :param index: An index for the list of available values.
+ :param type: If defined, this callable is used to cast the value
+ into a specific type. Exception are suppressed and result in
+ the default value to be returned.
+ '''
+ try:
+ val = self.dict[key][index]
+ return type(val) if type else val
+ except Exception:
+ pass
+ return default
+
+ def append(self, key, value):
+ ''' Add a new value to the list of values for this key. '''
+ self.dict.setdefault(key, []).append(value)
+
+ def replace(self, key, value):
+ ''' Replace the list of values with a single value. '''
+ self.dict[key] = [value]
+
+ def getall(self, key):
+ ''' Return a (possibly empty) list of values for a key. '''
+ return self.dict.get(key) or []
+
+ #: Aliases for WTForms to mimic other multi-dict APIs (Django)
+ getone = get
+ getlist = getall
+
+
+class FormsDict(MultiDict):
+ ''' This :class:`MultiDict` subclass is used to store request form data.
+ Additionally to the normal dict-like item access methods (which return
+ unmodified data as native strings), this container also supports
+ attribute-like access to its values. Attributes are automatically de-
+ or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
+ attributes default to an empty string. '''
+
+ #: Encoding used for attribute values.
+ input_encoding = 'utf8'
+ #: If true (default), unicode strings are first encoded with `latin1`
+ #: and then decoded to match :attr:`input_encoding`.
+ recode_unicode = True
+
+ def _fix(self, s, encoding=None):
+ if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
+ return s.encode('latin1').decode(encoding or self.input_encoding)
+ elif isinstance(s, bytes): # Python 2 WSGI
+ return s.decode(encoding or self.input_encoding)
+ else:
+ return s
+
+ def decode(self, encoding=None):
+ ''' Returns a copy with all keys and values de- or recoded to match
+ :attr:`input_encoding`. Some libraries (e.g. WTForms) want a
+ unicode dictionary. '''
+ copy = FormsDict()
+ enc = copy.input_encoding = encoding or self.input_encoding
+ copy.recode_unicode = False
+ for key, value in self.allitems():
+ copy.append(self._fix(key, enc), self._fix(value, enc))
+ return copy
+
+ def getunicode(self, name, default=None, encoding=None):
+ ''' Return the value as a unicode string, or the default. '''
+ try:
+ return self._fix(self[name], encoding)
+ except (UnicodeError, KeyError):
+ return default
+
+ def __getattr__(self, name, default=unicode()):
+ # Without this guard, pickle generates a cryptic TypeError:
+ if name.startswith('__') and name.endswith('__'):
+ return super(FormsDict, self).__getattr__(name)
+ return self.getunicode(name, default=default)
+
+class HeaderDict(MultiDict):
+ """ A case-insensitive version of :class:`MultiDict` that defaults to
+ replace the old value instead of appending it. """
+
+ def __init__(self, *a, **ka):
+ self.dict = {}
+ if a or ka: self.update(*a, **ka)
+
+ def __contains__(self, key): return _hkey(key) in self.dict
+ def __delitem__(self, key): del self.dict[_hkey(key)]
+ def __getitem__(self, key): return self.dict[_hkey(key)][-1]
+ def __setitem__(self, key, value): self.dict[_hkey(key)] = [_hval(value)]
+ def append(self, key, value): self.dict.setdefault(_hkey(key), []).append(_hval(value))
+ def replace(self, key, value): self.dict[_hkey(key)] = [_hval(value)]
+ def getall(self, key): return self.dict.get(_hkey(key)) or []
+ def get(self, key, default=None, index=-1):
+ return MultiDict.get(self, _hkey(key), default, index)
+ def filter(self, names):
+ for name in (_hkey(n) for n in names):
+ if name in self.dict:
+ del self.dict[name]
+
+
+class WSGIHeaderDict(DictMixin):
+ ''' This dict-like class wraps a WSGI environ dict and provides convenient
+ access to HTTP_* fields. Keys and values are native strings
+ (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
+ environment contains non-native string values, these are de- or encoded
+ using a lossless 'latin1' character set.
+
+ The API will remain stable even on changes to the relevant PEPs.
+ Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
+ that uses non-native strings.)
+ '''
+ #: List of keys that do not have a ``HTTP_`` prefix.
+ cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
+
+ def __init__(self, environ):
+ self.environ = environ
+
+ def _ekey(self, key):
+ ''' Translate header field name to CGI/WSGI environ key. '''
+ key = key.replace('-','_').upper()
+ if key in self.cgikeys:
+ return key
+ return 'HTTP_' + key
+
+ def raw(self, key, default=None):
+ ''' Return the header value as is (may be bytes or unicode). '''
+ return self.environ.get(self._ekey(key), default)
+
+ def __getitem__(self, key):
+ return tonat(self.environ[self._ekey(key)], 'latin1')
+
+ def __setitem__(self, key, value):
+ raise TypeError("%s is read-only." % self.__class__)
+
+ def __delitem__(self, key):
+ raise TypeError("%s is read-only." % self.__class__)
+
+ def __iter__(self):
+ for key in self.environ:
+ if key[:5] == 'HTTP_':
+ yield key[5:].replace('_', '-').title()
+ elif key in self.cgikeys:
+ yield key.replace('_', '-').title()
+
+ def keys(self): return [x for x in self]
+ def __len__(self): return len(self.keys())
+ def __contains__(self, key): return self._ekey(key) in self.environ
+
+
+
+class ConfigDict(dict):
+ ''' A dict-like configuration storage with additional support for
+ namespaces, validators, meta-data, on_change listeners and more.
+
+ This storage is optimized for fast read access. Retrieving a key
+ or using non-altering dict methods (e.g. `dict.get()`) has no overhead
+ compared to a native dict.
+ '''
+ __slots__ = ('_meta', '_on_change')
+
+ class Namespace(DictMixin):
+
+ def __init__(self, config, namespace):
+ self._config = config
+ self._prefix = namespace
+
+ def __getitem__(self, key):
+ depr('Accessing namespaces as dicts is discouraged. '
+ 'Only use flat item access: '
+ 'cfg["names"]["pace"]["key"] -> cfg["name.space.key"]') #0.12
+ return self._config[self._prefix + '.' + key]
+
+ def __setitem__(self, key, value):
+ self._config[self._prefix + '.' + key] = value
+
+ def __delitem__(self, key):
+ del self._config[self._prefix + '.' + key]
+
+ def __iter__(self):
+ ns_prefix = self._prefix + '.'
+ for key in self._config:
+ ns, dot, name = key.rpartition('.')
+ if ns == self._prefix and name:
+ yield name
+
+ def keys(self): return [x for x in self]
+ def __len__(self): return len(self.keys())
+ def __contains__(self, key): return self._prefix + '.' + key in self._config
+ def __repr__(self): return '' % self._prefix
+ def __str__(self): return '' % self._prefix
+
+ # Deprecated ConfigDict features
+ def __getattr__(self, key):
+ depr('Attribute access is deprecated.') #0.12
+ if key not in self and key[0].isupper():
+ self[key] = ConfigDict.Namespace(self._config, self._prefix + '.' + key)
+ if key not in self and key.startswith('__'):
+ raise AttributeError(key)
+ return self.get(key)
+
+ def __setattr__(self, key, value):
+ if key in ('_config', '_prefix'):
+ self.__dict__[key] = value
+ return
+ depr('Attribute assignment is deprecated.') #0.12
+ if hasattr(DictMixin, key):
+ raise AttributeError('Read-only attribute.')
+ if key in self and self[key] and isinstance(self[key], self.__class__):
+ raise AttributeError('Non-empty namespace attribute.')
+ self[key] = value
+
+ def __delattr__(self, key):
+ if key in self:
+ val = self.pop(key)
+ if isinstance(val, self.__class__):
+ prefix = key + '.'
+ for key in self:
+ if key.startswith(prefix):
+ del self[prefix+key]
+
+ def __call__(self, *a, **ka):
+ depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+ self.update(*a, **ka)
+ return self
+
+ def __init__(self, *a, **ka):
+ self._meta = {}
+ self._on_change = lambda name, value: None
+ if a or ka:
+ depr('Constructor does no longer accept parameters.') #0.12
+ self.update(*a, **ka)
+
+ def load_config(self, filename):
+ ''' Load values from an *.ini style config file.
+
+ If the config file contains sections, their names are used as
+ namespaces for the values within. The two special sections
+ ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
+ '''
+ conf = ConfigParser()
+ conf.read(filename)
+ for section in conf.sections():
+ for key, value in conf.items(section):
+ if section not in ('DEFAULT', 'bottle'):
+ key = section + '.' + key
+ self[key] = value
+ return self
+
+ def load_dict(self, source, namespace='', make_namespaces=False):
+ ''' Import values from a dictionary structure. Nesting can be used to
+ represent namespaces.
+
+ >>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}})
+ {'name.space.key': 'value'}
+ '''
+ stack = [(namespace, source)]
+ while stack:
+ prefix, source = stack.pop()
+ if not isinstance(source, dict):
+ raise TypeError('Source is not a dict (r)' % type(key))
+ for key, value in source.items():
+ if not isinstance(key, basestring):
+ raise TypeError('Key is not a string (%r)' % type(key))
+ full_key = prefix + '.' + key if prefix else key
+ if isinstance(value, dict):
+ stack.append((full_key, value))
+ if make_namespaces:
+ self[full_key] = self.Namespace(self, full_key)
+ else:
+ self[full_key] = value
+ return self
+
+ def update(self, *a, **ka):
+ ''' If the first parameter is a string, all keys are prefixed with this
+ namespace. Apart from that it works just as the usual dict.update().
+ Example: ``update('some.namespace', key='value')`` '''
+ prefix = ''
+ if a and isinstance(a[0], basestring):
+ prefix = a[0].strip('.') + '.'
+ a = a[1:]
+ for key, value in dict(*a, **ka).items():
+ self[prefix+key] = value
+
+ def setdefault(self, key, value):
+ if key not in self:
+ self[key] = value
+ return self[key]
+
+ def __setitem__(self, key, value):
+ if not isinstance(key, basestring):
+ raise TypeError('Key has type %r (not a string)' % type(key))
+
+ value = self.meta_get(key, 'filter', lambda x: x)(value)
+ if key in self and self[key] is value:
+ return
+ self._on_change(key, value)
+ dict.__setitem__(self, key, value)
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+
+ def clear(self):
+ for key in self:
+ del self[key]
+
+ def meta_get(self, key, metafield, default=None):
+ ''' Return the value of a meta field for a key. '''
+ return self._meta.get(key, {}).get(metafield, default)
+
+ def meta_set(self, key, metafield, value):
+ ''' Set the meta field for a key to a new value. This triggers the
+ on-change handler for existing keys. '''
+ self._meta.setdefault(key, {})[metafield] = value
+ if key in self:
+ self[key] = self[key]
+
+ def meta_list(self, key):
+ ''' Return an iterable of meta field names defined for a key. '''
+ return self._meta.get(key, {}).keys()
+
+ # Deprecated ConfigDict features
+ def __getattr__(self, key):
+ depr('Attribute access is deprecated.') #0.12
+ if key not in self and key[0].isupper():
+ self[key] = self.Namespace(self, key)
+ if key not in self and key.startswith('__'):
+ raise AttributeError(key)
+ return self.get(key)
+
+ def __setattr__(self, key, value):
+ if key in self.__slots__:
+ return dict.__setattr__(self, key, value)
+ depr('Attribute assignment is deprecated.') #0.12
+ if hasattr(dict, key):
+ raise AttributeError('Read-only attribute.')
+ if key in self and self[key] and isinstance(self[key], self.Namespace):
+ raise AttributeError('Non-empty namespace attribute.')
+ self[key] = value
+
+ def __delattr__(self, key):
+ if key in self:
+ val = self.pop(key)
+ if isinstance(val, self.Namespace):
+ prefix = key + '.'
+ for key in self:
+ if key.startswith(prefix):
+ del self[prefix+key]
+
+ def __call__(self, *a, **ka):
+ depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+ self.update(*a, **ka)
+ return self
+
+
+
+class AppStack(list):
+ """ A stack-like list. Calling it returns the head of the stack. """
+
+ def __call__(self):
+ """ Return the current default application. """
+ return self[-1]
+
+ def push(self, value=None):
+ """ Add a new :class:`Bottle` instance to the stack """
+ if not isinstance(value, Bottle):
+ value = Bottle()
+ self.append(value)
+ return value
+
+
+class WSGIFileWrapper(object):
+
+ def __init__(self, fp, buffer_size=1024*64):
+ self.fp, self.buffer_size = fp, buffer_size
+ for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
+ if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
+
+ def __iter__(self):
+ buff, read = self.buffer_size, self.read
+ while True:
+ part = read(buff)
+ if not part: return
+ yield part
+
+
+class _closeiter(object):
+ ''' This only exists to be able to attach a .close method to iterators that
+ do not support attribute assignment (most of itertools). '''
+
+ def __init__(self, iterator, close=None):
+ self.iterator = iterator
+ self.close_callbacks = makelist(close)
+
+ def __iter__(self):
+ return iter(self.iterator)
+
+ def close(self):
+ for func in self.close_callbacks:
+ func()
+
+
+class ResourceManager(object):
+ ''' This class manages a list of search paths and helps to find and open
+ application-bound resources (files).
+
+ :param base: default value for :meth:`add_path` calls.
+ :param opener: callable used to open resources.
+ :param cachemode: controls which lookups are cached. One of 'all',
+ 'found' or 'none'.
+ '''
+
+ def __init__(self, base='./', opener=open, cachemode='all'):
+ self.opener = open
+ self.base = base
+ self.cachemode = cachemode
+
+ #: A list of search paths. See :meth:`add_path` for details.
+ self.path = []
+ #: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
+ self.cache = {}
+
+ def add_path(self, path, base=None, index=None, create=False):
+ ''' Add a new path to the list of search paths. Return False if the
+ path does not exist.
+
+ :param path: The new search path. Relative paths are turned into
+ an absolute and normalized form. If the path looks like a file
+ (not ending in `/`), the filename is stripped off.
+ :param base: Path used to absolutize relative search paths.
+ Defaults to :attr:`base` which defaults to ``os.getcwd()``.
+ :param index: Position within the list of search paths. Defaults
+ to last index (appends to the list).
+
+ The `base` parameter makes it easy to reference files installed
+ along with a python module or package::
+
+ res.add_path('./resources/', __file__)
+ '''
+ base = os.path.abspath(os.path.dirname(base or self.base))
+ path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
+ path += os.sep
+ if path in self.path:
+ self.path.remove(path)
+ if create and not os.path.isdir(path):
+ os.makedirs(path)
+ if index is None:
+ self.path.append(path)
+ else:
+ self.path.insert(index, path)
+ self.cache.clear()
+ return os.path.exists(path)
+
+ def __iter__(self):
+ ''' Iterate over all existing files in all registered paths. '''
+ search = self.path[:]
+ while search:
+ path = search.pop()
+ if not os.path.isdir(path): continue
+ for name in os.listdir(path):
+ full = os.path.join(path, name)
+ if os.path.isdir(full): search.append(full)
+ else: yield full
+
+ def lookup(self, name):
+ ''' Search for a resource and return an absolute file path, or `None`.
+
+ The :attr:`path` list is searched in order. The first match is
+ returend. Symlinks are followed. The result is cached to speed up
+ future lookups. '''
+ if name not in self.cache or DEBUG:
+ for path in self.path:
+ fpath = os.path.join(path, name)
+ if os.path.isfile(fpath):
+ if self.cachemode in ('all', 'found'):
+ self.cache[name] = fpath
+ return fpath
+ if self.cachemode == 'all':
+ self.cache[name] = None
+ return self.cache[name]
+
+ def open(self, name, mode='r', *args, **kwargs):
+ ''' Find a resource and return a file object, or raise IOError. '''
+ fname = self.lookup(name)
+ if not fname: raise IOError("Resource %r not found." % name)
+ return self.opener(fname, mode=mode, *args, **kwargs)
+
+
+class FileUpload(object):
+
+ def __init__(self, fileobj, name, filename, headers=None):
+ ''' Wrapper for file uploads. '''
+ #: Open file(-like) object (BytesIO buffer or temporary file)
+ self.file = fileobj
+ #: Name of the upload form field
+ self.name = name
+ #: Raw filename as sent by the client (may contain unsafe characters)
+ self.raw_filename = filename
+ #: A :class:`HeaderDict` with additional headers (e.g. content-type)
+ self.headers = HeaderDict(headers) if headers else HeaderDict()
+
+ content_type = HeaderProperty('Content-Type')
+ content_length = HeaderProperty('Content-Length', reader=int, default=-1)
+
+ def get_header(self, name, default=None):
+ """ Return the value of a header within the mulripart part. """
+ return self.headers.get(name, default)
+
+ @cached_property
+ def filename(self):
+ ''' Name of the file on the client file system, but normalized to ensure
+ file system compatibility. An empty filename is returned as 'empty'.
+
+ Only ASCII letters, digits, dashes, underscores and dots are
+ allowed in the final filename. Accents are removed, if possible.
+ Whitespace is replaced by a single dash. Leading or tailing dots
+ or dashes are removed. The filename is limited to 255 characters.
+ '''
+ fname = self.raw_filename
+ if not isinstance(fname, unicode):
+ fname = fname.decode('utf8', 'ignore')
+ fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
+ fname = os.path.basename(fname.replace('\\', os.path.sep))
+ fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
+ fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
+ return fname[:255] or 'empty'
+
+ def _copy_file(self, fp, chunk_size=2**16):
+ read, write, offset = self.file.read, fp.write, self.file.tell()
+ while 1:
+ buf = read(chunk_size)
+ if not buf: break
+ write(buf)
+ self.file.seek(offset)
+
+ def save(self, destination, overwrite=False, chunk_size=2**16):
+ ''' Save file to disk or copy its content to an open file(-like) object.
+ If *destination* is a directory, :attr:`filename` is added to the
+ path. Existing files are not overwritten by default (IOError).
+
+ :param destination: File path, directory or file(-like) object.
+ :param overwrite: If True, replace existing files. (default: False)
+ :param chunk_size: Bytes to read at a time. (default: 64kb)
+ '''
+ if isinstance(destination, basestring): # Except file-likes here
+ if os.path.isdir(destination):
+ destination = os.path.join(destination, self.filename)
+ if not overwrite and os.path.exists(destination):
+ raise IOError('File exists.')
+ with open(destination, 'wb') as fp:
+ self._copy_file(fp, chunk_size)
+ else:
+ self._copy_file(destination, chunk_size)
+
+
+
+
+
+
+###############################################################################
+# Application Helper ###########################################################
+###############################################################################
+
+
+def abort(code=500, text='Unknown Error.'):
+ """ Aborts execution and causes a HTTP error. """
+ raise HTTPError(code, text)
+
+
+def redirect(url, code=None):
+ """ Aborts execution and causes a 303 or 302 redirect, depending on
+ the HTTP protocol version. """
+ if not code:
+ code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
+ res = response.copy(cls=HTTPResponse)
+ res.status = code
+ res.body = ""
+ res.set_header('Location', urljoin(request.url, url))
+ raise res
+
+
+def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
+ ''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
+ fp.seek(offset)
+ while bytes > 0:
+ part = fp.read(min(bytes, maxread))
+ if not part: break
+ bytes -= len(part)
+ yield part
+
+
+def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
+ """ Open a file in a safe way and return :exc:`HTTPResponse` with status
+ code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
+ ``Content-Length`` and ``Last-Modified`` headers are set if possible.
+ Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
+ requests.
+
+ :param filename: Name or path of the file to send.
+ :param root: Root path for file lookups. Should be an absolute directory
+ path.
+ :param mimetype: Defines the content-type header (default: guess from
+ file extension)
+ :param download: If True, ask the browser to open a `Save as...` dialog
+ instead of opening the file with the associated program. You can
+ specify a custom filename as a string. If not specified, the
+ original filename is used (default: False).
+ :param charset: The charset to use for files with a ``text/*``
+ mime-type. (default: UTF-8)
+ """
+
+ root = os.path.abspath(root) + os.sep
+ filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
+ headers = dict()
+
+ if not filename.startswith(root):
+ return HTTPError(403, "Access denied.")
+ if not os.path.exists(filename) or not os.path.isfile(filename):
+ return HTTPError(404, "File does not exist.")
+ if not os.access(filename, os.R_OK):
+ return HTTPError(403, "You do not have permission to access this file.")
+
+ if mimetype == 'auto':
+ mimetype, encoding = mimetypes.guess_type(filename)
+ if encoding: headers['Content-Encoding'] = encoding
+
+ if mimetype:
+ if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
+ mimetype += '; charset=%s' % charset
+ headers['Content-Type'] = mimetype
+
+ if download:
+ download = os.path.basename(filename if download == True else download)
+ headers['Content-Disposition'] = 'attachment; filename="%s"' % download
+
+ stats = os.stat(filename)
+ headers['Content-Length'] = clen = stats.st_size
+ lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
+ headers['Last-Modified'] = lm
+
+ ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
+ if ims:
+ ims = parse_date(ims.split(";")[0].strip())
+ if ims is not None and ims >= int(stats.st_mtime):
+ headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+ return HTTPResponse(status=304, **headers)
+
+ body = '' if request.method == 'HEAD' else open(filename, 'rb')
+
+ headers["Accept-Ranges"] = "bytes"
+ ranges = request.environ.get('HTTP_RANGE')
+ if 'HTTP_RANGE' in request.environ:
+ ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
+ if not ranges:
+ return HTTPError(416, "Requested Range Not Satisfiable")
+ offset, end = ranges[0]
+ headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
+ headers["Content-Length"] = str(end-offset)
+ if body: body = _file_iter_range(body, offset, end-offset)
+ return HTTPResponse(body, status=206, **headers)
+ return HTTPResponse(body, **headers)
+
+
+
+
+
+
+###############################################################################
+# HTTP Utilities and MISC (TODO) ###############################################
+###############################################################################
+
+
+def debug(mode=True):
+ """ Change the debug level.
+ There is only one debug level supported at the moment."""
+ global DEBUG
+ if mode: warnings.simplefilter('default')
+ DEBUG = bool(mode)
+
+def http_date(value):
+ if isinstance(value, (datedate, datetime)):
+ value = value.utctimetuple()
+ elif isinstance(value, (int, float)):
+ value = time.gmtime(value)
+ if not isinstance(value, basestring):
+ value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
+ return value
+
+def parse_date(ims):
+ """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
+ try:
+ ts = email.utils.parsedate_tz(ims)
+ return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
+ except (TypeError, ValueError, IndexError, OverflowError):
+ return None
+
+def parse_auth(header):
+ """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
+ try:
+ method, data = header.split(None, 1)
+ if method.lower() == 'basic':
+ user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
+ return user, pwd
+ except (KeyError, ValueError):
+ return None
+
+def parse_range_header(header, maxlen=0):
+ ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip
+ unsatisfiable ranges. The end index is non-inclusive.'''
+ if not header or header[:6] != 'bytes=': return
+ ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
+ for start, end in ranges:
+ try:
+ if not start: # bytes=-100 -> last 100 bytes
+ start, end = max(0, maxlen-int(end)), maxlen
+ elif not end: # bytes=100- -> all but the first 99 bytes
+ start, end = int(start), maxlen
+ else: # bytes=100-200 -> bytes 100-200 (inclusive)
+ start, end = int(start), min(int(end)+1, maxlen)
+ if 0 <= start < end <= maxlen:
+ yield start, end
+ except ValueError:
+ pass
+
+def _parse_qsl(qs):
+ r = []
+ for pair in qs.split('&'):
+ if not pair: continue
+ nv = pair.split('=', 1)
+ if len(nv) != 2: nv.append('')
+ key = urlunquote(nv[0].replace('+', ' '))
+ value = urlunquote(nv[1].replace('+', ' '))
+ r.append((key, value))
+ return r
+
+def _lscmp(a, b):
+ ''' Compares two strings in a cryptographically safe way:
+ Runtime is not affected by length of common prefix. '''
+ return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
+
+
+def cookie_encode(data, key):
+ ''' Encode and sign a pickle-able object. Return a (byte) string '''
+ msg = base64.b64encode(pickle.dumps(data, -1))
+ sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())
+ return tob('!') + sig + tob('?') + msg
+
+
+def cookie_decode(data, key):
+ ''' Verify and decode an encoded string. Return an object or None.'''
+ data = tob(data)
+ if cookie_is_encoded(data):
+ sig, msg = data.split(tob('?'), 1)
+ if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())):
+ return pickle.loads(base64.b64decode(msg))
+ return None
+
+
+def cookie_is_encoded(data):
+ ''' Return True if the argument looks like a encoded cookie.'''
+ return bool(data.startswith(tob('!')) and tob('?') in data)
+
+
+def html_escape(string):
+ ''' Escape HTML special characters ``&<>`` and quotes ``'"``. '''
+ return string.replace('&','&').replace('<','<').replace('>','>')\
+ .replace('"','"').replace("'",''')
+
+
+def html_quote(string):
+ ''' Escape and quote a string to be used as an HTTP attribute.'''
+ return '"%s"' % html_escape(string).replace('\n','
')\
+ .replace('\r','
').replace('\t',' ')
+
+
+def yieldroutes(func):
+ """ Return a generator for routes that match the signature (name, args)
+ of the func parameter. This may yield more than one route if the function
+ takes optional keyword arguments. The output is best described by example::
+
+ a() -> '/a'
+ b(x, y) -> '/b//'
+ c(x, y=5) -> '/c/' and '/c//'
+ d(x=5, y=6) -> '/d' and '/d/' and '/d//'
+ """
+ path = '/' + func.__name__.replace('__','/').lstrip('/')
+ spec = getargspec(func)
+ argc = len(spec[0]) - len(spec[3] or [])
+ path += ('/<%s>' * argc) % tuple(spec[0][:argc])
+ yield path
+ for arg in spec[0][argc:]:
+ path += '/<%s>' % arg
+ yield path
+
+
+def path_shift(script_name, path_info, shift=1):
+ ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+ :return: The modified paths.
+ :param script_name: The SCRIPT_NAME path.
+ :param script_name: The PATH_INFO path.
+ :param shift: The number of path fragments to shift. May be negative to
+ change the shift direction. (default: 1)
+ '''
+ if shift == 0: return script_name, path_info
+ pathlist = path_info.strip('/').split('/')
+ scriptlist = script_name.strip('/').split('/')
+ if pathlist and pathlist[0] == '': pathlist = []
+ if scriptlist and scriptlist[0] == '': scriptlist = []
+ if shift > 0 and shift <= len(pathlist):
+ moved = pathlist[:shift]
+ scriptlist = scriptlist + moved
+ pathlist = pathlist[shift:]
+ elif shift < 0 and shift >= -len(scriptlist):
+ moved = scriptlist[shift:]
+ pathlist = moved + pathlist
+ scriptlist = scriptlist[:shift]
+ else:
+ empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
+ raise AssertionError("Cannot shift. Nothing left from %s" % empty)
+ new_script_name = '/' + '/'.join(scriptlist)
+ new_path_info = '/' + '/'.join(pathlist)
+ if path_info.endswith('/') and pathlist: new_path_info += '/'
+ return new_script_name, new_path_info
+
+
+def auth_basic(check, realm="private", text="Access denied"):
+ ''' Callback decorator to require HTTP auth (basic).
+ TODO: Add route(check_auth=...) parameter. '''
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*a, **ka):
+ user, password = request.auth or (None, None)
+ if user is None or not check(user, password):
+ err = HTTPError(401, text)
+ err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
+ return err
+ return func(*a, **ka)
+ return wrapper
+ return decorator
+
+
+# Shortcuts for common Bottle methods.
+# They all refer to the current default application.
+
+def make_default_app_wrapper(name):
+ ''' Return a callable that relays calls to the current default app. '''
+ @functools.wraps(getattr(Bottle, name))
+ def wrapper(*a, **ka):
+ return getattr(app(), name)(*a, **ka)
+ return wrapper
+
+route = make_default_app_wrapper('route')
+get = make_default_app_wrapper('get')
+post = make_default_app_wrapper('post')
+put = make_default_app_wrapper('put')
+delete = make_default_app_wrapper('delete')
+error = make_default_app_wrapper('error')
+mount = make_default_app_wrapper('mount')
+hook = make_default_app_wrapper('hook')
+install = make_default_app_wrapper('install')
+uninstall = make_default_app_wrapper('uninstall')
+url = make_default_app_wrapper('get_url')
+
+
+
+
+
+
+
+###############################################################################
+# Server Adapter ###############################################################
+###############################################################################
+
+
+class ServerAdapter(object):
+ quiet = False
+ def __init__(self, host='127.0.0.1', port=8080, **options):
+ self.options = options
+ self.host = host
+ self.port = int(port)
+
+ def run(self, handler): # pragma: no cover
+ pass
+
+ def __repr__(self):
+ args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
+ return "%s(%s)" % (self.__class__.__name__, args)
+
+
+class CGIServer(ServerAdapter):
+ quiet = True
+ def run(self, handler): # pragma: no cover
+ from wsgiref.handlers import CGIHandler
+ def fixed_environ(environ, start_response):
+ environ.setdefault('PATH_INFO', '')
+ return handler(environ, start_response)
+ CGIHandler().run(fixed_environ)
+
+
+class FlupFCGIServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ import flup.server.fcgi
+ self.options.setdefault('bindAddress', (self.host, self.port))
+ flup.server.fcgi.WSGIServer(handler, **self.options).run()
+
+
+class WSGIRefServer(ServerAdapter):
+ def run(self, app): # pragma: no cover
+ from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
+ from wsgiref.simple_server import make_server
+ import socket
+
+ class FixedHandler(WSGIRequestHandler):
+ def address_string(self): # Prevent reverse DNS lookups please.
+ return self.client_address[0]
+ def log_request(*args, **kw):
+ if not self.quiet:
+ return WSGIRequestHandler.log_request(*args, **kw)
+
+ handler_cls = self.options.get('handler_class', FixedHandler)
+ server_cls = self.options.get('server_class', WSGIServer)
+
+ if ':' in self.host: # Fix wsgiref for IPv6 addresses.
+ if getattr(server_cls, 'address_family') == socket.AF_INET:
+ class server_cls(server_cls):
+ address_family = socket.AF_INET6
+
+ srv = make_server(self.host, self.port, app, server_cls, handler_cls)
+ srv.serve_forever()
+
+
+class CherryPyServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ depr("The wsgi server part of cherrypy was split into a new "
+ "project called 'cheroot'. Use the 'cheroot' server "
+ "adapter instead of cherrypy.")
+ from cherrypy import wsgiserver # This will fail for CherryPy >= 9
+
+ self.options['bind_addr'] = (self.host, self.port)
+ self.options['wsgi_app'] = handler
+
+ certfile = self.options.get('certfile')
+ if certfile:
+ del self.options['certfile']
+ keyfile = self.options.get('keyfile')
+ if keyfile:
+ del self.options['keyfile']
+
+ server = wsgiserver.CherryPyWSGIServer(**self.options)
+ if certfile:
+ server.ssl_certificate = certfile
+ if keyfile:
+ server.ssl_private_key = keyfile
+
+ try:
+ server.start()
+ finally:
+ server.stop()
+
+
+class CherootServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from cheroot import wsgi
+ from cheroot.ssl import builtin
+ self.options['bind_addr'] = (self.host, self.port)
+ self.options['wsgi_app'] = handler
+ certfile = self.options.pop('certfile', None)
+ keyfile = self.options.pop('keyfile', None)
+ chainfile = self.options.pop('chainfile', None)
+ server = wsgi.Server(**self.options)
+ if certfile and keyfile:
+ server.ssl_adapter = builtin.BuiltinSSLAdapter(
+ certfile, keyfile, chainfile)
+ try:
+ server.start()
+ finally:
+ server.stop()
+
+
+class WaitressServer(ServerAdapter):
+ def run(self, handler):
+ from waitress import serve
+ serve(handler, host=self.host, port=self.port)
+
+
+class PasteServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from paste import httpserver
+ from paste.translogger import TransLogger
+ handler = TransLogger(handler, setup_console_handler=(not self.quiet))
+ httpserver.serve(handler, host=self.host, port=str(self.port),
+ **self.options)
+
+
+class MeinheldServer(ServerAdapter):
+ def run(self, handler):
+ from meinheld import server
+ server.listen((self.host, self.port))
+ server.run(handler)
+
+
+class FapwsServer(ServerAdapter):
+ """ Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3 """
+ def run(self, handler): # pragma: no cover
+ import fapws._evwsgi as evwsgi
+ from fapws import base, config
+ port = self.port
+ if float(config.SERVER_IDENT[-2:]) > 0.4:
+ # fapws3 silently changed its API in 0.5
+ port = str(port)
+ evwsgi.start(self.host, port)
+ # fapws3 never releases the GIL. Complain upstream. I tried. No luck.
+ if 'BOTTLE_CHILD' in os.environ and not self.quiet:
+ _stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
+ _stderr(" (Fapws3 breaks python thread support)\n")
+ evwsgi.set_base_module(base)
+ def app(environ, start_response):
+ environ['wsgi.multiprocess'] = False
+ return handler(environ, start_response)
+ evwsgi.wsgi_cb(('', app))
+ evwsgi.run()
+
+
+class TornadoServer(ServerAdapter):
+ """ The super hyped asynchronous server by facebook. Untested. """
+ def run(self, handler): # pragma: no cover
+ import tornado.wsgi, tornado.httpserver, tornado.ioloop
+ container = tornado.wsgi.WSGIContainer(handler)
+ server = tornado.httpserver.HTTPServer(container)
+ server.listen(port=self.port,address=self.host)
+ tornado.ioloop.IOLoop.instance().start()
+
+
+class AppEngineServer(ServerAdapter):
+ """ Adapter for Google App Engine. """
+ quiet = True
+ def run(self, handler):
+ from google.appengine.ext.webapp import util
+ # A main() function in the handler script enables 'App Caching'.
+ # Lets makes sure it is there. This _really_ improves performance.
+ module = sys.modules.get('__main__')
+ if module and not hasattr(module, 'main'):
+ module.main = lambda: util.run_wsgi_app(handler)
+ util.run_wsgi_app(handler)
+
+
+class TwistedServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from twisted.web import server, wsgi
+ from twisted.python.threadpool import ThreadPool
+ from twisted.internet import reactor
+ thread_pool = ThreadPool()
+ thread_pool.start()
+ reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
+ factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
+ reactor.listenTCP(self.port, factory, interface=self.host)
+ reactor.run()
+
+
+class DieselServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from diesel.protocols.wsgi import WSGIApplication
+ app = WSGIApplication(handler, port=self.port)
+ app.run()
+
+
+class GeventServer(ServerAdapter):
+ """ Untested. Options:
+
+ * `fast` (default: False) uses libevent's http server, but has some
+ issues: No streaming, no pipelining, no SSL.
+ * See gevent.wsgi.WSGIServer() documentation for more options.
+ """
+ def run(self, handler):
+ from gevent import pywsgi, local
+ if not isinstance(threading.local(), local.local):
+ msg = "Bottle requires gevent.monkey.patch_all() (before import)"
+ raise RuntimeError(msg)
+ if self.options.pop('fast', None):
+ depr('The "fast" option has been deprecated and removed by Gevent.')
+ if self.quiet:
+ self.options['log'] = None
+ address = (self.host, self.port)
+ server = pywsgi.WSGIServer(address, handler, **self.options)
+ if 'BOTTLE_CHILD' in os.environ:
+ import signal
+ signal.signal(signal.SIGINT, lambda s, f: server.stop())
+ server.serve_forever()
+
+
+class GeventSocketIOServer(ServerAdapter):
+ def run(self,handler):
+ from socketio import server
+ address = (self.host, self.port)
+ server.SocketIOServer(address, handler, **self.options).serve_forever()
+
+
+class GunicornServer(ServerAdapter):
+ """ Untested. See http://gunicorn.org/configure.html for options. """
+ def run(self, handler):
+ from gunicorn.app.base import Application
+
+ config = {'bind': "%s:%d" % (self.host, int(self.port))}
+ config.update(self.options)
+
+ class GunicornApplication(Application):
+ def init(self, parser, opts, args):
+ return config
+
+ def load(self):
+ return handler
+
+ GunicornApplication().run()
+
+
+class EventletServer(ServerAdapter):
+ """ Untested """
+ def run(self, handler):
+ from eventlet import wsgi, listen
+ try:
+ wsgi.server(listen((self.host, self.port)), handler,
+ log_output=(not self.quiet))
+ except TypeError:
+ # Fallback, if we have old version of eventlet
+ wsgi.server(listen((self.host, self.port)), handler)
+
+
+class RocketServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from rocket import Rocket
+ server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
+ server.start()
+
+
+class BjoernServer(ServerAdapter):
+ """ Fast server written in C: https://github.com/jonashaag/bjoern """
+ def run(self, handler):
+ from bjoern import run
+ run(handler, self.host, self.port)
+
+
+class AutoServer(ServerAdapter):
+ """ Untested. """
+ adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer,
+ CherootServer, WSGIRefServer]
+
+ def run(self, handler):
+ for sa in self.adapters:
+ try:
+ return sa(self.host, self.port, **self.options).run(handler)
+ except ImportError:
+ pass
+
+server_names = {
+ 'cgi': CGIServer,
+ 'flup': FlupFCGIServer,
+ 'wsgiref': WSGIRefServer,
+ 'waitress': WaitressServer,
+ 'cherrypy': CherryPyServer,
+ 'cheroot': CherootServer,
+ 'paste': PasteServer,
+ 'fapws3': FapwsServer,
+ 'tornado': TornadoServer,
+ 'gae': AppEngineServer,
+ 'twisted': TwistedServer,
+ 'diesel': DieselServer,
+ 'meinheld': MeinheldServer,
+ 'gunicorn': GunicornServer,
+ 'eventlet': EventletServer,
+ 'gevent': GeventServer,
+ 'geventSocketIO':GeventSocketIOServer,
+ 'rocket': RocketServer,
+ 'bjoern' : BjoernServer,
+ 'auto': AutoServer,
+}
+
+
+
+
+
+
+###############################################################################
+# Application Control ##########################################################
+###############################################################################
+
+
+def load(target, **namespace):
+ """ Import a module or fetch an object from a module.
+
+ * ``package.module`` returns `module` as a module object.
+ * ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
+ * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
+
+ The last form accepts not only function calls, but any type of
+ expression. Keyword arguments passed to this function are available as
+ local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
+ """
+ module, target = target.split(":", 1) if ':' in target else (target, None)
+ if module not in sys.modules: __import__(module)
+ if not target: return sys.modules[module]
+ if target.isalnum(): return getattr(sys.modules[module], target)
+ package_name = module.split('.')[0]
+ namespace[package_name] = sys.modules[package_name]
+ return eval('%s.%s' % (module, target), namespace)
+
+
+def load_app(target):
+ """ Load a bottle application from a module and make sure that the import
+ does not affect the current default application, but returns a separate
+ application object. See :func:`load` for the target parameter. """
+ global NORUN; NORUN, nr_old = True, NORUN
+ try:
+ tmp = default_app.push() # Create a new "default application"
+ rv = load(target) # Import the target module
+ return rv if callable(rv) else tmp
+ finally:
+ default_app.remove(tmp) # Remove the temporary added default application
+ NORUN = nr_old
+
+_debug = debug
+def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
+ interval=1, reloader=False, quiet=False, plugins=None,
+ debug=None, **kargs):
+ """ Start a server instance. This method blocks until the server terminates.
+
+ :param app: WSGI application or target string supported by
+ :func:`load_app`. (default: :func:`default_app`)
+ :param server: Server adapter to use. See :data:`server_names` keys
+ for valid names or pass a :class:`ServerAdapter` subclass.
+ (default: `wsgiref`)
+ :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
+ all interfaces including the external one. (default: 127.0.0.1)
+ :param port: Server port to bind to. Values below 1024 require root
+ privileges. (default: 8080)
+ :param reloader: Start auto-reloading server? (default: False)
+ :param interval: Auto-reloader interval in seconds (default: 1)
+ :param quiet: Suppress output to stdout and stderr? (default: False)
+ :param options: Options passed to the server adapter.
+ """
+ if NORUN: return
+ if reloader and not os.environ.get('BOTTLE_CHILD'):
+ try:
+ lockfile = None
+ fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
+ os.close(fd) # We only need this file to exist. We never write to it
+ while os.path.exists(lockfile):
+ args = [sys.executable] + sys.argv
+ environ = os.environ.copy()
+ environ['BOTTLE_CHILD'] = 'true'
+ environ['BOTTLE_LOCKFILE'] = lockfile
+ p = subprocess.Popen(args, env=environ)
+ while p.poll() is None: # Busy wait...
+ os.utime(lockfile, None) # I am alive!
+ time.sleep(interval)
+ if p.poll() != 3:
+ if os.path.exists(lockfile): os.unlink(lockfile)
+ sys.exit(p.poll())
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if os.path.exists(lockfile):
+ os.unlink(lockfile)
+ return
+
+ try:
+ if debug is not None: _debug(debug)
+ app = app or default_app()
+ if isinstance(app, basestring):
+ app = load_app(app)
+ if not callable(app):
+ raise ValueError("Application is not callable: %r" % app)
+
+ for plugin in plugins or []:
+ app.install(plugin)
+
+ if server in server_names:
+ server = server_names.get(server)
+ if isinstance(server, basestring):
+ server = load(server)
+ if isinstance(server, type):
+ server = server(host=host, port=port, **kargs)
+ if not isinstance(server, ServerAdapter):
+ raise ValueError("Unknown or unsupported server: %r" % server)
+
+ server.quiet = server.quiet or quiet
+ if not server.quiet:
+ _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
+ _stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
+ _stderr("Hit Ctrl-C to quit.\n\n")
+
+ if reloader:
+ lockfile = os.environ.get('BOTTLE_LOCKFILE')
+ bgcheck = FileCheckerThread(lockfile, interval)
+ with bgcheck:
+ server.run(app)
+ if bgcheck.status == 'reload':
+ sys.exit(3)
+ else:
+ server.run(app)
+ except KeyboardInterrupt:
+ pass
+ except (SystemExit, MemoryError):
+ raise
+ except:
+ if not reloader: raise
+ if not getattr(server, 'quiet', quiet):
+ print_exc()
+ time.sleep(interval)
+ sys.exit(3)
+
+
+
+class FileCheckerThread(threading.Thread):
+ ''' Interrupt main-thread as soon as a changed module file is detected,
+ the lockfile gets deleted or gets to old. '''
+
+ def __init__(self, lockfile, interval):
+ threading.Thread.__init__(self)
+ self.lockfile, self.interval = lockfile, interval
+ #: Is one of 'reload', 'error' or 'exit'
+ self.status = None
+
+ def run(self):
+ exists = os.path.exists
+ mtime = lambda path: os.stat(path).st_mtime
+ files = dict()
+
+ for module in list(sys.modules.values()):
+ path = getattr(module, '__file__', '') or ''
+ if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
+ if path and exists(path): files[path] = mtime(path)
+
+ while not self.status:
+ if not exists(self.lockfile)\
+ or mtime(self.lockfile) < time.time() - self.interval - 5:
+ self.status = 'error'
+ thread.interrupt_main()
+ for path, lmtime in list(files.items()):
+ if not exists(path) or mtime(path) > lmtime:
+ self.status = 'reload'
+ thread.interrupt_main()
+ break
+ time.sleep(self.interval)
+
+ def __enter__(self):
+ self.start()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.status: self.status = 'exit' # silent exit
+ self.join()
+ return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
+
+
+
+
+
+###############################################################################
+# Template Adapters ############################################################
+###############################################################################
+
+
+class TemplateError(HTTPError):
+ def __init__(self, message):
+ HTTPError.__init__(self, 500, message)
+
+
+class BaseTemplate(object):
+ """ Base class and minimal API for template adapters """
+ extensions = ['tpl','html','thtml','stpl']
+ settings = {} #used in prepare()
+ defaults = {} #used in render()
+
+ def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
+ """ Create a new template.
+ If the source parameter (str or buffer) is missing, the name argument
+ is used to guess a template filename. Subclasses can assume that
+ self.source and/or self.filename are set. Both are strings.
+ The lookup, encoding and settings parameters are stored as instance
+ variables.
+ The lookup parameter stores a list containing directory paths.
+ The encoding parameter should be used to decode byte strings or files.
+ The settings parameter contains a dict for engine-specific settings.
+ """
+ self.name = name
+ self.source = source.read() if hasattr(source, 'read') else source
+ self.filename = source.filename if hasattr(source, 'filename') else None
+ self.lookup = [os.path.abspath(x) for x in lookup]
+ self.encoding = encoding
+ self.settings = self.settings.copy() # Copy from class variable
+ self.settings.update(settings) # Apply
+ if not self.source and self.name:
+ self.filename = self.search(self.name, self.lookup)
+ if not self.filename:
+ raise TemplateError('Template %s not found.' % repr(name))
+ if not self.source and not self.filename:
+ raise TemplateError('No template specified.')
+ self.prepare(**self.settings)
+
+ @classmethod
+ def search(cls, name, lookup=[]):
+ """ Search name in all directories specified in lookup.
+ First without, then with common extensions. Return first hit. """
+ if not lookup:
+ depr('The template lookup path list should not be empty.') #0.12
+ lookup = ['.']
+
+ if os.path.isabs(name) and os.path.isfile(name):
+ depr('Absolute template path names are deprecated.') #0.12
+ return os.path.abspath(name)
+
+ for spath in lookup:
+ spath = os.path.abspath(spath) + os.sep
+ fname = os.path.abspath(os.path.join(spath, name))
+ if not fname.startswith(spath): continue
+ if os.path.isfile(fname): return fname
+ for ext in cls.extensions:
+ if os.path.isfile('%s.%s' % (fname, ext)):
+ return '%s.%s' % (fname, ext)
+
+ @classmethod
+ def global_config(cls, key, *args):
+ ''' This reads or sets the global settings stored in class.settings. '''
+ if args:
+ cls.settings = cls.settings.copy() # Make settings local to class
+ cls.settings[key] = args[0]
+ else:
+ return cls.settings[key]
+
+ def prepare(self, **options):
+ """ Run preparations (parsing, caching, ...).
+ It should be possible to call this again to refresh a template or to
+ update settings.
+ """
+ raise NotImplementedError
+
+ def render(self, *args, **kwargs):
+ """ Render the template with the specified local variables and return
+ a single byte or unicode string. If it is a byte string, the encoding
+ must match self.encoding. This method must be thread-safe!
+ Local variables may be provided in dictionaries (args)
+ or directly, as keywords (kwargs).
+ """
+ raise NotImplementedError
+
+
+class MakoTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from mako.template import Template
+ from mako.lookup import TemplateLookup
+ options.update({'input_encoding':self.encoding})
+ options.setdefault('format_exceptions', bool(DEBUG))
+ lookup = TemplateLookup(directories=self.lookup, **options)
+ if self.source:
+ self.tpl = Template(self.source, lookup=lookup, **options)
+ else:
+ self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ _defaults = self.defaults.copy()
+ _defaults.update(kwargs)
+ return self.tpl.render(**_defaults)
+
+
+class CheetahTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from Cheetah.Template import Template
+ self.context = threading.local()
+ self.context.vars = {}
+ options['searchList'] = [self.context.vars]
+ if self.source:
+ self.tpl = Template(source=self.source, **options)
+ else:
+ self.tpl = Template(file=self.filename, **options)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ self.context.vars.update(self.defaults)
+ self.context.vars.update(kwargs)
+ out = str(self.tpl)
+ self.context.vars.clear()
+ return out
+
+
+class Jinja2Template(BaseTemplate):
+ def prepare(self, filters=None, tests=None, globals={}, **kwargs):
+ from jinja2 import Environment, FunctionLoader
+ if 'prefix' in kwargs: # TODO: to be removed after a while
+ raise RuntimeError('The keyword argument `prefix` has been removed. '
+ 'Use the full jinja2 environment name line_statement_prefix instead.')
+ self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
+ if filters: self.env.filters.update(filters)
+ if tests: self.env.tests.update(tests)
+ if globals: self.env.globals.update(globals)
+ if self.source:
+ self.tpl = self.env.from_string(self.source)
+ else:
+ self.tpl = self.env.get_template(self.filename)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ _defaults = self.defaults.copy()
+ _defaults.update(kwargs)
+ return self.tpl.render(**_defaults)
+
+ def loader(self, name):
+ fname = self.search(name, self.lookup)
+ if not fname: return
+ with open(fname, "rb") as f:
+ return f.read().decode(self.encoding)
+
+
+class SimpleTemplate(BaseTemplate):
+
+ def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
+ self.cache = {}
+ enc = self.encoding
+ self._str = lambda x: touni(x, enc)
+ self._escape = lambda x: escape_func(touni(x, enc))
+ self.syntax = syntax
+ if noescape:
+ self._str, self._escape = self._escape, self._str
+
+ @cached_property
+ def co(self):
+ return compile(self.code, self.filename or '', 'exec')
+
+ @cached_property
+ def code(self):
+ source = self.source
+ if not source:
+ with open(self.filename, 'rb') as f:
+ source = f.read()
+ try:
+ source, encoding = touni(source), 'utf8'
+ except UnicodeError:
+ depr('Template encodings other than utf8 are no longer supported.') #0.11
+ source, encoding = touni(source, 'latin1'), 'latin1'
+ parser = StplParser(source, encoding=encoding, syntax=self.syntax)
+ code = parser.translate()
+ self.encoding = parser.encoding
+ return code
+
+ def _rebase(self, _env, _name=None, **kwargs):
+ if _name is None:
+ depr('Rebase function called without arguments.'
+ ' You were probably looking for {{base}}?', True) #0.12
+ _env['_rebase'] = (_name, kwargs)
+
+ def _include(self, _env, _name=None, **kwargs):
+ if _name is None:
+ depr('Rebase function called without arguments.'
+ ' You were probably looking for {{base}}?', True) #0.12
+ env = _env.copy()
+ env.update(kwargs)
+ if _name not in self.cache:
+ self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
+ return self.cache[_name].execute(env['_stdout'], env)
+
+ def execute(self, _stdout, kwargs):
+ env = self.defaults.copy()
+ env.update(kwargs)
+ env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
+ 'include': functools.partial(self._include, env),
+ 'rebase': functools.partial(self._rebase, env), '_rebase': None,
+ '_str': self._str, '_escape': self._escape, 'get': env.get,
+ 'setdefault': env.setdefault, 'defined': env.__contains__ })
+ eval(self.co, env)
+ if env.get('_rebase'):
+ subtpl, rargs = env.pop('_rebase')
+ rargs['base'] = ''.join(_stdout) #copy stdout
+ del _stdout[:] # clear stdout
+ return self._include(env, subtpl, **rargs)
+ return env
+
+ def render(self, *args, **kwargs):
+ """ Render the template using keyword arguments as local variables. """
+ env = {}; stdout = []
+ for dictarg in args: env.update(dictarg)
+ env.update(kwargs)
+ self.execute(stdout, env)
+ return ''.join(stdout)
+
+
+class StplSyntaxError(TemplateError): pass
+
+
+class StplParser(object):
+ ''' Parser for stpl templates. '''
+ _re_cache = {} #: Cache for compiled re patterns
+ # This huge pile of voodoo magic splits python code into 8 different tokens.
+ # 1: All kinds of python strings (trust me, it works)
+ _re_tok = '([urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
+ '|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
+ '|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
+ '|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
+ _re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
+ # 2: Comments (until end of line, but not the newline itself)
+ _re_tok += '|(#.*)'
+ # 3,4: Open and close grouping tokens
+ _re_tok += '|([\\[\\{\\(])'
+ _re_tok += '|([\\]\\}\\)])'
+ # 5,6: Keywords that start or continue a python block (only start of line)
+ _re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
+ '|^([ \\t]*(?:elif|else|except|finally)\\b)'
+ # 7: Our special 'end' keyword (but only if it stands alone)
+ _re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
+ # 8: A customizable end-of-code-block template token (only end of line)
+ _re_tok += '|(%(block_close)s[ \\t]*(?=\\r?$))'
+ # 9: And finally, a single newline. The 10th token is 'everything else'
+ _re_tok += '|(\\r?\\n)'
+
+ # Match the start tokens of code areas in a template
+ _re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))(%%?)'
+ # Match inline statements (may contain python strings)
+ _re_inl = '(?m)%%(inline_start)s((?:%s|[^\'"\n])*?)%%(inline_end)s' % _re_inl
+ _re_tok = '(?m)' + _re_tok
+
+ default_syntax = '<% %> % {{ }}'
+
+ def __init__(self, source, syntax=None, encoding='utf8'):
+ self.source, self.encoding = touni(source, encoding), encoding
+ self.set_syntax(syntax or self.default_syntax)
+ self.code_buffer, self.text_buffer = [], []
+ self.lineno, self.offset = 1, 0
+ self.indent, self.indent_mod = 0, 0
+ self.paren_depth = 0
+
+ def get_syntax(self):
+ ''' Tokens as a space separated string (default: <% %> % {{ }}) '''
+ return self._syntax
+
+ def set_syntax(self, syntax):
+ self._syntax = syntax
+ self._tokens = syntax.split()
+ if not syntax in self._re_cache:
+ names = 'block_start block_close line_start inline_start inline_end'
+ etokens = map(re.escape, self._tokens)
+ pattern_vars = dict(zip(names.split(), etokens))
+ patterns = (self._re_split, self._re_tok, self._re_inl)
+ patterns = [re.compile(p%pattern_vars) for p in patterns]
+ self._re_cache[syntax] = patterns
+ self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
+
+ syntax = property(get_syntax, set_syntax)
+
+ def translate(self):
+ if self.offset: raise RuntimeError('Parser is a one time instance.')
+ while True:
+ m = self.re_split.search(self.source[self.offset:])
+ if m:
+ text = self.source[self.offset:self.offset+m.start()]
+ self.text_buffer.append(text)
+ self.offset += m.end()
+ if m.group(1): # New escape syntax
+ line, sep, _ = self.source[self.offset:].partition('\n')
+ self.text_buffer.append(m.group(2)+m.group(5)+line+sep)
+ self.offset += len(line+sep)+1
+ continue
+ elif m.group(5): # Old escape syntax
+ depr('Escape code lines with a backslash.') #0.12
+ line, sep, _ = self.source[self.offset:].partition('\n')
+ self.text_buffer.append(m.group(2)+line+sep)
+ self.offset += len(line+sep)+1
+ continue
+ self.flush_text()
+ self.read_code(multiline=bool(m.group(4)))
+ else: break
+ self.text_buffer.append(self.source[self.offset:])
+ self.flush_text()
+ return ''.join(self.code_buffer)
+
+ def read_code(self, multiline):
+ code_line, comment = '', ''
+ while True:
+ m = self.re_tok.search(self.source[self.offset:])
+ if not m:
+ code_line += self.source[self.offset:]
+ self.offset = len(self.source)
+ self.write_code(code_line.strip(), comment)
+ return
+ code_line += self.source[self.offset:self.offset+m.start()]
+ self.offset += m.end()
+ _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups()
+ if (code_line or self.paren_depth > 0) and (_blk1 or _blk2): # a if b else c
+ code_line += _blk1 or _blk2
+ continue
+ if _str: # Python string
+ code_line += _str
+ elif _com: # Python comment (up to EOL)
+ comment = _com
+ if multiline and _com.strip().endswith(self._tokens[1]):
+ multiline = False # Allow end-of-block in comments
+ elif _po: # open parenthesis
+ self.paren_depth += 1
+ code_line += _po
+ elif _pc: # close parenthesis
+ if self.paren_depth > 0:
+ # we could check for matching parentheses here, but it's
+ # easier to leave that to python - just check counts
+ self.paren_depth -= 1
+ code_line += _pc
+ elif _blk1: # Start-block keyword (if/for/while/def/try/...)
+ code_line, self.indent_mod = _blk1, -1
+ self.indent += 1
+ elif _blk2: # Continue-block keyword (else/elif/except/...)
+ code_line, self.indent_mod = _blk2, -1
+ elif _end: # The non-standard 'end'-keyword (ends a block)
+ self.indent -= 1
+ elif _cend: # The end-code-block template token (usually '%>')
+ if multiline: multiline = False
+ else: code_line += _cend
+ else: # \n
+ self.write_code(code_line.strip(), comment)
+ self.lineno += 1
+ code_line, comment, self.indent_mod = '', '', 0
+ if not multiline:
+ break
+
+ def flush_text(self):
+ text = ''.join(self.text_buffer)
+ del self.text_buffer[:]
+ if not text: return
+ parts, pos, nl = [], 0, '\\\n'+' '*self.indent
+ for m in self.re_inl.finditer(text):
+ prefix, pos = text[pos:m.start()], m.end()
+ if prefix:
+ parts.append(nl.join(map(repr, prefix.splitlines(True))))
+ if prefix.endswith('\n'): parts[-1] += nl
+ parts.append(self.process_inline(m.group(1).strip()))
+ if pos < len(text):
+ prefix = text[pos:]
+ lines = prefix.splitlines(True)
+ if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
+ elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
+ parts.append(nl.join(map(repr, lines)))
+ code = '_printlist((%s,))' % ', '.join(parts)
+ self.lineno += code.count('\n')+1
+ self.write_code(code)
+
+ def process_inline(self, chunk):
+ if chunk[0] == '!': return '_str(%s)' % chunk[1:]
+ return '_escape(%s)' % chunk
+
+ def write_code(self, line, comment=''):
+ line, comment = self.fix_backward_compatibility(line, comment)
+ code = ' ' * (self.indent+self.indent_mod)
+ code += line.lstrip() + comment + '\n'
+ self.code_buffer.append(code)
+
+ def fix_backward_compatibility(self, line, comment):
+ parts = line.strip().split(None, 2)
+ if parts and parts[0] in ('include', 'rebase'):
+ depr('The include and rebase keywords are functions now.') #0.12
+ if len(parts) == 1: return "_printlist([base])", comment
+ elif len(parts) == 2: return "_=%s(%r)" % tuple(parts), comment
+ else: return "_=%s(%r, %s)" % tuple(parts), comment
+ if self.lineno <= 2 and not line.strip() and 'coding' in comment:
+ m = re.match(r"#.*coding[:=]\s*([-\w.]+)", comment)
+ if m:
+ depr('PEP263 encoding strings in templates are deprecated.') #0.12
+ enc = m.group(1)
+ self.source = self.source.encode(self.encoding).decode(enc)
+ self.encoding = enc
+ return line, comment.replace('coding','coding*')
+ return line, comment
+
+
+def template(*args, **kwargs):
+ '''
+ Get a rendered template as a string iterator.
+ You can use a name, a filename or a template string as first parameter.
+ Template rendering arguments can be passed as dictionaries
+ or directly (as keyword arguments).
+ '''
+ tpl = args[0] if args else None
+ adapter = kwargs.pop('template_adapter', SimpleTemplate)
+ lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
+ tplid = (id(lookup), tpl)
+ if tplid not in TEMPLATES or DEBUG:
+ settings = kwargs.pop('template_settings', {})
+ if isinstance(tpl, adapter):
+ TEMPLATES[tplid] = tpl
+ if settings: TEMPLATES[tplid].prepare(**settings)
+ elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
+ TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
+ else:
+ TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
+ if not TEMPLATES[tplid]:
+ abort(500, 'Template (%s) not found' % tpl)
+ for dictarg in args[1:]: kwargs.update(dictarg)
+ return TEMPLATES[tplid].render(kwargs)
+
+mako_template = functools.partial(template, template_adapter=MakoTemplate)
+cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
+jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
+
+
+def view(tpl_name, **defaults):
+ ''' Decorator: renders a template for a handler.
+ The handler can control its behavior like that:
+
+ - return a dict of template vars to fill out the template
+ - return something other than a dict and the view decorator will not
+ process the template, but return the handler result as is.
+ This includes returning a HTTPResponse(dict) to get,
+ for instance, JSON with autojson or other castfilters.
+ '''
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ if isinstance(result, (dict, DictMixin)):
+ tplvars = defaults.copy()
+ tplvars.update(result)
+ return template(tpl_name, **tplvars)
+ elif result is None:
+ return template(tpl_name, **defaults)
+ return result
+ return wrapper
+ return decorator
+
+mako_view = functools.partial(view, template_adapter=MakoTemplate)
+cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
+jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
+
+
+
+
+
+
+###############################################################################
+# Constants and Globals ########################################################
+###############################################################################
+
+
+TEMPLATE_PATH = ['./', './views/']
+TEMPLATES = {}
+DEBUG = False
+NORUN = False # If set, run() does nothing. Used by load_app()
+
+#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
+HTTP_CODES = httplib.responses
+HTTP_CODES[418] = "I'm a teapot" # RFC 2324
+HTTP_CODES[422] = "Unprocessable Entity" # RFC 4918
+HTTP_CODES[428] = "Precondition Required"
+HTTP_CODES[429] = "Too Many Requests"
+HTTP_CODES[431] = "Request Header Fields Too Large"
+HTTP_CODES[511] = "Network Authentication Required"
+_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
+
+#: The default template used for error pages. Override with @error()
+ERROR_PAGE_TEMPLATE = """
+%%try:
+ %%from %s import DEBUG, HTTP_CODES, request, touni
+
+
+
+ Error: {{e.status}}
+
+
+
+ Error: {{e.status}}
+ Sorry, the requested URL {{repr(request.url)}}
+ caused an error:
+ {{e.body}}
+ %%if DEBUG and e.exception:
+ Exception:
+ {{repr(e.exception)}}
+ %%end
+ %%if DEBUG and e.traceback:
+ Traceback:
+ {{e.traceback}}
+ %%end
+
+
+%%except ImportError:
+ ImportError: Could not generate the error page. Please add bottle to
+ the import path.
+%%end
+""" % __name__
+
+#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
+#: request callback, this instance always refers to the *current* request
+#: (even on a multithreaded server).
+request = LocalRequest()
+
+#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
+#: HTTP response for the *current* request.
+response = LocalResponse()
+
+#: A thread-safe namespace. Not used by Bottle.
+local = threading.local()
+
+# Initialize app stack (create first empty Bottle app)
+# BC: 0.6.4 and needed for run()
+app = default_app = AppStack()
+app.push()
+
+#: A virtual package that redirects import statements.
+#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
+ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
+
+if __name__ == '__main__':
+ opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
+ if opt.version:
+ _stdout('Bottle %s\n'%__version__)
+ sys.exit(0)
+ if not args:
+ parser.print_help()
+ _stderr('\nError: No application specified.\n')
+ sys.exit(1)
+
+ sys.path.insert(0, '.')
+ sys.modules.setdefault('bottle', sys.modules['__main__'])
+
+ host, port = (opt.bind or 'localhost'), 8080
+ if ':' in host and host.rfind(']') < host.rfind(':'):
+ host, port = host.rsplit(':', 1)
+ host = host.strip('[]')
+
+ run(args[0], host=host, port=int(port), server=opt.server,
+ reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
+
+
+
+
+# THE END
diff --git a/venv/bin/lazywiki b/venv/bin/lazywiki
new file mode 100755
index 0000000..f8b1e39
--- /dev/null
+++ b/venv/bin/lazywiki
@@ -0,0 +1,33 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# EASY-INSTALL-ENTRY-SCRIPT: 'Lazy-Wiki==0.0.2','console_scripts','lazywiki'
+import re
+import sys
+
+# for compatibility with easy_install; see #2198
+__requires__ = 'Lazy-Wiki==0.0.2'
+
+try:
+ from importlib.metadata import distribution
+except ImportError:
+ try:
+ from importlib_metadata import distribution
+ except ImportError:
+ from pkg_resources import load_entry_point
+
+
+def importlib_load_entry_point(spec, group, name):
+ dist_name, _, _ = spec.partition('==')
+ matches = (
+ entry_point
+ for entry_point in distribution(dist_name).entry_points
+ if entry_point.group == group and entry_point.name == name
+ )
+ return next(matches).load()
+
+
+globals().setdefault('load_entry_point', importlib_load_entry_point)
+
+
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(load_entry_point('Lazy-Wiki==0.0.2', 'console_scripts', 'lazywiki')())
diff --git a/venv/bin/markdown_py b/venv/bin/markdown_py
new file mode 100755
index 0000000..3f07b91
--- /dev/null
+++ b/venv/bin/markdown_py
@@ -0,0 +1,8 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from markdown.__main__ import run
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(run())
diff --git a/venv/bin/pip b/venv/bin/pip
new file mode 100755
index 0000000..b92fa38
--- /dev/null
+++ b/venv/bin/pip
@@ -0,0 +1,8 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/venv/bin/pip3 b/venv/bin/pip3
new file mode 100755
index 0000000..b92fa38
--- /dev/null
+++ b/venv/bin/pip3
@@ -0,0 +1,8 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/venv/bin/pip3.11 b/venv/bin/pip3.11
new file mode 100755
index 0000000..b92fa38
--- /dev/null
+++ b/venv/bin/pip3.11
@@ -0,0 +1,8 @@
+#!/home/grid/Documents/Code/lazywiki/venv/bin/python3
+# -*- coding: utf-8 -*-
+import re
+import sys
+from pip._internal.cli.main import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/venv/bin/python b/venv/bin/python
new file mode 120000
index 0000000..b8a0adb
--- /dev/null
+++ b/venv/bin/python
@@ -0,0 +1 @@
+python3
\ No newline at end of file
diff --git a/venv/bin/python3 b/venv/bin/python3
new file mode 120000
index 0000000..ae65fda
--- /dev/null
+++ b/venv/bin/python3
@@ -0,0 +1 @@
+/usr/bin/python3
\ No newline at end of file
diff --git a/venv/bin/python3.11 b/venv/bin/python3.11
new file mode 120000
index 0000000..b8a0adb
--- /dev/null
+++ b/venv/bin/python3.11
@@ -0,0 +1 @@
+python3
\ No newline at end of file
diff --git a/venv/include/site/python3.11/greenlet/greenlet.h b/venv/include/site/python3.11/greenlet/greenlet.h
new file mode 100644
index 0000000..d02a16e
--- /dev/null
+++ b/venv/include/site/python3.11/greenlet/greenlet.h
@@ -0,0 +1,164 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+
+/* Greenlet object interface */
+
+#ifndef Py_GREENLETOBJECT_H
+#define Py_GREENLETOBJECT_H
+
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* This is deprecated and undocumented. It does not change. */
+#define GREENLET_VERSION "1.0.0"
+
+#ifndef GREENLET_MODULE
+#define implementation_ptr_t void*
+#endif
+
+typedef struct _greenlet {
+ PyObject_HEAD
+ PyObject* weakreflist;
+ PyObject* dict;
+ implementation_ptr_t pimpl;
+} PyGreenlet;
+
+#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
+
+
+/* C API functions */
+
+/* Total number of symbols that are exported */
+#define PyGreenlet_API_pointers 12
+
+#define PyGreenlet_Type_NUM 0
+#define PyExc_GreenletError_NUM 1
+#define PyExc_GreenletExit_NUM 2
+
+#define PyGreenlet_New_NUM 3
+#define PyGreenlet_GetCurrent_NUM 4
+#define PyGreenlet_Throw_NUM 5
+#define PyGreenlet_Switch_NUM 6
+#define PyGreenlet_SetParent_NUM 7
+
+#define PyGreenlet_MAIN_NUM 8
+#define PyGreenlet_STARTED_NUM 9
+#define PyGreenlet_ACTIVE_NUM 10
+#define PyGreenlet_GET_PARENT_NUM 11
+
+#ifndef GREENLET_MODULE
+/* This section is used by modules that uses the greenlet C API */
+static void** _PyGreenlet_API = NULL;
+
+# define PyGreenlet_Type \
+ (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
+
+# define PyExc_GreenletError \
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
+
+# define PyExc_GreenletExit \
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
+
+/*
+ * PyGreenlet_New(PyObject *args)
+ *
+ * greenlet.greenlet(run, parent=None)
+ */
+# define PyGreenlet_New \
+ (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
+ _PyGreenlet_API[PyGreenlet_New_NUM])
+
+/*
+ * PyGreenlet_GetCurrent(void)
+ *
+ * greenlet.getcurrent()
+ */
+# define PyGreenlet_GetCurrent \
+ (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
+
+/*
+ * PyGreenlet_Throw(
+ * PyGreenlet *greenlet,
+ * PyObject *typ,
+ * PyObject *val,
+ * PyObject *tb)
+ *
+ * g.throw(...)
+ */
+# define PyGreenlet_Throw \
+ (*(PyObject * (*)(PyGreenlet * self, \
+ PyObject * typ, \
+ PyObject * val, \
+ PyObject * tb)) \
+ _PyGreenlet_API[PyGreenlet_Throw_NUM])
+
+/*
+ * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
+ *
+ * g.switch(*args, **kwargs)
+ */
+# define PyGreenlet_Switch \
+ (*(PyObject * \
+ (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
+ _PyGreenlet_API[PyGreenlet_Switch_NUM])
+
+/*
+ * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
+ *
+ * g.parent = new_parent
+ */
+# define PyGreenlet_SetParent \
+ (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
+ _PyGreenlet_API[PyGreenlet_SetParent_NUM])
+
+/*
+ * PyGreenlet_GetParent(PyObject* greenlet)
+ *
+ * return greenlet.parent;
+ *
+ * This could return NULL even if there is no exception active.
+ * If it does not return NULL, you are responsible for decrementing the
+ * reference count.
+ */
+# define PyGreenlet_GetParent \
+ (*(PyGreenlet* (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
+
+/*
+ * deprecated, undocumented alias.
+ */
+# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
+
+# define PyGreenlet_MAIN \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_MAIN_NUM])
+
+# define PyGreenlet_STARTED \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_STARTED_NUM])
+
+# define PyGreenlet_ACTIVE \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
+
+
+
+
+/* Macro that imports greenlet and initializes C API */
+/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
+ keep the older definition to be sure older code that might have a copy of
+ the header still works. */
+# define PyGreenlet_Import() \
+ { \
+ _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
+ }
+
+#endif /* GREENLET_MODULE */
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_GREENLETOBJECT_H */
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/PKG-INFO b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/PKG-INFO
new file mode 100755
index 0000000..c800e14
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/PKG-INFO
@@ -0,0 +1,3 @@
+Metadata-Version: 2.1
+Name: Lazy-Wiki
+Version: 0.0.2
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/SOURCES.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/SOURCES.txt
new file mode 100755
index 0000000..3844b7d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/SOURCES.txt
@@ -0,0 +1,17 @@
+MANIFEST.in
+setup.py
+Lazy_Wiki.egg-info/PKG-INFO
+Lazy_Wiki.egg-info/SOURCES.txt
+Lazy_Wiki.egg-info/dependency_links.txt
+Lazy_Wiki.egg-info/entry_points.txt
+Lazy_Wiki.egg-info/requires.txt
+Lazy_Wiki.egg-info/top_level.txt
+lazy_wiki/__init__.py
+lazy_wiki/__main__.py
+lazy_wiki/db.py
+lazy_wiki/schema.py
+lazy_wiki/web.py
+lazy_wiki/views/delete.tpl
+lazy_wiki/views/edit.tpl
+lazy_wiki/views/head.tpl
+lazy_wiki/views/view.tpl
\ No newline at end of file
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/dependency_links.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/dependency_links.txt
new file mode 100755
index 0000000..8b13789
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/entry_points.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/entry_points.txt
new file mode 100755
index 0000000..81f7064
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+lazywiki = lazy_wiki.__main__:main
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/installed-files.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/installed-files.txt
new file mode 100644
index 0000000..5036b08
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/installed-files.txt
@@ -0,0 +1,21 @@
+../../../../bin/lazywiki
+../lazy_wiki/__init__.py
+../lazy_wiki/__main__.py
+../lazy_wiki/__pycache__/__init__.cpython-311.pyc
+../lazy_wiki/__pycache__/__main__.cpython-311.pyc
+../lazy_wiki/__pycache__/db.cpython-311.pyc
+../lazy_wiki/__pycache__/schema.cpython-311.pyc
+../lazy_wiki/__pycache__/web.cpython-311.pyc
+../lazy_wiki/db.py
+../lazy_wiki/schema.py
+../lazy_wiki/views/delete.tpl
+../lazy_wiki/views/edit.tpl
+../lazy_wiki/views/head.tpl
+../lazy_wiki/views/view.tpl
+../lazy_wiki/web.py
+PKG-INFO
+SOURCES.txt
+dependency_links.txt
+entry_points.txt
+requires.txt
+top_level.txt
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/requires.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/requires.txt
new file mode 100755
index 0000000..034bdeb
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/requires.txt
@@ -0,0 +1,3 @@
+bottle
+markdown
+sqlalchemy==1.4.42
diff --git a/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/top_level.txt b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/top_level.txt
new file mode 100755
index 0000000..f4b8db5
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Lazy_Wiki-0.0.2.egg-info/top_level.txt
@@ -0,0 +1 @@
+lazy_wiki
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/LICENSE.md b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/LICENSE.md
new file mode 100644
index 0000000..6249d60
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/LICENSE.md
@@ -0,0 +1,30 @@
+BSD 3-Clause License
+
+Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
+Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
+Copyright 2004 Manfred Stienstra (the original version)
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/METADATA b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/METADATA
new file mode 100644
index 0000000..516d18d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/METADATA
@@ -0,0 +1,146 @@
+Metadata-Version: 2.1
+Name: Markdown
+Version: 3.6
+Summary: Python implementation of John Gruber's Markdown.
+Author: Manfred Stienstra, Yuri Takhteyev
+Author-email: Waylan limberg
+Maintainer: Isaac Muse
+Maintainer-email: Waylan Limberg
+License: BSD 3-Clause License
+
+ Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
+ Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
+ Copyright 2004 Manfred Stienstra (the original version)
+
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ 3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Project-URL: Homepage, https://Python-Markdown.github.io/
+Project-URL: Documentation, https://Python-Markdown.github.io/
+Project-URL: Repository, https://github.com/Python-Markdown/markdown
+Project-URL: Issue Tracker, https://github.com/Python-Markdown/markdown/issues
+Project-URL: Changelog, https://python-markdown.github.io/changelog/
+Keywords: markdown,markdown-parser,python-markdown,markdown-to-html
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Communications :: Email :: Filters
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries
+Classifier: Topic :: Internet :: WWW/HTTP :: Site Management
+Classifier: Topic :: Software Development :: Documentation
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Filters
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Classifier: Topic :: Text Processing :: Markup :: Markdown
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown
+License-File: LICENSE.md
+Requires-Dist: importlib-metadata >=4.4 ; python_version < "3.10"
+Provides-Extra: docs
+Requires-Dist: mkdocs >=1.5 ; extra == 'docs'
+Requires-Dist: mkdocs-nature >=0.6 ; extra == 'docs'
+Requires-Dist: mdx-gh-links >=0.2 ; extra == 'docs'
+Requires-Dist: mkdocstrings[python] ; extra == 'docs'
+Requires-Dist: mkdocs-gen-files ; extra == 'docs'
+Requires-Dist: mkdocs-section-index ; extra == 'docs'
+Requires-Dist: mkdocs-literate-nav ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: coverage ; extra == 'testing'
+Requires-Dist: pyyaml ; extra == 'testing'
+
+[Python-Markdown][]
+===================
+
+[![Build Status][build-button]][build]
+[![Coverage Status][codecov-button]][codecov]
+[![Latest Version][mdversion-button]][md-pypi]
+[![Python Versions][pyversion-button]][md-pypi]
+[![BSD License][bsdlicense-button]][bsdlicense]
+[![Code of Conduct][codeofconduct-button]][Code of Conduct]
+
+[build-button]: https://github.com/Python-Markdown/markdown/workflows/CI/badge.svg?event=push
+[build]: https://github.com/Python-Markdown/markdown/actions?query=workflow%3ACI+event%3Apush
+[codecov-button]: https://codecov.io/gh/Python-Markdown/markdown/branch/master/graph/badge.svg
+[codecov]: https://codecov.io/gh/Python-Markdown/markdown
+[mdversion-button]: https://img.shields.io/pypi/v/Markdown.svg
+[md-pypi]: https://pypi.org/project/Markdown/
+[pyversion-button]: https://img.shields.io/pypi/pyversions/Markdown.svg
+[bsdlicense-button]: https://img.shields.io/badge/license-BSD-yellow.svg
+[bsdlicense]: https://opensource.org/licenses/BSD-3-Clause
+[codeofconduct-button]: https://img.shields.io/badge/code%20of%20conduct-contributor%20covenant-green.svg?style=flat-square
+[Code of Conduct]: https://github.com/Python-Markdown/markdown/blob/master/CODE_OF_CONDUCT.md
+
+This is a Python implementation of John Gruber's [Markdown][].
+It is almost completely compliant with the reference implementation,
+though there are a few known issues. See [Features][] for information
+on what exactly is supported and what is not. Additional features are
+supported by the [Available Extensions][].
+
+[Python-Markdown]: https://Python-Markdown.github.io/
+[Markdown]: https://daringfireball.net/projects/markdown/
+[Features]: https://Python-Markdown.github.io#Features
+[Available Extensions]: https://Python-Markdown.github.io/extensions
+
+Documentation
+-------------
+
+```bash
+pip install markdown
+```
+```python
+import markdown
+html = markdown.markdown(your_text_string)
+```
+
+For more advanced [installation] and [usage] documentation, see the `docs/` directory
+of the distribution or the project website at .
+
+[installation]: https://python-markdown.github.io/install/
+[usage]: https://python-markdown.github.io/reference/
+
+See the change log at .
+
+Support
+-------
+
+You may report bugs, ask for help, and discuss various other issues on the [bug tracker][].
+
+[bug tracker]: https://github.com/Python-Markdown/markdown/issues
+
+Code of Conduct
+---------------
+
+Everyone interacting in the Python-Markdown project's code bases, issue trackers,
+and mailing lists is expected to follow the [Code of Conduct].
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/RECORD b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/RECORD
new file mode 100644
index 0000000..a381b10
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/RECORD
@@ -0,0 +1,74 @@
+../../../bin/markdown_py,sha256=H_j-tI6aArIE37IVvg5JrI2CpQCOcF2g8qIgeiXC1VQ,249
+Markdown-3.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Markdown-3.6.dist-info/LICENSE.md,sha256=e6TrbRCzKy0R3OE4ITQDUc27swuozMZ4Qdsv_Ybnmso,1650
+Markdown-3.6.dist-info/METADATA,sha256=8_ETqzTxcOemQXj7ujUabMFcDBDGtsRrccFDr1-XWvc,7040
+Markdown-3.6.dist-info/RECORD,,
+Markdown-3.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
+Markdown-3.6.dist-info/entry_points.txt,sha256=lMEyiiA_ZZyfPCBlDviBl-SiU0cfoeuEKpwxw361sKQ,1102
+Markdown-3.6.dist-info/top_level.txt,sha256=IAxs8x618RXoH1uCqeLLxXsDefJvE_mIibr_M4sOlyk,9
+markdown/__init__.py,sha256=dfzwwdpG9L8QLEPBpLFPIHx_BN056aZXp9xZifTxYIU,1777
+markdown/__main__.py,sha256=innFBxRqwPBNxG1zhKktJji4bnRKtVyYYd30ID13Tcw,5859
+markdown/__meta__.py,sha256=DqtqnYYLznrkvI1G4JalBc4WpgOp48naNoG9zlMWZas,1712
+markdown/__pycache__/__init__.cpython-311.pyc,,
+markdown/__pycache__/__main__.cpython-311.pyc,,
+markdown/__pycache__/__meta__.cpython-311.pyc,,
+markdown/__pycache__/blockparser.cpython-311.pyc,,
+markdown/__pycache__/blockprocessors.cpython-311.pyc,,
+markdown/__pycache__/core.cpython-311.pyc,,
+markdown/__pycache__/htmlparser.cpython-311.pyc,,
+markdown/__pycache__/inlinepatterns.cpython-311.pyc,,
+markdown/__pycache__/postprocessors.cpython-311.pyc,,
+markdown/__pycache__/preprocessors.cpython-311.pyc,,
+markdown/__pycache__/serializers.cpython-311.pyc,,
+markdown/__pycache__/test_tools.cpython-311.pyc,,
+markdown/__pycache__/treeprocessors.cpython-311.pyc,,
+markdown/__pycache__/util.cpython-311.pyc,,
+markdown/blockparser.py,sha256=j4CQImVpiq7g9pz8wCxvzT61X_T2iSAjXupHJk8P3eA,5728
+markdown/blockprocessors.py,sha256=koY5rq8DixzBCHcquvZJp6x2JYyBGjrwxMWNZhd6D2U,27013
+markdown/core.py,sha256=DyyzDsmd-KcuEp8ZWUKJAeUCt7B7G3J3NeqZqp3LphI,21335
+markdown/extensions/__init__.py,sha256=9z1khsdKCVrmrJ_2GfxtPAdjD3FyMe5vhC7wmM4O9m0,4822
+markdown/extensions/__pycache__/__init__.cpython-311.pyc,,
+markdown/extensions/__pycache__/abbr.cpython-311.pyc,,
+markdown/extensions/__pycache__/admonition.cpython-311.pyc,,
+markdown/extensions/__pycache__/attr_list.cpython-311.pyc,,
+markdown/extensions/__pycache__/codehilite.cpython-311.pyc,,
+markdown/extensions/__pycache__/def_list.cpython-311.pyc,,
+markdown/extensions/__pycache__/extra.cpython-311.pyc,,
+markdown/extensions/__pycache__/fenced_code.cpython-311.pyc,,
+markdown/extensions/__pycache__/footnotes.cpython-311.pyc,,
+markdown/extensions/__pycache__/legacy_attrs.cpython-311.pyc,,
+markdown/extensions/__pycache__/legacy_em.cpython-311.pyc,,
+markdown/extensions/__pycache__/md_in_html.cpython-311.pyc,,
+markdown/extensions/__pycache__/meta.cpython-311.pyc,,
+markdown/extensions/__pycache__/nl2br.cpython-311.pyc,,
+markdown/extensions/__pycache__/sane_lists.cpython-311.pyc,,
+markdown/extensions/__pycache__/smarty.cpython-311.pyc,,
+markdown/extensions/__pycache__/tables.cpython-311.pyc,,
+markdown/extensions/__pycache__/toc.cpython-311.pyc,,
+markdown/extensions/__pycache__/wikilinks.cpython-311.pyc,,
+markdown/extensions/abbr.py,sha256=JqFOfU7JlhIFY06-nZnSU0wDqneFKKWMe95eXB-iLtc,3250
+markdown/extensions/admonition.py,sha256=Hqcn3I8JG0i-OPWdoqI189TmlQRgH6bs5PmpCANyLlg,6547
+markdown/extensions/attr_list.py,sha256=t3PrgAr5Ebldnq3nJNbteBt79bN0ccXS5RemmQfUZ9g,7820
+markdown/extensions/codehilite.py,sha256=ChlmpM6S--j-UK7t82859UpYjm8EftdiLqmgDnknyes,13503
+markdown/extensions/def_list.py,sha256=J3NVa6CllfZPsboJCEycPyRhtjBHnOn8ET6omEvVlDo,4029
+markdown/extensions/extra.py,sha256=1vleT284kued4HQBtF83IjSumJVo0q3ng6MjTkVNfNQ,2163
+markdown/extensions/fenced_code.py,sha256=-fYSmRZ9DTYQ8HO9b_78i47kVyVu6mcVJlqVTMdzvo4,8300
+markdown/extensions/footnotes.py,sha256=bRFlmIBOKDI5efG1jZfDkMoV2osfqWip1rN1j2P-mMg,16710
+markdown/extensions/legacy_attrs.py,sha256=oWcyNrfP0F6zsBoBOaD5NiwrJyy4kCpgQLl12HA7JGU,2788
+markdown/extensions/legacy_em.py,sha256=-Z_w4PEGSS-Xg-2-BtGAnXwwy5g5GDgv2tngASnPgxg,1693
+markdown/extensions/md_in_html.py,sha256=y4HEWEnkvfih22fojcaJeAmjx1AtF8N-a_jb6IDFfts,16546
+markdown/extensions/meta.py,sha256=v_4Uq7nbcQ76V1YAvqVPiNLbRLIQHJsnfsk-tN70RmY,2600
+markdown/extensions/nl2br.py,sha256=9KKcrPs62c3ENNnmOJZs0rrXXqUtTCfd43j1_OPpmgU,1090
+markdown/extensions/sane_lists.py,sha256=ogAKcm7gEpcXV7fSTf8JZH5YdKAssPCEOUzdGM3C9Tw,2150
+markdown/extensions/smarty.py,sha256=yqT0OiE2AqYrqqZtcUFFmp2eJsQHomiKzgyG2JFb9rI,11048
+markdown/extensions/tables.py,sha256=oTDvGD1qp9xjVWPGYNgDBWe9NqsX5gS6UU5wUsQ1bC8,8741
+markdown/extensions/toc.py,sha256=PGg-EqbBubm3n0b633r8Xa9kc6JIdbo20HGAOZ6GEl8,18322
+markdown/extensions/wikilinks.py,sha256=j7D2sozica6sqXOUa_GuAXqIzxp-7Hi60bfXymiuma8,3285
+markdown/htmlparser.py,sha256=dEr6IE7i9b6Tc1gdCLZGeWw6g6-E-jK1Z4KPj8yGk8Q,14332
+markdown/inlinepatterns.py,sha256=7_HF5nTOyQag_CyBgU4wwmuI6aMjtadvGadyS9IP21w,38256
+markdown/postprocessors.py,sha256=eYi6eW0mGudmWpmsW45hduLwX66Zr8Bf44WyU9vKp-I,4807
+markdown/preprocessors.py,sha256=pq5NnHKkOSVQeIo-ajC-Yt44kvyMV97D04FBOQXctJM,3224
+markdown/serializers.py,sha256=YtAFYQoOdp_TAmYGow6nBo0eB6I-Sl4PTLdLDfQJHwQ,7174
+markdown/test_tools.py,sha256=MtN4cf3ZPDtb83wXLTol-3q3aIGRIkJ2zWr6fd-RgVE,8662
+markdown/treeprocessors.py,sha256=o4dnoZZsIeVV8qR45Njr8XgwKleWYDS5pv8dKQhJvv8,17651
+markdown/util.py,sha256=vJ1E0xjMzDAlTqLUSJWgdEvxdQfLXDEYUssOQMw9kPQ,13929
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/WHEEL b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/WHEEL
new file mode 100644
index 0000000..bab98d6
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.43.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/entry_points.txt b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/entry_points.txt
new file mode 100644
index 0000000..be3bd8f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/entry_points.txt
@@ -0,0 +1,22 @@
+[console_scripts]
+markdown_py = markdown.__main__:run
+
+[markdown.extensions]
+abbr = markdown.extensions.abbr:AbbrExtension
+admonition = markdown.extensions.admonition:AdmonitionExtension
+attr_list = markdown.extensions.attr_list:AttrListExtension
+codehilite = markdown.extensions.codehilite:CodeHiliteExtension
+def_list = markdown.extensions.def_list:DefListExtension
+extra = markdown.extensions.extra:ExtraExtension
+fenced_code = markdown.extensions.fenced_code:FencedCodeExtension
+footnotes = markdown.extensions.footnotes:FootnoteExtension
+legacy_attrs = markdown.extensions.legacy_attrs:LegacyAttrExtension
+legacy_em = markdown.extensions.legacy_em:LegacyEmExtension
+md_in_html = markdown.extensions.md_in_html:MarkdownInHtmlExtension
+meta = markdown.extensions.meta:MetaExtension
+nl2br = markdown.extensions.nl2br:Nl2BrExtension
+sane_lists = markdown.extensions.sane_lists:SaneListExtension
+smarty = markdown.extensions.smarty:SmartyExtension
+tables = markdown.extensions.tables:TableExtension
+toc = markdown.extensions.toc:TocExtension
+wikilinks = markdown.extensions.wikilinks:WikiLinkExtension
diff --git a/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/top_level.txt
new file mode 100644
index 0000000..0918c97
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/Markdown-3.6.dist-info/top_level.txt
@@ -0,0 +1 @@
+markdown
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/LICENSE b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/LICENSE
new file mode 100644
index 0000000..c933e4b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright 2005-2022 SQLAlchemy authors and contributors .
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/METADATA b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/METADATA
new file mode 100644
index 0000000..fb90601
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/METADATA
@@ -0,0 +1,238 @@
+Metadata-Version: 2.1
+Name: SQLAlchemy
+Version: 1.4.42
+Summary: Database Abstraction Library
+Home-page: https://www.sqlalchemy.org
+Author: Mike Bayer
+Author-email: mike_mp@zzzcomputing.com
+License: MIT
+Project-URL: Documentation, https://docs.sqlalchemy.org
+Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Database :: Front-Ends
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+Requires-Dist: importlib-metadata ; python_version < "3.8"
+Requires-Dist: greenlet (!=0.4.17) ; python_version >= "3" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32"))))))
+Provides-Extra: aiomysql
+Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiomysql'
+Requires-Dist: aiomysql ; (python_version >= "3") and extra == 'aiomysql'
+Provides-Extra: aiosqlite
+Requires-Dist: typing-extensions (!=3.10.0.1) ; extra == 'aiosqlite'
+Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'aiosqlite'
+Requires-Dist: aiosqlite ; (python_version >= "3") and extra == 'aiosqlite'
+Provides-Extra: asyncio
+Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncio'
+Provides-Extra: asyncmy
+Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'asyncmy'
+Requires-Dist: asyncmy (!=0.2.4,>=0.2.3) ; (python_version >= "3") and extra == 'asyncmy'
+Provides-Extra: mariadb_connector
+Requires-Dist: mariadb (!=1.1.2,>=1.0.1) ; (python_version >= "3") and extra == 'mariadb_connector'
+Provides-Extra: mssql
+Requires-Dist: pyodbc ; extra == 'mssql'
+Provides-Extra: mssql_pymssql
+Requires-Dist: pymssql ; extra == 'mssql_pymssql'
+Provides-Extra: mssql_pyodbc
+Requires-Dist: pyodbc ; extra == 'mssql_pyodbc'
+Provides-Extra: mypy
+Requires-Dist: sqlalchemy2-stubs ; extra == 'mypy'
+Requires-Dist: mypy (>=0.910) ; (python_version >= "3") and extra == 'mypy'
+Provides-Extra: mysql
+Requires-Dist: mysqlclient (<2,>=1.4.0) ; (python_version < "3") and extra == 'mysql'
+Requires-Dist: mysqlclient (>=1.4.0) ; (python_version >= "3") and extra == 'mysql'
+Provides-Extra: mysql_connector
+Requires-Dist: mysql-connector-python ; extra == 'mysql_connector'
+Provides-Extra: oracle
+Requires-Dist: cx-oracle (<8,>=7) ; (python_version < "3") and extra == 'oracle'
+Requires-Dist: cx-oracle (>=7) ; (python_version >= "3") and extra == 'oracle'
+Provides-Extra: postgresql
+Requires-Dist: psycopg2 (>=2.7) ; extra == 'postgresql'
+Provides-Extra: postgresql_asyncpg
+Requires-Dist: greenlet (!=0.4.17) ; (python_version >= "3") and extra == 'postgresql_asyncpg'
+Requires-Dist: asyncpg ; (python_version >= "3") and extra == 'postgresql_asyncpg'
+Provides-Extra: postgresql_pg8000
+Requires-Dist: pg8000 (!=1.29.0,>=1.16.6) ; extra == 'postgresql_pg8000'
+Provides-Extra: postgresql_psycopg2binary
+Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary'
+Provides-Extra: postgresql_psycopg2cffi
+Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi'
+Provides-Extra: pymysql
+Requires-Dist: pymysql (<1) ; (python_version < "3") and extra == 'pymysql'
+Requires-Dist: pymysql ; (python_version >= "3") and extra == 'pymysql'
+Provides-Extra: sqlcipher
+Requires-Dist: sqlcipher3-binary ; (python_version >= "3") and extra == 'sqlcipher'
+
+SQLAlchemy
+==========
+
+|PyPI| |Python| |Downloads|
+
+.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy
+ :target: https://pypi.org/project/sqlalchemy
+ :alt: PyPI
+
+.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy
+ :target: https://pypi.org/project/sqlalchemy
+ :alt: PyPI - Python Version
+
+.. |Downloads| image:: https://img.shields.io/pypi/dm/sqlalchemy
+ :target: https://pypi.org/project/sqlalchemy
+ :alt: PyPI - Downloads
+
+
+The Python SQL Toolkit and Object Relational Mapper
+
+Introduction
+-------------
+
+SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
+that gives application developers the full power and
+flexibility of SQL. SQLAlchemy provides a full suite
+of well known enterprise-level persistence patterns,
+designed for efficient and high-performing database
+access, adapted into a simple and Pythonic domain
+language.
+
+Major SQLAlchemy features include:
+
+* An industrial strength ORM, built
+ from the core on the identity map, unit of work,
+ and data mapper patterns. These patterns
+ allow transparent persistence of objects
+ using a declarative configuration system.
+ Domain models
+ can be constructed and manipulated naturally,
+ and changes are synchronized with the
+ current transaction automatically.
+* A relationally-oriented query system, exposing
+ the full range of SQL's capabilities
+ explicitly, including joins, subqueries,
+ correlation, and most everything else,
+ in terms of the object model.
+ Writing queries with the ORM uses the same
+ techniques of relational composition you use
+ when writing SQL. While you can drop into
+ literal SQL at any time, it's virtually never
+ needed.
+* A comprehensive and flexible system
+ of eager loading for related collections and objects.
+ Collections are cached within a session,
+ and can be loaded on individual access, all
+ at once using joins, or by query per collection
+ across the full result set.
+* A Core SQL construction system and DBAPI
+ interaction layer. The SQLAlchemy Core is
+ separate from the ORM and is a full database
+ abstraction layer in its own right, and includes
+ an extensible Python-based SQL expression
+ language, schema metadata, connection pooling,
+ type coercion, and custom types.
+* All primary and foreign key constraints are
+ assumed to be composite and natural. Surrogate
+ integer primary keys are of course still the
+ norm, but SQLAlchemy never assumes or hardcodes
+ to this model.
+* Database introspection and generation. Database
+ schemas can be "reflected" in one step into
+ Python structures representing database metadata;
+ those same structures can then generate
+ CREATE statements right back out - all within
+ the Core, independent of the ORM.
+
+SQLAlchemy's philosophy:
+
+* SQL databases behave less and less like object
+ collections the more size and performance start to
+ matter; object collections behave less and less like
+ tables and rows the more abstraction starts to matter.
+ SQLAlchemy aims to accommodate both of these
+ principles.
+* An ORM doesn't need to hide the "R". A relational
+ database provides rich, set-based functionality
+ that should be fully exposed. SQLAlchemy's
+ ORM provides an open-ended set of patterns
+ that allow a developer to construct a custom
+ mediation layer between a domain model and
+ a relational schema, turning the so-called
+ "object relational impedance" issue into
+ a distant memory.
+* The developer, in all cases, makes all decisions
+ regarding the design, structure, and naming conventions
+ of both the object model as well as the relational
+ schema. SQLAlchemy only provides the means
+ to automate the execution of these decisions.
+* With SQLAlchemy, there's no such thing as
+ "the ORM generated a bad query" - you
+ retain full control over the structure of
+ queries, including how joins are organized,
+ how subqueries and correlation is used, what
+ columns are requested. Everything SQLAlchemy
+ does is ultimately the result of a developer-
+ initiated decision.
+* Don't use an ORM if the problem doesn't need one.
+ SQLAlchemy consists of a Core and separate ORM
+ component. The Core offers a full SQL expression
+ language that allows Pythonic construction
+ of SQL constructs that render directly to SQL
+ strings for a target database, returning
+ result sets that are essentially enhanced DBAPI
+ cursors.
+* Transactions should be the norm. With SQLAlchemy's
+ ORM, nothing goes to permanent storage until
+ commit() is called. SQLAlchemy encourages applications
+ to create a consistent means of delineating
+ the start and end of a series of operations.
+* Never render a literal value in a SQL statement.
+ Bound parameters are used to the greatest degree
+ possible, allowing query optimizers to cache
+ query plans effectively and making SQL injection
+ attacks a non-issue.
+
+Documentation
+-------------
+
+Latest documentation is at:
+
+https://www.sqlalchemy.org/docs/
+
+Installation / Requirements
+---------------------------
+
+Full documentation for installation is at
+`Installation `_.
+
+Getting Help / Development / Bug reporting
+------------------------------------------
+
+Please refer to the `SQLAlchemy Community Guide `_.
+
+Code of Conduct
+---------------
+
+Above all, SQLAlchemy places great emphasis on polite, thoughtful, and
+constructive communication between users and developers.
+Please see our current Code of Conduct at
+`Code of Conduct `_.
+
+License
+-------
+
+SQLAlchemy is distributed under the `MIT license
+`_.
+
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/RECORD b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/RECORD
new file mode 100644
index 0000000..792cddb
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/RECORD
@@ -0,0 +1,485 @@
+SQLAlchemy-1.4.42.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+SQLAlchemy-1.4.42.dist-info/LICENSE,sha256=hZ3tJdo0wetz5uc230xfjOPtLtUpBmMXbwbncg2cmiA,1100
+SQLAlchemy-1.4.42.dist-info/METADATA,sha256=kfixyeTkluIm-oaTTIeytK0bb1dOSaXlwK3d9XXym_o,10023
+SQLAlchemy-1.4.42.dist-info/RECORD,,
+SQLAlchemy-1.4.42.dist-info/WHEEL,sha256=lVPXYH8LMHYHuLy0p0zNneWNEw-dpoJ5k5Tb3b38QMM,225
+SQLAlchemy-1.4.42.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11
+sqlalchemy/__init__.py,sha256=syDTdTFdJQFS7HqzjGJ4l8CLd8LWzyvG5F3MkJD0cCk,4114
+sqlalchemy/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/__pycache__/exc.cpython-311.pyc,,
+sqlalchemy/__pycache__/inspection.cpython-311.pyc,,
+sqlalchemy/__pycache__/log.cpython-311.pyc,,
+sqlalchemy/__pycache__/processors.cpython-311.pyc,,
+sqlalchemy/__pycache__/schema.cpython-311.pyc,,
+sqlalchemy/__pycache__/types.cpython-311.pyc,,
+sqlalchemy/cimmutabledict.cpython-311-x86_64-linux-gnu.so,sha256=_YcUdFdTaUvjX_BjfmVZRvPXyd42e0upDP6LVH2eMpI,49104
+sqlalchemy/connectors/__init__.py,sha256=2m_LPZFkNExkoaTw14fRActQCcyFl7W81WeYj2O10lM,279
+sqlalchemy/connectors/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/connectors/__pycache__/mxodbc.cpython-311.pyc,,
+sqlalchemy/connectors/__pycache__/pyodbc.cpython-311.pyc,,
+sqlalchemy/connectors/mxodbc.py,sha256=CApFVkPEL8amXL5HKcG83jU9RbbVg0EQSyxceLWh260,5784
+sqlalchemy/connectors/pyodbc.py,sha256=003bqMmK-Hpy-kZYa4vy2CNRz73Fvvj2zUsyhFQnkUc,6855
+sqlalchemy/cprocessors.cpython-311-x86_64-linux-gnu.so,sha256=-yT-JdHigN_GzZmvHh2HyzRlys3GlnIqEPjZI8y3tQc,52616
+sqlalchemy/cresultproxy.cpython-311-x86_64-linux-gnu.so,sha256=EUTYgyblNDrJzGnsnn__7IE2nr0N9_SQd-aF-R3vNQA,97328
+sqlalchemy/databases/__init__.py,sha256=LAm4NHQgjg4sdCED02wUiZj9_0fKBEkStYtqvLWHArk,1010
+sqlalchemy/databases/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/__init__.py,sha256=52RcDU2JGS1nW2OHx2nIJ1B_IBI4puWFx09th8Hg-D0,2085
+sqlalchemy/dialects/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/firebird/__init__.py,sha256=iZH9WTMjUcsAf6Rl6-64CkcoLOixitP45TSZVSBQYL4,1153
+sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/firebird/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-311.pyc,,
+sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-311.pyc,,
+sqlalchemy/dialects/firebird/base.py,sha256=P0ycKcsMKJyglm6uikAVDSc_7UV0NPSIU7hL58HQaog,31171
+sqlalchemy/dialects/firebird/fdb.py,sha256=lQhO8S1P8PjUeEW3NXCC1vqNp1DGzBQIUN2eIi-fCC0,4116
+sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=2_RZGXSw12FCEeZW0cXxbaR2Bl7GfMd7gGg5pgUiFzg,6479
+sqlalchemy/dialects/mssql/__init__.py,sha256=fvIR7jRTPH_4HellLg2kjwYIA3HM_jpNWSw9De0JciE,1788
+sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/json.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-311.pyc,,
+sqlalchemy/dialects/mssql/base.py,sha256=P6CsAKmEjdENyLoeWPnhRpKXEHXW5oxpVF9-GqT2MIc,116347
+sqlalchemy/dialects/mssql/information_schema.py,sha256=R0xpK7xppti2ToGahDksb9jHy9R9MyHTwCfgeNvw3BQ,7584
+sqlalchemy/dialects/mssql/json.py,sha256=K1RqVl5bslYyVMtk5CWGjRV_I4K1sszXjx2F_nbCVWI,4558
+sqlalchemy/dialects/mssql/mxodbc.py,sha256=HPIxqFtSUY9Ugz-ebNb2T_sLoLp4rQi7qrmezsIYIsM,4808
+sqlalchemy/dialects/mssql/provision.py,sha256=m7ofLZYZinDS91Vgs42fK7dhJNnH-J_Bw2x_tP59tCc,4255
+sqlalchemy/dialects/mssql/pymssql.py,sha256=Zo4lyJQD77NKCg_RG5hCmaPVgjrZLMjk-zZbYVYRDR8,3863
+sqlalchemy/dialects/mssql/pyodbc.py,sha256=T__b7XXLrPAp0eo80ykgelUZQvncF9GcxccPDz_zOgw,24432
+sqlalchemy/dialects/mysql/__init__.py,sha256=4C8GY2nAGQOrdGj3CseZqF4NR-CkhVZ_CgXFoskGAJs,2190
+sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/dml.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/expression.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/json.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/__pycache__/types.cpython-311.pyc,,
+sqlalchemy/dialects/mysql/aiomysql.py,sha256=Xqfr0SjvUu-qQZgrDLBnxo4dRQF9ZrI6tpc4HgiXENE,9609
+sqlalchemy/dialects/mysql/asyncmy.py,sha256=D8slHiFP3hOvwxf8zMY_-72V1owEhnpO0LmQdkz4n4M,9885
+sqlalchemy/dialects/mysql/base.py,sha256=cdE1wUMRADc0vxHpFCEB7lusbOIW7ihXPBOmRY6tpn0,115204
+sqlalchemy/dialects/mysql/cymysql.py,sha256=zaVxpSLTg8rvIrI6BtlK0815BCLuLKp2ILHLs57thVA,2271
+sqlalchemy/dialects/mysql/dml.py,sha256=EXTHGjiXeNxGyt-jbRH5ZNIkRjTja25gQXAthTCCw8g,6226
+sqlalchemy/dialects/mysql/enumerated.py,sha256=Dv5BAF8DxCqfVXIkXt5kzGG-BxNygpdnXrZjyyzKyqM,9364
+sqlalchemy/dialects/mysql/expression.py,sha256=HJ4IO3LPJk4cQYIL-O-jN2vLWxVGCqem_K3h8kKNWzE,3741
+sqlalchemy/dialects/mysql/json.py,sha256=DMQnyo3PQ_XSPvDl8jt26Ya-fyMEaIJDXQBdLVmsdjE,2313
+sqlalchemy/dialects/mysql/mariadb.py,sha256=OBwN9RMQLP-xqLbNMAe5uoz7PEtqa68ln2HwwA6KUn8,585
+sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=vLhoFmC9OFh30bHGRFBwWHv3ou3wTZ8WPZOamgmUuWs,7563
+sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=CT4bFb2WaFHwBDfRSqK3ieltrkulTYwsX0kgbWPrRao,7690
+sqlalchemy/dialects/mysql/mysqldb.py,sha256=qvea9Iuf6SUqb4QSHeCEcbUf3c3FSckjT4jfQSTMlyw,10437
+sqlalchemy/dialects/mysql/oursql.py,sha256=fWWMyvhNZ6ywBGvvwJ8DqtBec8cUtziiIjYopBn2WVg,8523
+sqlalchemy/dialects/mysql/provision.py,sha256=P5ma4Xy5eSOFIcMjIe_zAwu_6ncSXSLVZYYSMS5Io9c,2649
+sqlalchemy/dialects/mysql/pymysql.py,sha256=D106c8jEME1O0wOMV7ZgSuwin7Pv61kKLWYFEEKPpUY,2770
+sqlalchemy/dialects/mysql/pyodbc.py,sha256=31587UnRrSQhep_NXt7ii0-3xkAVDJgCGQXSDCpDDuY,4290
+sqlalchemy/dialects/mysql/reflection.py,sha256=ZyCxf4PlVqLgpHO8AZbXEadmvqInEwthaNJRiMziMoU,18710
+sqlalchemy/dialects/mysql/reserved_words.py,sha256=vvAyUvobiAB46Lpd7DhyWPgp3cWdFaVu9_5P39TEXMM,9104
+sqlalchemy/dialects/mysql/types.py,sha256=MrMLGeFo-zJJfGMn39smAfxy5fPvQrgXv49cIrm6Img,24589
+sqlalchemy/dialects/oracle/__init__.py,sha256=POVn6bB3yD-b4ZT7CSYQlmNpxDRIRpfuJ8CTTYgphPM,1229
+sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-311.pyc,,
+sqlalchemy/dialects/oracle/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/dialects/oracle/base.py,sha256=8jixA3aDMW-cyclxBOFIGnpFCVJuixy1raBhmkoaau4,87563
+sqlalchemy/dialects/oracle/cx_oracle.py,sha256=78Igd2RmfFXNGSMllfhMPRu-AUbBVGKZ3_VI6a9ouh4,53202
+sqlalchemy/dialects/oracle/provision.py,sha256=GtHrw1rtW0bzPSa9dUE-IjDFGaElyJyw4rwHAK3QDVY,5806
+sqlalchemy/dialects/postgresql/__init__.py,sha256=thvDDu6Vp68lXdF78wagnnOTq7sFBCDwT5X9x8Mygn8,2509
+sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/array.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/json.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/pygresql.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-311.pyc,,
+sqlalchemy/dialects/postgresql/array.py,sha256=oYxNNsFs09isqbdym5WGRPQkUVSF4UqeRDeudis-1bI,14197
+sqlalchemy/dialects/postgresql/asyncpg.py,sha256=-vHGMkxXBg7gWRunr-WlmzYgJDfS4RJotNeyzbwRF8Q,35445
+sqlalchemy/dialects/postgresql/base.py,sha256=MyxOUhYQFvOiKfX207ZlKR5ap5TbWUrwcGyE_IJF1T0,159101
+sqlalchemy/dialects/postgresql/dml.py,sha256=O7GBPR4liaOBBJWGlEU86vrfuzLMy3d3LIbeRZ-nSvc,9582
+sqlalchemy/dialects/postgresql/ext.py,sha256=xk8e0iT5L7bPwjpSlw5eI3lwli_LwmdugB7GhnDOtMo,8652
+sqlalchemy/dialects/postgresql/hstore.py,sha256=8V6JhPYHtwctKlD3PA_FrGNejxz_YUCVhwYUkaSj0WA,12873
+sqlalchemy/dialects/postgresql/json.py,sha256=cIABYehcW9j7ctBCAYXhZFGFQeHgLkisVQB1k2ftnT4,10556
+sqlalchemy/dialects/postgresql/pg8000.py,sha256=_UztntjUclGLtty8nvVwlcNtCEFz_9lsQrf-HR7EpLE,17044
+sqlalchemy/dialects/postgresql/provision.py,sha256=ZDFEIOvtpBIgCnj1Q1R3-WDWx7lFnE6kdEGNTDFpzAw,4319
+sqlalchemy/dialects/postgresql/psycopg2.py,sha256=yUbR7QwBtu46n1TssONOtcF7ci6W2YERDZlyIRzVckI,40340
+sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=pBRHxI6KgVePwPO_FAFaE7Nces43qPIviDwbtchi8f8,1691
+sqlalchemy/dialects/postgresql/pygresql.py,sha256=oZ847ZkhqqzPeo1BiQnIP7slX7SIbXdoo1OyC5ehChY,8585
+sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=_Kw2eXUEAefflJVA1dZJ7aCGt2Lown3PW3i2ab2Eat0,3693
+sqlalchemy/dialects/postgresql/ranges.py,sha256=AP3ODSZoH9Yf9CeAPy_GpVVLMtK-4rdebmHWYjgKFug,4763
+sqlalchemy/dialects/sqlite/__init__.py,sha256=GwL23FcaoQOso1Sa1RlaF3i5SezqEVjfijvbp8hzRg0,1198
+sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/json.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-311.pyc,,
+sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=VY9IAHargDb13k5QKtrodhJcXQ8nErgl4fEj7o39o0Y,10223
+sqlalchemy/dialects/sqlite/base.py,sha256=UZrriowzuSoAbQagvqKyC9HTCV0UjWuqIxB0SBmO07E,88435
+sqlalchemy/dialects/sqlite/dml.py,sha256=hFloxZoqsrew4tlzS0DSMyzdKJ9-HU0z-dLKWVgR5ns,6865
+sqlalchemy/dialects/sqlite/json.py,sha256=oFw4Rt8xw-tkD3IMlm3TDEGe1RqrTyvIuqjABsxn8EI,2518
+sqlalchemy/dialects/sqlite/provision.py,sha256=AQILXN5PBUSM05c-SFSFFhPdFqcQDwdoKtUnvLDac14,4676
+sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=1MmhAlAaUTnzm7guppjDzGXQ6_OxFtuGzchSiJ0PeRA,5605
+sqlalchemy/dialects/sqlite/pysqlite.py,sha256=hIvCxLaxe-HSYmLBnvwmzayqxo2OMJMr35mlFGxeNd8,24453
+sqlalchemy/dialects/sybase/__init__.py,sha256=STn2xh97yskErTEYZAyrptb5vYOqPamvb9-QnYd3aG4,1364
+sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/dialects/sybase/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-311.pyc,,
+sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-311.pyc,,
+sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-311.pyc,,
+sqlalchemy/dialects/sybase/base.py,sha256=rOfZ2sN3BEtwIDo9nvIWe5VpgxVvjjLt4gSxFb9VyC0,32421
+sqlalchemy/dialects/sybase/mxodbc.py,sha256=7U4-Y4mf_o6qzFQraQ7XklDTB0PDddF8u6hFIpuAsCE,939
+sqlalchemy/dialects/sybase/pyodbc.py,sha256=bTbAjgvx2LRlhY94DYl_NXRkbVJAd71_LbIvRCtDPX0,2230
+sqlalchemy/dialects/sybase/pysybase.py,sha256=-i6vGx7UIVX2arQE9_9GM_YcqeiRCawqxcXnngjvRAY,3370
+sqlalchemy/engine/__init__.py,sha256=T44Oyjf2yPp77vDWs8g54h9XVt3FbGRZagKxGxu9XwU,2108
+sqlalchemy/engine/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/characteristics.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/create.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/cursor.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/default.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/interfaces.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/mock.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/reflection.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/result.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/row.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/strategies.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/url.cpython-311.pyc,,
+sqlalchemy/engine/__pycache__/util.cpython-311.pyc,,
+sqlalchemy/engine/base.py,sha256=Iv9_Fcju-spBWw_E-KAwaPzNXhFM5EE8XOnBUKLqHt4,124586
+sqlalchemy/engine/characteristics.py,sha256=qvd3T8HW470kIxN-x6OzycfjCFdnmbzcaFQeds7KHOw,1817
+sqlalchemy/engine/create.py,sha256=q47BzZWgZVxWAaex60SIbFxkfvDFHkDUH5RU0_WnwdA,30797
+sqlalchemy/engine/cursor.py,sha256=VSuEZzk6G6NrXjtt0wxMAQwiLz3fPsfOyj1dwJHY0jM,68765
+sqlalchemy/engine/default.py,sha256=ndcnktOnj3Tmu-PUnaPtVfL1xpykzJrQmDhGj-5Ox0U,67023
+sqlalchemy/engine/events.py,sha256=_qeDo_mMNXXnpQBSAnaRkE1gg6c-r7P5IT78r0aBUuc,33422
+sqlalchemy/engine/interfaces.py,sha256=hXZVQUVaXkwsE3oI_1f4xJ9dMGKmO_s3dCFwlaMC7A4,58972
+sqlalchemy/engine/mock.py,sha256=wJIFZbvkHwAoi7jCupeyZzuE-J9lqyzhJ6VdrAyMNkw,3626
+sqlalchemy/engine/reflection.py,sha256=w0ix23go8S41ye3kM-UOLGVs-UiLUnS8oJqrWI-z9ow,38930
+sqlalchemy/engine/result.py,sha256=HwRxVtgpu62MdUxOdlv79HbZx4UKJJoN_uqoe1dQ2WA,58992
+sqlalchemy/engine/row.py,sha256=eFw7PtgqNkRSNwMTZPFxKNOBbwZ4V6_eOP8YpYAwRPE,18690
+sqlalchemy/engine/strategies.py,sha256=RzejZkLGzWq6QWWJ6a6fyYDdQac4VWCmORCTYEOZwCM,414
+sqlalchemy/engine/url.py,sha256=nUMnXWrRX98_1WYH39JObBqw8WvUTke5fzyRI9cO9Ek,26686
+sqlalchemy/engine/util.py,sha256=drzyg95MX5NzC10bSQsqQ-dc3k4N4p009JhQuLUS8r0,8442
+sqlalchemy/event/__init__.py,sha256=I3Y3cjTy0wC_f-pJRX7B-9UizYje3nh3lIHOlL0Xf00,517
+sqlalchemy/event/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/event/__pycache__/api.cpython-311.pyc,,
+sqlalchemy/event/__pycache__/attr.cpython-311.pyc,,
+sqlalchemy/event/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/event/__pycache__/legacy.cpython-311.pyc,,
+sqlalchemy/event/__pycache__/registry.cpython-311.pyc,,
+sqlalchemy/event/api.py,sha256=yTMDO4cZp-CioTgeDfYGR0O4_zxfFZ-EFdNqM-dOw8E,8043
+sqlalchemy/event/attr.py,sha256=j_JWiTWNGvnb3fVrYvUfoiFpn8wB-gWjiF0wyAenOxw,14109
+sqlalchemy/event/base.py,sha256=FCifBVGLxkNkpr4mN608ZRcAraML8bcS5IU8_vAJjRQ,10936
+sqlalchemy/event/legacy.py,sha256=C09AtrcACXF2gL5c8adk2nLUo1oBfnhFHDkBpv3znUg,6270
+sqlalchemy/event/registry.py,sha256=5FuO494J1n2dUYImM9Yz1kl7C8NmO4c4GtKbk_l-S6k,8486
+sqlalchemy/events.py,sha256=VrZuUXHgwyx4kMKEielctzyTWqDlm2gvzMcc38jedoE,467
+sqlalchemy/exc.py,sha256=x9Z-nIkMQ1r3dqdNmVK5cHQq0zVFrdI6oKkXMw_QB3s,21116
+sqlalchemy/ext/__init__.py,sha256=4-X49d1TiOPC-T8JSpaFiMMVNP8JL9bDoBW19wBmXRY,322
+sqlalchemy/ext/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/associationproxy.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/automap.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/baked.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/compiler.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/horizontal_shard.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/hybrid.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/indexable.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/instrumentation.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/mutable.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/orderinglist.cpython-311.pyc,,
+sqlalchemy/ext/__pycache__/serializer.cpython-311.pyc,,
+sqlalchemy/ext/associationproxy.py,sha256=-687A1ZZMgToO6emMUy8kDOQb-GE8OqfM01xNkh3QtQ,51139
+sqlalchemy/ext/asyncio/__init__.py,sha256=XKCzBrSBP_LlqaCKpiMeSPUzwNdQFXUg9GL57EOM9-8,823
+sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/engine.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/exc.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/result.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/__pycache__/session.cpython-311.pyc,,
+sqlalchemy/ext/asyncio/base.py,sha256=UM_GgnHC7VqT6TTgLXj1eQXtUQa1gVL8--wYcTTeotM,2520
+sqlalchemy/ext/asyncio/engine.py,sha256=h6vGb79ZP4AF5OSsZggxcCHF-dU-zwXso5YCKyga8pk,26655
+sqlalchemy/ext/asyncio/events.py,sha256=_rh2nSAD_6ZqoIRJihiCKUgzSMLBMxBuZ_gUWLpfbHg,1423
+sqlalchemy/ext/asyncio/exc.py,sha256=3tcIXQPCJROB3P_TkoHmkzy6o_dIIuMcnnu4tJB__ck,639
+sqlalchemy/ext/asyncio/result.py,sha256=OPsKEHnMNP80BJI8kLExY8OQovff_2Wj8Kvxd4t3Ht0,21238
+sqlalchemy/ext/asyncio/scoping.py,sha256=fckFlTcwgGjgurVnp69En-4IFwWRqgUV6ukGgPklDJ4,2960
+sqlalchemy/ext/asyncio/session.py,sha256=cbXZVkWO_aO0_r2uHC1GC092LMvF7QPAV0_WE9SCDVM,24140
+sqlalchemy/ext/automap.py,sha256=-x_Ls5a-opmgYwpjDGjmtrR1hqSy7AvKfUthK5UHD2A,45782
+sqlalchemy/ext/baked.py,sha256=DI4hcMk-poznDtAB6S38S7kvo5DXuvrt1CIAT8t5QPw,19969
+sqlalchemy/ext/compiler.py,sha256=Q3Dkj-viLi_1_OFL1EUKsz3RJ8aQk6bYwIflx6tbZR0,22629
+sqlalchemy/ext/declarative/__init__.py,sha256=NS6-oy4iI6AiMaGWGznzYSx4gnB1fOniOGtqPHxC0ms,1842
+sqlalchemy/ext/declarative/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/ext/declarative/__pycache__/extensions.cpython-311.pyc,,
+sqlalchemy/ext/declarative/extensions.py,sha256=Bg_aQW5sJy5LDpy6AyMTCo5U_IpPJqwg5jQ1lV86Fwc,16541
+sqlalchemy/ext/horizontal_shard.py,sha256=2NygP6u9SsOlOqCEqkzNbcSshdxtfxOI78XysnJw3S8,8922
+sqlalchemy/ext/hybrid.py,sha256=OSy2ZB-4i46Ai5NYncBQ4VAd19clflN6esAUGAgKxJE,41939
+sqlalchemy/ext/indexable.py,sha256=RZmG2074pMoM9-A3evs2ZKqMn3M9uTc3izAI1cN6HQc,11255
+sqlalchemy/ext/instrumentation.py,sha256=ReSLFxqbHgwAKNwoQQmKHoqYvWCob_WuXlPAEUJk4pk,14386
+sqlalchemy/ext/mutable.py,sha256=nQ0lVZVjJoRXrebrF_XUdxFcHmvN3ROKZzibzypDZN8,32492
+sqlalchemy/ext/mypy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/ext/mypy/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/apply.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/infer.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/names.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/plugin.cpython-311.pyc,,
+sqlalchemy/ext/mypy/__pycache__/util.cpython-311.pyc,,
+sqlalchemy/ext/mypy/apply.py,sha256=9FIH7jxh6Rl1YDE_3tsacpfNb_8floNQkTuHaNgL7XU,9610
+sqlalchemy/ext/mypy/decl_class.py,sha256=buWnXWGOR71CADPZ0_51S49imTXDo-LjTjWsWhhgee0,17343
+sqlalchemy/ext/mypy/infer.py,sha256=otnyujWtI9x7IqsYMu-c21_AJigyAtsaHW6XmVXcaBk,18028
+sqlalchemy/ext/mypy/names.py,sha256=exMWKhQ7ouSFXojttr0ZadmigT5O_wFQ1rmZ4r7Ks4g,7930
+sqlalchemy/ext/mypy/plugin.py,sha256=6JnnsFCOJVwkF1o6FmXRhBYszq5gmli_lqLZJKMhALA,9245
+sqlalchemy/ext/mypy/util.py,sha256=NuIWpY4W5CXES-3q3lviisWuQhwtaQmkAejOspfrGls,8242
+sqlalchemy/ext/orderinglist.py,sha256=JtRiLDROBsDJnME4kZMDzr3FI6rheP-bd1M-C6zxDPU,13875
+sqlalchemy/ext/serializer.py,sha256=RC0aOS6nlFdA0Agkw_-3iiw7Ah2bZnY7sZVZFGj7vHI,5956
+sqlalchemy/future/__init__.py,sha256=tDG3ddqc3cRE61x7Q32ekTBQONsdy30drnW6KnIB92g,525
+sqlalchemy/future/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/future/__pycache__/engine.cpython-311.pyc,,
+sqlalchemy/future/engine.py,sha256=Ly-M3NGamVrpnA9XOG_nVLra5f7OlmTMmg7dMb2tn4s,16184
+sqlalchemy/future/orm/__init__.py,sha256=EKGpGVxFh3-ZA34c1Ujfy51Z_2oG05CFiSxk48pE1R8,289
+sqlalchemy/future/orm/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/inspection.py,sha256=Bcoh4cUJMKjZHcGQP-_Nz-swGXLVVWidj36W2F35Trg,3051
+sqlalchemy/log.py,sha256=0zxWZ9_FkRwYyjTvTaBGW9wMlRG0dSmbAb7SvW42EfY,7143
+sqlalchemy/orm/__init__.py,sha256=ECAf9d5L7wG58S3ijtNRJaQrdgX3WxDJxTlVVPk0hvk,10964
+sqlalchemy/orm/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/attributes.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/clsregistry.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/collections.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/context.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/decl_api.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/decl_base.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/dependency.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/descriptor_props.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/dynamic.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/evaluator.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/exc.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/identity.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/instrumentation.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/interfaces.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/loading.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/mapper.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/path_registry.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/persistence.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/properties.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/query.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/relationships.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/scoping.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/session.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/state.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/strategies.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/strategy_options.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/sync.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/unitofwork.cpython-311.pyc,,
+sqlalchemy/orm/__pycache__/util.cpython-311.pyc,,
+sqlalchemy/orm/attributes.py,sha256=u3tFz0hQdKyh_mCD53rWSKzaPnerquZiM9C71MHsOa4,77098
+sqlalchemy/orm/base.py,sha256=HZu51CAOyCjJqGGPJbFqOgqbbA_yQ06Lucxpf-J1B54,15068
+sqlalchemy/orm/clsregistry.py,sha256=i8-S8jCSsslTUlOXmfaxoDDkxy3nYGUiZVUeJlpDERA,13286
+sqlalchemy/orm/collections.py,sha256=YXLS4MyQIGWVAV5S3sXLvJKdfVCFAQsKFymOgxzkSuU,54723
+sqlalchemy/orm/context.py,sha256=HyOcHWsyDckCtuc9mVEXCjVkBNigXdReIp8EnGSZR4A,111259
+sqlalchemy/orm/decl_api.py,sha256=rZSz1jys3n_V2woNUZuV8nciN0VgDZFMAiQdNbLkr10,35564
+sqlalchemy/orm/decl_base.py,sha256=unKLbWcQZ3At3nbqh6wbK8YtyGpywuJoBoCB00KJse8,44746
+sqlalchemy/orm/dependency.py,sha256=RsQ6UtF0Ryl-hgMqw9mm5tqNCZa5bbW56_X1prm6R-8,46987
+sqlalchemy/orm/descriptor_props.py,sha256=mdVGbdKc4N8gCxV2RXDGMFZB3V2aWZARUUH9VOe0K1s,25987
+sqlalchemy/orm/dynamic.py,sha256=heJsZBQSckDO1k2fYd1x1tap6qEDoS2yogx9VapzIY4,15957
+sqlalchemy/orm/evaluator.py,sha256=DIHogWj0T5l0CNB7PfiPg-KlKhYKFJD53g7VHGZ92BY,7942
+sqlalchemy/orm/events.py,sha256=81htesUMxJZ1u0DhneIxKYzq9SYBWAHdmj7xSeXYTds,112280
+sqlalchemy/orm/exc.py,sha256=dCW9lmc-DpwTJaHo-q8TJac5dK2jWFc4Fes6V8Z_gUo,6532
+sqlalchemy/orm/identity.py,sha256=_UnI-6Beolu3xWGGERUQfVg0dc9sb-3G22Xv8yzfKFg,7233
+sqlalchemy/orm/instrumentation.py,sha256=L4pmTKaGvmRjd8UTThGF-QqScFIWWs9hx6AZA0-rOn0,20378
+sqlalchemy/orm/interfaces.py,sha256=sHcREdNqRuHwX_hXv9UbfwbY5GD60b5vDODQOsdZW6Y,32344
+sqlalchemy/orm/loading.py,sha256=5rAng8kIp5UOLtZd5nUjduDLIhUQ80Sodc9W-jSMc1E,49317
+sqlalchemy/orm/mapper.py,sha256=rpizqQ7CrtuwLTalpEtQ_jXjt_HoGXdwK1jmeaSushw,143240
+sqlalchemy/orm/path_registry.py,sha256=0Akeeayg-OM-pPOAxVCyggGINInYX8kXrQkYWOtesd0,16411
+sqlalchemy/orm/persistence.py,sha256=KW7iYNJpEHjUMVFr_pQkkyvoSC1cfSpmzRvVv1H_sgs,84250
+sqlalchemy/orm/properties.py,sha256=XmmjsU1XBTyIe1mX8DZ2EdavRutLWxO7QN1k2cJVJ4w,14665
+sqlalchemy/orm/query.py,sha256=9aBTx4yskglMfirPKc9u_RwjmtXz2s3Be7dKHCmcEtY,125553
+sqlalchemy/orm/relationships.py,sha256=1gF4dUPcvAqszVrwGXC1mp58A0kvWQswUh6DBOWCL08,143945
+sqlalchemy/orm/scoping.py,sha256=K4sY8l969uQigmm9VV1GL4XmIA505r_x_1yeDZSRWMQ,7257
+sqlalchemy/orm/session.py,sha256=hfwa5CPmkv33IFGvMUnFDEkGUIOy0e_I-dfOEJixmPc,162785
+sqlalchemy/orm/state.py,sha256=dqtNddMpqipJTxdYT8YCgRBBnRLx1aYSv4thMtNESrs,33524
+sqlalchemy/orm/strategies.py,sha256=GYe8eW9eH5zbmSuly-uS0dn33n0zxj71N3uYZj4nO-M,108082
+sqlalchemy/orm/strategy_options.py,sha256=Qaa2h2Ukq3H7k4ytlMfuziFhctDCGnsyrW5b3KIonDk,67454
+sqlalchemy/orm/sync.py,sha256=KRyKql_Pgjm_y8clsUOLe8jo5JzM1t6II2vCorbtRow,5824
+sqlalchemy/orm/unitofwork.py,sha256=XEMx8PhX-KdP9tQpVgB0mcqnPlVbpSPG4bSKW6zIMRE,27090
+sqlalchemy/orm/util.py,sha256=wSmgTqRDkBKuwuisftAqAnJ_18XLi2lRkPK8zXP3yBU,75314
+sqlalchemy/pool/__init__.py,sha256=dTuz0I0lQ1aj_BHoMzoBk4FW1rI-4ssLZfXi7826ja8,1603
+sqlalchemy/pool/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/pool/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/pool/__pycache__/dbapi_proxy.cpython-311.pyc,,
+sqlalchemy/pool/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/pool/__pycache__/impl.cpython-311.pyc,,
+sqlalchemy/pool/base.py,sha256=CQaopoyk_9yAiLYr1vcgct1LAb6uA2t1AGYoCGKtJfU,39107
+sqlalchemy/pool/dbapi_proxy.py,sha256=ZDa32bJzGunYw8OyS5g0GfLoRo-Qwrf7jcsGsA9StSg,4229
+sqlalchemy/pool/events.py,sha256=nVQfjW55gD6-DEtTIDUCx-cNHZCKtt7C3gsdqf-PFWg,10299
+sqlalchemy/pool/impl.py,sha256=m8kUBUGN3ZikSndBO8mcu2ym8kd_o8vEtLsDSycZXAI,15783
+sqlalchemy/processors.py,sha256=LWwr9g-qDHiike9UKqD1yX8ghCxjpAWRdQk7Mh5NepA,5745
+sqlalchemy/schema.py,sha256=FLG1OeHCucohyiShM_jvw4OJivdrWSAsI7MxPIX7Q1M,2413
+sqlalchemy/sql/__init__.py,sha256=ojeq7QnyQrUcO1Ia7nogzumgOfTKXk6Oib7HuH_hz6Y,4661
+sqlalchemy/sql/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/annotation.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/base.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/coercions.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/compiler.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/crud.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/ddl.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/default_comparator.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/dml.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/elements.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/events.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/expression.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/functions.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/lambdas.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/naming.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/operators.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/roles.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/schema.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/selectable.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/sqltypes.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/traversals.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/type_api.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/util.cpython-311.pyc,,
+sqlalchemy/sql/__pycache__/visitors.cpython-311.pyc,,
+sqlalchemy/sql/annotation.py,sha256=xGpbeieggvywgRlqerZxz6lYnuSob7C86rJQ87k6Va0,11502
+sqlalchemy/sql/base.py,sha256=grJ02HrUj2yoDqlrhbNR_J4RHSahsyFilmvVgnCKb2g,55897
+sqlalchemy/sql/coercions.py,sha256=r5bczqjtsm67jl6RiPxyY-ictLPqtPQO0OnhhSN2zCI,34530
+sqlalchemy/sql/compiler.py,sha256=9D8kz0YBIz7ojkhsUZeU68pg-ecsKAtSef9ymFhI06A,188380
+sqlalchemy/sql/crud.py,sha256=yMGTebDMvF2Hpdto3YSwK6GiRLPpSbRVcZby1zU3n4w,35967
+sqlalchemy/sql/ddl.py,sha256=OV8dpPN3tW0nepwxitfz05W804mGJX6I3HHNJsI0mDo,44208
+sqlalchemy/sql/default_comparator.py,sha256=GR_hgIHtrZWq6j6yTWpiOWTUjIts5gn-UBcE37JVvfk,11178
+sqlalchemy/sql/dml.py,sha256=xAI5vzJFY_Y8_AEhJCo1Cxj-2M9tZzljVcpQ7-iUnpM,54663
+sqlalchemy/sql/elements.py,sha256=Z8zavyqLnrAto9Z-JI28s6IR1w7B3No45JgDMjneLNE,181569
+sqlalchemy/sql/events.py,sha256=7TLLn-aA-vgg8YbWK04RzXNJQ_gh9zmEHlFJu1947iA,13246
+sqlalchemy/sql/expression.py,sha256=cyzp-pgHBfrgQ6_mRxo4T4zNSKIIzd40PlRLgwXI5aM,8828
+sqlalchemy/sql/functions.py,sha256=qwzMoP1OIn0Fnw54iGdWNzFB67ABnu6gTi0D94pCPx4,48482
+sqlalchemy/sql/lambdas.py,sha256=Jh4K1h_Vqp9bKlVGYrIFGfbFZ6WjhitVPyMtpEpeLZw,44913
+sqlalchemy/sql/naming.py,sha256=bmjEtvUW0Ccrc5tzH0_PcoPeA5jAtDLPJ4QxtKaAwe8,6786
+sqlalchemy/sql/operators.py,sha256=cJaehhLz2HWqEdFHtBQwasIqgpaUukegNmKJKrtVt84,48538
+sqlalchemy/sql/roles.py,sha256=ZTgs4PY4rneDh2suTVbmn25yGZyW34sztNWX8cOUf3M,5638
+sqlalchemy/sql/schema.py,sha256=pbLkR844wkM0uzIXTAyauACab3vor1IhmUhBreoqG94,195347
+sqlalchemy/sql/selectable.py,sha256=jj2zQ1vZvhJ2jLeTdBH5YIomtZq8oN1Z1mOro2ozQYg,237390
+sqlalchemy/sql/sqltypes.py,sha256=s1jHooEjUEIHj_-mmALSnLc-KmnwBNQ7h_4H5yWmdkA,114742
+sqlalchemy/sql/traversals.py,sha256=P0GP8F8RlM-lpL5jm3gWj7-NnE8klIXEcDmHk5Dmc-c,52719
+sqlalchemy/sql/type_api.py,sha256=IHOZMFl05LgcJ8FfqGGr703bEQEc8c56ru9vJdX-PEU,71036
+sqlalchemy/sql/util.py,sha256=JI2eMLpaDzZQjG3Cd4AopUmIMfzQXFIQVUJj8TG8gWw,35856
+sqlalchemy/sql/visitors.py,sha256=XLRAf08NKf5ndsNDIRY3wPJaaEBIIxl3DDI_dTKrh_s,27329
+sqlalchemy/testing/__init__.py,sha256=TKwXQsqFFV4gjeO48VGaLhCE99qhIVSQNxFrKdP6uNk,2850
+sqlalchemy/testing/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/assertions.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/assertsql.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/asyncio.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/config.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/engines.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/entities.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/exclusions.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/fixtures.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/mock.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/pickleable.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/profiling.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/provision.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/requirements.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/schema.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/util.cpython-311.pyc,,
+sqlalchemy/testing/__pycache__/warnings.cpython-311.pyc,,
+sqlalchemy/testing/assertions.py,sha256=fcCcIUk04m2XgpotqK2mRD5nKXsyOHXV8tchAAnfQyk,26502
+sqlalchemy/testing/assertsql.py,sha256=OIt0QyHKlFJ4zxu6WrX8_ufmBD9KrMgFrjsXTGkU3ys,14964
+sqlalchemy/testing/asyncio.py,sha256=B6ZqYcQpT6QtM8gR3o3AcZX32J6ZbWDqTTZGklVo5-I,3671
+sqlalchemy/testing/config.py,sha256=XhmzFNkEN_djORr4r6owvoIl3G5zA6Eo5neUiEJXy0E,6543
+sqlalchemy/testing/engines.py,sha256=s4h7bKB2Bqmu1rlquR2O88UktP03n6UVrrWkTNhqm3w,13392
+sqlalchemy/testing/entities.py,sha256=sOd9BlmZFPQFrBdCUlkOR8lxGEQNExkJmS_V2U5WIOk,3253
+sqlalchemy/testing/exclusions.py,sha256=zOthfVJs07z9wN2iAH0rGT39Q76Y_2cBuk5dPEW4wOA,13329
+sqlalchemy/testing/fixtures.py,sha256=Rc2Pa9Ae6xtDPqCPGQhB3UFl7h2_5F41TvdKocL7jvE,30924
+sqlalchemy/testing/mock.py,sha256=RUTHkpnxCQfsDlEZ_aQttL_3SXLATwxt4olgmSxAsJw,894
+sqlalchemy/testing/pickleable.py,sha256=QlwC2Cr7vKkHlj86t2Wlq9eGteZFXkvPpGlWAl9_g7Y,2886
+sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+sqlalchemy/testing/plugin/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-311.pyc,,
+sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-311.pyc,,
+sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-311.pyc,,
+sqlalchemy/testing/plugin/__pycache__/reinvent_fixtures_py2k.cpython-311.pyc,,
+sqlalchemy/testing/plugin/bootstrap.py,sha256=038KOv89msOTFsWoDvCyPRb3ZTMv5eAOOKoGPHuZ7zs,1701
+sqlalchemy/testing/plugin/plugin_base.py,sha256=9Bg56KOsZSGW1jLHh_7fle85yFocyV8AGGVlswO9XAU,21540
+sqlalchemy/testing/plugin/pytestplugin.py,sha256=_NbB52E6sv6R9NJApMxMnwomH8y7iirfCYKnXvUH1g0,26133
+sqlalchemy/testing/plugin/reinvent_fixtures_py2k.py,sha256=MdakbJzFh8N_7gUpX-nFbGPFs3AZRsmDAe-7zucf0ls,3288
+sqlalchemy/testing/profiling.py,sha256=ullStV2c-R4jTQJMK1tMKZE5qtSZ-PB1LzHod_hA230,10566
+sqlalchemy/testing/provision.py,sha256=IPpsZg4Pc42mXGScKdLri0SjeWJrURXbBF1S9m6ftY8,12070
+sqlalchemy/testing/requirements.py,sha256=G-l-20BjZ6eMA7TIy3FO4Ck_T6acLz9XwBheQI4Dql0,43499
+sqlalchemy/testing/schema.py,sha256=INOq15yhNyANmheylSQBUlm0IWRaAkEX22BpHSMqn08,6544
+sqlalchemy/testing/suite/__init__.py,sha256=_firVc2uS3TMZ3vH2baQzNb17ubM78RHtb9kniSybmk,476
+sqlalchemy/testing/suite/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_cte.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_insert.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_results.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_select.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_types.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-311.pyc,,
+sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-311.pyc,,
+sqlalchemy/testing/suite/test_cte.py,sha256=XuTuaWblSXyO1OOUTShBBmNch7fBdGnlMD84ooVTqFY,6183
+sqlalchemy/testing/suite/test_ddl.py,sha256=UwbfljXHdWUen3muIcgnOPi-A4AO6F1QzSOiHf9lU-A,11762
+sqlalchemy/testing/suite/test_deprecations.py,sha256=8oLDFUswey8KjPFKRUsqMyGT5sUMMoPQr7-XyIBMehw,5059
+sqlalchemy/testing/suite/test_dialect.py,sha256=eR1VVOb2fm955zavpWkmMjipCva3QvEE177U0OG-0LY,10895
+sqlalchemy/testing/suite/test_insert.py,sha256=oKtVjFuxqdSV5uKj5-OxdSABupLp0pECkWkSLd2U_QA,11134
+sqlalchemy/testing/suite/test_reflection.py,sha256=p-m2BjuWh7jW2vXvY_LxYsfjW47HqGs9O9PUpfm1HIs,58130
+sqlalchemy/testing/suite/test_results.py,sha256=xcoSl1ueaHo8LgKZp0Z1lJ44Mhjf2hxlWs_LjNLBNiE,13983
+sqlalchemy/testing/suite/test_rowcount.py,sha256=GQQRXIWbb6SfD5hwtBC8qvkGAgi1rI5Pv3c59eoumck,4877
+sqlalchemy/testing/suite/test_select.py,sha256=is3BbULeOWOJTRCoUwPnh6Crue15FXfkXKqAkxrFeGM,55464
+sqlalchemy/testing/suite/test_sequence.py,sha256=eCyOQlynF8T0cLrIMz0PO6WuW8ktpFVYq_fQp5CQ298,8431
+sqlalchemy/testing/suite/test_types.py,sha256=airX8OuJJdft4DU8okOLecJbcUhC15urr60Yu1U8Qe4,48044
+sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=CndeAtV3DWJXxLbOoumqf4_mOOYcW_yNOrbKQ4cwFhw,6737
+sqlalchemy/testing/suite/test_update_delete.py,sha256=w9MMRqJCm7OW0Q5XaVjS6B8BGY_b_VvBeK3EWr7NKhU,1625
+sqlalchemy/testing/util.py,sha256=bvCWcESEPEO8QUTH0CcOa4Xg65EYK--V8Q_XeFcfGfE,12503
+sqlalchemy/testing/warnings.py,sha256=l9lI3heNOSbKreAhLcABpaA1e_6Ioi4l7q0mr5jY5OI,2270
+sqlalchemy/types.py,sha256=x8YDIEypMHOzWb7dzp67tW2WfDF7xtdh72HVDxm-aaY,2995
+sqlalchemy/util/__init__.py,sha256=75NADEtwE5GMCS27VcsEnTsTq1nSvXmJ2GY2aU3Q8hI,6373
+sqlalchemy/util/__pycache__/__init__.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/_collections.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/_compat_py3k.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/_preloaded.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/compat.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/concurrency.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/deprecations.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/langhelpers.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/queue.cpython-311.pyc,,
+sqlalchemy/util/__pycache__/topological.cpython-311.pyc,,
+sqlalchemy/util/_collections.py,sha256=Nulmym_NZYGN4OyE9cMtIVSoTwOzk3eJpSJ20l8j-lU,29139
+sqlalchemy/util/_compat_py3k.py,sha256=KibHVHAIlQfYdl8xs3ZhJQDlWEI6EhudTbOnMc2x9e4,2195
+sqlalchemy/util/_concurrency_py3k.py,sha256=5fTahmOgokaam-u-z7Xv0DYKR7YnK4TNjQqbVRYhoKQ,6598
+sqlalchemy/util/_preloaded.py,sha256=rx7QZ4T1zDZV5lktSvQlop3O0kdbCFVMmNDp5IOhpXQ,2396
+sqlalchemy/util/compat.py,sha256=cRcYIpcBc6aV_yboUTsKpmX1ssICP7kloCJRqEMsRBs,18281
+sqlalchemy/util/concurrency.py,sha256=LtozDo0PsiToyVmKzSDnu8qOMhRyGVjTNMsBiKro9d8,2278
+sqlalchemy/util/deprecations.py,sha256=RXg5M_MQhaopn00uTB0WEcz5yTTmPu2OCFPNklw5Uv4,11774
+sqlalchemy/util/langhelpers.py,sha256=RIlviqqBbBy1XhMnOwQHtmtAofNtMF79aCu3wa9Iycc,56288
+sqlalchemy/util/queue.py,sha256=FW6DSeO_GadaW0UA2EXjrBtFPRHO-dNGEoRwqHTfkMA,9293
+sqlalchemy/util/topological.py,sha256=MV1lkI2E0JdVIJVplggVo6iO_ZEVlUHRGvMW9AsXJRA,2859
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/WHEEL b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/WHEEL
new file mode 100644
index 0000000..63d8b2f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/WHEEL
@@ -0,0 +1,8 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_5_x86_64
+Tag: cp311-cp311-manylinux1_x86_64
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/top_level.txt
new file mode 100644
index 0000000..39fb2be
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/SQLAlchemy-1.4.42.dist-info/top_level.txt
@@ -0,0 +1 @@
+sqlalchemy
diff --git a/venv/lib/python3.11/site-packages/__pycache__/bottle.cpython-311.pyc b/venv/lib/python3.11/site-packages/__pycache__/bottle.cpython-311.pyc
new file mode 100644
index 0000000..b9e760d
Binary files /dev/null and b/venv/lib/python3.11/site-packages/__pycache__/bottle.cpython-311.pyc differ
diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py b/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
new file mode 100644
index 0000000..f987a53
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/_distutils_hack/__init__.py
@@ -0,0 +1,222 @@
+# don't import any costly modules
+import sys
+import os
+
+
+is_pypy = '__pypy__' in sys.builtin_module_names
+
+
+def warn_distutils_present():
+ if 'distutils' not in sys.modules:
+ return
+ if is_pypy and sys.version_info < (3, 7):
+ # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
+ # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
+ return
+ import warnings
+
+ warnings.warn(
+ "Distutils was imported before Setuptools, but importing Setuptools "
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
+ "using distutils directly, ensure that setuptools is installed in the "
+ "traditional way (e.g. not an editable install), and/or make sure "
+ "that setuptools is always imported before distutils."
+ )
+
+
+def clear_distutils():
+ if 'distutils' not in sys.modules:
+ return
+ import warnings
+
+ warnings.warn("Setuptools is replacing distutils.")
+ mods = [
+ name
+ for name in sys.modules
+ if name == "distutils" or name.startswith("distutils.")
+ ]
+ for name in mods:
+ del sys.modules[name]
+
+
+def enabled():
+ """
+ Allow selection of distutils by environment variable.
+ """
+ which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
+ return which == 'local'
+
+
+def ensure_local_distutils():
+ import importlib
+
+ clear_distutils()
+
+ # With the DistutilsMetaFinder in place,
+ # perform an import to cause distutils to be
+ # loaded from setuptools._distutils. Ref #2906.
+ with shim():
+ importlib.import_module('distutils')
+
+ # check that submodules load as expected
+ core = importlib.import_module('distutils.core')
+ assert '_distutils' in core.__file__, core.__file__
+ assert 'setuptools._distutils.log' not in sys.modules
+
+
+def do_override():
+ """
+ Ensure that the local copy of distutils is preferred over stdlib.
+
+ See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
+ for more motivation.
+ """
+ if enabled():
+ warn_distutils_present()
+ ensure_local_distutils()
+
+
+class _TrivialRe:
+ def __init__(self, *patterns):
+ self._patterns = patterns
+
+ def match(self, string):
+ return all(pat in string for pat in self._patterns)
+
+
+class DistutilsMetaFinder:
+ def find_spec(self, fullname, path, target=None):
+ # optimization: only consider top level modules and those
+ # found in the CPython test suite.
+ if path is not None and not fullname.startswith('test.'):
+ return
+
+ method_name = 'spec_for_{fullname}'.format(**locals())
+ method = getattr(self, method_name, lambda: None)
+ return method()
+
+ def spec_for_distutils(self):
+ if self.is_cpython():
+ return
+
+ import importlib
+ import importlib.abc
+ import importlib.util
+
+ try:
+ mod = importlib.import_module('setuptools._distutils')
+ except Exception:
+ # There are a couple of cases where setuptools._distutils
+ # may not be present:
+ # - An older Setuptools without a local distutils is
+ # taking precedence. Ref #2957.
+ # - Path manipulation during sitecustomize removes
+ # setuptools from the path but only after the hook
+ # has been loaded. Ref #2980.
+ # In either case, fall back to stdlib behavior.
+ return
+
+ class DistutilsLoader(importlib.abc.Loader):
+ def create_module(self, spec):
+ mod.__name__ = 'distutils'
+ return mod
+
+ def exec_module(self, module):
+ pass
+
+ return importlib.util.spec_from_loader(
+ 'distutils', DistutilsLoader(), origin=mod.__file__
+ )
+
+ @staticmethod
+ def is_cpython():
+ """
+ Suppress supplying distutils for CPython (build and tests).
+ Ref #2965 and #3007.
+ """
+ return os.path.isfile('pybuilddir.txt')
+
+ def spec_for_pip(self):
+ """
+ Ensure stdlib distutils when running under pip.
+ See pypa/pip#8761 for rationale.
+ """
+ if self.pip_imported_during_build():
+ return
+ clear_distutils()
+ self.spec_for_distutils = lambda: None
+
+ @classmethod
+ def pip_imported_during_build(cls):
+ """
+ Detect if pip is being imported in a build script. Ref #2355.
+ """
+ import traceback
+
+ return any(
+ cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
+ )
+
+ @staticmethod
+ def frame_file_is_setup(frame):
+ """
+ Return True if the indicated frame suggests a setup.py file.
+ """
+ # some frames may not have __file__ (#2940)
+ return frame.f_globals.get('__file__', '').endswith('setup.py')
+
+ def spec_for_sensitive_tests(self):
+ """
+ Ensure stdlib distutils when running select tests under CPython.
+
+ python/cpython#91169
+ """
+ clear_distutils()
+ self.spec_for_distutils = lambda: None
+
+ sensitive_tests = (
+ [
+ 'test.test_distutils',
+ 'test.test_peg_generator',
+ 'test.test_importlib',
+ ]
+ if sys.version_info < (3, 10)
+ else [
+ 'test.test_distutils',
+ ]
+ )
+
+
+for name in DistutilsMetaFinder.sensitive_tests:
+ setattr(
+ DistutilsMetaFinder,
+ f'spec_for_{name}',
+ DistutilsMetaFinder.spec_for_sensitive_tests,
+ )
+
+
+DISTUTILS_FINDER = DistutilsMetaFinder()
+
+
+def add_shim():
+ DISTUTILS_FINDER in sys.meta_path or insert_shim()
+
+
+class shim:
+ def __enter__(self):
+ insert_shim()
+
+ def __exit__(self, exc, value, tb):
+ remove_shim()
+
+
+def insert_shim():
+ sys.meta_path.insert(0, DISTUTILS_FINDER)
+
+
+def remove_shim():
+ try:
+ sys.meta_path.remove(DISTUTILS_FINDER)
+ except ValueError:
+ pass
diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..44b763f
Binary files /dev/null and b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/__init__.cpython-311.pyc differ
diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc
new file mode 100644
index 0000000..1ff034b
Binary files /dev/null and b/venv/lib/python3.11/site-packages/_distutils_hack/__pycache__/override.cpython-311.pyc differ
diff --git a/venv/lib/python3.11/site-packages/_distutils_hack/override.py b/venv/lib/python3.11/site-packages/_distutils_hack/override.py
new file mode 100644
index 0000000..2cc433a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/_distutils_hack/override.py
@@ -0,0 +1 @@
+__import__('_distutils_hack').do_override()
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/AUTHORS b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/AUTHORS
new file mode 100644
index 0000000..1b7869d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/AUTHORS
@@ -0,0 +1,64 @@
+Bottle is written and maintained by Marcel Hellkamp .
+
+Thanks to all the people who found bugs, sent patches, spread the word, helped each other on the mailing-list and made this project possible. I hope the following (alphabetically sorted) list is complete. If you miss your name on that list (or want your name removed) please :doc:`tell me ` or add it yourself.
+
+* acasajus
+* Adam R. Smith
+* Alexey Borzenkov
+* Alexis Daboville
+* Anton I. Sipos
+* Anton Kolechkin
+* apexi200sx
+* apheage
+* BillMa
+* Brad Greenlee
+* Brandon Gilmore
+* Branko Vukelic
+* Brian Sierakowski
+* Brian Wickman
+* Carl Scharenberg
+* Damien Degois
+* David Buxton
+* Duane Johnson
+* fcamel
+* Frank Murphy
+* Frederic Junod
+* goldfaber3012
+* Greg Milby
+* gstein
+* Ian Davis
+* Itamar Nabriski
+* Iuri de Silvio
+* Jaimie Murdock
+* Jeff Nichols
+* Jeremy Kelley
+* joegester
+* Johannes Krampf
+* Jonas Haag
+* Joshua Roesslein
+* Karl
+* Kevin Zuber
+* Kraken
+* Kyle Fritz
+* m35
+* Marcos Neves
+* masklinn
+* Michael Labbe
+* Michael Soulier
+* `reddit `_
+* Nicolas Vanhoren
+* Robert Rollins
+* rogererens
+* rwxrwx
+* Santiago Gala
+* Sean M. Collins
+* Sebastian Wollrath
+* Seth
+* Sigurd Høgsbro
+* Stuart Rackham
+* Sun Ning
+* Tomás A. Schertel
+* Tristan Zajonc
+* voltron
+* Wieland Hoffmann
+* zombat
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/LICENSE b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/LICENSE
new file mode 100644
index 0000000..cdd0c70
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012, Marcel Hellkamp.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/METADATA b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/METADATA
new file mode 100644
index 0000000..9cf5ed2
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/METADATA
@@ -0,0 +1,43 @@
+Metadata-Version: 2.1
+Name: bottle
+Version: 0.12.25
+Summary: Fast and simple WSGI-framework for small web-applications.
+Home-page: http://bottlepy.org/
+Author: Marcel Hellkamp
+Author-email: marc@gsites.de
+License: MIT
+Platform: any
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries
+Classifier: Topic :: Internet :: WWW/HTTP :: HTTP Servers
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
+Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Server
+Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+License-File: LICENSE
+License-File: AUTHORS
+
+
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottlepy.org/
+
+Copyright (c) 2016, Marcel Hellkamp.
+License: MIT (see LICENSE for details)
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/RECORD b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/RECORD
new file mode 100644
index 0000000..a5fe85e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/RECORD
@@ -0,0 +1,11 @@
+../../../bin/__pycache__/bottle.cpython-311.pyc,,
+../../../bin/bottle.py,sha256=0F2U2N0T9sp9TZB7vNL02rcqoRJEa9hgiV2zOinvKxM,152025
+__pycache__/bottle.cpython-311.pyc,,
+bottle-0.12.25.dist-info/AUTHORS,sha256=A0Y_uWygTzQczXdwcMI8h6XqqWns2pGsJnZOGwu_IPo,1308
+bottle-0.12.25.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+bottle-0.12.25.dist-info/LICENSE,sha256=0OchHxw8GhxW850YvLB_J_SAyKlVJhd1bdo6M1kzuKY,1061
+bottle-0.12.25.dist-info/METADATA,sha256=rJhg3ktccumQ24G8lM8XPLztRVnpo4S4C4fjCRtOWdM,1836
+bottle-0.12.25.dist-info/RECORD,,
+bottle-0.12.25.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92
+bottle-0.12.25.dist-info/top_level.txt,sha256=cK8mpC1WUvVJAVL1XsjCoCGkD-0Yc-pcrqfH0fRXkhg,7
+bottle.py,sha256=iJVdWAfpOi2ksPZlyZtALczPj9aqqcNXrSXSClUCJwc,151993
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/WHEEL b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/WHEEL
new file mode 100644
index 0000000..57e3d84
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.38.4)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/top_level.txt
new file mode 100644
index 0000000..310dc0b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle-0.12.25.dist-info/top_level.txt
@@ -0,0 +1 @@
+bottle
diff --git a/venv/lib/python3.11/site-packages/bottle.py b/venv/lib/python3.11/site-packages/bottle.py
new file mode 100644
index 0000000..54ee51d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/bottle.py
@@ -0,0 +1,3809 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+"""
+Bottle is a fast and simple micro-framework for small web applications. It
+offers request dispatching (Routes) with url parameter support, templates,
+a built-in HTTP Server and adapters for many third party WSGI/HTTP-server and
+template engines - all in a single file and with no dependencies other than the
+Python Standard Library.
+
+Homepage and documentation: http://bottlepy.org/
+
+Copyright (c) 2016, Marcel Hellkamp.
+License: MIT (see LICENSE for details)
+"""
+
+from __future__ import with_statement
+
+__author__ = 'Marcel Hellkamp'
+__version__ = '0.12.25'
+__license__ = 'MIT'
+
+# The gevent server adapter needs to patch some modules before they are imported
+# This is why we parse the commandline parameters here but handle them later
+if __name__ == '__main__':
+ from optparse import OptionParser
+ _cmd_parser = OptionParser(usage="usage: %prog [options] package.module:app")
+ _opt = _cmd_parser.add_option
+ _opt("--version", action="store_true", help="show version number.")
+ _opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.")
+ _opt("-s", "--server", default='wsgiref', help="use SERVER as backend.")
+ _opt("-p", "--plugin", action="append", help="install additional plugin/s.")
+ _opt("--debug", action="store_true", help="start server in debug mode.")
+ _opt("--reload", action="store_true", help="auto-reload on file changes.")
+ _cmd_options, _cmd_args = _cmd_parser.parse_args()
+ if _cmd_options.server and _cmd_options.server.startswith('gevent'):
+ import gevent.monkey; gevent.monkey.patch_all()
+
+import base64, cgi, email.utils, functools, hmac, itertools, mimetypes,\
+ os, re, subprocess, sys, tempfile, threading, time, warnings, hashlib
+
+from datetime import date as datedate, datetime, timedelta
+from tempfile import TemporaryFile
+from traceback import format_exc, print_exc
+from unicodedata import normalize
+
+
+try: from simplejson import dumps as json_dumps, loads as json_lds
+except ImportError: # pragma: no cover
+ try: from json import dumps as json_dumps, loads as json_lds
+ except ImportError:
+ try: from django.utils.simplejson import dumps as json_dumps, loads as json_lds
+ except ImportError:
+ def json_dumps(data):
+ raise ImportError("JSON support requires Python 2.6 or simplejson.")
+ json_lds = json_dumps
+
+
+
+# We now try to fix 2.5/2.6/3.1/3.2 incompatibilities.
+# It ain't pretty but it works... Sorry for the mess.
+
+py = sys.version_info
+py3k = py >= (3, 0, 0)
+py25 = py < (2, 6, 0)
+py31 = (3, 1, 0) <= py < (3, 2, 0)
+
+# Workaround for the missing "as" keyword in py3k.
+def _e(): return sys.exc_info()[1]
+
+# Workaround for the "print is a keyword/function" Python 2/3 dilemma
+# and a fallback for mod_wsgi (resticts stdout/err attribute access)
+try:
+ _stdout, _stderr = sys.stdout.write, sys.stderr.write
+except IOError:
+ _stdout = lambda x: sys.stdout.write(x)
+ _stderr = lambda x: sys.stderr.write(x)
+
+# Lots of stdlib and builtin differences.
+if py3k:
+ import http.client as httplib
+ import _thread as thread
+ from urllib.parse import urljoin, SplitResult as UrlSplitResult
+ from urllib.parse import urlencode, quote as urlquote, unquote as urlunquote
+ urlunquote = functools.partial(urlunquote, encoding='latin1')
+ from http.cookies import SimpleCookie
+ if py >= (3, 3, 0):
+ from collections.abc import MutableMapping as DictMixin
+ from types import ModuleType as new_module
+ else:
+ from collections import MutableMapping as DictMixin
+ from imp import new_module
+ import pickle
+ from io import BytesIO
+ from configparser import ConfigParser
+ from inspect import getfullargspec
+ def getargspec(func):
+ spec = getfullargspec(func)
+ kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs)
+ return kwargs, spec[1], spec[2], spec[3]
+
+ basestring = str
+ unicode = str
+ json_loads = lambda s: json_lds(touni(s))
+ callable = lambda x: hasattr(x, '__call__')
+ imap = map
+ def _raise(*a): raise a[0](a[1]).with_traceback(a[2])
+else: # 2.x
+ import httplib
+ import thread
+ from urlparse import urljoin, SplitResult as UrlSplitResult
+ from urllib import urlencode, quote as urlquote, unquote as urlunquote
+ from Cookie import SimpleCookie
+ from itertools import imap
+ import cPickle as pickle
+ from imp import new_module
+ from StringIO import StringIO as BytesIO
+ from ConfigParser import SafeConfigParser as ConfigParser
+ from inspect import getargspec
+ if py25:
+ msg = "Python 2.5 support may be dropped in future versions of Bottle."
+ warnings.warn(msg, DeprecationWarning)
+ from UserDict import DictMixin
+ def next(it): return it.next()
+ bytes = str
+ else: # 2.6, 2.7
+ from collections import MutableMapping as DictMixin
+ unicode = unicode
+ json_loads = json_lds
+ eval(compile('def _raise(*a): raise a[0], a[1], a[2]', '', 'exec'))
+
+# Some helpers for string/byte handling
+def tob(s, enc='utf8'):
+ return s.encode(enc) if isinstance(s, unicode) else bytes(s)
+def touni(s, enc='utf8', err='strict'):
+ return s.decode(enc, err) if isinstance(s, bytes) else unicode(s)
+tonat = touni if py3k else tob
+
+# 3.2 fixes cgi.FieldStorage to accept bytes (which makes a lot of sense).
+# 3.1 needs a workaround.
+if py31:
+ from io import TextIOWrapper
+ class NCTextIOWrapper(TextIOWrapper):
+ def close(self): pass # Keep wrapped buffer open.
+
+
+# A bug in functools causes it to break if the wrapper is an instance method
+def update_wrapper(wrapper, wrapped, *a, **ka):
+ try: functools.update_wrapper(wrapper, wrapped, *a, **ka)
+ except AttributeError: pass
+
+
+
+# These helpers are used at module level and need to be defined first.
+# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense.
+
+def depr(message, hard=False):
+ warnings.warn(message, DeprecationWarning, stacklevel=3)
+
+def makelist(data): # This is just to handy
+ if isinstance(data, (tuple, list, set, dict)): return list(data)
+ elif data: return [data]
+ else: return []
+
+
+class DictProperty(object):
+ ''' Property that maps to a key in a local dict-like attribute. '''
+ def __init__(self, attr, key=None, read_only=False):
+ self.attr, self.key, self.read_only = attr, key, read_only
+
+ def __call__(self, func):
+ functools.update_wrapper(self, func, updated=[])
+ self.getter, self.key = func, self.key or func.__name__
+ return self
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ key, storage = self.key, getattr(obj, self.attr)
+ if key not in storage: storage[key] = self.getter(obj)
+ return storage[key]
+
+ def __set__(self, obj, value):
+ if self.read_only: raise AttributeError("Read-Only property.")
+ getattr(obj, self.attr)[self.key] = value
+
+ def __delete__(self, obj):
+ if self.read_only: raise AttributeError("Read-Only property.")
+ del getattr(obj, self.attr)[self.key]
+
+
+class cached_property(object):
+ ''' A property that is only computed once per instance and then replaces
+ itself with an ordinary attribute. Deleting the attribute resets the
+ property. '''
+
+ def __init__(self, func):
+ self.__doc__ = getattr(func, '__doc__')
+ self.func = func
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ value = obj.__dict__[self.func.__name__] = self.func(obj)
+ return value
+
+
+class lazy_attribute(object):
+ ''' A property that caches itself to the class object. '''
+ def __init__(self, func):
+ functools.update_wrapper(self, func, updated=[])
+ self.getter = func
+
+ def __get__(self, obj, cls):
+ value = self.getter(cls)
+ setattr(cls, self.__name__, value)
+ return value
+
+
+
+
+
+
+###############################################################################
+# Exceptions and Events ########################################################
+###############################################################################
+
+
+class BottleException(Exception):
+ """ A base class for exceptions used by bottle. """
+ pass
+
+
+
+
+
+
+###############################################################################
+# Routing ######################################################################
+###############################################################################
+
+
+class RouteError(BottleException):
+ """ This is a base class for all routing related exceptions """
+
+
+class RouteReset(BottleException):
+ """ If raised by a plugin or request handler, the route is reset and all
+ plugins are re-applied. """
+
+class RouterUnknownModeError(RouteError): pass
+
+
+class RouteSyntaxError(RouteError):
+ """ The route parser found something not supported by this router. """
+
+
+class RouteBuildError(RouteError):
+ """ The route could not be built. """
+
+
+def _re_flatten(p):
+ ''' Turn all capturing groups in a regular expression pattern into
+ non-capturing groups. '''
+ if '(' not in p: return p
+ return re.sub(r'(\\*)(\(\?P<[^>]+>|\((?!\?))',
+ lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + '(?:', p)
+
+
+class Router(object):
+ ''' A Router is an ordered collection of route->target pairs. It is used to
+ efficiently match WSGI requests against a number of routes and return
+ the first target that satisfies the request. The target may be anything,
+ usually a string, ID or callable object. A route consists of a path-rule
+ and a HTTP method.
+
+ The path-rule is either a static path (e.g. `/contact`) or a dynamic
+ path that contains wildcards (e.g. `/wiki/`). The wildcard syntax
+ and details on the matching order are described in docs:`routing`.
+ '''
+
+ default_pattern = '[^/]+'
+ default_filter = 're'
+
+ #: The current CPython regexp implementation does not allow more
+ #: than 99 matching groups per regular expression.
+ _MAX_GROUPS_PER_PATTERN = 99
+
+ def __init__(self, strict=False):
+ self.rules = [] # All rules in order
+ self._groups = {} # index of regexes to find them in dyna_routes
+ self.builder = {} # Data structure for the url builder
+ self.static = {} # Search structure for static routes
+ self.dyna_routes = {}
+ self.dyna_regexes = {} # Search structure for dynamic routes
+ #: If true, static routes are no longer checked first.
+ self.strict_order = strict
+ self.filters = {
+ 're': lambda conf:
+ (_re_flatten(conf or self.default_pattern), None, None),
+ 'int': lambda conf: (r'-?\d+', int, lambda x: str(int(x))),
+ 'float': lambda conf: (r'-?[\d.]+', float, lambda x: str(float(x))),
+ 'path': lambda conf: (r'.+?', None, None)}
+
+ def add_filter(self, name, func):
+ ''' Add a filter. The provided function is called with the configuration
+ string as parameter and must return a (regexp, to_python, to_url) tuple.
+ The first element is a string, the last two are callables or None. '''
+ self.filters[name] = func
+
+ rule_syntax = re.compile('(\\\\*)'\
+ '(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)'\
+ '|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)'\
+ '(?::((?:\\\\.|[^\\\\>])+)?)?)?>))')
+
+ def _itertokens(self, rule):
+ offset, prefix = 0, ''
+ for match in self.rule_syntax.finditer(rule):
+ prefix += rule[offset:match.start()]
+ g = match.groups()
+ if len(g[0])%2: # Escaped wildcard
+ prefix += match.group(0)[len(g[0]):]
+ offset = match.end()
+ continue
+ if prefix:
+ yield prefix, None, None
+ name, filtr, conf = g[4:7] if g[2] is None else g[1:4]
+ yield name, filtr or 'default', conf or None
+ offset, prefix = match.end(), ''
+ if offset <= len(rule) or prefix:
+ yield prefix+rule[offset:], None, None
+
+ def add(self, rule, method, target, name=None):
+ ''' Add a new rule or replace the target for an existing rule. '''
+ anons = 0 # Number of anonymous wildcards found
+ keys = [] # Names of keys
+ pattern = '' # Regular expression pattern with named groups
+ filters = [] # Lists of wildcard input filters
+ builder = [] # Data structure for the URL builder
+ is_static = True
+
+ for key, mode, conf in self._itertokens(rule):
+ if mode:
+ is_static = False
+ if mode == 'default': mode = self.default_filter
+ mask, in_filter, out_filter = self.filters[mode](conf)
+ if not key:
+ pattern += '(?:%s)' % mask
+ key = 'anon%d' % anons
+ anons += 1
+ else:
+ pattern += '(?P<%s>%s)' % (key, mask)
+ keys.append(key)
+ if in_filter: filters.append((key, in_filter))
+ builder.append((key, out_filter or str))
+ elif key:
+ pattern += re.escape(key)
+ builder.append((None, key))
+
+ self.builder[rule] = builder
+ if name: self.builder[name] = builder
+
+ if is_static and not self.strict_order:
+ self.static.setdefault(method, {})
+ self.static[method][self.build(rule)] = (target, None)
+ return
+
+ try:
+ re_pattern = re.compile('^(%s)$' % pattern)
+ re_match = re_pattern.match
+ except re.error:
+ raise RouteSyntaxError("Could not add Route: %s (%s)" % (rule, _e()))
+
+ if filters:
+ def getargs(path):
+ url_args = re_match(path).groupdict()
+ for name, wildcard_filter in filters:
+ try:
+ url_args[name] = wildcard_filter(url_args[name])
+ except ValueError:
+ raise HTTPError(400, 'Path has wrong format.')
+ return url_args
+ elif re_pattern.groupindex:
+ def getargs(path):
+ return re_match(path).groupdict()
+ else:
+ getargs = None
+
+ flatpat = _re_flatten(pattern)
+ whole_rule = (rule, flatpat, target, getargs)
+
+ if (flatpat, method) in self._groups:
+ if DEBUG:
+ msg = 'Route <%s %s> overwrites a previously defined route'
+ warnings.warn(msg % (method, rule), RuntimeWarning)
+ self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule
+ else:
+ self.dyna_routes.setdefault(method, []).append(whole_rule)
+ self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1
+
+ self._compile(method)
+
+ def _compile(self, method):
+ all_rules = self.dyna_routes[method]
+ comborules = self.dyna_regexes[method] = []
+ maxgroups = self._MAX_GROUPS_PER_PATTERN
+ for x in range(0, len(all_rules), maxgroups):
+ some = all_rules[x:x+maxgroups]
+ combined = (flatpat for (_, flatpat, _, _) in some)
+ combined = '|'.join('(^%s$)' % flatpat for flatpat in combined)
+ combined = re.compile(combined).match
+ rules = [(target, getargs) for (_, _, target, getargs) in some]
+ comborules.append((combined, rules))
+
+ def build(self, _name, *anons, **query):
+ ''' Build an URL by filling the wildcards in a rule. '''
+ builder = self.builder.get(_name)
+ if not builder: raise RouteBuildError("No route with that name.", _name)
+ try:
+ for i, value in enumerate(anons): query['anon%d'%i] = value
+ url = ''.join([f(query.pop(n)) if n else f for (n,f) in builder])
+ return url if not query else url+'?'+urlencode(query)
+ except KeyError:
+ raise RouteBuildError('Missing URL argument: %r' % _e().args[0])
+
+ def match(self, environ):
+ ''' Return a (target, url_agrs) tuple or raise HTTPError(400/404/405). '''
+ verb = environ['REQUEST_METHOD'].upper()
+ path = environ['PATH_INFO'] or '/'
+ target = None
+ if verb == 'HEAD':
+ methods = ['PROXY', verb, 'GET', 'ANY']
+ else:
+ methods = ['PROXY', verb, 'ANY']
+
+ for method in methods:
+ if method in self.static and path in self.static[method]:
+ target, getargs = self.static[method][path]
+ return target, getargs(path) if getargs else {}
+ elif method in self.dyna_regexes:
+ for combined, rules in self.dyna_regexes[method]:
+ match = combined(path)
+ if match:
+ target, getargs = rules[match.lastindex - 1]
+ return target, getargs(path) if getargs else {}
+
+ # No matching route found. Collect alternative methods for 405 response
+ allowed = set([])
+ nocheck = set(methods)
+ for method in set(self.static) - nocheck:
+ if path in self.static[method]:
+ allowed.add(method)
+ for method in set(self.dyna_regexes) - allowed - nocheck:
+ for combined, rules in self.dyna_regexes[method]:
+ match = combined(path)
+ if match:
+ allowed.add(method)
+ if allowed:
+ allow_header = ",".join(sorted(allowed))
+ raise HTTPError(405, "Method not allowed.", Allow=allow_header)
+
+ # No matching route and no alternative method found. We give up
+ raise HTTPError(404, "Not found: " + repr(path))
+
+
+
+
+
+
+class Route(object):
+ ''' This class wraps a route callback along with route specific metadata and
+ configuration and applies Plugins on demand. It is also responsible for
+ turing an URL path rule into a regular expression usable by the Router.
+ '''
+
+ def __init__(self, app, rule, method, callback, name=None,
+ plugins=None, skiplist=None, **config):
+ #: The application this route is installed to.
+ self.app = app
+ #: The path-rule string (e.g. ``/wiki/:page``).
+ self.rule = rule
+ #: The HTTP method as a string (e.g. ``GET``).
+ self.method = method
+ #: The original callback with no plugins applied. Useful for introspection.
+ self.callback = callback
+ #: The name of the route (if specified) or ``None``.
+ self.name = name or None
+ #: A list of route-specific plugins (see :meth:`Bottle.route`).
+ self.plugins = plugins or []
+ #: A list of plugins to not apply to this route (see :meth:`Bottle.route`).
+ self.skiplist = skiplist or []
+ #: Additional keyword arguments passed to the :meth:`Bottle.route`
+ #: decorator are stored in this dictionary. Used for route-specific
+ #: plugin configuration and meta-data.
+ self.config = ConfigDict().load_dict(config, make_namespaces=True)
+
+ def __call__(self, *a, **ka):
+ depr("Some APIs changed to return Route() instances instead of"\
+ " callables. Make sure to use the Route.call method and not to"\
+ " call Route instances directly.") #0.12
+ return self.call(*a, **ka)
+
+ @cached_property
+ def call(self):
+ ''' The route callback with all plugins applied. This property is
+ created on demand and then cached to speed up subsequent requests.'''
+ return self._make_callback()
+
+ def reset(self):
+ ''' Forget any cached values. The next time :attr:`call` is accessed,
+ all plugins are re-applied. '''
+ self.__dict__.pop('call', None)
+
+ def prepare(self):
+ ''' Do all on-demand work immediately (useful for debugging).'''
+ self.call
+
+ @property
+ def _context(self):
+ depr('Switch to Plugin API v2 and access the Route object directly.') #0.12
+ return dict(rule=self.rule, method=self.method, callback=self.callback,
+ name=self.name, app=self.app, config=self.config,
+ apply=self.plugins, skip=self.skiplist)
+
+ def all_plugins(self):
+ ''' Yield all Plugins affecting this route. '''
+ unique = set()
+ for p in reversed(self.app.plugins + self.plugins):
+ if True in self.skiplist: break
+ name = getattr(p, 'name', False)
+ if name and (name in self.skiplist or name in unique): continue
+ if p in self.skiplist or type(p) in self.skiplist: continue
+ if name: unique.add(name)
+ yield p
+
+ def _make_callback(self):
+ callback = self.callback
+ for plugin in self.all_plugins():
+ try:
+ if hasattr(plugin, 'apply'):
+ api = getattr(plugin, 'api', 1)
+ context = self if api > 1 else self._context
+ callback = plugin.apply(callback, context)
+ else:
+ callback = plugin(callback)
+ except RouteReset: # Try again with changed configuration.
+ return self._make_callback()
+ if not callback is self.callback:
+ update_wrapper(callback, self.callback)
+ return callback
+
+ def get_undecorated_callback(self):
+ ''' Return the callback. If the callback is a decorated function, try to
+ recover the original function. '''
+ func = self.callback
+ func = getattr(func, '__func__' if py3k else 'im_func', func)
+ closure_attr = '__closure__' if py3k else 'func_closure'
+ while hasattr(func, closure_attr) and getattr(func, closure_attr):
+ func = getattr(func, closure_attr)[0].cell_contents
+ return func
+
+ def get_callback_args(self):
+ ''' Return a list of argument names the callback (most likely) accepts
+ as keyword arguments. If the callback is a decorated function, try
+ to recover the original function before inspection. '''
+ return getargspec(self.get_undecorated_callback())[0]
+
+ def get_config(self, key, default=None):
+ ''' Lookup a config field and return its value, first checking the
+ route.config, then route.app.config.'''
+ for conf in (self.config, self.app.config):
+ if key in conf: return conf[key]
+ return default
+
+ def __repr__(self):
+ cb = self.get_undecorated_callback()
+ return '<%s %r %r>' % (self.method, self.rule, cb)
+
+
+
+
+
+
+###############################################################################
+# Application Object ###########################################################
+###############################################################################
+
+
+class Bottle(object):
+ """ Each Bottle object represents a single, distinct web application and
+ consists of routes, callbacks, plugins, resources and configuration.
+ Instances are callable WSGI applications.
+
+ :param catchall: If true (default), handle all exceptions. Turn off to
+ let debugging middleware handle exceptions.
+ """
+
+ def __init__(self, catchall=True, autojson=True):
+
+ #: A :class:`ConfigDict` for app specific configuration.
+ self.config = ConfigDict()
+ self.config._on_change = functools.partial(self.trigger_hook, 'config')
+ self.config.meta_set('autojson', 'validate', bool)
+ self.config.meta_set('catchall', 'validate', bool)
+ self.config['catchall'] = catchall
+ self.config['autojson'] = autojson
+
+ #: A :class:`ResourceManager` for application files
+ self.resources = ResourceManager()
+
+ self.routes = [] # List of installed :class:`Route` instances.
+ self.router = Router() # Maps requests to :class:`Route` instances.
+ self.error_handler = {}
+
+ # Core plugins
+ self.plugins = [] # List of installed plugins.
+ if self.config['autojson']:
+ self.install(JSONPlugin())
+ self.install(TemplatePlugin())
+
+ #: If true, most exceptions are caught and returned as :exc:`HTTPError`
+ catchall = DictProperty('config', 'catchall')
+
+ __hook_names = 'before_request', 'after_request', 'app_reset', 'config'
+ __hook_reversed = 'after_request'
+
+ @cached_property
+ def _hooks(self):
+ return dict((name, []) for name in self.__hook_names)
+
+ def add_hook(self, name, func):
+ ''' Attach a callback to a hook. Three hooks are currently implemented:
+
+ before_request
+ Executed once before each request. The request context is
+ available, but no routing has happened yet.
+ after_request
+ Executed once after each request regardless of its outcome.
+ app_reset
+ Called whenever :meth:`Bottle.reset` is called.
+ '''
+ if name in self.__hook_reversed:
+ self._hooks[name].insert(0, func)
+ else:
+ self._hooks[name].append(func)
+
+ def remove_hook(self, name, func):
+ ''' Remove a callback from a hook. '''
+ if name in self._hooks and func in self._hooks[name]:
+ self._hooks[name].remove(func)
+ return True
+
+ def trigger_hook(self, __name, *args, **kwargs):
+ ''' Trigger a hook and return a list of results. '''
+ return [hook(*args, **kwargs) for hook in self._hooks[__name][:]]
+
+ def hook(self, name):
+ """ Return a decorator that attaches a callback to a hook. See
+ :meth:`add_hook` for details."""
+ def decorator(func):
+ self.add_hook(name, func)
+ return func
+ return decorator
+
+ def mount(self, prefix, app, **options):
+ ''' Mount an application (:class:`Bottle` or plain WSGI) to a specific
+ URL prefix. Example::
+
+ root_app.mount('/admin/', admin_app)
+
+ :param prefix: path prefix or `mount-point`. If it ends in a slash,
+ that slash is mandatory.
+ :param app: an instance of :class:`Bottle` or a WSGI application.
+
+ All other parameters are passed to the underlying :meth:`route` call.
+ '''
+ if isinstance(app, basestring):
+ depr('Parameter order of Bottle.mount() changed.', True) # 0.10
+
+ segments = [p for p in prefix.split('/') if p]
+ if not segments: raise ValueError('Empty path prefix.')
+ path_depth = len(segments)
+
+ def mountpoint_wrapper():
+ try:
+ request.path_shift(path_depth)
+ rs = HTTPResponse([])
+ def start_response(status, headerlist, exc_info=None):
+ if exc_info:
+ try:
+ _raise(*exc_info)
+ finally:
+ exc_info = None
+ rs.status = status
+ for name, value in headerlist: rs.add_header(name, value)
+ return rs.body.append
+ body = app(request.environ, start_response)
+ if body and rs.body: body = itertools.chain(rs.body, body)
+ rs.body = body or rs.body
+ return rs
+ finally:
+ request.path_shift(-path_depth)
+
+ options.setdefault('skip', True)
+ options.setdefault('method', 'PROXY')
+ options.setdefault('mountpoint', {'prefix': prefix, 'target': app})
+ options['callback'] = mountpoint_wrapper
+
+ self.route('/%s/<:re:.*>' % '/'.join(segments), **options)
+ if not prefix.endswith('/'):
+ self.route('/' + '/'.join(segments), **options)
+
+ def merge(self, routes):
+ ''' Merge the routes of another :class:`Bottle` application or a list of
+ :class:`Route` objects into this application. The routes keep their
+ 'owner', meaning that the :data:`Route.app` attribute is not
+ changed. '''
+ if isinstance(routes, Bottle):
+ routes = routes.routes
+ for route in routes:
+ self.add_route(route)
+
+ def install(self, plugin):
+ ''' Add a plugin to the list of plugins and prepare it for being
+ applied to all routes of this application. A plugin may be a simple
+ decorator or an object that implements the :class:`Plugin` API.
+ '''
+ if hasattr(plugin, 'setup'): plugin.setup(self)
+ if not callable(plugin) and not hasattr(plugin, 'apply'):
+ raise TypeError("Plugins must be callable or implement .apply()")
+ self.plugins.append(plugin)
+ self.reset()
+ return plugin
+
+ def uninstall(self, plugin):
+ ''' Uninstall plugins. Pass an instance to remove a specific plugin, a type
+ object to remove all plugins that match that type, a string to remove
+ all plugins with a matching ``name`` attribute or ``True`` to remove all
+ plugins. Return the list of removed plugins. '''
+ removed, remove = [], plugin
+ for i, plugin in list(enumerate(self.plugins))[::-1]:
+ if remove is True or remove is plugin or remove is type(plugin) \
+ or getattr(plugin, 'name', True) == remove:
+ removed.append(plugin)
+ del self.plugins[i]
+ if hasattr(plugin, 'close'): plugin.close()
+ if removed: self.reset()
+ return removed
+
+ def reset(self, route=None):
+ ''' Reset all routes (force plugins to be re-applied) and clear all
+ caches. If an ID or route object is given, only that specific route
+ is affected. '''
+ if route is None: routes = self.routes
+ elif isinstance(route, Route): routes = [route]
+ else: routes = [self.routes[route]]
+ for route in routes: route.reset()
+ if DEBUG:
+ for route in routes: route.prepare()
+ self.trigger_hook('app_reset')
+
+ def close(self):
+ ''' Close the application and all installed plugins. '''
+ for plugin in self.plugins:
+ if hasattr(plugin, 'close'): plugin.close()
+ self.stopped = True
+
+ def run(self, **kwargs):
+ ''' Calls :func:`run` with the same parameters. '''
+ run(self, **kwargs)
+
+ def match(self, environ):
+ """ Search for a matching route and return a (:class:`Route` , urlargs)
+ tuple. The second value is a dictionary with parameters extracted
+ from the URL. Raise :exc:`HTTPError` (404/405) on a non-match."""
+ return self.router.match(environ)
+
+ def get_url(self, routename, **kargs):
+ """ Return a string that matches a named route """
+ scriptname = request.environ.get('SCRIPT_NAME', '').strip('/') + '/'
+ location = self.router.build(routename, **kargs).lstrip('/')
+ return urljoin(urljoin('/', scriptname), location)
+
+ def add_route(self, route):
+ ''' Add a route object, but do not change the :data:`Route.app`
+ attribute.'''
+ self.routes.append(route)
+ self.router.add(route.rule, route.method, route, name=route.name)
+ if DEBUG: route.prepare()
+
+ def route(self, path=None, method='GET', callback=None, name=None,
+ apply=None, skip=None, **config):
+ """ A decorator to bind a function to a request URL. Example::
+
+ @app.route('/hello/:name')
+ def hello(name):
+ return 'Hello %s' % name
+
+ The ``:name`` part is a wildcard. See :class:`Router` for syntax
+ details.
+
+ :param path: Request path or a list of paths to listen to. If no
+ path is specified, it is automatically generated from the
+ signature of the function.
+ :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of
+ methods to listen to. (default: `GET`)
+ :param callback: An optional shortcut to avoid the decorator
+ syntax. ``route(..., callback=func)`` equals ``route(...)(func)``
+ :param name: The name for this route. (default: None)
+ :param apply: A decorator or plugin or a list of plugins. These are
+ applied to the route callback in addition to installed plugins.
+ :param skip: A list of plugins, plugin classes or names. Matching
+ plugins are not installed to this route. ``True`` skips all.
+
+ Any additional keyword arguments are stored as route-specific
+ configuration and passed to plugins (see :meth:`Plugin.apply`).
+ """
+ if callable(path): path, callback = None, path
+ plugins = makelist(apply)
+ skiplist = makelist(skip)
+ def decorator(callback):
+ # TODO: Documentation and tests
+ if isinstance(callback, basestring): callback = load(callback)
+ for rule in makelist(path) or yieldroutes(callback):
+ for verb in makelist(method):
+ verb = verb.upper()
+ route = Route(self, rule, verb, callback, name=name,
+ plugins=plugins, skiplist=skiplist, **config)
+ self.add_route(route)
+ return callback
+ return decorator(callback) if callback else decorator
+
+ def get(self, path=None, method='GET', **options):
+ """ Equals :meth:`route`. """
+ return self.route(path, method, **options)
+
+ def post(self, path=None, method='POST', **options):
+ """ Equals :meth:`route` with a ``POST`` method parameter. """
+ return self.route(path, method, **options)
+
+ def put(self, path=None, method='PUT', **options):
+ """ Equals :meth:`route` with a ``PUT`` method parameter. """
+ return self.route(path, method, **options)
+
+ def delete(self, path=None, method='DELETE', **options):
+ """ Equals :meth:`route` with a ``DELETE`` method parameter. """
+ return self.route(path, method, **options)
+
+ def error(self, code=500):
+ """ Decorator: Register an output handler for a HTTP error code"""
+ def wrapper(handler):
+ self.error_handler[int(code)] = handler
+ return handler
+ return wrapper
+
+ def default_error_handler(self, res):
+ return tob(template(ERROR_PAGE_TEMPLATE, e=res))
+
+ def _handle(self, environ):
+ try:
+
+ environ['bottle.app'] = self
+ request.bind(environ)
+ response.bind()
+
+ path = environ['bottle.raw_path'] = environ['PATH_INFO']
+ if py3k:
+ try:
+ environ['PATH_INFO'] = path.encode('latin1').decode('utf8')
+ except UnicodeError:
+ return HTTPError(400, 'Invalid path string. Expected UTF-8')
+
+ try:
+ self.trigger_hook('before_request')
+ route, args = self.router.match(environ)
+ environ['route.handle'] = route
+ environ['bottle.route'] = route
+ environ['route.url_args'] = args
+ return route.call(**args)
+ finally:
+ self.trigger_hook('after_request')
+
+ except HTTPResponse:
+ return _e()
+ except RouteReset:
+ route.reset()
+ return self._handle(environ)
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ stacktrace = format_exc()
+ environ['wsgi.errors'].write(stacktrace)
+ return HTTPError(500, "Internal Server Error", _e(), stacktrace)
+
+ def _cast(self, out, peek=None):
+ """ Try to convert the parameter into something WSGI compatible and set
+ correct HTTP headers when possible.
+ Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like,
+ iterable of strings and iterable of unicodes
+ """
+
+ # Empty output is done here
+ if not out:
+ if 'Content-Length' not in response:
+ response['Content-Length'] = 0
+ return []
+ # Join lists of byte or unicode strings. Mixed lists are NOT supported
+ if isinstance(out, (tuple, list))\
+ and isinstance(out[0], (bytes, unicode)):
+ out = out[0][0:0].join(out) # b'abc'[0:0] -> b''
+ # Encode unicode strings
+ if isinstance(out, unicode):
+ out = out.encode(response.charset)
+ # Byte Strings are just returned
+ if isinstance(out, bytes):
+ if 'Content-Length' not in response:
+ response['Content-Length'] = len(out)
+ return [out]
+ # HTTPError or HTTPException (recursive, because they may wrap anything)
+ # TODO: Handle these explicitly in handle() or make them iterable.
+ if isinstance(out, HTTPError):
+ out.apply(response)
+ out = self.error_handler.get(out.status_code, self.default_error_handler)(out)
+ return self._cast(out)
+ if isinstance(out, HTTPResponse):
+ out.apply(response)
+ return self._cast(out.body)
+
+ # File-like objects.
+ if hasattr(out, 'read'):
+ if 'wsgi.file_wrapper' in request.environ:
+ return request.environ['wsgi.file_wrapper'](out)
+ elif hasattr(out, 'close') or not hasattr(out, '__iter__'):
+ return WSGIFileWrapper(out)
+
+ # Handle Iterables. We peek into them to detect their inner type.
+ try:
+ iout = iter(out)
+ first = next(iout)
+ while not first:
+ first = next(iout)
+ except StopIteration:
+ return self._cast('')
+ except HTTPResponse:
+ first = _e()
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ first = HTTPError(500, 'Unhandled exception', _e(), format_exc())
+
+ # These are the inner types allowed in iterator or generator objects.
+ if isinstance(first, HTTPResponse):
+ return self._cast(first)
+ elif isinstance(first, bytes):
+ new_iter = itertools.chain([first], iout)
+ elif isinstance(first, unicode):
+ encoder = lambda x: x.encode(response.charset)
+ new_iter = imap(encoder, itertools.chain([first], iout))
+ else:
+ msg = 'Unsupported response type: %s' % type(first)
+ return self._cast(HTTPError(500, msg))
+ if hasattr(out, 'close'):
+ new_iter = _closeiter(new_iter, out.close)
+ return new_iter
+
+ def wsgi(self, environ, start_response):
+ """ The bottle WSGI-interface. """
+ try:
+ out = self._cast(self._handle(environ))
+ # rfc2616 section 4.3
+ if response._status_code in (100, 101, 204, 304)\
+ or environ['REQUEST_METHOD'] == 'HEAD':
+ if hasattr(out, 'close'): out.close()
+ out = []
+ start_response(response._status_line, response.headerlist)
+ return out
+ except (KeyboardInterrupt, SystemExit, MemoryError):
+ raise
+ except Exception:
+ if not self.catchall: raise
+ err = 'Critical error while processing request: %s
' \
+ % html_escape(environ.get('PATH_INFO', '/'))
+ if DEBUG:
+ err += 'Error:
\n\n%s\n
\n' \
+ 'Traceback:
\n\n%s\n
\n' \
+ % (html_escape(repr(_e())), html_escape(format_exc()))
+ environ['wsgi.errors'].write(err)
+ headers = [('Content-Type', 'text/html; charset=UTF-8')]
+ start_response('500 INTERNAL SERVER ERROR', headers, sys.exc_info())
+ return [tob(err)]
+
+ def __call__(self, environ, start_response):
+ ''' Each instance of :class:'Bottle' is a WSGI application. '''
+ return self.wsgi(environ, start_response)
+
+
+
+
+
+
+###############################################################################
+# HTTP and WSGI Tools ##########################################################
+###############################################################################
+
+class BaseRequest(object):
+ """ A wrapper for WSGI environment dictionaries that adds a lot of
+ convenient access methods and properties. Most of them are read-only.
+
+ Adding new attributes to a request actually adds them to the environ
+ dictionary (as 'bottle.request.ext.'). This is the recommended
+ way to store and access request-specific data.
+ """
+
+ __slots__ = ('environ')
+
+ #: Maximum size of memory buffer for :attr:`body` in bytes.
+ MEMFILE_MAX = 102400
+
+ def __init__(self, environ=None):
+ """ Wrap a WSGI environ dictionary. """
+ #: The wrapped WSGI environ dictionary. This is the only real attribute.
+ #: All other attributes actually are read-only properties.
+ self.environ = {} if environ is None else environ
+ self.environ['bottle.request'] = self
+
+ @DictProperty('environ', 'bottle.app', read_only=True)
+ def app(self):
+ ''' Bottle application handling this request. '''
+ raise RuntimeError('This request is not connected to an application.')
+
+ @DictProperty('environ', 'bottle.route', read_only=True)
+ def route(self):
+ """ The bottle :class:`Route` object that matches this request. """
+ raise RuntimeError('This request is not connected to a route.')
+
+ @DictProperty('environ', 'route.url_args', read_only=True)
+ def url_args(self):
+ """ The arguments extracted from the URL. """
+ raise RuntimeError('This request is not connected to a route.')
+
+ @property
+ def path(self):
+ ''' The value of ``PATH_INFO`` with exactly one prefixed slash (to fix
+ broken clients and avoid the "empty path" edge case). '''
+ return '/' + self.environ.get('PATH_INFO','').lstrip('/')
+
+ @property
+ def method(self):
+ ''' The ``REQUEST_METHOD`` value as an uppercase string. '''
+ return self.environ.get('REQUEST_METHOD', 'GET').upper()
+
+ @DictProperty('environ', 'bottle.request.headers', read_only=True)
+ def headers(self):
+ ''' A :class:`WSGIHeaderDict` that provides case-insensitive access to
+ HTTP request headers. '''
+ return WSGIHeaderDict(self.environ)
+
+ def get_header(self, name, default=None):
+ ''' Return the value of a request header, or a given default value. '''
+ return self.headers.get(name, default)
+
+ @DictProperty('environ', 'bottle.request.cookies', read_only=True)
+ def cookies(self):
+ """ Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT
+ decoded. Use :meth:`get_cookie` if you expect signed cookies. """
+ cookies = SimpleCookie(self.environ.get('HTTP_COOKIE','')).values()
+ return FormsDict((c.key, c.value) for c in cookies)
+
+ def get_cookie(self, key, default=None, secret=None):
+ """ Return the content of a cookie. To read a `Signed Cookie`, the
+ `secret` must match the one used to create the cookie (see
+ :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing
+ cookie or wrong signature), return a default value. """
+ value = self.cookies.get(key)
+ if secret and value:
+ dec = cookie_decode(value, secret) # (key, value) tuple or None
+ return dec[1] if dec and dec[0] == key else default
+ return value or default
+
+ @DictProperty('environ', 'bottle.request.query', read_only=True)
+ def query(self):
+ ''' The :attr:`query_string` parsed into a :class:`FormsDict`. These
+ values are sometimes called "URL arguments" or "GET parameters", but
+ not to be confused with "URL wildcards" as they are provided by the
+ :class:`Router`. '''
+ get = self.environ['bottle.get'] = FormsDict()
+ pairs = _parse_qsl(self.environ.get('QUERY_STRING', ''))
+ for key, value in pairs:
+ get[key] = value
+ return get
+
+ @DictProperty('environ', 'bottle.request.forms', read_only=True)
+ def forms(self):
+ """ Form values parsed from an `url-encoded` or `multipart/form-data`
+ encoded POST or PUT request body. The result is returned as a
+ :class:`FormsDict`. All keys and values are strings. File uploads
+ are stored separately in :attr:`files`. """
+ forms = FormsDict()
+ forms.recode_unicode = self.POST.recode_unicode
+ for name, item in self.POST.allitems():
+ if not isinstance(item, FileUpload):
+ forms[name] = item
+ return forms
+
+ @DictProperty('environ', 'bottle.request.params', read_only=True)
+ def params(self):
+ """ A :class:`FormsDict` with the combined values of :attr:`query` and
+ :attr:`forms`. File uploads are stored in :attr:`files`. """
+ params = FormsDict()
+ for key, value in self.query.allitems():
+ params[key] = value
+ for key, value in self.forms.allitems():
+ params[key] = value
+ return params
+
+ @DictProperty('environ', 'bottle.request.files', read_only=True)
+ def files(self):
+ """ File uploads parsed from `multipart/form-data` encoded POST or PUT
+ request body. The values are instances of :class:`FileUpload`.
+
+ """
+ files = FormsDict()
+ files.recode_unicode = self.POST.recode_unicode
+ for name, item in self.POST.allitems():
+ if isinstance(item, FileUpload):
+ files[name] = item
+ return files
+
+ @DictProperty('environ', 'bottle.request.json', read_only=True)
+ def json(self):
+ ''' If the ``Content-Type`` header is ``application/json``, this
+ property holds the parsed content of the request body. Only requests
+ smaller than :attr:`MEMFILE_MAX` are processed to avoid memory
+ exhaustion. '''
+ ctype = self.environ.get('CONTENT_TYPE', '').lower().split(';')[0]
+ if ctype == 'application/json':
+ b = self._get_body_string()
+ if not b:
+ return None
+ return json_loads(b)
+ return None
+
+ def _iter_body(self, read, bufsize):
+ maxread = max(0, self.content_length)
+ while maxread:
+ part = read(min(maxread, bufsize))
+ if not part: break
+ yield part
+ maxread -= len(part)
+
+ def _iter_chunked(self, read, bufsize):
+ err = HTTPError(400, 'Error while parsing chunked transfer body.')
+ rn, sem, bs = tob('\r\n'), tob(';'), tob('')
+ while True:
+ header = read(1)
+ while header[-2:] != rn:
+ c = read(1)
+ header += c
+ if not c: raise err
+ if len(header) > bufsize: raise err
+ size, _, _ = header.partition(sem)
+ try:
+ maxread = int(tonat(size.strip()), 16)
+ except ValueError:
+ raise err
+ if maxread == 0: break
+ buff = bs
+ while maxread > 0:
+ if not buff:
+ buff = read(min(maxread, bufsize))
+ part, buff = buff[:maxread], buff[maxread:]
+ if not part: raise err
+ yield part
+ maxread -= len(part)
+ if read(2) != rn:
+ raise err
+
+ @DictProperty('environ', 'bottle.request.body', read_only=True)
+ def _body(self):
+ body_iter = self._iter_chunked if self.chunked else self._iter_body
+ read_func = self.environ['wsgi.input'].read
+ body, body_size, is_temp_file = BytesIO(), 0, False
+ for part in body_iter(read_func, self.MEMFILE_MAX):
+ body.write(part)
+ body_size += len(part)
+ if not is_temp_file and body_size > self.MEMFILE_MAX:
+ body, tmp = TemporaryFile(mode='w+b'), body
+ body.write(tmp.getvalue())
+ del tmp
+ is_temp_file = True
+ self.environ['wsgi.input'] = body
+ body.seek(0)
+ return body
+
+ def _get_body_string(self):
+ ''' read body until content-length or MEMFILE_MAX into a string. Raise
+ HTTPError(413) on requests that are to large. '''
+ clen = self.content_length
+ if clen > self.MEMFILE_MAX:
+ raise HTTPError(413, 'Request to large')
+ if clen < 0: clen = self.MEMFILE_MAX + 1
+ data = self.body.read(clen)
+ if len(data) > self.MEMFILE_MAX: # Fail fast
+ raise HTTPError(413, 'Request to large')
+ return data
+
+ @property
+ def body(self):
+ """ The HTTP request body as a seek-able file-like object. Depending on
+ :attr:`MEMFILE_MAX`, this is either a temporary file or a
+ :class:`io.BytesIO` instance. Accessing this property for the first
+ time reads and replaces the ``wsgi.input`` environ variable.
+ Subsequent accesses just do a `seek(0)` on the file object. """
+ self._body.seek(0)
+ return self._body
+
+ @property
+ def chunked(self):
+ ''' True if Chunked transfer encoding was. '''
+ return 'chunked' in self.environ.get('HTTP_TRANSFER_ENCODING', '').lower()
+
+ #: An alias for :attr:`query`.
+ GET = query
+
+ @DictProperty('environ', 'bottle.request.post', read_only=True)
+ def POST(self):
+ """ The values of :attr:`forms` and :attr:`files` combined into a single
+ :class:`FormsDict`. Values are either strings (form values) or
+ instances of :class:`cgi.FieldStorage` (file uploads).
+ """
+ post = FormsDict()
+ # We default to application/x-www-form-urlencoded for everything that
+ # is not multipart and take the fast path (also: 3.1 workaround)
+ if not self.content_type.startswith('multipart/'):
+ pairs = _parse_qsl(tonat(self._get_body_string(), 'latin1'))
+ for key, value in pairs:
+ post[key] = value
+ return post
+
+ safe_env = {'QUERY_STRING':''} # Build a safe environment for cgi
+ for key in ('REQUEST_METHOD', 'CONTENT_TYPE', 'CONTENT_LENGTH'):
+ if key in self.environ: safe_env[key] = self.environ[key]
+ args = dict(fp=self.body, environ=safe_env, keep_blank_values=True)
+ if py31:
+ args['fp'] = NCTextIOWrapper(args['fp'], encoding='utf8',
+ newline='\n')
+ elif py3k:
+ args['encoding'] = 'utf8'
+ post.recode_unicode = False
+ data = cgi.FieldStorage(**args)
+ self['_cgi.FieldStorage'] = data #http://bugs.python.org/issue18394#msg207958
+ data = data.list or []
+ for item in data:
+ if item.filename is None:
+ post[item.name] = item.value
+ else:
+ post[item.name] = FileUpload(item.file, item.name,
+ item.filename, item.headers)
+ return post
+
+ @property
+ def url(self):
+ """ The full request URI including hostname and scheme. If your app
+ lives behind a reverse proxy or load balancer and you get confusing
+ results, make sure that the ``X-Forwarded-Host`` header is set
+ correctly. """
+ return self.urlparts.geturl()
+
+ @DictProperty('environ', 'bottle.request.urlparts', read_only=True)
+ def urlparts(self):
+ ''' The :attr:`url` string as an :class:`urlparse.SplitResult` tuple.
+ The tuple contains (scheme, host, path, query_string and fragment),
+ but the fragment is always empty because it is not visible to the
+ server. '''
+ env = self.environ
+ http = env.get('HTTP_X_FORWARDED_PROTO') or env.get('wsgi.url_scheme', 'http')
+ host = env.get('HTTP_X_FORWARDED_HOST') or env.get('HTTP_HOST')
+ if not host:
+ # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients.
+ host = env.get('SERVER_NAME', '127.0.0.1')
+ port = env.get('SERVER_PORT')
+ if port and port != ('80' if http == 'http' else '443'):
+ host += ':' + port
+ path = urlquote(self.fullpath)
+ return UrlSplitResult(http, host, path, env.get('QUERY_STRING'), '')
+
+ @property
+ def fullpath(self):
+ """ Request path including :attr:`script_name` (if present). """
+ return urljoin(self.script_name, self.path.lstrip('/'))
+
+ @property
+ def query_string(self):
+ """ The raw :attr:`query` part of the URL (everything in between ``?``
+ and ``#``) as a string. """
+ return self.environ.get('QUERY_STRING', '')
+
+ @property
+ def script_name(self):
+ ''' The initial portion of the URL's `path` that was removed by a higher
+ level (server or routing middleware) before the application was
+ called. This script path is returned with leading and tailing
+ slashes. '''
+ script_name = self.environ.get('SCRIPT_NAME', '').strip('/')
+ return '/' + script_name + '/' if script_name else '/'
+
+ def path_shift(self, shift=1):
+ ''' Shift path segments from :attr:`path` to :attr:`script_name` and
+ vice versa.
+
+ :param shift: The number of path segments to shift. May be negative
+ to change the shift direction. (default: 1)
+ '''
+ script = self.environ.get('SCRIPT_NAME','/')
+ self['SCRIPT_NAME'], self['PATH_INFO'] = path_shift(script, self.path, shift)
+
+ @property
+ def content_length(self):
+ ''' The request body length as an integer. The client is responsible to
+ set this header. Otherwise, the real length of the body is unknown
+ and -1 is returned. In this case, :attr:`body` will be empty. '''
+ return int(self.environ.get('CONTENT_LENGTH') or -1)
+
+ @property
+ def content_type(self):
+ ''' The Content-Type header as a lowercase-string (default: empty). '''
+ return self.environ.get('CONTENT_TYPE', '').lower()
+
+ @property
+ def is_xhr(self):
+ ''' True if the request was triggered by a XMLHttpRequest. This only
+ works with JavaScript libraries that support the `X-Requested-With`
+ header (most of the popular libraries do). '''
+ requested_with = self.environ.get('HTTP_X_REQUESTED_WITH','')
+ return requested_with.lower() == 'xmlhttprequest'
+
+ @property
+ def is_ajax(self):
+ ''' Alias for :attr:`is_xhr`. "Ajax" is not the right term. '''
+ return self.is_xhr
+
+ @property
+ def auth(self):
+ """ HTTP authentication data as a (user, password) tuple. This
+ implementation currently supports basic (not digest) authentication
+ only. If the authentication happened at a higher level (e.g. in the
+ front web-server or a middleware), the password field is None, but
+ the user field is looked up from the ``REMOTE_USER`` environ
+ variable. On any errors, None is returned. """
+ basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))
+ if basic: return basic
+ ruser = self.environ.get('REMOTE_USER')
+ if ruser: return (ruser, None)
+ return None
+
+ @property
+ def remote_route(self):
+ """ A list of all IPs that were involved in this request, starting with
+ the client IP and followed by zero or more proxies. This does only
+ work if all proxies support the ```X-Forwarded-For`` header. Note
+ that this information can be forged by malicious clients. """
+ proxy = self.environ.get('HTTP_X_FORWARDED_FOR')
+ if proxy: return [ip.strip() for ip in proxy.split(',')]
+ remote = self.environ.get('REMOTE_ADDR')
+ return [remote] if remote else []
+
+ @property
+ def remote_addr(self):
+ """ The client IP as a string. Note that this information can be forged
+ by malicious clients. """
+ route = self.remote_route
+ return route[0] if route else None
+
+ def copy(self):
+ """ Return a new :class:`Request` with a shallow :attr:`environ` copy. """
+ return Request(self.environ.copy())
+
+ def get(self, value, default=None): return self.environ.get(value, default)
+ def __getitem__(self, key): return self.environ[key]
+ def __delitem__(self, key): self[key] = ""; del(self.environ[key])
+ def __iter__(self): return iter(self.environ)
+ def __len__(self): return len(self.environ)
+ def keys(self): return self.environ.keys()
+ def __setitem__(self, key, value):
+ """ Change an environ value and clear all caches that depend on it. """
+
+ if self.environ.get('bottle.request.readonly'):
+ raise KeyError('The environ dictionary is read-only.')
+
+ self.environ[key] = value
+ todelete = ()
+
+ if key == 'wsgi.input':
+ todelete = ('body', 'forms', 'files', 'params', 'post', 'json')
+ elif key == 'QUERY_STRING':
+ todelete = ('query', 'params')
+ elif key.startswith('HTTP_'):
+ todelete = ('headers', 'cookies')
+
+ for key in todelete:
+ self.environ.pop('bottle.request.'+key, None)
+
+ def __repr__(self):
+ return '<%s: %s %s>' % (self.__class__.__name__, self.method, self.url)
+
+ def __getattr__(self, name):
+ ''' Search in self.environ for additional user defined attributes. '''
+ try:
+ var = self.environ['bottle.request.ext.%s'%name]
+ return var.__get__(self) if hasattr(var, '__get__') else var
+ except KeyError:
+ raise AttributeError('Attribute %r not defined.' % name)
+
+ def __setattr__(self, name, value):
+ if name == 'environ': return object.__setattr__(self, name, value)
+ self.environ['bottle.request.ext.%s'%name] = value
+
+
+def _hkey(key):
+ if '\n' in key or '\r' in key or '\0' in key:
+ raise ValueError("Header names must not contain control characters: %r" % key)
+ return key.title().replace('_', '-')
+
+
+def _hval(value):
+ value = tonat(value)
+ if '\n' in value or '\r' in value or '\0' in value:
+ raise ValueError("Header value must not contain control characters: %r" % value)
+ return value
+
+
+
+class HeaderProperty(object):
+ def __init__(self, name, reader=None, writer=None, default=''):
+ self.name, self.default = name, default
+ self.reader, self.writer = reader, writer
+ self.__doc__ = 'Current value of the %r header.' % name.title()
+
+ def __get__(self, obj, cls):
+ if obj is None: return self
+ value = obj.get_header(self.name, self.default)
+ return self.reader(value) if self.reader else value
+
+ def __set__(self, obj, value):
+ obj[self.name] = self.writer(value) if self.writer else value
+
+ def __delete__(self, obj):
+ del obj[self.name]
+
+
+class BaseResponse(object):
+ """ Storage class for a response body as well as headers and cookies.
+
+ This class does support dict-like case-insensitive item-access to
+ headers, but is NOT a dict. Most notably, iterating over a response
+ yields parts of the body and not the headers.
+
+ :param body: The response body as one of the supported types.
+ :param status: Either an HTTP status code (e.g. 200) or a status line
+ including the reason phrase (e.g. '200 OK').
+ :param headers: A dictionary or a list of name-value pairs.
+
+ Additional keyword arguments are added to the list of headers.
+ Underscores in the header name are replaced with dashes.
+ """
+
+ default_status = 200
+ default_content_type = 'text/html; charset=UTF-8'
+
+ # Header blacklist for specific response codes
+ # (rfc2616 section 10.2.3 and 10.3.5)
+ bad_headers = {
+ 204: set(('Content-Type',)),
+ 304: set(('Allow', 'Content-Encoding', 'Content-Language',
+ 'Content-Length', 'Content-Range', 'Content-Type',
+ 'Content-Md5', 'Last-Modified'))}
+
+ def __init__(self, body='', status=None, headers=None, **more_headers):
+ self._cookies = None
+ self._headers = {}
+ self.body = body
+ self.status = status or self.default_status
+ if headers:
+ if isinstance(headers, dict):
+ headers = headers.items()
+ for name, value in headers:
+ self.add_header(name, value)
+ if more_headers:
+ for name, value in more_headers.items():
+ self.add_header(name, value)
+
+ def copy(self, cls=None):
+ ''' Returns a copy of self. '''
+ cls = cls or BaseResponse
+ assert issubclass(cls, BaseResponse)
+ copy = cls()
+ copy.status = self.status
+ copy._headers = dict((k, v[:]) for (k, v) in self._headers.items())
+ if self._cookies:
+ copy._cookies = SimpleCookie()
+ copy._cookies.load(self._cookies.output(header=''))
+ return copy
+
+ def __iter__(self):
+ return iter(self.body)
+
+ def close(self):
+ if hasattr(self.body, 'close'):
+ self.body.close()
+
+ @property
+ def status_line(self):
+ ''' The HTTP status line as a string (e.g. ``404 Not Found``).'''
+ return self._status_line
+
+ @property
+ def status_code(self):
+ ''' The HTTP status code as an integer (e.g. 404).'''
+ return self._status_code
+
+ def _set_status(self, status):
+ if isinstance(status, int):
+ code, status = status, _HTTP_STATUS_LINES.get(status)
+ elif ' ' in status:
+ status = status.strip()
+ code = int(status.split()[0])
+ else:
+ raise ValueError('String status line without a reason phrase.')
+ if not 100 <= code <= 999: raise ValueError('Status code out of range.')
+ self._status_code = code
+ self._status_line = str(status or ('%d Unknown' % code))
+
+ def _get_status(self):
+ return self._status_line
+
+ status = property(_get_status, _set_status, None,
+ ''' A writeable property to change the HTTP response status. It accepts
+ either a numeric code (100-999) or a string with a custom reason
+ phrase (e.g. "404 Brain not found"). Both :data:`status_line` and
+ :data:`status_code` are updated accordingly. The return value is
+ always a status string. ''')
+ del _get_status, _set_status
+
+ @property
+ def headers(self):
+ ''' An instance of :class:`HeaderDict`, a case-insensitive dict-like
+ view on the response headers. '''
+ hdict = HeaderDict()
+ hdict.dict = self._headers
+ return hdict
+
+ def __contains__(self, name): return _hkey(name) in self._headers
+ def __delitem__(self, name): del self._headers[_hkey(name)]
+ def __getitem__(self, name): return self._headers[_hkey(name)][-1]
+ def __setitem__(self, name, value): self._headers[_hkey(name)] = [_hval(value)]
+
+ def get_header(self, name, default=None):
+ ''' Return the value of a previously defined header. If there is no
+ header with that name, return a default value. '''
+ return self._headers.get(_hkey(name), [default])[-1]
+
+ def set_header(self, name, value):
+ ''' Create a new response header, replacing any previously defined
+ headers with the same name. '''
+ self._headers[_hkey(name)] = [_hval(value)]
+
+ def add_header(self, name, value):
+ ''' Add an additional response header, not removing duplicates. '''
+ self._headers.setdefault(_hkey(name), []).append(_hval(value))
+
+ def iter_headers(self):
+ ''' Yield (header, value) tuples, skipping headers that are not
+ allowed with the current response status code. '''
+ return self.headerlist
+
+ @property
+ def headerlist(self):
+ """ WSGI conform list of (header, value) tuples. """
+ out = []
+ headers = list(self._headers.items())
+ if 'Content-Type' not in self._headers:
+ headers.append(('Content-Type', [self.default_content_type]))
+ if self._status_code in self.bad_headers:
+ bad_headers = self.bad_headers[self._status_code]
+ headers = [h for h in headers if h[0] not in bad_headers]
+ out += [(name, val) for (name, vals) in headers for val in vals]
+ if self._cookies:
+ for c in self._cookies.values():
+ out.append(('Set-Cookie', _hval(c.OutputString())))
+ if py3k:
+ out = [(k, v.encode('utf8').decode('latin1')) for (k, v) in out]
+ return out
+
+ content_type = HeaderProperty('Content-Type')
+ content_length = HeaderProperty('Content-Length', reader=int)
+ expires = HeaderProperty('Expires',
+ reader=lambda x: datetime.utcfromtimestamp(parse_date(x)),
+ writer=lambda x: http_date(x))
+
+ @property
+ def charset(self, default='UTF-8'):
+ """ Return the charset specified in the content-type header (default: utf8). """
+ if 'charset=' in self.content_type:
+ return self.content_type.split('charset=')[-1].split(';')[0].strip()
+ return default
+
+ def set_cookie(self, name, value, secret=None, **options):
+ ''' Create a new cookie or replace an old one. If the `secret` parameter is
+ set, create a `Signed Cookie` (described below).
+
+ :param name: the name of the cookie.
+ :param value: the value of the cookie.
+ :param secret: a signature key required for signed cookies.
+
+ Additionally, this method accepts all RFC 2109 attributes that are
+ supported by :class:`cookie.Morsel`, including:
+
+ :param max_age: maximum age in seconds. (default: None)
+ :param expires: a datetime object or UNIX timestamp. (default: None)
+ :param domain: the domain that is allowed to read the cookie.
+ (default: current domain)
+ :param path: limits the cookie to a given path (default: current path)
+ :param secure: limit the cookie to HTTPS connections (default: off).
+ :param httponly: prevents client-side javascript to read this cookie
+ (default: off, requires Python 2.6 or newer).
+
+ If neither `expires` nor `max_age` is set (default), the cookie will
+ expire at the end of the browser session (as soon as the browser
+ window is closed).
+
+ Signed cookies may store any pickle-able object and are
+ cryptographically signed to prevent manipulation. Keep in mind that
+ cookies are limited to 4kb in most browsers.
+
+ Warning: Signed cookies are not encrypted (the client can still see
+ the content) and not copy-protected (the client can restore an old
+ cookie). The main intention is to make pickling and unpickling
+ save, not to store secret information at client side.
+ '''
+ if not self._cookies:
+ self._cookies = SimpleCookie()
+
+ if secret:
+ value = touni(cookie_encode((name, value), secret))
+ elif not isinstance(value, basestring):
+ raise TypeError('Secret key missing for non-string Cookie.')
+
+ if len(value) > 4096: raise ValueError('Cookie value to long.')
+ self._cookies[name] = value
+
+ for key, value in options.items():
+ if key == 'max_age':
+ if isinstance(value, timedelta):
+ value = value.seconds + value.days * 24 * 3600
+ if key == 'expires':
+ if isinstance(value, (datedate, datetime)):
+ value = value.timetuple()
+ elif isinstance(value, (int, float)):
+ value = time.gmtime(value)
+ value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
+ self._cookies[name][key.replace('_', '-')] = value
+
+ def delete_cookie(self, key, **kwargs):
+ ''' Delete a cookie. Be sure to use the same `domain` and `path`
+ settings as used to create the cookie. '''
+ kwargs['max_age'] = -1
+ kwargs['expires'] = 0
+ self.set_cookie(key, '', **kwargs)
+
+ def __repr__(self):
+ out = ''
+ for name, value in self.headerlist:
+ out += '%s: %s\n' % (name.title(), value.strip())
+ return out
+
+
+def local_property(name=None):
+ if name: depr('local_property() is deprecated and will be removed.') #0.12
+ ls = threading.local()
+ def fget(self):
+ try: return ls.var
+ except AttributeError:
+ raise RuntimeError("Request context not initialized.")
+ def fset(self, value): ls.var = value
+ def fdel(self): del ls.var
+ return property(fget, fset, fdel, 'Thread-local property')
+
+
+class LocalRequest(BaseRequest):
+ ''' A thread-local subclass of :class:`BaseRequest` with a different
+ set of attributes for each thread. There is usually only one global
+ instance of this class (:data:`request`). If accessed during a
+ request/response cycle, this instance always refers to the *current*
+ request (even on a multithreaded server). '''
+ bind = BaseRequest.__init__
+ environ = local_property()
+
+
+class LocalResponse(BaseResponse):
+ ''' A thread-local subclass of :class:`BaseResponse` with a different
+ set of attributes for each thread. There is usually only one global
+ instance of this class (:data:`response`). Its attributes are used
+ to build the HTTP response at the end of the request/response cycle.
+ '''
+ bind = BaseResponse.__init__
+ _status_line = local_property()
+ _status_code = local_property()
+ _cookies = local_property()
+ _headers = local_property()
+ body = local_property()
+
+
+Request = BaseRequest
+Response = BaseResponse
+
+
+class HTTPResponse(Response, BottleException):
+ def __init__(self, body='', status=None, headers=None, **more_headers):
+ super(HTTPResponse, self).__init__(body, status, headers, **more_headers)
+
+ def apply(self, response):
+ response._status_code = self._status_code
+ response._status_line = self._status_line
+ response._headers = self._headers
+ response._cookies = self._cookies
+ response.body = self.body
+
+
+class HTTPError(HTTPResponse):
+ default_status = 500
+ def __init__(self, status=None, body=None, exception=None, traceback=None,
+ **options):
+ self.exception = exception
+ self.traceback = traceback
+ super(HTTPError, self).__init__(body, status, **options)
+
+
+
+
+
+###############################################################################
+# Plugins ######################################################################
+###############################################################################
+
+class PluginError(BottleException): pass
+
+
+class JSONPlugin(object):
+ name = 'json'
+ api = 2
+
+ def __init__(self, json_dumps=json_dumps):
+ self.json_dumps = json_dumps
+
+ def apply(self, callback, route):
+ dumps = self.json_dumps
+ if not dumps: return callback
+ def wrapper(*a, **ka):
+ try:
+ rv = callback(*a, **ka)
+ except HTTPResponse:
+ rv = _e()
+
+ if isinstance(rv, dict):
+ #Attempt to serialize, raises exception on failure
+ json_response = dumps(rv)
+ #Set content type only if serialization succesful
+ response.content_type = 'application/json'
+ return json_response
+ elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict):
+ rv.body = dumps(rv.body)
+ rv.content_type = 'application/json'
+ return rv
+
+ return wrapper
+
+
+class TemplatePlugin(object):
+ ''' This plugin applies the :func:`view` decorator to all routes with a
+ `template` config parameter. If the parameter is a tuple, the second
+ element must be a dict with additional options (e.g. `template_engine`)
+ or default variables for the template. '''
+ name = 'template'
+ api = 2
+
+ def apply(self, callback, route):
+ conf = route.config.get('template')
+ if isinstance(conf, (tuple, list)) and len(conf) == 2:
+ return view(conf[0], **conf[1])(callback)
+ elif isinstance(conf, str):
+ return view(conf)(callback)
+ else:
+ return callback
+
+
+#: Not a plugin, but part of the plugin API. TODO: Find a better place.
+class _ImportRedirect(object):
+ def __init__(self, name, impmask):
+ ''' Create a virtual package that redirects imports (see PEP 302). '''
+ self.name = name
+ self.impmask = impmask
+ self.module = sys.modules.setdefault(name, new_module(name))
+ self.module.__dict__.update({'__file__': __file__, '__path__': [],
+ '__all__': [], '__loader__': self})
+ sys.meta_path.append(self)
+
+ def find_module(self, fullname, path=None):
+ if '.' not in fullname: return
+ packname = fullname.rsplit('.', 1)[0]
+ if packname != self.name: return
+ return self
+
+ def load_module(self, fullname):
+ if fullname in sys.modules: return sys.modules[fullname]
+ modname = fullname.rsplit('.', 1)[1]
+ realname = self.impmask % modname
+ __import__(realname)
+ module = sys.modules[fullname] = sys.modules[realname]
+ setattr(self.module, modname, module)
+ module.__loader__ = self
+ return module
+
+
+
+
+
+
+###############################################################################
+# Common Utilities #############################################################
+###############################################################################
+
+
+class MultiDict(DictMixin):
+ """ This dict stores multiple values per key, but behaves exactly like a
+ normal dict in that it returns only the newest value for any given key.
+ There are special methods available to access the full list of values.
+ """
+
+ def __init__(self, *a, **k):
+ self.dict = dict((k, [v]) for (k, v) in dict(*a, **k).items())
+
+ def __len__(self): return len(self.dict)
+ def __iter__(self): return iter(self.dict)
+ def __contains__(self, key): return key in self.dict
+ def __delitem__(self, key): del self.dict[key]
+ def __getitem__(self, key): return self.dict[key][-1]
+ def __setitem__(self, key, value): self.append(key, value)
+ def keys(self): return self.dict.keys()
+
+ if py3k:
+ def values(self): return (v[-1] for v in self.dict.values())
+ def items(self): return ((k, v[-1]) for k, v in self.dict.items())
+ def allitems(self):
+ return ((k, v) for k, vl in self.dict.items() for v in vl)
+ iterkeys = keys
+ itervalues = values
+ iteritems = items
+ iterallitems = allitems
+
+ else:
+ def values(self): return [v[-1] for v in self.dict.values()]
+ def items(self): return [(k, v[-1]) for k, v in self.dict.items()]
+ def iterkeys(self): return self.dict.iterkeys()
+ def itervalues(self): return (v[-1] for v in self.dict.itervalues())
+ def iteritems(self):
+ return ((k, v[-1]) for k, v in self.dict.iteritems())
+ def iterallitems(self):
+ return ((k, v) for k, vl in self.dict.iteritems() for v in vl)
+ def allitems(self):
+ return [(k, v) for k, vl in self.dict.iteritems() for v in vl]
+
+ def get(self, key, default=None, index=-1, type=None):
+ ''' Return the most recent value for a key.
+
+ :param default: The default value to be returned if the key is not
+ present or the type conversion fails.
+ :param index: An index for the list of available values.
+ :param type: If defined, this callable is used to cast the value
+ into a specific type. Exception are suppressed and result in
+ the default value to be returned.
+ '''
+ try:
+ val = self.dict[key][index]
+ return type(val) if type else val
+ except Exception:
+ pass
+ return default
+
+ def append(self, key, value):
+ ''' Add a new value to the list of values for this key. '''
+ self.dict.setdefault(key, []).append(value)
+
+ def replace(self, key, value):
+ ''' Replace the list of values with a single value. '''
+ self.dict[key] = [value]
+
+ def getall(self, key):
+ ''' Return a (possibly empty) list of values for a key. '''
+ return self.dict.get(key) or []
+
+ #: Aliases for WTForms to mimic other multi-dict APIs (Django)
+ getone = get
+ getlist = getall
+
+
+class FormsDict(MultiDict):
+ ''' This :class:`MultiDict` subclass is used to store request form data.
+ Additionally to the normal dict-like item access methods (which return
+ unmodified data as native strings), this container also supports
+ attribute-like access to its values. Attributes are automatically de-
+ or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing
+ attributes default to an empty string. '''
+
+ #: Encoding used for attribute values.
+ input_encoding = 'utf8'
+ #: If true (default), unicode strings are first encoded with `latin1`
+ #: and then decoded to match :attr:`input_encoding`.
+ recode_unicode = True
+
+ def _fix(self, s, encoding=None):
+ if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI
+ return s.encode('latin1').decode(encoding or self.input_encoding)
+ elif isinstance(s, bytes): # Python 2 WSGI
+ return s.decode(encoding or self.input_encoding)
+ else:
+ return s
+
+ def decode(self, encoding=None):
+ ''' Returns a copy with all keys and values de- or recoded to match
+ :attr:`input_encoding`. Some libraries (e.g. WTForms) want a
+ unicode dictionary. '''
+ copy = FormsDict()
+ enc = copy.input_encoding = encoding or self.input_encoding
+ copy.recode_unicode = False
+ for key, value in self.allitems():
+ copy.append(self._fix(key, enc), self._fix(value, enc))
+ return copy
+
+ def getunicode(self, name, default=None, encoding=None):
+ ''' Return the value as a unicode string, or the default. '''
+ try:
+ return self._fix(self[name], encoding)
+ except (UnicodeError, KeyError):
+ return default
+
+ def __getattr__(self, name, default=unicode()):
+ # Without this guard, pickle generates a cryptic TypeError:
+ if name.startswith('__') and name.endswith('__'):
+ return super(FormsDict, self).__getattr__(name)
+ return self.getunicode(name, default=default)
+
+class HeaderDict(MultiDict):
+ """ A case-insensitive version of :class:`MultiDict` that defaults to
+ replace the old value instead of appending it. """
+
+ def __init__(self, *a, **ka):
+ self.dict = {}
+ if a or ka: self.update(*a, **ka)
+
+ def __contains__(self, key): return _hkey(key) in self.dict
+ def __delitem__(self, key): del self.dict[_hkey(key)]
+ def __getitem__(self, key): return self.dict[_hkey(key)][-1]
+ def __setitem__(self, key, value): self.dict[_hkey(key)] = [_hval(value)]
+ def append(self, key, value): self.dict.setdefault(_hkey(key), []).append(_hval(value))
+ def replace(self, key, value): self.dict[_hkey(key)] = [_hval(value)]
+ def getall(self, key): return self.dict.get(_hkey(key)) or []
+ def get(self, key, default=None, index=-1):
+ return MultiDict.get(self, _hkey(key), default, index)
+ def filter(self, names):
+ for name in (_hkey(n) for n in names):
+ if name in self.dict:
+ del self.dict[name]
+
+
+class WSGIHeaderDict(DictMixin):
+ ''' This dict-like class wraps a WSGI environ dict and provides convenient
+ access to HTTP_* fields. Keys and values are native strings
+ (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI
+ environment contains non-native string values, these are de- or encoded
+ using a lossless 'latin1' character set.
+
+ The API will remain stable even on changes to the relevant PEPs.
+ Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one
+ that uses non-native strings.)
+ '''
+ #: List of keys that do not have a ``HTTP_`` prefix.
+ cgikeys = ('CONTENT_TYPE', 'CONTENT_LENGTH')
+
+ def __init__(self, environ):
+ self.environ = environ
+
+ def _ekey(self, key):
+ ''' Translate header field name to CGI/WSGI environ key. '''
+ key = key.replace('-','_').upper()
+ if key in self.cgikeys:
+ return key
+ return 'HTTP_' + key
+
+ def raw(self, key, default=None):
+ ''' Return the header value as is (may be bytes or unicode). '''
+ return self.environ.get(self._ekey(key), default)
+
+ def __getitem__(self, key):
+ return tonat(self.environ[self._ekey(key)], 'latin1')
+
+ def __setitem__(self, key, value):
+ raise TypeError("%s is read-only." % self.__class__)
+
+ def __delitem__(self, key):
+ raise TypeError("%s is read-only." % self.__class__)
+
+ def __iter__(self):
+ for key in self.environ:
+ if key[:5] == 'HTTP_':
+ yield key[5:].replace('_', '-').title()
+ elif key in self.cgikeys:
+ yield key.replace('_', '-').title()
+
+ def keys(self): return [x for x in self]
+ def __len__(self): return len(self.keys())
+ def __contains__(self, key): return self._ekey(key) in self.environ
+
+
+
+class ConfigDict(dict):
+ ''' A dict-like configuration storage with additional support for
+ namespaces, validators, meta-data, on_change listeners and more.
+
+ This storage is optimized for fast read access. Retrieving a key
+ or using non-altering dict methods (e.g. `dict.get()`) has no overhead
+ compared to a native dict.
+ '''
+ __slots__ = ('_meta', '_on_change')
+
+ class Namespace(DictMixin):
+
+ def __init__(self, config, namespace):
+ self._config = config
+ self._prefix = namespace
+
+ def __getitem__(self, key):
+ depr('Accessing namespaces as dicts is discouraged. '
+ 'Only use flat item access: '
+ 'cfg["names"]["pace"]["key"] -> cfg["name.space.key"]') #0.12
+ return self._config[self._prefix + '.' + key]
+
+ def __setitem__(self, key, value):
+ self._config[self._prefix + '.' + key] = value
+
+ def __delitem__(self, key):
+ del self._config[self._prefix + '.' + key]
+
+ def __iter__(self):
+ ns_prefix = self._prefix + '.'
+ for key in self._config:
+ ns, dot, name = key.rpartition('.')
+ if ns == self._prefix and name:
+ yield name
+
+ def keys(self): return [x for x in self]
+ def __len__(self): return len(self.keys())
+ def __contains__(self, key): return self._prefix + '.' + key in self._config
+ def __repr__(self): return '' % self._prefix
+ def __str__(self): return '' % self._prefix
+
+ # Deprecated ConfigDict features
+ def __getattr__(self, key):
+ depr('Attribute access is deprecated.') #0.12
+ if key not in self and key[0].isupper():
+ self[key] = ConfigDict.Namespace(self._config, self._prefix + '.' + key)
+ if key not in self and key.startswith('__'):
+ raise AttributeError(key)
+ return self.get(key)
+
+ def __setattr__(self, key, value):
+ if key in ('_config', '_prefix'):
+ self.__dict__[key] = value
+ return
+ depr('Attribute assignment is deprecated.') #0.12
+ if hasattr(DictMixin, key):
+ raise AttributeError('Read-only attribute.')
+ if key in self and self[key] and isinstance(self[key], self.__class__):
+ raise AttributeError('Non-empty namespace attribute.')
+ self[key] = value
+
+ def __delattr__(self, key):
+ if key in self:
+ val = self.pop(key)
+ if isinstance(val, self.__class__):
+ prefix = key + '.'
+ for key in self:
+ if key.startswith(prefix):
+ del self[prefix+key]
+
+ def __call__(self, *a, **ka):
+ depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+ self.update(*a, **ka)
+ return self
+
+ def __init__(self, *a, **ka):
+ self._meta = {}
+ self._on_change = lambda name, value: None
+ if a or ka:
+ depr('Constructor does no longer accept parameters.') #0.12
+ self.update(*a, **ka)
+
+ def load_config(self, filename):
+ ''' Load values from an *.ini style config file.
+
+ If the config file contains sections, their names are used as
+ namespaces for the values within. The two special sections
+ ``DEFAULT`` and ``bottle`` refer to the root namespace (no prefix).
+ '''
+ conf = ConfigParser()
+ conf.read(filename)
+ for section in conf.sections():
+ for key, value in conf.items(section):
+ if section not in ('DEFAULT', 'bottle'):
+ key = section + '.' + key
+ self[key] = value
+ return self
+
+ def load_dict(self, source, namespace='', make_namespaces=False):
+ ''' Import values from a dictionary structure. Nesting can be used to
+ represent namespaces.
+
+ >>> ConfigDict().load_dict({'name': {'space': {'key': 'value'}}})
+ {'name.space.key': 'value'}
+ '''
+ stack = [(namespace, source)]
+ while stack:
+ prefix, source = stack.pop()
+ if not isinstance(source, dict):
+ raise TypeError('Source is not a dict (r)' % type(key))
+ for key, value in source.items():
+ if not isinstance(key, basestring):
+ raise TypeError('Key is not a string (%r)' % type(key))
+ full_key = prefix + '.' + key if prefix else key
+ if isinstance(value, dict):
+ stack.append((full_key, value))
+ if make_namespaces:
+ self[full_key] = self.Namespace(self, full_key)
+ else:
+ self[full_key] = value
+ return self
+
+ def update(self, *a, **ka):
+ ''' If the first parameter is a string, all keys are prefixed with this
+ namespace. Apart from that it works just as the usual dict.update().
+ Example: ``update('some.namespace', key='value')`` '''
+ prefix = ''
+ if a and isinstance(a[0], basestring):
+ prefix = a[0].strip('.') + '.'
+ a = a[1:]
+ for key, value in dict(*a, **ka).items():
+ self[prefix+key] = value
+
+ def setdefault(self, key, value):
+ if key not in self:
+ self[key] = value
+ return self[key]
+
+ def __setitem__(self, key, value):
+ if not isinstance(key, basestring):
+ raise TypeError('Key has type %r (not a string)' % type(key))
+
+ value = self.meta_get(key, 'filter', lambda x: x)(value)
+ if key in self and self[key] is value:
+ return
+ self._on_change(key, value)
+ dict.__setitem__(self, key, value)
+
+ def __delitem__(self, key):
+ dict.__delitem__(self, key)
+
+ def clear(self):
+ for key in self:
+ del self[key]
+
+ def meta_get(self, key, metafield, default=None):
+ ''' Return the value of a meta field for a key. '''
+ return self._meta.get(key, {}).get(metafield, default)
+
+ def meta_set(self, key, metafield, value):
+ ''' Set the meta field for a key to a new value. This triggers the
+ on-change handler for existing keys. '''
+ self._meta.setdefault(key, {})[metafield] = value
+ if key in self:
+ self[key] = self[key]
+
+ def meta_list(self, key):
+ ''' Return an iterable of meta field names defined for a key. '''
+ return self._meta.get(key, {}).keys()
+
+ # Deprecated ConfigDict features
+ def __getattr__(self, key):
+ depr('Attribute access is deprecated.') #0.12
+ if key not in self and key[0].isupper():
+ self[key] = self.Namespace(self, key)
+ if key not in self and key.startswith('__'):
+ raise AttributeError(key)
+ return self.get(key)
+
+ def __setattr__(self, key, value):
+ if key in self.__slots__:
+ return dict.__setattr__(self, key, value)
+ depr('Attribute assignment is deprecated.') #0.12
+ if hasattr(dict, key):
+ raise AttributeError('Read-only attribute.')
+ if key in self and self[key] and isinstance(self[key], self.Namespace):
+ raise AttributeError('Non-empty namespace attribute.')
+ self[key] = value
+
+ def __delattr__(self, key):
+ if key in self:
+ val = self.pop(key)
+ if isinstance(val, self.Namespace):
+ prefix = key + '.'
+ for key in self:
+ if key.startswith(prefix):
+ del self[prefix+key]
+
+ def __call__(self, *a, **ka):
+ depr('Calling ConfDict is deprecated. Use the update() method.') #0.12
+ self.update(*a, **ka)
+ return self
+
+
+
+class AppStack(list):
+ """ A stack-like list. Calling it returns the head of the stack. """
+
+ def __call__(self):
+ """ Return the current default application. """
+ return self[-1]
+
+ def push(self, value=None):
+ """ Add a new :class:`Bottle` instance to the stack """
+ if not isinstance(value, Bottle):
+ value = Bottle()
+ self.append(value)
+ return value
+
+
+class WSGIFileWrapper(object):
+
+ def __init__(self, fp, buffer_size=1024*64):
+ self.fp, self.buffer_size = fp, buffer_size
+ for attr in ('fileno', 'close', 'read', 'readlines', 'tell', 'seek'):
+ if hasattr(fp, attr): setattr(self, attr, getattr(fp, attr))
+
+ def __iter__(self):
+ buff, read = self.buffer_size, self.read
+ while True:
+ part = read(buff)
+ if not part: return
+ yield part
+
+
+class _closeiter(object):
+ ''' This only exists to be able to attach a .close method to iterators that
+ do not support attribute assignment (most of itertools). '''
+
+ def __init__(self, iterator, close=None):
+ self.iterator = iterator
+ self.close_callbacks = makelist(close)
+
+ def __iter__(self):
+ return iter(self.iterator)
+
+ def close(self):
+ for func in self.close_callbacks:
+ func()
+
+
+class ResourceManager(object):
+ ''' This class manages a list of search paths and helps to find and open
+ application-bound resources (files).
+
+ :param base: default value for :meth:`add_path` calls.
+ :param opener: callable used to open resources.
+ :param cachemode: controls which lookups are cached. One of 'all',
+ 'found' or 'none'.
+ '''
+
+ def __init__(self, base='./', opener=open, cachemode='all'):
+ self.opener = open
+ self.base = base
+ self.cachemode = cachemode
+
+ #: A list of search paths. See :meth:`add_path` for details.
+ self.path = []
+ #: A cache for resolved paths. ``res.cache.clear()`` clears the cache.
+ self.cache = {}
+
+ def add_path(self, path, base=None, index=None, create=False):
+ ''' Add a new path to the list of search paths. Return False if the
+ path does not exist.
+
+ :param path: The new search path. Relative paths are turned into
+ an absolute and normalized form. If the path looks like a file
+ (not ending in `/`), the filename is stripped off.
+ :param base: Path used to absolutize relative search paths.
+ Defaults to :attr:`base` which defaults to ``os.getcwd()``.
+ :param index: Position within the list of search paths. Defaults
+ to last index (appends to the list).
+
+ The `base` parameter makes it easy to reference files installed
+ along with a python module or package::
+
+ res.add_path('./resources/', __file__)
+ '''
+ base = os.path.abspath(os.path.dirname(base or self.base))
+ path = os.path.abspath(os.path.join(base, os.path.dirname(path)))
+ path += os.sep
+ if path in self.path:
+ self.path.remove(path)
+ if create and not os.path.isdir(path):
+ os.makedirs(path)
+ if index is None:
+ self.path.append(path)
+ else:
+ self.path.insert(index, path)
+ self.cache.clear()
+ return os.path.exists(path)
+
+ def __iter__(self):
+ ''' Iterate over all existing files in all registered paths. '''
+ search = self.path[:]
+ while search:
+ path = search.pop()
+ if not os.path.isdir(path): continue
+ for name in os.listdir(path):
+ full = os.path.join(path, name)
+ if os.path.isdir(full): search.append(full)
+ else: yield full
+
+ def lookup(self, name):
+ ''' Search for a resource and return an absolute file path, or `None`.
+
+ The :attr:`path` list is searched in order. The first match is
+ returend. Symlinks are followed. The result is cached to speed up
+ future lookups. '''
+ if name not in self.cache or DEBUG:
+ for path in self.path:
+ fpath = os.path.join(path, name)
+ if os.path.isfile(fpath):
+ if self.cachemode in ('all', 'found'):
+ self.cache[name] = fpath
+ return fpath
+ if self.cachemode == 'all':
+ self.cache[name] = None
+ return self.cache[name]
+
+ def open(self, name, mode='r', *args, **kwargs):
+ ''' Find a resource and return a file object, or raise IOError. '''
+ fname = self.lookup(name)
+ if not fname: raise IOError("Resource %r not found." % name)
+ return self.opener(fname, mode=mode, *args, **kwargs)
+
+
+class FileUpload(object):
+
+ def __init__(self, fileobj, name, filename, headers=None):
+ ''' Wrapper for file uploads. '''
+ #: Open file(-like) object (BytesIO buffer or temporary file)
+ self.file = fileobj
+ #: Name of the upload form field
+ self.name = name
+ #: Raw filename as sent by the client (may contain unsafe characters)
+ self.raw_filename = filename
+ #: A :class:`HeaderDict` with additional headers (e.g. content-type)
+ self.headers = HeaderDict(headers) if headers else HeaderDict()
+
+ content_type = HeaderProperty('Content-Type')
+ content_length = HeaderProperty('Content-Length', reader=int, default=-1)
+
+ def get_header(self, name, default=None):
+ """ Return the value of a header within the mulripart part. """
+ return self.headers.get(name, default)
+
+ @cached_property
+ def filename(self):
+ ''' Name of the file on the client file system, but normalized to ensure
+ file system compatibility. An empty filename is returned as 'empty'.
+
+ Only ASCII letters, digits, dashes, underscores and dots are
+ allowed in the final filename. Accents are removed, if possible.
+ Whitespace is replaced by a single dash. Leading or tailing dots
+ or dashes are removed. The filename is limited to 255 characters.
+ '''
+ fname = self.raw_filename
+ if not isinstance(fname, unicode):
+ fname = fname.decode('utf8', 'ignore')
+ fname = normalize('NFKD', fname).encode('ASCII', 'ignore').decode('ASCII')
+ fname = os.path.basename(fname.replace('\\', os.path.sep))
+ fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip()
+ fname = re.sub(r'[-\s]+', '-', fname).strip('.-')
+ return fname[:255] or 'empty'
+
+ def _copy_file(self, fp, chunk_size=2**16):
+ read, write, offset = self.file.read, fp.write, self.file.tell()
+ while 1:
+ buf = read(chunk_size)
+ if not buf: break
+ write(buf)
+ self.file.seek(offset)
+
+ def save(self, destination, overwrite=False, chunk_size=2**16):
+ ''' Save file to disk or copy its content to an open file(-like) object.
+ If *destination* is a directory, :attr:`filename` is added to the
+ path. Existing files are not overwritten by default (IOError).
+
+ :param destination: File path, directory or file(-like) object.
+ :param overwrite: If True, replace existing files. (default: False)
+ :param chunk_size: Bytes to read at a time. (default: 64kb)
+ '''
+ if isinstance(destination, basestring): # Except file-likes here
+ if os.path.isdir(destination):
+ destination = os.path.join(destination, self.filename)
+ if not overwrite and os.path.exists(destination):
+ raise IOError('File exists.')
+ with open(destination, 'wb') as fp:
+ self._copy_file(fp, chunk_size)
+ else:
+ self._copy_file(destination, chunk_size)
+
+
+
+
+
+
+###############################################################################
+# Application Helper ###########################################################
+###############################################################################
+
+
+def abort(code=500, text='Unknown Error.'):
+ """ Aborts execution and causes a HTTP error. """
+ raise HTTPError(code, text)
+
+
+def redirect(url, code=None):
+ """ Aborts execution and causes a 303 or 302 redirect, depending on
+ the HTTP protocol version. """
+ if not code:
+ code = 303 if request.get('SERVER_PROTOCOL') == "HTTP/1.1" else 302
+ res = response.copy(cls=HTTPResponse)
+ res.status = code
+ res.body = ""
+ res.set_header('Location', urljoin(request.url, url))
+ raise res
+
+
+def _file_iter_range(fp, offset, bytes, maxread=1024*1024):
+ ''' Yield chunks from a range in a file. No chunk is bigger than maxread.'''
+ fp.seek(offset)
+ while bytes > 0:
+ part = fp.read(min(bytes, maxread))
+ if not part: break
+ bytes -= len(part)
+ yield part
+
+
+def static_file(filename, root, mimetype='auto', download=False, charset='UTF-8'):
+ """ Open a file in a safe way and return :exc:`HTTPResponse` with status
+ code 200, 305, 403 or 404. The ``Content-Type``, ``Content-Encoding``,
+ ``Content-Length`` and ``Last-Modified`` headers are set if possible.
+ Special support for ``If-Modified-Since``, ``Range`` and ``HEAD``
+ requests.
+
+ :param filename: Name or path of the file to send.
+ :param root: Root path for file lookups. Should be an absolute directory
+ path.
+ :param mimetype: Defines the content-type header (default: guess from
+ file extension)
+ :param download: If True, ask the browser to open a `Save as...` dialog
+ instead of opening the file with the associated program. You can
+ specify a custom filename as a string. If not specified, the
+ original filename is used (default: False).
+ :param charset: The charset to use for files with a ``text/*``
+ mime-type. (default: UTF-8)
+ """
+
+ root = os.path.abspath(root) + os.sep
+ filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
+ headers = dict()
+
+ if not filename.startswith(root):
+ return HTTPError(403, "Access denied.")
+ if not os.path.exists(filename) or not os.path.isfile(filename):
+ return HTTPError(404, "File does not exist.")
+ if not os.access(filename, os.R_OK):
+ return HTTPError(403, "You do not have permission to access this file.")
+
+ if mimetype == 'auto':
+ mimetype, encoding = mimetypes.guess_type(filename)
+ if encoding: headers['Content-Encoding'] = encoding
+
+ if mimetype:
+ if mimetype[:5] == 'text/' and charset and 'charset' not in mimetype:
+ mimetype += '; charset=%s' % charset
+ headers['Content-Type'] = mimetype
+
+ if download:
+ download = os.path.basename(filename if download == True else download)
+ headers['Content-Disposition'] = 'attachment; filename="%s"' % download
+
+ stats = os.stat(filename)
+ headers['Content-Length'] = clen = stats.st_size
+ lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
+ headers['Last-Modified'] = lm
+
+ ims = request.environ.get('HTTP_IF_MODIFIED_SINCE')
+ if ims:
+ ims = parse_date(ims.split(";")[0].strip())
+ if ims is not None and ims >= int(stats.st_mtime):
+ headers['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime())
+ return HTTPResponse(status=304, **headers)
+
+ body = '' if request.method == 'HEAD' else open(filename, 'rb')
+
+ headers["Accept-Ranges"] = "bytes"
+ ranges = request.environ.get('HTTP_RANGE')
+ if 'HTTP_RANGE' in request.environ:
+ ranges = list(parse_range_header(request.environ['HTTP_RANGE'], clen))
+ if not ranges:
+ return HTTPError(416, "Requested Range Not Satisfiable")
+ offset, end = ranges[0]
+ headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end-1, clen)
+ headers["Content-Length"] = str(end-offset)
+ if body: body = _file_iter_range(body, offset, end-offset)
+ return HTTPResponse(body, status=206, **headers)
+ return HTTPResponse(body, **headers)
+
+
+
+
+
+
+###############################################################################
+# HTTP Utilities and MISC (TODO) ###############################################
+###############################################################################
+
+
+def debug(mode=True):
+ """ Change the debug level.
+ There is only one debug level supported at the moment."""
+ global DEBUG
+ if mode: warnings.simplefilter('default')
+ DEBUG = bool(mode)
+
+def http_date(value):
+ if isinstance(value, (datedate, datetime)):
+ value = value.utctimetuple()
+ elif isinstance(value, (int, float)):
+ value = time.gmtime(value)
+ if not isinstance(value, basestring):
+ value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value)
+ return value
+
+def parse_date(ims):
+ """ Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch. """
+ try:
+ ts = email.utils.parsedate_tz(ims)
+ return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone
+ except (TypeError, ValueError, IndexError, OverflowError):
+ return None
+
+def parse_auth(header):
+ """ Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None"""
+ try:
+ method, data = header.split(None, 1)
+ if method.lower() == 'basic':
+ user, pwd = touni(base64.b64decode(tob(data))).split(':',1)
+ return user, pwd
+ except (KeyError, ValueError):
+ return None
+
+def parse_range_header(header, maxlen=0):
+ ''' Yield (start, end) ranges parsed from a HTTP Range header. Skip
+ unsatisfiable ranges. The end index is non-inclusive.'''
+ if not header or header[:6] != 'bytes=': return
+ ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r]
+ for start, end in ranges:
+ try:
+ if not start: # bytes=-100 -> last 100 bytes
+ start, end = max(0, maxlen-int(end)), maxlen
+ elif not end: # bytes=100- -> all but the first 99 bytes
+ start, end = int(start), maxlen
+ else: # bytes=100-200 -> bytes 100-200 (inclusive)
+ start, end = int(start), min(int(end)+1, maxlen)
+ if 0 <= start < end <= maxlen:
+ yield start, end
+ except ValueError:
+ pass
+
+def _parse_qsl(qs):
+ r = []
+ for pair in qs.split('&'):
+ if not pair: continue
+ nv = pair.split('=', 1)
+ if len(nv) != 2: nv.append('')
+ key = urlunquote(nv[0].replace('+', ' '))
+ value = urlunquote(nv[1].replace('+', ' '))
+ r.append((key, value))
+ return r
+
+def _lscmp(a, b):
+ ''' Compares two strings in a cryptographically safe way:
+ Runtime is not affected by length of common prefix. '''
+ return not sum(0 if x==y else 1 for x, y in zip(a, b)) and len(a) == len(b)
+
+
+def cookie_encode(data, key):
+ ''' Encode and sign a pickle-able object. Return a (byte) string '''
+ msg = base64.b64encode(pickle.dumps(data, -1))
+ sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())
+ return tob('!') + sig + tob('?') + msg
+
+
+def cookie_decode(data, key):
+ ''' Verify and decode an encoded string. Return an object or None.'''
+ data = tob(data)
+ if cookie_is_encoded(data):
+ sig, msg = data.split(tob('?'), 1)
+ if _lscmp(sig[1:], base64.b64encode(hmac.new(tob(key), msg, digestmod=hashlib.md5).digest())):
+ return pickle.loads(base64.b64decode(msg))
+ return None
+
+
+def cookie_is_encoded(data):
+ ''' Return True if the argument looks like a encoded cookie.'''
+ return bool(data.startswith(tob('!')) and tob('?') in data)
+
+
+def html_escape(string):
+ ''' Escape HTML special characters ``&<>`` and quotes ``'"``. '''
+ return string.replace('&','&').replace('<','<').replace('>','>')\
+ .replace('"','"').replace("'",''')
+
+
+def html_quote(string):
+ ''' Escape and quote a string to be used as an HTTP attribute.'''
+ return '"%s"' % html_escape(string).replace('\n','
')\
+ .replace('\r','
').replace('\t',' ')
+
+
+def yieldroutes(func):
+ """ Return a generator for routes that match the signature (name, args)
+ of the func parameter. This may yield more than one route if the function
+ takes optional keyword arguments. The output is best described by example::
+
+ a() -> '/a'
+ b(x, y) -> '/b//'
+ c(x, y=5) -> '/c/' and '/c//'
+ d(x=5, y=6) -> '/d' and '/d/' and '/d//'
+ """
+ path = '/' + func.__name__.replace('__','/').lstrip('/')
+ spec = getargspec(func)
+ argc = len(spec[0]) - len(spec[3] or [])
+ path += ('/<%s>' * argc) % tuple(spec[0][:argc])
+ yield path
+ for arg in spec[0][argc:]:
+ path += '/<%s>' % arg
+ yield path
+
+
+def path_shift(script_name, path_info, shift=1):
+ ''' Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa.
+
+ :return: The modified paths.
+ :param script_name: The SCRIPT_NAME path.
+ :param script_name: The PATH_INFO path.
+ :param shift: The number of path fragments to shift. May be negative to
+ change the shift direction. (default: 1)
+ '''
+ if shift == 0: return script_name, path_info
+ pathlist = path_info.strip('/').split('/')
+ scriptlist = script_name.strip('/').split('/')
+ if pathlist and pathlist[0] == '': pathlist = []
+ if scriptlist and scriptlist[0] == '': scriptlist = []
+ if shift > 0 and shift <= len(pathlist):
+ moved = pathlist[:shift]
+ scriptlist = scriptlist + moved
+ pathlist = pathlist[shift:]
+ elif shift < 0 and shift >= -len(scriptlist):
+ moved = scriptlist[shift:]
+ pathlist = moved + pathlist
+ scriptlist = scriptlist[:shift]
+ else:
+ empty = 'SCRIPT_NAME' if shift < 0 else 'PATH_INFO'
+ raise AssertionError("Cannot shift. Nothing left from %s" % empty)
+ new_script_name = '/' + '/'.join(scriptlist)
+ new_path_info = '/' + '/'.join(pathlist)
+ if path_info.endswith('/') and pathlist: new_path_info += '/'
+ return new_script_name, new_path_info
+
+
+def auth_basic(check, realm="private", text="Access denied"):
+ ''' Callback decorator to require HTTP auth (basic).
+ TODO: Add route(check_auth=...) parameter. '''
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*a, **ka):
+ user, password = request.auth or (None, None)
+ if user is None or not check(user, password):
+ err = HTTPError(401, text)
+ err.add_header('WWW-Authenticate', 'Basic realm="%s"' % realm)
+ return err
+ return func(*a, **ka)
+ return wrapper
+ return decorator
+
+
+# Shortcuts for common Bottle methods.
+# They all refer to the current default application.
+
+def make_default_app_wrapper(name):
+ ''' Return a callable that relays calls to the current default app. '''
+ @functools.wraps(getattr(Bottle, name))
+ def wrapper(*a, **ka):
+ return getattr(app(), name)(*a, **ka)
+ return wrapper
+
+route = make_default_app_wrapper('route')
+get = make_default_app_wrapper('get')
+post = make_default_app_wrapper('post')
+put = make_default_app_wrapper('put')
+delete = make_default_app_wrapper('delete')
+error = make_default_app_wrapper('error')
+mount = make_default_app_wrapper('mount')
+hook = make_default_app_wrapper('hook')
+install = make_default_app_wrapper('install')
+uninstall = make_default_app_wrapper('uninstall')
+url = make_default_app_wrapper('get_url')
+
+
+
+
+
+
+
+###############################################################################
+# Server Adapter ###############################################################
+###############################################################################
+
+
+class ServerAdapter(object):
+ quiet = False
+ def __init__(self, host='127.0.0.1', port=8080, **options):
+ self.options = options
+ self.host = host
+ self.port = int(port)
+
+ def run(self, handler): # pragma: no cover
+ pass
+
+ def __repr__(self):
+ args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
+ return "%s(%s)" % (self.__class__.__name__, args)
+
+
+class CGIServer(ServerAdapter):
+ quiet = True
+ def run(self, handler): # pragma: no cover
+ from wsgiref.handlers import CGIHandler
+ def fixed_environ(environ, start_response):
+ environ.setdefault('PATH_INFO', '')
+ return handler(environ, start_response)
+ CGIHandler().run(fixed_environ)
+
+
+class FlupFCGIServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ import flup.server.fcgi
+ self.options.setdefault('bindAddress', (self.host, self.port))
+ flup.server.fcgi.WSGIServer(handler, **self.options).run()
+
+
+class WSGIRefServer(ServerAdapter):
+ def run(self, app): # pragma: no cover
+ from wsgiref.simple_server import WSGIRequestHandler, WSGIServer
+ from wsgiref.simple_server import make_server
+ import socket
+
+ class FixedHandler(WSGIRequestHandler):
+ def address_string(self): # Prevent reverse DNS lookups please.
+ return self.client_address[0]
+ def log_request(*args, **kw):
+ if not self.quiet:
+ return WSGIRequestHandler.log_request(*args, **kw)
+
+ handler_cls = self.options.get('handler_class', FixedHandler)
+ server_cls = self.options.get('server_class', WSGIServer)
+
+ if ':' in self.host: # Fix wsgiref for IPv6 addresses.
+ if getattr(server_cls, 'address_family') == socket.AF_INET:
+ class server_cls(server_cls):
+ address_family = socket.AF_INET6
+
+ srv = make_server(self.host, self.port, app, server_cls, handler_cls)
+ srv.serve_forever()
+
+
+class CherryPyServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ depr("The wsgi server part of cherrypy was split into a new "
+ "project called 'cheroot'. Use the 'cheroot' server "
+ "adapter instead of cherrypy.")
+ from cherrypy import wsgiserver # This will fail for CherryPy >= 9
+
+ self.options['bind_addr'] = (self.host, self.port)
+ self.options['wsgi_app'] = handler
+
+ certfile = self.options.get('certfile')
+ if certfile:
+ del self.options['certfile']
+ keyfile = self.options.get('keyfile')
+ if keyfile:
+ del self.options['keyfile']
+
+ server = wsgiserver.CherryPyWSGIServer(**self.options)
+ if certfile:
+ server.ssl_certificate = certfile
+ if keyfile:
+ server.ssl_private_key = keyfile
+
+ try:
+ server.start()
+ finally:
+ server.stop()
+
+
+class CherootServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from cheroot import wsgi
+ from cheroot.ssl import builtin
+ self.options['bind_addr'] = (self.host, self.port)
+ self.options['wsgi_app'] = handler
+ certfile = self.options.pop('certfile', None)
+ keyfile = self.options.pop('keyfile', None)
+ chainfile = self.options.pop('chainfile', None)
+ server = wsgi.Server(**self.options)
+ if certfile and keyfile:
+ server.ssl_adapter = builtin.BuiltinSSLAdapter(
+ certfile, keyfile, chainfile)
+ try:
+ server.start()
+ finally:
+ server.stop()
+
+
+class WaitressServer(ServerAdapter):
+ def run(self, handler):
+ from waitress import serve
+ serve(handler, host=self.host, port=self.port)
+
+
+class PasteServer(ServerAdapter):
+ def run(self, handler): # pragma: no cover
+ from paste import httpserver
+ from paste.translogger import TransLogger
+ handler = TransLogger(handler, setup_console_handler=(not self.quiet))
+ httpserver.serve(handler, host=self.host, port=str(self.port),
+ **self.options)
+
+
+class MeinheldServer(ServerAdapter):
+ def run(self, handler):
+ from meinheld import server
+ server.listen((self.host, self.port))
+ server.run(handler)
+
+
+class FapwsServer(ServerAdapter):
+ """ Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3 """
+ def run(self, handler): # pragma: no cover
+ import fapws._evwsgi as evwsgi
+ from fapws import base, config
+ port = self.port
+ if float(config.SERVER_IDENT[-2:]) > 0.4:
+ # fapws3 silently changed its API in 0.5
+ port = str(port)
+ evwsgi.start(self.host, port)
+ # fapws3 never releases the GIL. Complain upstream. I tried. No luck.
+ if 'BOTTLE_CHILD' in os.environ and not self.quiet:
+ _stderr("WARNING: Auto-reloading does not work with Fapws3.\n")
+ _stderr(" (Fapws3 breaks python thread support)\n")
+ evwsgi.set_base_module(base)
+ def app(environ, start_response):
+ environ['wsgi.multiprocess'] = False
+ return handler(environ, start_response)
+ evwsgi.wsgi_cb(('', app))
+ evwsgi.run()
+
+
+class TornadoServer(ServerAdapter):
+ """ The super hyped asynchronous server by facebook. Untested. """
+ def run(self, handler): # pragma: no cover
+ import tornado.wsgi, tornado.httpserver, tornado.ioloop
+ container = tornado.wsgi.WSGIContainer(handler)
+ server = tornado.httpserver.HTTPServer(container)
+ server.listen(port=self.port,address=self.host)
+ tornado.ioloop.IOLoop.instance().start()
+
+
+class AppEngineServer(ServerAdapter):
+ """ Adapter for Google App Engine. """
+ quiet = True
+ def run(self, handler):
+ from google.appengine.ext.webapp import util
+ # A main() function in the handler script enables 'App Caching'.
+ # Lets makes sure it is there. This _really_ improves performance.
+ module = sys.modules.get('__main__')
+ if module and not hasattr(module, 'main'):
+ module.main = lambda: util.run_wsgi_app(handler)
+ util.run_wsgi_app(handler)
+
+
+class TwistedServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from twisted.web import server, wsgi
+ from twisted.python.threadpool import ThreadPool
+ from twisted.internet import reactor
+ thread_pool = ThreadPool()
+ thread_pool.start()
+ reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
+ factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler))
+ reactor.listenTCP(self.port, factory, interface=self.host)
+ reactor.run()
+
+
+class DieselServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from diesel.protocols.wsgi import WSGIApplication
+ app = WSGIApplication(handler, port=self.port)
+ app.run()
+
+
+class GeventServer(ServerAdapter):
+ """ Untested. Options:
+
+ * `fast` (default: False) uses libevent's http server, but has some
+ issues: No streaming, no pipelining, no SSL.
+ * See gevent.wsgi.WSGIServer() documentation for more options.
+ """
+ def run(self, handler):
+ from gevent import pywsgi, local
+ if not isinstance(threading.local(), local.local):
+ msg = "Bottle requires gevent.monkey.patch_all() (before import)"
+ raise RuntimeError(msg)
+ if self.options.pop('fast', None):
+ depr('The "fast" option has been deprecated and removed by Gevent.')
+ if self.quiet:
+ self.options['log'] = None
+ address = (self.host, self.port)
+ server = pywsgi.WSGIServer(address, handler, **self.options)
+ if 'BOTTLE_CHILD' in os.environ:
+ import signal
+ signal.signal(signal.SIGINT, lambda s, f: server.stop())
+ server.serve_forever()
+
+
+class GeventSocketIOServer(ServerAdapter):
+ def run(self,handler):
+ from socketio import server
+ address = (self.host, self.port)
+ server.SocketIOServer(address, handler, **self.options).serve_forever()
+
+
+class GunicornServer(ServerAdapter):
+ """ Untested. See http://gunicorn.org/configure.html for options. """
+ def run(self, handler):
+ from gunicorn.app.base import Application
+
+ config = {'bind': "%s:%d" % (self.host, int(self.port))}
+ config.update(self.options)
+
+ class GunicornApplication(Application):
+ def init(self, parser, opts, args):
+ return config
+
+ def load(self):
+ return handler
+
+ GunicornApplication().run()
+
+
+class EventletServer(ServerAdapter):
+ """ Untested """
+ def run(self, handler):
+ from eventlet import wsgi, listen
+ try:
+ wsgi.server(listen((self.host, self.port)), handler,
+ log_output=(not self.quiet))
+ except TypeError:
+ # Fallback, if we have old version of eventlet
+ wsgi.server(listen((self.host, self.port)), handler)
+
+
+class RocketServer(ServerAdapter):
+ """ Untested. """
+ def run(self, handler):
+ from rocket import Rocket
+ server = Rocket((self.host, self.port), 'wsgi', { 'wsgi_app' : handler })
+ server.start()
+
+
+class BjoernServer(ServerAdapter):
+ """ Fast server written in C: https://github.com/jonashaag/bjoern """
+ def run(self, handler):
+ from bjoern import run
+ run(handler, self.host, self.port)
+
+
+class AutoServer(ServerAdapter):
+ """ Untested. """
+ adapters = [WaitressServer, PasteServer, TwistedServer, CherryPyServer,
+ CherootServer, WSGIRefServer]
+
+ def run(self, handler):
+ for sa in self.adapters:
+ try:
+ return sa(self.host, self.port, **self.options).run(handler)
+ except ImportError:
+ pass
+
+server_names = {
+ 'cgi': CGIServer,
+ 'flup': FlupFCGIServer,
+ 'wsgiref': WSGIRefServer,
+ 'waitress': WaitressServer,
+ 'cherrypy': CherryPyServer,
+ 'cheroot': CherootServer,
+ 'paste': PasteServer,
+ 'fapws3': FapwsServer,
+ 'tornado': TornadoServer,
+ 'gae': AppEngineServer,
+ 'twisted': TwistedServer,
+ 'diesel': DieselServer,
+ 'meinheld': MeinheldServer,
+ 'gunicorn': GunicornServer,
+ 'eventlet': EventletServer,
+ 'gevent': GeventServer,
+ 'geventSocketIO':GeventSocketIOServer,
+ 'rocket': RocketServer,
+ 'bjoern' : BjoernServer,
+ 'auto': AutoServer,
+}
+
+
+
+
+
+
+###############################################################################
+# Application Control ##########################################################
+###############################################################################
+
+
+def load(target, **namespace):
+ """ Import a module or fetch an object from a module.
+
+ * ``package.module`` returns `module` as a module object.
+ * ``pack.mod:name`` returns the module variable `name` from `pack.mod`.
+ * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result.
+
+ The last form accepts not only function calls, but any type of
+ expression. Keyword arguments passed to this function are available as
+ local variables. Example: ``import_string('re:compile(x)', x='[a-z]')``
+ """
+ module, target = target.split(":", 1) if ':' in target else (target, None)
+ if module not in sys.modules: __import__(module)
+ if not target: return sys.modules[module]
+ if target.isalnum(): return getattr(sys.modules[module], target)
+ package_name = module.split('.')[0]
+ namespace[package_name] = sys.modules[package_name]
+ return eval('%s.%s' % (module, target), namespace)
+
+
+def load_app(target):
+ """ Load a bottle application from a module and make sure that the import
+ does not affect the current default application, but returns a separate
+ application object. See :func:`load` for the target parameter. """
+ global NORUN; NORUN, nr_old = True, NORUN
+ try:
+ tmp = default_app.push() # Create a new "default application"
+ rv = load(target) # Import the target module
+ return rv if callable(rv) else tmp
+ finally:
+ default_app.remove(tmp) # Remove the temporary added default application
+ NORUN = nr_old
+
+_debug = debug
+def run(app=None, server='wsgiref', host='127.0.0.1', port=8080,
+ interval=1, reloader=False, quiet=False, plugins=None,
+ debug=None, **kargs):
+ """ Start a server instance. This method blocks until the server terminates.
+
+ :param app: WSGI application or target string supported by
+ :func:`load_app`. (default: :func:`default_app`)
+ :param server: Server adapter to use. See :data:`server_names` keys
+ for valid names or pass a :class:`ServerAdapter` subclass.
+ (default: `wsgiref`)
+ :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on
+ all interfaces including the external one. (default: 127.0.0.1)
+ :param port: Server port to bind to. Values below 1024 require root
+ privileges. (default: 8080)
+ :param reloader: Start auto-reloading server? (default: False)
+ :param interval: Auto-reloader interval in seconds (default: 1)
+ :param quiet: Suppress output to stdout and stderr? (default: False)
+ :param options: Options passed to the server adapter.
+ """
+ if NORUN: return
+ if reloader and not os.environ.get('BOTTLE_CHILD'):
+ try:
+ lockfile = None
+ fd, lockfile = tempfile.mkstemp(prefix='bottle.', suffix='.lock')
+ os.close(fd) # We only need this file to exist. We never write to it
+ while os.path.exists(lockfile):
+ args = [sys.executable] + sys.argv
+ environ = os.environ.copy()
+ environ['BOTTLE_CHILD'] = 'true'
+ environ['BOTTLE_LOCKFILE'] = lockfile
+ p = subprocess.Popen(args, env=environ)
+ while p.poll() is None: # Busy wait...
+ os.utime(lockfile, None) # I am alive!
+ time.sleep(interval)
+ if p.poll() != 3:
+ if os.path.exists(lockfile): os.unlink(lockfile)
+ sys.exit(p.poll())
+ except KeyboardInterrupt:
+ pass
+ finally:
+ if os.path.exists(lockfile):
+ os.unlink(lockfile)
+ return
+
+ try:
+ if debug is not None: _debug(debug)
+ app = app or default_app()
+ if isinstance(app, basestring):
+ app = load_app(app)
+ if not callable(app):
+ raise ValueError("Application is not callable: %r" % app)
+
+ for plugin in plugins or []:
+ app.install(plugin)
+
+ if server in server_names:
+ server = server_names.get(server)
+ if isinstance(server, basestring):
+ server = load(server)
+ if isinstance(server, type):
+ server = server(host=host, port=port, **kargs)
+ if not isinstance(server, ServerAdapter):
+ raise ValueError("Unknown or unsupported server: %r" % server)
+
+ server.quiet = server.quiet or quiet
+ if not server.quiet:
+ _stderr("Bottle v%s server starting up (using %s)...\n" % (__version__, repr(server)))
+ _stderr("Listening on http://%s:%d/\n" % (server.host, server.port))
+ _stderr("Hit Ctrl-C to quit.\n\n")
+
+ if reloader:
+ lockfile = os.environ.get('BOTTLE_LOCKFILE')
+ bgcheck = FileCheckerThread(lockfile, interval)
+ with bgcheck:
+ server.run(app)
+ if bgcheck.status == 'reload':
+ sys.exit(3)
+ else:
+ server.run(app)
+ except KeyboardInterrupt:
+ pass
+ except (SystemExit, MemoryError):
+ raise
+ except:
+ if not reloader: raise
+ if not getattr(server, 'quiet', quiet):
+ print_exc()
+ time.sleep(interval)
+ sys.exit(3)
+
+
+
+class FileCheckerThread(threading.Thread):
+ ''' Interrupt main-thread as soon as a changed module file is detected,
+ the lockfile gets deleted or gets to old. '''
+
+ def __init__(self, lockfile, interval):
+ threading.Thread.__init__(self)
+ self.lockfile, self.interval = lockfile, interval
+ #: Is one of 'reload', 'error' or 'exit'
+ self.status = None
+
+ def run(self):
+ exists = os.path.exists
+ mtime = lambda path: os.stat(path).st_mtime
+ files = dict()
+
+ for module in list(sys.modules.values()):
+ path = getattr(module, '__file__', '') or ''
+ if path[-4:] in ('.pyo', '.pyc'): path = path[:-1]
+ if path and exists(path): files[path] = mtime(path)
+
+ while not self.status:
+ if not exists(self.lockfile)\
+ or mtime(self.lockfile) < time.time() - self.interval - 5:
+ self.status = 'error'
+ thread.interrupt_main()
+ for path, lmtime in list(files.items()):
+ if not exists(path) or mtime(path) > lmtime:
+ self.status = 'reload'
+ thread.interrupt_main()
+ break
+ time.sleep(self.interval)
+
+ def __enter__(self):
+ self.start()
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ if not self.status: self.status = 'exit' # silent exit
+ self.join()
+ return exc_type is not None and issubclass(exc_type, KeyboardInterrupt)
+
+
+
+
+
+###############################################################################
+# Template Adapters ############################################################
+###############################################################################
+
+
+class TemplateError(HTTPError):
+ def __init__(self, message):
+ HTTPError.__init__(self, 500, message)
+
+
+class BaseTemplate(object):
+ """ Base class and minimal API for template adapters """
+ extensions = ['tpl','html','thtml','stpl']
+ settings = {} #used in prepare()
+ defaults = {} #used in render()
+
+ def __init__(self, source=None, name=None, lookup=[], encoding='utf8', **settings):
+ """ Create a new template.
+ If the source parameter (str or buffer) is missing, the name argument
+ is used to guess a template filename. Subclasses can assume that
+ self.source and/or self.filename are set. Both are strings.
+ The lookup, encoding and settings parameters are stored as instance
+ variables.
+ The lookup parameter stores a list containing directory paths.
+ The encoding parameter should be used to decode byte strings or files.
+ The settings parameter contains a dict for engine-specific settings.
+ """
+ self.name = name
+ self.source = source.read() if hasattr(source, 'read') else source
+ self.filename = source.filename if hasattr(source, 'filename') else None
+ self.lookup = [os.path.abspath(x) for x in lookup]
+ self.encoding = encoding
+ self.settings = self.settings.copy() # Copy from class variable
+ self.settings.update(settings) # Apply
+ if not self.source and self.name:
+ self.filename = self.search(self.name, self.lookup)
+ if not self.filename:
+ raise TemplateError('Template %s not found.' % repr(name))
+ if not self.source and not self.filename:
+ raise TemplateError('No template specified.')
+ self.prepare(**self.settings)
+
+ @classmethod
+ def search(cls, name, lookup=[]):
+ """ Search name in all directories specified in lookup.
+ First without, then with common extensions. Return first hit. """
+ if not lookup:
+ depr('The template lookup path list should not be empty.') #0.12
+ lookup = ['.']
+
+ if os.path.isabs(name) and os.path.isfile(name):
+ depr('Absolute template path names are deprecated.') #0.12
+ return os.path.abspath(name)
+
+ for spath in lookup:
+ spath = os.path.abspath(spath) + os.sep
+ fname = os.path.abspath(os.path.join(spath, name))
+ if not fname.startswith(spath): continue
+ if os.path.isfile(fname): return fname
+ for ext in cls.extensions:
+ if os.path.isfile('%s.%s' % (fname, ext)):
+ return '%s.%s' % (fname, ext)
+
+ @classmethod
+ def global_config(cls, key, *args):
+ ''' This reads or sets the global settings stored in class.settings. '''
+ if args:
+ cls.settings = cls.settings.copy() # Make settings local to class
+ cls.settings[key] = args[0]
+ else:
+ return cls.settings[key]
+
+ def prepare(self, **options):
+ """ Run preparations (parsing, caching, ...).
+ It should be possible to call this again to refresh a template or to
+ update settings.
+ """
+ raise NotImplementedError
+
+ def render(self, *args, **kwargs):
+ """ Render the template with the specified local variables and return
+ a single byte or unicode string. If it is a byte string, the encoding
+ must match self.encoding. This method must be thread-safe!
+ Local variables may be provided in dictionaries (args)
+ or directly, as keywords (kwargs).
+ """
+ raise NotImplementedError
+
+
+class MakoTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from mako.template import Template
+ from mako.lookup import TemplateLookup
+ options.update({'input_encoding':self.encoding})
+ options.setdefault('format_exceptions', bool(DEBUG))
+ lookup = TemplateLookup(directories=self.lookup, **options)
+ if self.source:
+ self.tpl = Template(self.source, lookup=lookup, **options)
+ else:
+ self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ _defaults = self.defaults.copy()
+ _defaults.update(kwargs)
+ return self.tpl.render(**_defaults)
+
+
+class CheetahTemplate(BaseTemplate):
+ def prepare(self, **options):
+ from Cheetah.Template import Template
+ self.context = threading.local()
+ self.context.vars = {}
+ options['searchList'] = [self.context.vars]
+ if self.source:
+ self.tpl = Template(source=self.source, **options)
+ else:
+ self.tpl = Template(file=self.filename, **options)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ self.context.vars.update(self.defaults)
+ self.context.vars.update(kwargs)
+ out = str(self.tpl)
+ self.context.vars.clear()
+ return out
+
+
+class Jinja2Template(BaseTemplate):
+ def prepare(self, filters=None, tests=None, globals={}, **kwargs):
+ from jinja2 import Environment, FunctionLoader
+ if 'prefix' in kwargs: # TODO: to be removed after a while
+ raise RuntimeError('The keyword argument `prefix` has been removed. '
+ 'Use the full jinja2 environment name line_statement_prefix instead.')
+ self.env = Environment(loader=FunctionLoader(self.loader), **kwargs)
+ if filters: self.env.filters.update(filters)
+ if tests: self.env.tests.update(tests)
+ if globals: self.env.globals.update(globals)
+ if self.source:
+ self.tpl = self.env.from_string(self.source)
+ else:
+ self.tpl = self.env.get_template(self.filename)
+
+ def render(self, *args, **kwargs):
+ for dictarg in args: kwargs.update(dictarg)
+ _defaults = self.defaults.copy()
+ _defaults.update(kwargs)
+ return self.tpl.render(**_defaults)
+
+ def loader(self, name):
+ fname = self.search(name, self.lookup)
+ if not fname: return
+ with open(fname, "rb") as f:
+ return f.read().decode(self.encoding)
+
+
+class SimpleTemplate(BaseTemplate):
+
+ def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka):
+ self.cache = {}
+ enc = self.encoding
+ self._str = lambda x: touni(x, enc)
+ self._escape = lambda x: escape_func(touni(x, enc))
+ self.syntax = syntax
+ if noescape:
+ self._str, self._escape = self._escape, self._str
+
+ @cached_property
+ def co(self):
+ return compile(self.code, self.filename or '', 'exec')
+
+ @cached_property
+ def code(self):
+ source = self.source
+ if not source:
+ with open(self.filename, 'rb') as f:
+ source = f.read()
+ try:
+ source, encoding = touni(source), 'utf8'
+ except UnicodeError:
+ depr('Template encodings other than utf8 are no longer supported.') #0.11
+ source, encoding = touni(source, 'latin1'), 'latin1'
+ parser = StplParser(source, encoding=encoding, syntax=self.syntax)
+ code = parser.translate()
+ self.encoding = parser.encoding
+ return code
+
+ def _rebase(self, _env, _name=None, **kwargs):
+ if _name is None:
+ depr('Rebase function called without arguments.'
+ ' You were probably looking for {{base}}?', True) #0.12
+ _env['_rebase'] = (_name, kwargs)
+
+ def _include(self, _env, _name=None, **kwargs):
+ if _name is None:
+ depr('Rebase function called without arguments.'
+ ' You were probably looking for {{base}}?', True) #0.12
+ env = _env.copy()
+ env.update(kwargs)
+ if _name not in self.cache:
+ self.cache[_name] = self.__class__(name=_name, lookup=self.lookup)
+ return self.cache[_name].execute(env['_stdout'], env)
+
+ def execute(self, _stdout, kwargs):
+ env = self.defaults.copy()
+ env.update(kwargs)
+ env.update({'_stdout': _stdout, '_printlist': _stdout.extend,
+ 'include': functools.partial(self._include, env),
+ 'rebase': functools.partial(self._rebase, env), '_rebase': None,
+ '_str': self._str, '_escape': self._escape, 'get': env.get,
+ 'setdefault': env.setdefault, 'defined': env.__contains__ })
+ eval(self.co, env)
+ if env.get('_rebase'):
+ subtpl, rargs = env.pop('_rebase')
+ rargs['base'] = ''.join(_stdout) #copy stdout
+ del _stdout[:] # clear stdout
+ return self._include(env, subtpl, **rargs)
+ return env
+
+ def render(self, *args, **kwargs):
+ """ Render the template using keyword arguments as local variables. """
+ env = {}; stdout = []
+ for dictarg in args: env.update(dictarg)
+ env.update(kwargs)
+ self.execute(stdout, env)
+ return ''.join(stdout)
+
+
+class StplSyntaxError(TemplateError): pass
+
+
+class StplParser(object):
+ ''' Parser for stpl templates. '''
+ _re_cache = {} #: Cache for compiled re patterns
+ # This huge pile of voodoo magic splits python code into 8 different tokens.
+ # 1: All kinds of python strings (trust me, it works)
+ _re_tok = '([urbURB]?(?:\'\'(?!\')|""(?!")|\'{6}|"{6}' \
+ '|\'(?:[^\\\\\']|\\\\.)+?\'|"(?:[^\\\\"]|\\\\.)+?"' \
+ '|\'{3}(?:[^\\\\]|\\\\.|\\n)+?\'{3}' \
+ '|"{3}(?:[^\\\\]|\\\\.|\\n)+?"{3}))'
+ _re_inl = _re_tok.replace('|\\n','') # We re-use this string pattern later
+ # 2: Comments (until end of line, but not the newline itself)
+ _re_tok += '|(#.*)'
+ # 3,4: Open and close grouping tokens
+ _re_tok += '|([\\[\\{\\(])'
+ _re_tok += '|([\\]\\}\\)])'
+ # 5,6: Keywords that start or continue a python block (only start of line)
+ _re_tok += '|^([ \\t]*(?:if|for|while|with|try|def|class)\\b)' \
+ '|^([ \\t]*(?:elif|else|except|finally)\\b)'
+ # 7: Our special 'end' keyword (but only if it stands alone)
+ _re_tok += '|((?:^|;)[ \\t]*end[ \\t]*(?=(?:%(block_close)s[ \\t]*)?\\r?$|;|#))'
+ # 8: A customizable end-of-code-block template token (only end of line)
+ _re_tok += '|(%(block_close)s[ \\t]*(?=\\r?$))'
+ # 9: And finally, a single newline. The 10th token is 'everything else'
+ _re_tok += '|(\\r?\\n)'
+
+ # Match the start tokens of code areas in a template
+ _re_split = '(?m)^[ \t]*(\\\\?)((%(line_start)s)|(%(block_start)s))(%%?)'
+ # Match inline statements (may contain python strings)
+ _re_inl = '(?m)%%(inline_start)s((?:%s|[^\'"\n])*?)%%(inline_end)s' % _re_inl
+ _re_tok = '(?m)' + _re_tok
+
+ default_syntax = '<% %> % {{ }}'
+
+ def __init__(self, source, syntax=None, encoding='utf8'):
+ self.source, self.encoding = touni(source, encoding), encoding
+ self.set_syntax(syntax or self.default_syntax)
+ self.code_buffer, self.text_buffer = [], []
+ self.lineno, self.offset = 1, 0
+ self.indent, self.indent_mod = 0, 0
+ self.paren_depth = 0
+
+ def get_syntax(self):
+ ''' Tokens as a space separated string (default: <% %> % {{ }}) '''
+ return self._syntax
+
+ def set_syntax(self, syntax):
+ self._syntax = syntax
+ self._tokens = syntax.split()
+ if not syntax in self._re_cache:
+ names = 'block_start block_close line_start inline_start inline_end'
+ etokens = map(re.escape, self._tokens)
+ pattern_vars = dict(zip(names.split(), etokens))
+ patterns = (self._re_split, self._re_tok, self._re_inl)
+ patterns = [re.compile(p%pattern_vars) for p in patterns]
+ self._re_cache[syntax] = patterns
+ self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax]
+
+ syntax = property(get_syntax, set_syntax)
+
+ def translate(self):
+ if self.offset: raise RuntimeError('Parser is a one time instance.')
+ while True:
+ m = self.re_split.search(self.source[self.offset:])
+ if m:
+ text = self.source[self.offset:self.offset+m.start()]
+ self.text_buffer.append(text)
+ self.offset += m.end()
+ if m.group(1): # New escape syntax
+ line, sep, _ = self.source[self.offset:].partition('\n')
+ self.text_buffer.append(m.group(2)+m.group(5)+line+sep)
+ self.offset += len(line+sep)+1
+ continue
+ elif m.group(5): # Old escape syntax
+ depr('Escape code lines with a backslash.') #0.12
+ line, sep, _ = self.source[self.offset:].partition('\n')
+ self.text_buffer.append(m.group(2)+line+sep)
+ self.offset += len(line+sep)+1
+ continue
+ self.flush_text()
+ self.read_code(multiline=bool(m.group(4)))
+ else: break
+ self.text_buffer.append(self.source[self.offset:])
+ self.flush_text()
+ return ''.join(self.code_buffer)
+
+ def read_code(self, multiline):
+ code_line, comment = '', ''
+ while True:
+ m = self.re_tok.search(self.source[self.offset:])
+ if not m:
+ code_line += self.source[self.offset:]
+ self.offset = len(self.source)
+ self.write_code(code_line.strip(), comment)
+ return
+ code_line += self.source[self.offset:self.offset+m.start()]
+ self.offset += m.end()
+ _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups()
+ if (code_line or self.paren_depth > 0) and (_blk1 or _blk2): # a if b else c
+ code_line += _blk1 or _blk2
+ continue
+ if _str: # Python string
+ code_line += _str
+ elif _com: # Python comment (up to EOL)
+ comment = _com
+ if multiline and _com.strip().endswith(self._tokens[1]):
+ multiline = False # Allow end-of-block in comments
+ elif _po: # open parenthesis
+ self.paren_depth += 1
+ code_line += _po
+ elif _pc: # close parenthesis
+ if self.paren_depth > 0:
+ # we could check for matching parentheses here, but it's
+ # easier to leave that to python - just check counts
+ self.paren_depth -= 1
+ code_line += _pc
+ elif _blk1: # Start-block keyword (if/for/while/def/try/...)
+ code_line, self.indent_mod = _blk1, -1
+ self.indent += 1
+ elif _blk2: # Continue-block keyword (else/elif/except/...)
+ code_line, self.indent_mod = _blk2, -1
+ elif _end: # The non-standard 'end'-keyword (ends a block)
+ self.indent -= 1
+ elif _cend: # The end-code-block template token (usually '%>')
+ if multiline: multiline = False
+ else: code_line += _cend
+ else: # \n
+ self.write_code(code_line.strip(), comment)
+ self.lineno += 1
+ code_line, comment, self.indent_mod = '', '', 0
+ if not multiline:
+ break
+
+ def flush_text(self):
+ text = ''.join(self.text_buffer)
+ del self.text_buffer[:]
+ if not text: return
+ parts, pos, nl = [], 0, '\\\n'+' '*self.indent
+ for m in self.re_inl.finditer(text):
+ prefix, pos = text[pos:m.start()], m.end()
+ if prefix:
+ parts.append(nl.join(map(repr, prefix.splitlines(True))))
+ if prefix.endswith('\n'): parts[-1] += nl
+ parts.append(self.process_inline(m.group(1).strip()))
+ if pos < len(text):
+ prefix = text[pos:]
+ lines = prefix.splitlines(True)
+ if lines[-1].endswith('\\\\\n'): lines[-1] = lines[-1][:-3]
+ elif lines[-1].endswith('\\\\\r\n'): lines[-1] = lines[-1][:-4]
+ parts.append(nl.join(map(repr, lines)))
+ code = '_printlist((%s,))' % ', '.join(parts)
+ self.lineno += code.count('\n')+1
+ self.write_code(code)
+
+ def process_inline(self, chunk):
+ if chunk[0] == '!': return '_str(%s)' % chunk[1:]
+ return '_escape(%s)' % chunk
+
+ def write_code(self, line, comment=''):
+ line, comment = self.fix_backward_compatibility(line, comment)
+ code = ' ' * (self.indent+self.indent_mod)
+ code += line.lstrip() + comment + '\n'
+ self.code_buffer.append(code)
+
+ def fix_backward_compatibility(self, line, comment):
+ parts = line.strip().split(None, 2)
+ if parts and parts[0] in ('include', 'rebase'):
+ depr('The include and rebase keywords are functions now.') #0.12
+ if len(parts) == 1: return "_printlist([base])", comment
+ elif len(parts) == 2: return "_=%s(%r)" % tuple(parts), comment
+ else: return "_=%s(%r, %s)" % tuple(parts), comment
+ if self.lineno <= 2 and not line.strip() and 'coding' in comment:
+ m = re.match(r"#.*coding[:=]\s*([-\w.]+)", comment)
+ if m:
+ depr('PEP263 encoding strings in templates are deprecated.') #0.12
+ enc = m.group(1)
+ self.source = self.source.encode(self.encoding).decode(enc)
+ self.encoding = enc
+ return line, comment.replace('coding','coding*')
+ return line, comment
+
+
+def template(*args, **kwargs):
+ '''
+ Get a rendered template as a string iterator.
+ You can use a name, a filename or a template string as first parameter.
+ Template rendering arguments can be passed as dictionaries
+ or directly (as keyword arguments).
+ '''
+ tpl = args[0] if args else None
+ adapter = kwargs.pop('template_adapter', SimpleTemplate)
+ lookup = kwargs.pop('template_lookup', TEMPLATE_PATH)
+ tplid = (id(lookup), tpl)
+ if tplid not in TEMPLATES or DEBUG:
+ settings = kwargs.pop('template_settings', {})
+ if isinstance(tpl, adapter):
+ TEMPLATES[tplid] = tpl
+ if settings: TEMPLATES[tplid].prepare(**settings)
+ elif "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
+ TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings)
+ else:
+ TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings)
+ if not TEMPLATES[tplid]:
+ abort(500, 'Template (%s) not found' % tpl)
+ for dictarg in args[1:]: kwargs.update(dictarg)
+ return TEMPLATES[tplid].render(kwargs)
+
+mako_template = functools.partial(template, template_adapter=MakoTemplate)
+cheetah_template = functools.partial(template, template_adapter=CheetahTemplate)
+jinja2_template = functools.partial(template, template_adapter=Jinja2Template)
+
+
+def view(tpl_name, **defaults):
+ ''' Decorator: renders a template for a handler.
+ The handler can control its behavior like that:
+
+ - return a dict of template vars to fill out the template
+ - return something other than a dict and the view decorator will not
+ process the template, but return the handler result as is.
+ This includes returning a HTTPResponse(dict) to get,
+ for instance, JSON with autojson or other castfilters.
+ '''
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ result = func(*args, **kwargs)
+ if isinstance(result, (dict, DictMixin)):
+ tplvars = defaults.copy()
+ tplvars.update(result)
+ return template(tpl_name, **tplvars)
+ elif result is None:
+ return template(tpl_name, **defaults)
+ return result
+ return wrapper
+ return decorator
+
+mako_view = functools.partial(view, template_adapter=MakoTemplate)
+cheetah_view = functools.partial(view, template_adapter=CheetahTemplate)
+jinja2_view = functools.partial(view, template_adapter=Jinja2Template)
+
+
+
+
+
+
+###############################################################################
+# Constants and Globals ########################################################
+###############################################################################
+
+
+TEMPLATE_PATH = ['./', './views/']
+TEMPLATES = {}
+DEBUG = False
+NORUN = False # If set, run() does nothing. Used by load_app()
+
+#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found')
+HTTP_CODES = httplib.responses
+HTTP_CODES[418] = "I'm a teapot" # RFC 2324
+HTTP_CODES[422] = "Unprocessable Entity" # RFC 4918
+HTTP_CODES[428] = "Precondition Required"
+HTTP_CODES[429] = "Too Many Requests"
+HTTP_CODES[431] = "Request Header Fields Too Large"
+HTTP_CODES[511] = "Network Authentication Required"
+_HTTP_STATUS_LINES = dict((k, '%d %s'%(k,v)) for (k,v) in HTTP_CODES.items())
+
+#: The default template used for error pages. Override with @error()
+ERROR_PAGE_TEMPLATE = """
+%%try:
+ %%from %s import DEBUG, HTTP_CODES, request, touni
+
+
+
+ Error: {{e.status}}
+
+
+
+ Error: {{e.status}}
+ Sorry, the requested URL {{repr(request.url)}}
+ caused an error:
+ {{e.body}}
+ %%if DEBUG and e.exception:
+ Exception:
+ {{repr(e.exception)}}
+ %%end
+ %%if DEBUG and e.traceback:
+ Traceback:
+ {{e.traceback}}
+ %%end
+
+
+%%except ImportError:
+ ImportError: Could not generate the error page. Please add bottle to
+ the import path.
+%%end
+""" % __name__
+
+#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a
+#: request callback, this instance always refers to the *current* request
+#: (even on a multithreaded server).
+request = LocalRequest()
+
+#: A thread-safe instance of :class:`LocalResponse`. It is used to change the
+#: HTTP response for the *current* request.
+response = LocalResponse()
+
+#: A thread-safe namespace. Not used by Bottle.
+local = threading.local()
+
+# Initialize app stack (create first empty Bottle app)
+# BC: 0.6.4 and needed for run()
+app = default_app = AppStack()
+app.push()
+
+#: A virtual package that redirects import statements.
+#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`.
+ext = _ImportRedirect('bottle.ext' if __name__ == '__main__' else __name__+".ext", 'bottle_%s').module
+
+if __name__ == '__main__':
+ opt, args, parser = _cmd_options, _cmd_args, _cmd_parser
+ if opt.version:
+ _stdout('Bottle %s\n'%__version__)
+ sys.exit(0)
+ if not args:
+ parser.print_help()
+ _stderr('\nError: No application specified.\n')
+ sys.exit(1)
+
+ sys.path.insert(0, '.')
+ sys.modules.setdefault('bottle', sys.modules['__main__'])
+
+ host, port = (opt.bind or 'localhost'), 8080
+ if ':' in host and host.rfind(']') < host.rfind(':'):
+ host, port = host.rsplit(':', 1)
+ host = host.strip('[]')
+
+ run(args[0], host=host, port=int(port), server=opt.server,
+ reloader=opt.reload, plugins=opt.plugin, debug=opt.debug)
+
+
+
+
+# THE END
diff --git a/venv/lib/python3.11/site-packages/distutils-precedence.pth b/venv/lib/python3.11/site-packages/distutils-precedence.pth
new file mode 100644
index 0000000..7f009fe
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/distutils-precedence.pth
@@ -0,0 +1 @@
+import os; var = 'SETUPTOOLS_USE_DISTUTILS'; enabled = os.environ.get(var, 'local') == 'local'; enabled and __import__('_distutils_hack').add_shim();
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/AUTHORS b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/AUTHORS
new file mode 100644
index 0000000..42a5c22
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/AUTHORS
@@ -0,0 +1,51 @@
+Original Authors
+----------------
+* Armin Rigo
+* Christian Tismer
+
+Contributors
+------------
+* Al Stone
+* Alexander Schmidt
+* Alexey Borzenkov
+* Andreas Schwab
+* Armin Ronacher
+* Bin Wang
+* Bob Ippolito
+* ChangBo Guo
+* Christoph Gohlke
+* Denis Bilenko
+* Dirk Mueller
+* Donovan Preston
+* Fantix King
+* Floris Bruynooghe
+* Fredrik Fornwall
+* Gerd Woetzel
+* Giel van Schijndel
+* Gökhan Karabulut
+* Gustavo Niemeyer
+* Guy Rozendorn
+* Hye-Shik Chang
+* Jared Kuolt
+* Jason Madden
+* Josh Snyder
+* Kyle Ambroff
+* Laszlo Boszormenyi
+* Mao Han
+* Marc Abramowitz
+* Marc Schlaich
+* Marcin Bachry
+* Matt Madison
+* Matt Turner
+* Michael Ellerman
+* Michael Matz
+* Ralf Schmitt
+* Robie Basak
+* Ronny Pfannschmidt
+* Samual M. Rushing
+* Tony Bowles
+* Tony Breeds
+* Trevor Bowen
+* Tulio Magno Quites Machado Filho
+* Ulrich Weigand
+* Victor Stinner
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/INSTALLER b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE
new file mode 100644
index 0000000..b73a4a1
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE
@@ -0,0 +1,30 @@
+The following files are derived from Stackless Python and are subject to the
+same license as Stackless Python:
+
+ src/greenlet/slp_platformselect.h
+ files in src/greenlet/platform/ directory
+
+See LICENSE.PSF and http://www.stackless.com/ for details.
+
+Unless otherwise noted, the files in greenlet have been released under the
+following MIT license:
+
+Copyright (c) Armin Rigo, Christian Tismer and contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE.PSF b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE.PSF
new file mode 100644
index 0000000..d3b509a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/LICENSE.PSF
@@ -0,0 +1,47 @@
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011 Python Software Foundation; All Rights Reserved" are retained in Python
+alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/METADATA b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/METADATA
new file mode 100644
index 0000000..e87d0ab
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: greenlet
+Version: 3.0.3
+Summary: Lightweight in-process concurrent programming
+Home-page: https://greenlet.readthedocs.io/
+Author: Alexey Borzenkov
+Author-email: snaury@gmail.com
+Maintainer: Jason Madden
+Maintainer-email: jason@seecoresoftware.com
+License: MIT License
+Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues
+Project-URL: Source Code, https://github.com/python-greenlet/greenlet/
+Project-URL: Documentation, https://greenlet.readthedocs.io/
+Keywords: greenlet coroutine concurrency threads cooperative
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Programming Language :: C
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Operating System :: OS Independent
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.7
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+License-File: LICENSE.PSF
+License-File: AUTHORS
+Provides-Extra: docs
+Requires-Dist: Sphinx ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Provides-Extra: test
+Requires-Dist: objgraph ; extra == 'test'
+Requires-Dist: psutil ; extra == 'test'
+
+.. This file is included into docs/history.rst
+
+
+Greenlets are lightweight coroutines for in-process concurrent
+programming.
+
+The "greenlet" package is a spin-off of `Stackless`_, a version of
+CPython that supports micro-threads called "tasklets". Tasklets run
+pseudo-concurrently (typically in a single or a few OS-level threads)
+and are synchronized with data exchanges on "channels".
+
+A "greenlet", on the other hand, is a still more primitive notion of
+micro-thread with no implicit scheduling; coroutines, in other words.
+This is useful when you want to control exactly when your code runs.
+You can build custom scheduled micro-threads on top of greenlet;
+however, it seems that greenlets are useful on their own as a way to
+make advanced control flow structures. For example, we can recreate
+generators; the difference with Python's own generators is that our
+generators can call nested functions and the nested functions can
+yield values too. (Additionally, you don't need a "yield" keyword. See
+the example in `test_generator.py
+`_).
+
+Greenlets are provided as a C extension module for the regular unmodified
+interpreter.
+
+.. _`Stackless`: http://www.stackless.com
+
+
+Who is using Greenlet?
+======================
+
+There are several libraries that use Greenlet as a more flexible
+alternative to Python's built in coroutine support:
+
+ - `Concurrence`_
+ - `Eventlet`_
+ - `Gevent`_
+
+.. _Concurrence: http://opensource.hyves.org/concurrence/
+.. _Eventlet: http://eventlet.net/
+.. _Gevent: http://www.gevent.org/
+
+Getting Greenlet
+================
+
+The easiest way to get Greenlet is to install it with pip::
+
+ pip install greenlet
+
+
+Source code archives and binary distributions are available on the
+python package index at https://pypi.org/project/greenlet
+
+The source code repository is hosted on github:
+https://github.com/python-greenlet/greenlet
+
+Documentation is available on readthedocs.org:
+https://greenlet.readthedocs.io
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/RECORD b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/RECORD
new file mode 100644
index 0000000..ff63cc4
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/RECORD
@@ -0,0 +1,116 @@
+../../../include/site/python3.11/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755
+greenlet-3.0.3.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849
+greenlet-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+greenlet-3.0.3.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434
+greenlet-3.0.3.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424
+greenlet-3.0.3.dist-info/METADATA,sha256=CHtHlitUM_AS9hKoJfYLF3Vz-UFJlqRnhbRl2-1JrjU,3779
+greenlet-3.0.3.dist-info/RECORD,,
+greenlet-3.0.3.dist-info/WHEEL,sha256=xlJUan517virathN2lKmlOcMObJx20JZaCR_iv23glU,153
+greenlet-3.0.3.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9
+greenlet/TBrokenGreenlet.cpp,sha256=YgKaHkQV6_dKBrgS0HKDSqZroskv0IwSZDo4bsiwz3w,1029
+greenlet/TExceptionState.cpp,sha256=Ctg2YfyEYNjOYbteRB_oIJa9lNGyC7N1F3h4XqqQdg8,1367
+greenlet/TGreenlet.cpp,sha256=1xwAzGNqO68AZ4D5lD5DHmGPBohM6nv4BYnLatgIL68,25637
+greenlet/TGreenletGlobals.cpp,sha256=qLi1icS1UDSbefTkolz9TycEi_GOUblsEznMp0HFywQ,3268
+greenlet/TMainGreenlet.cpp,sha256=FvWtGJDKb64DLy0n-ddcTF6xJDwczPMKSm9mXSsHJKg,3365
+greenlet/TPythonState.cpp,sha256=QUoIQzF0HYmAJO_nwX5gXSSlMNL1mkxlN24KJCXIrIQ,14861
+greenlet/TStackState.cpp,sha256=VclDR-qiMeJjuiJxL9_u24MJiTgdSaYvr8bWQdTEZjY,7389
+greenlet/TThreadStateDestroy.cpp,sha256=EqZ-GjksrWNC20CY_P0yXN43wVRMYEh659SmRRqBaI4,7214
+greenlet/TUserGreenlet.cpp,sha256=b_Bmh4WZdS6I1yM2AfHRtd535WovtpYMkpfu2GQpaDs,23618
+greenlet/__init__.py,sha256=Dw4tovn18bpPaWQ4SK7jDJe24uV4ao264UfaT0uufxU,1723
+greenlet/__pycache__/__init__.cpython-311.pyc,,
+greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so,sha256=89kThwDfvkHXs3GXeuXnnZb-wShF60h1XyHXZYmkymU,1506232
+greenlet/greenlet.cpp,sha256=k9RZolayY79WgjPXwcA3Vcv48MuW7TAtogIZPaDD3gM,48815
+greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755
+greenlet/greenlet_allocator.hpp,sha256=kxyWW4Qdwlrc7ufgdb5vd6Y7jhauQ699Kod0mqiO1iM,1582
+greenlet/greenlet_compiler_compat.hpp,sha256=m7wvwrZqBoCQpDMTP-Z7whdXIES7e3AuXBgvPHSsfxg,4140
+greenlet/greenlet_cpython_add_pending.hpp,sha256=apAwIhGlgYrnYn03zWL6Sxy68kltDeb1e0QupZfb3DQ,6043
+greenlet/greenlet_cpython_compat.hpp,sha256=ZpN8gewZeOtd6T-mLidA7zteQ_P4vG8T1za_KPvCijg,3621
+greenlet/greenlet_exceptions.hpp,sha256=Dt8YdaQn8AK9nBfwU9rrDoMlR2Lw5aLTQV6ZAsHmfsw,3683
+greenlet/greenlet_greenlet.hpp,sha256=Ct_EAx4OJL6FvF5g3jV1ybSxnqzLVaRdPi2EcYT1iq4,27728
+greenlet/greenlet_internal.hpp,sha256=ZXH5zemWCN8wH8zAqMUGycvz_3IulRL6Gf2hZA6CknE,2703
+greenlet/greenlet_refs.hpp,sha256=ECkHKV1CVamtzmWWGKXXMpw8lXLeIzastXM9tfqlsNI,33864
+greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198
+greenlet/greenlet_thread_state.hpp,sha256=0UwJCNd86ifwM2yDd3QrNmHAECL-eNADHubwiB_XGA4,20614
+greenlet/greenlet_thread_state_dict_cleanup.hpp,sha256=tEN0rI1pZiEsdtr7Oda24gr52fGiHnYTLyM8Vme3Gns,3831
+greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867
+greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+greenlet/platform/__pycache__/__init__.cpython-311.pyc,,
+greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143
+greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307
+greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671
+greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748
+greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479
+greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892
+greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245
+greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746
+greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398
+greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331
+greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779
+greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928
+greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426
+greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860
+greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815
+greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941
+greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759
+greenlet/platform/switch_ppc_macosx.h,sha256=L8sB0c00V4G2_5cQCG3zX-23DKq3le_Dcj0sUDcACos,2624
+greenlet/platform/switch_ppc_unix.h,sha256=POy4bRBcH74Chfw4viFE9bVlZ-7BaNsFC0NnXr1L2tg,2652
+greenlet/platform/switch_riscv_unix.h,sha256=jX3vC_xZXiUho8tz4J6Ai8BNQB80yLn03fxkoMztVCU,740
+greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763
+greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797
+greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509
+greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841
+greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078
+greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805
+greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838
+greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059
+greenlet/slp_platformselect.h,sha256=JEnia_2HsTwdqvnnEsDxHQqalYvFJqx_CDsqvNUQYe8,3600
+greenlet/tests/__init__.py,sha256=F282jaIavKrhsYgHJEXtIQXKHdHpe9OJOPTK7R40JzI,9022
+greenlet/tests/__pycache__/__init__.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_cpp_exception.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_slp_switch.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-311.pyc,,
+greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-311.pyc,,
+greenlet/tests/__pycache__/leakcheck.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_contextvars.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_cpp.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_extension_interface.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_gc.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_generator.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_generator_nested.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_greenlet.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_greenlet_trash.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_leaks.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_stack_saved.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_throw.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_tracing.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_version.cpython-311.pyc,,
+greenlet/tests/__pycache__/test_weakref.cpython-311.pyc,,
+greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773
+greenlet/tests/_test_extension.cpython-311-x86_64-linux-gnu.so,sha256=cYvKKnDFhjTDjM_mYc_4l53g44Iz-CJR5woKXR6Ddqg,36624
+greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565
+greenlet/tests/_test_extension_cpp.cpython-311-x86_64-linux-gnu.so,sha256=de1fYlFMrBJRAwPKHWl-OMuBy8AmSXsh14FYYyLj6dI,57288
+greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263
+greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985
+greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961
+greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524
+greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956
+greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285
+greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817
+greenlet/tests/leakcheck.py,sha256=inbfM7_oVzd8jIKGxCgo4JqpFZaDAnWPkSULJ8vIE1s,11964
+greenlet/tests/test_contextvars.py,sha256=0n5pR_lbpAppc5wFfK0e1SwYLM-fsSFp72B5_ArLPGE,10348
+greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736
+greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829
+greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923
+greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240
+greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718
+greenlet/tests/test_greenlet.py,sha256=95qgDR-xtB0jzEFLirNx7HPUdwHikVMvDdyUoCvyjOo,45354
+greenlet/tests/test_greenlet_trash.py,sha256=P6r-3K4fmXX8foW8BVgthuqVKjicHMDvxfK7Al4x028,7508
+greenlet/tests/test_leaks.py,sha256=wskLqCAvqZ3qTZkam_wXzd-E5zelUjlXS5Ss8KshtZY,17465
+greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446
+greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712
+greenlet/tests/test_tracing.py,sha256=VlwzMU0C1noospZhuUMyB7MHw200emIvGCN_6G2p2ZU,8250
+greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339
+greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/WHEEL b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/WHEEL
new file mode 100644
index 0000000..cc43dbf
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_24_x86_64
+Tag: cp311-cp311-manylinux_2_28_x86_64
+
diff --git a/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/top_level.txt b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/top_level.txt
new file mode 100644
index 0000000..46725be
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet-3.0.3.dist-info/top_level.txt
@@ -0,0 +1 @@
+greenlet
diff --git a/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp
new file mode 100644
index 0000000..11a3bea
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TBrokenGreenlet.cpp
@@ -0,0 +1,45 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of greenlet::UserGreenlet.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+
+#include "greenlet_greenlet.hpp"
+
+namespace greenlet {
+
+void* BrokenGreenlet::operator new(size_t UNUSED(count))
+{
+ return allocator.allocate(1);
+}
+
+
+void BrokenGreenlet::operator delete(void* ptr)
+{
+ return allocator.deallocate(static_cast(ptr),
+ 1);
+}
+
+greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator;
+
+bool
+BrokenGreenlet::force_slp_switch_error() const noexcept
+{
+ return this->_force_slp_switch_error;
+}
+
+UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void)
+{
+ if (this->_force_switch_error) {
+ return switchstack_result_t(-1);
+ }
+ return UserGreenlet::g_switchstack();
+}
+
+}; //namespace greenlet
diff --git a/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp b/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp
new file mode 100644
index 0000000..ee6b191
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TExceptionState.cpp
@@ -0,0 +1,62 @@
+#ifndef GREENLET_EXCEPTION_STATE_CPP
+#define GREENLET_EXCEPTION_STATE_CPP
+
+#include
+#include "greenlet_greenlet.hpp"
+
+namespace greenlet {
+
+
+ExceptionState::ExceptionState()
+{
+ this->clear();
+}
+
+void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept
+{
+ this->exc_info = tstate->exc_info;
+ this->exc_state = tstate->exc_state;
+}
+
+void ExceptionState::operator>>(PyThreadState *const tstate) noexcept
+{
+ tstate->exc_state = this->exc_state;
+ tstate->exc_info =
+ this->exc_info ? this->exc_info : &tstate->exc_state;
+ this->clear();
+}
+
+void ExceptionState::clear() noexcept
+{
+ this->exc_info = nullptr;
+ this->exc_state.exc_value = nullptr;
+#if !GREENLET_PY311
+ this->exc_state.exc_type = nullptr;
+ this->exc_state.exc_traceback = nullptr;
+#endif
+ this->exc_state.previous_item = nullptr;
+}
+
+int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept
+{
+ Py_VISIT(this->exc_state.exc_value);
+#if !GREENLET_PY311
+ Py_VISIT(this->exc_state.exc_type);
+ Py_VISIT(this->exc_state.exc_traceback);
+#endif
+ return 0;
+}
+
+void ExceptionState::tp_clear() noexcept
+{
+ Py_CLEAR(this->exc_state.exc_value);
+#if !GREENLET_PY311
+ Py_CLEAR(this->exc_state.exc_type);
+ Py_CLEAR(this->exc_state.exc_traceback);
+#endif
+}
+
+
+}; // namespace greenlet
+
+#endif // GREENLET_EXCEPTION_STATE_CPP
diff --git a/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp
new file mode 100644
index 0000000..51f8995
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TGreenlet.cpp
@@ -0,0 +1,714 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of greenlet::Greenlet.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+
+#include "greenlet_internal.hpp"
+#include "greenlet_greenlet.hpp"
+#include "greenlet_thread_state.hpp"
+
+#include "TGreenletGlobals.cpp"
+#include "TThreadStateDestroy.cpp"
+
+namespace greenlet {
+
+Greenlet::Greenlet(PyGreenlet* p)
+{
+ p ->pimpl = this;
+}
+
+Greenlet::~Greenlet()
+{
+ // XXX: Can't do this. tp_clear is a virtual function, and by the
+ // time we're here, we've sliced off our child classes.
+ //this->tp_clear();
+}
+
+Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack)
+ : stack_state(initial_stack)
+{
+ // can't use a delegating constructor because of
+ // MSVC for Python 2.7
+ p->pimpl = this;
+}
+
+bool
+Greenlet::force_slp_switch_error() const noexcept
+{
+ return false;
+}
+
+void
+Greenlet::release_args()
+{
+ this->switch_args.CLEAR();
+}
+
+/**
+ * CAUTION: This will allocate memory and may trigger garbage
+ * collection and arbitrary Python code.
+ */
+OwnedObject
+Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state))
+{
+ // If we're killed because we lost all references in the
+ // middle of a switch, that's ok. Don't reset the args/kwargs,
+ // we still want to pass them to the parent.
+ PyErr_SetString(mod_globs->PyExc_GreenletExit,
+ "Killing the greenlet because all references have vanished.");
+ // To get here it had to have run before
+ return this->g_switch();
+}
+
+inline void
+Greenlet::slp_restore_state() noexcept
+{
+#ifdef SLP_BEFORE_RESTORE_STATE
+ SLP_BEFORE_RESTORE_STATE();
+#endif
+ this->stack_state.copy_heap_to_stack(
+ this->thread_state()->borrow_current()->stack_state);
+}
+
+
+inline int
+Greenlet::slp_save_state(char *const stackref) noexcept
+{
+ // XXX: This used to happen in the middle, before saving, but
+ // after finding the next owner. Does that matter? This is
+ // only defined for Sparc/GCC where it flushes register
+ // windows to the stack (I think)
+#ifdef SLP_BEFORE_SAVE_STATE
+ SLP_BEFORE_SAVE_STATE();
+#endif
+ return this->stack_state.copy_stack_to_heap(stackref,
+ this->thread_state()->borrow_current()->stack_state);
+}
+
+/**
+ * CAUTION: This will allocate memory and may trigger garbage
+ * collection and arbitrary Python code.
+ */
+OwnedObject
+Greenlet::on_switchstack_or_initialstub_failure(
+ Greenlet* target,
+ const Greenlet::switchstack_result_t& err,
+ const bool target_was_me,
+ const bool was_initial_stub)
+{
+ // If we get here, either g_initialstub()
+ // failed, or g_switchstack() failed. Either one of those
+ // cases SHOULD leave us in the original greenlet with a valid stack.
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(
+ PyExc_SystemError,
+ was_initial_stub
+ ? "Failed to switch stacks into a greenlet for the first time."
+ : "Failed to switch stacks into a running greenlet.");
+ }
+ this->release_args();
+
+ if (target && !target_was_me) {
+ target->murder_in_place();
+ }
+
+ assert(!err.the_new_current_greenlet);
+ assert(!err.origin_greenlet);
+ return OwnedObject();
+
+}
+
+OwnedGreenlet
+Greenlet::g_switchstack_success() noexcept
+{
+ PyThreadState* tstate = PyThreadState_GET();
+ // restore the saved state
+ this->python_state >> tstate;
+ this->exception_state >> tstate;
+
+ // The thread state hasn't been changed yet.
+ ThreadState* thread_state = this->thread_state();
+ OwnedGreenlet result(thread_state->get_current());
+ thread_state->set_current(this->self());
+ //assert(thread_state->borrow_current().borrow() == this->_self);
+ return result;
+}
+
+Greenlet::switchstack_result_t
+Greenlet::g_switchstack(void)
+{
+ // if any of these assertions fail, it's likely because we
+ // switched away and tried to switch back to us. Early stages of
+ // switching are not reentrant because we re-use ``this->args()``.
+ // Switching away would happen if we trigger a garbage collection
+ // (by just using some Python APIs that happen to allocate Python
+ // objects) and some garbage had weakref callbacks or __del__ that
+ // switches (people don't write code like that by hand, but with
+ // gevent it's possible without realizing it)
+ assert(this->args() || PyErr_Occurred());
+ { /* save state */
+ if (this->thread_state()->is_current(this->self())) {
+ // Hmm, nothing to do.
+ // TODO: Does this bypass trace events that are
+ // important?
+ return switchstack_result_t(0,
+ this, this->thread_state()->borrow_current());
+ }
+ BorrowedGreenlet current = this->thread_state()->borrow_current();
+ PyThreadState* tstate = PyThreadState_GET();
+
+ current->python_state << tstate;
+ current->exception_state << tstate;
+ this->python_state.will_switch_from(tstate);
+ switching_thread_state = this;
+ current->expose_frames();
+ }
+ assert(this->args() || PyErr_Occurred());
+ // If this is the first switch into a greenlet, this will
+ // return twice, once with 1 in the new greenlet, once with 0
+ // in the origin.
+ int err;
+ if (this->force_slp_switch_error()) {
+ err = -1;
+ }
+ else {
+ err = slp_switch();
+ }
+
+ if (err < 0) { /* error */
+ // Tested by
+ // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running
+ //
+ // It's not clear if it's worth trying to clean up and
+ // continue here. Failing to switch stacks is a big deal which
+ // may not be recoverable (who knows what state the stack is in).
+ // Also, we've stolen references in preparation for calling
+ // ``g_switchstack_success()`` and we don't have a clean
+ // mechanism for backing that all out.
+ Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt.");
+ }
+
+ // No stack-based variables are valid anymore.
+
+ // But the global is volatile so we can reload it without the
+ // compiler caching it from earlier.
+ Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this
+ switching_thread_state = nullptr;
+ // except that no stack variables are valid, we would:
+ // assert(this == greenlet_that_switched_in);
+
+ // switchstack success is where we restore the exception state,
+ // etc. It returns the origin greenlet because its convenient.
+
+ OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success();
+ assert(greenlet_that_switched_in->args() || PyErr_Occurred());
+ return switchstack_result_t(err, greenlet_that_switched_in, origin);
+}
+
+
+inline void
+Greenlet::check_switch_allowed() const
+{
+ // TODO: Make this take a parameter of the current greenlet,
+ // or current main greenlet, to make the check for
+ // cross-thread switching cheaper. Surely somewhere up the
+ // call stack we've already accessed the thread local variable.
+
+ // We expect to always have a main greenlet now; accessing the thread state
+ // created it. However, if we get here and cleanup has already
+ // begun because we're a greenlet that was running in a
+ // (now dead) thread, these invariants will not hold true. In
+ // fact, accessing `this->thread_state` may not even be possible.
+
+ // If the thread this greenlet was running in is dead,
+ // we'll still have a reference to a main greenlet, but the
+ // thread state pointer we have is bogus.
+ // TODO: Give the objects an API to determine if they belong
+ // to a dead thread.
+
+ const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage();
+
+ if (!main_greenlet) {
+ throw PyErrOccurred(mod_globs->PyExc_GreenletError,
+ "cannot switch to a garbage collected greenlet");
+ }
+
+ if (!main_greenlet->thread_state()) {
+ throw PyErrOccurred(mod_globs->PyExc_GreenletError,
+ "cannot switch to a different thread (which happens to have exited)");
+ }
+
+ // The main greenlet we found was from the .parent lineage.
+ // That may or may not have any relationship to the main
+ // greenlet of the running thread. We can't actually access
+ // our this->thread_state members to try to check that,
+ // because it could be in the process of getting destroyed,
+ // but setting the main_greenlet->thread_state member to NULL
+ // may not be visible yet. So we need to check against the
+ // current thread state (once the cheaper checks are out of
+ // the way)
+ const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet();
+ if (
+ // lineage main greenlet is not this thread's greenlet
+ current_main_greenlet != main_greenlet
+ || (
+ // atteched to some thread
+ this->main_greenlet()
+ // XXX: Same condition as above. Was this supposed to be
+ // this->main_greenlet()?
+ && current_main_greenlet != main_greenlet)
+ // switching into a known dead thread (XXX: which, if we get here,
+ // is bad, because we just accessed the thread state, which is
+ // gone!)
+ || (!current_main_greenlet->thread_state())) {
+ // CAUTION: This may trigger memory allocations, gc, and
+ // arbitrary Python code.
+ throw PyErrOccurred(mod_globs->PyExc_GreenletError,
+ "cannot switch to a different thread");
+ }
+}
+
+const OwnedObject
+Greenlet::context() const
+{
+ using greenlet::PythonStateContext;
+ OwnedObject result;
+
+ if (this->is_currently_running_in_some_thread()) {
+ /* Currently running greenlet: context is stored in the thread state,
+ not the greenlet object. */
+ if (GET_THREAD_STATE().state().is_current(this->self())) {
+ result = PythonStateContext::context(PyThreadState_GET());
+ }
+ else {
+ throw ValueError(
+ "cannot get context of a "
+ "greenlet that is running in a different thread");
+ }
+ }
+ else {
+ /* Greenlet is not running: just return context. */
+ result = this->python_state.context();
+ }
+ if (!result) {
+ result = OwnedObject::None();
+ }
+ return result;
+}
+
+
+void
+Greenlet::context(BorrowedObject given)
+{
+ using greenlet::PythonStateContext;
+ if (!given) {
+ throw AttributeError("can't delete context attribute");
+ }
+ if (given.is_None()) {
+ /* "Empty context" is stored as NULL, not None. */
+ given = nullptr;
+ }
+
+ //checks type, incrs refcnt
+ greenlet::refs::OwnedContext context(given);
+ PyThreadState* tstate = PyThreadState_GET();
+
+ if (this->is_currently_running_in_some_thread()) {
+ if (!GET_THREAD_STATE().state().is_current(this->self())) {
+ throw ValueError("cannot set context of a greenlet"
+ " that is running in a different thread");
+ }
+
+ /* Currently running greenlet: context is stored in the thread state,
+ not the greenlet object. */
+ OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate));
+ PythonStateContext::context(tstate, context.relinquish_ownership());
+ }
+ else {
+ /* Greenlet is not running: just set context. Note that the
+ greenlet may be dead.*/
+ this->python_state.context() = context;
+ }
+}
+
+/**
+ * CAUTION: May invoke arbitrary Python code.
+ *
+ * Figure out what the result of ``greenlet.switch(arg, kwargs)``
+ * should be and transfers ownership of it to the left-hand-side.
+ *
+ * If switch() was just passed an arg tuple, then we'll just return that.
+ * If only keyword arguments were passed, then we'll pass the keyword
+ * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and
+ * return both.
+ *
+ * CAUTION: This may allocate a new tuple object, which may
+ * cause the Python garbage collector to run, which in turn may
+ * run arbitrary Python code that switches.
+ */
+OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept
+{
+ // Because this may invoke arbitrary Python code, which could
+ // result in switching back to us, we need to get the
+ // arguments locally on the stack.
+ assert(rhs);
+ OwnedObject args = rhs.args();
+ OwnedObject kwargs = rhs.kwargs();
+ rhs.CLEAR();
+ // We shouldn't be called twice for the same switch.
+ assert(args || kwargs);
+ assert(!rhs);
+
+ if (!kwargs) {
+ lhs = args;
+ }
+ else if (!PyDict_Size(kwargs.borrow())) {
+ lhs = args;
+ }
+ else if (!PySequence_Length(args.borrow())) {
+ lhs = kwargs;
+ }
+ else {
+ // PyTuple_Pack allocates memory, may GC, may run arbitrary
+ // Python code.
+ lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow()));
+ }
+ return lhs;
+}
+
+static OwnedObject
+g_handle_exit(const OwnedObject& greenlet_result)
+{
+ if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) {
+ /* catch and ignore GreenletExit */
+ PyErrFetchParam val;
+ PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam());
+ if (!val) {
+ return OwnedObject::None();
+ }
+ return OwnedObject(val);
+ }
+
+ if (greenlet_result) {
+ // package the result into a 1-tuple
+ // PyTuple_Pack increments the reference of its arguments,
+ // so we always need to decref the greenlet result;
+ // the owner will do that.
+ return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow()));
+ }
+
+ return OwnedObject();
+}
+
+
+
+/**
+ * May run arbitrary Python code.
+ */
+OwnedObject
+Greenlet::g_switch_finish(const switchstack_result_t& err)
+{
+ assert(err.the_new_current_greenlet == this);
+
+ ThreadState& state = *this->thread_state();
+ // Because calling the trace function could do arbitrary things,
+ // including switching away from this greenlet and then maybe
+ // switching back, we need to capture the arguments now so that
+ // they don't change.
+ OwnedObject result;
+ if (this->args()) {
+ result <<= this->args();
+ }
+ else {
+ assert(PyErr_Occurred());
+ }
+ assert(!this->args());
+ try {
+ // Our only caller handles the bad error case
+ assert(err.status >= 0);
+ assert(state.borrow_current() == this->self());
+ if (OwnedObject tracefunc = state.get_tracefunc()) {
+ assert(result || PyErr_Occurred());
+ g_calltrace(tracefunc,
+ result ? mod_globs->event_switch : mod_globs->event_throw,
+ err.origin_greenlet,
+ this->self());
+ }
+ // The above could have invoked arbitrary Python code, but
+ // it couldn't switch back to this object and *also*
+ // throw an exception, so the args won't have changed.
+
+ if (PyErr_Occurred()) {
+ // We get here if we fell of the end of the run() function
+ // raising an exception. The switch itself was
+ // successful, but the function raised.
+ // valgrind reports that memory allocated here can still
+ // be reached after a test run.
+ throw PyErrOccurred::from_current();
+ }
+ return result;
+ }
+ catch (const PyErrOccurred&) {
+ /* Turn switch errors into switch throws */
+ /* Turn trace errors into switch throws */
+ this->release_args();
+ throw;
+ }
+}
+
+void
+Greenlet::g_calltrace(const OwnedObject& tracefunc,
+ const greenlet::refs::ImmortalEventName& event,
+ const BorrowedGreenlet& origin,
+ const BorrowedGreenlet& target)
+{
+ PyErrPieces saved_exc;
+ try {
+ TracingGuard tracing_guard;
+ // TODO: We have saved the active exception (if any) that's
+ // about to be raised. In the 'throw' case, we could provide
+ // the exception to the tracefunction, which seems very helpful.
+ tracing_guard.CallTraceFunction(tracefunc, event, origin, target);
+ }
+ catch (const PyErrOccurred&) {
+ // In case of exceptions trace function is removed,
+ // and any existing exception is replaced with the tracing
+ // exception.
+ GET_THREAD_STATE().state().set_tracefunc(Py_None);
+ throw;
+ }
+
+ saved_exc.PyErrRestore();
+ assert(
+ (event == mod_globs->event_throw && PyErr_Occurred())
+ || (event == mod_globs->event_switch && !PyErr_Occurred())
+ );
+}
+
+void
+Greenlet::murder_in_place()
+{
+ if (this->active()) {
+ assert(!this->is_currently_running_in_some_thread());
+ this->deactivate_and_free();
+ }
+}
+
+inline void
+Greenlet::deactivate_and_free()
+{
+ if (!this->active()) {
+ return;
+ }
+ // Throw away any saved stack.
+ this->stack_state = StackState();
+ assert(!this->stack_state.active());
+ // Throw away any Python references.
+ // We're holding a borrowed reference to the last
+ // frame we executed. Since we borrowed it, the
+ // normal traversal, clear, and dealloc functions
+ // ignore it, meaning it leaks. (The thread state
+ // object can't find it to clear it when that's
+ // deallocated either, because by definition if we
+ // got an object on this list, it wasn't
+ // running and the thread state doesn't have
+ // this frame.)
+ // So here, we *do* clear it.
+ this->python_state.tp_clear(true);
+}
+
+bool
+Greenlet::belongs_to_thread(const ThreadState* thread_state) const
+{
+ if (!this->thread_state() // not running anywhere, or thread
+ // exited
+ || !thread_state) { // same, or there is no thread state.
+ return false;
+ }
+ return true;
+}
+
+
+void
+Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state)
+{
+ /* Cannot raise an exception to kill the greenlet if
+ it is not running in the same thread! */
+ if (this->belongs_to_thread(current_thread_state)) {
+ assert(current_thread_state);
+ // To get here it had to have run before
+ /* Send the greenlet a GreenletExit exception. */
+
+ // We don't care about the return value, only whether an
+ // exception happened.
+ this->throw_GreenletExit_during_dealloc(*current_thread_state);
+ return;
+ }
+
+ // Not the same thread! Temporarily save the greenlet
+ // into its thread's deleteme list, *if* it exists.
+ // If that thread has already exited, and processed its pending
+ // cleanup, we'll never be able to clean everything up: we won't
+ // be able to raise an exception.
+ // That's mostly OK! Since we can't add it to a list, our refcount
+ // won't increase, and we'll go ahead with the DECREFs later.
+ ThreadState *const thread_state = this->thread_state();
+ if (thread_state) {
+ thread_state->delete_when_thread_running(this->self());
+ }
+ else {
+ // The thread is dead, we can't raise an exception.
+ // We need to make it look non-active, though, so that dealloc
+ // finishes killing it.
+ this->deactivate_and_free();
+ }
+ return;
+}
+
+
+int
+Greenlet::tp_traverse(visitproc visit, void* arg)
+{
+
+ int result;
+ if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) {
+ return result;
+ }
+ //XXX: This is ugly. But so is handling everything having to do
+ //with the top frame.
+ bool visit_top_frame = this->was_running_in_dead_thread();
+ // When true, the thread is dead. Our implicit weak reference to the
+ // frame is now all that's left; we consider ourselves to
+ // strongly own it now.
+ if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) {
+ return result;
+ }
+ return 0;
+}
+
+int
+Greenlet::tp_clear()
+{
+ bool own_top_frame = this->was_running_in_dead_thread();
+ this->exception_state.tp_clear();
+ this->python_state.tp_clear(own_top_frame);
+ return 0;
+}
+
+bool Greenlet::is_currently_running_in_some_thread() const
+{
+ return this->stack_state.active() && !this->python_state.top_frame();
+}
+
+#if GREENLET_PY312
+void GREENLET_NOINLINE(Greenlet::expose_frames)()
+{
+ if (!this->python_state.top_frame()) {
+ return;
+ }
+
+ _PyInterpreterFrame* last_complete_iframe = nullptr;
+ _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame;
+ while (iframe) {
+ // We must make a copy before looking at the iframe contents,
+ // since iframe might point to a portion of the greenlet's C stack
+ // that was spilled when switching greenlets.
+ _PyInterpreterFrame iframe_copy;
+ this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe));
+ if (!_PyFrame_IsIncomplete(&iframe_copy)) {
+ // If the iframe were OWNED_BY_CSTACK then it would always be
+ // incomplete. Since it's not incomplete, it's not on the C stack
+ // and we can access it through the original `iframe` pointer
+ // directly. This is important since GetFrameObject might
+ // lazily _create_ the frame object and we don't want the
+ // interpreter to lose track of it.
+ assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK);
+
+ // We really want to just write:
+ // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe);
+ // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject
+ // which is not a visible symbol in libpython. The easiest
+ // way to get a public function to call it is using
+ // PyFrame_GetBack, which is defined as follows:
+ // assert(frame != NULL);
+ // assert(!_PyFrame_IsIncomplete(frame->f_frame));
+ // PyFrameObject *back = frame->f_back;
+ // if (back == NULL) {
+ // _PyInterpreterFrame *prev = frame->f_frame->previous;
+ // prev = _PyFrame_GetFirstComplete(prev);
+ // if (prev) {
+ // back = _PyFrame_GetFrameObject(prev);
+ // }
+ // }
+ // return (PyFrameObject*)Py_XNewRef(back);
+ if (!iframe->frame_obj) {
+ PyFrameObject dummy_frame;
+ _PyInterpreterFrame dummy_iframe;
+ dummy_frame.f_back = nullptr;
+ dummy_frame.f_frame = &dummy_iframe;
+ // force the iframe to be considered complete without
+ // needing to check its code object:
+ dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR;
+ dummy_iframe.previous = iframe;
+ assert(!_PyFrame_IsIncomplete(&dummy_iframe));
+ // Drop the returned reference immediately; the iframe
+ // continues to hold a strong reference
+ Py_XDECREF(PyFrame_GetBack(&dummy_frame));
+ assert(iframe->frame_obj);
+ }
+
+ // This is a complete frame, so make the last one of those we saw
+ // point at it, bypassing any incomplete frames (which may have
+ // been on the C stack) in between the two. We're overwriting
+ // last_complete_iframe->previous and need that to be reversible,
+ // so we store the original previous ptr in the frame object
+ // (which we must have created on a previous iteration through
+ // this loop). The frame object has a bunch of storage that is
+ // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only
+ // occurs when the frame object outlives the frame's execution,
+ // which can't have happened yet because the frame is currently
+ // executing as far as the interpreter is concerned. So, we can
+ // reuse it for our own purposes.
+ assert(iframe->owner == FRAME_OWNED_BY_THREAD
+ || iframe->owner == FRAME_OWNED_BY_GENERATOR);
+ if (last_complete_iframe) {
+ assert(last_complete_iframe->frame_obj);
+ memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0],
+ &last_complete_iframe->previous, sizeof(void *));
+ last_complete_iframe->previous = iframe;
+ }
+ last_complete_iframe = iframe;
+ }
+ // Frames that are OWNED_BY_FRAME_OBJECT are linked via the
+ // frame's f_back while all others are linked via the iframe's
+ // previous ptr. Since all the frames we traverse are running
+ // as far as the interpreter is concerned, we don't have to
+ // worry about the OWNED_BY_FRAME_OBJECT case.
+ iframe = iframe_copy.previous;
+ }
+
+ // Give the outermost complete iframe a null previous pointer to
+ // account for any potential incomplete/C-stack iframes between it
+ // and the actual top-of-stack
+ if (last_complete_iframe) {
+ assert(last_complete_iframe->frame_obj);
+ memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0],
+ &last_complete_iframe->previous, sizeof(void *));
+ last_complete_iframe->previous = nullptr;
+ }
+}
+#else
+void Greenlet::expose_frames()
+{
+
+}
+#endif
+
+}; // namespace greenlet
diff --git a/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp b/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp
new file mode 100644
index 0000000..c71c963
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TGreenletGlobals.cpp
@@ -0,0 +1,94 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of GreenletGlobals.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+#ifndef T_GREENLET_GLOBALS
+#define T_GREENLET_GLOBALS
+
+#include "greenlet_refs.hpp"
+#include "greenlet_exceptions.hpp"
+#include "greenlet_thread_support.hpp"
+#include "greenlet_thread_state.hpp"
+
+namespace greenlet {
+
+// This encapsulates what were previously module global "constants"
+// established at init time.
+// This is a step towards Python3 style module state that allows
+// reloading.
+//
+// In an earlier iteration of this code, we used placement new to be
+// able to allocate this object statically still, so that references
+// to its members don't incur an extra pointer indirection.
+// But under some scenarios, that could result in crashes at
+// shutdown because apparently the destructor was getting run twice?
+class GreenletGlobals
+{
+
+public:
+ const greenlet::refs::ImmortalEventName event_switch;
+ const greenlet::refs::ImmortalEventName event_throw;
+ const greenlet::refs::ImmortalException PyExc_GreenletError;
+ const greenlet::refs::ImmortalException PyExc_GreenletExit;
+ const greenlet::refs::ImmortalObject empty_tuple;
+ const greenlet::refs::ImmortalObject empty_dict;
+ const greenlet::refs::ImmortalString str_run;
+ Mutex* const thread_states_to_destroy_lock;
+ greenlet::cleanup_queue_t thread_states_to_destroy;
+
+ GreenletGlobals() :
+ event_switch("switch"),
+ event_throw("throw"),
+ PyExc_GreenletError("greenlet.error"),
+ PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException),
+ empty_tuple(Require(PyTuple_New(0))),
+ empty_dict(Require(PyDict_New())),
+ str_run("run"),
+ thread_states_to_destroy_lock(new Mutex())
+ {}
+
+ ~GreenletGlobals()
+ {
+ // This object is (currently) effectively immortal, and not
+ // just because of those placement new tricks; if we try to
+ // deallocate the static object we allocated, and overwrote,
+ // we would be doing so at C++ teardown time, which is after
+ // the final Python GIL is released, and we can't use the API
+ // then.
+ // (The members will still be destructed, but they also don't
+ // do any deallocation.)
+ }
+
+ void queue_to_destroy(ThreadState* ts) const
+ {
+ // we're currently accessed through a static const object,
+ // implicitly marking our members as const, so code can't just
+ // call push_back (or pop_back) without casting away the
+ // const.
+ //
+ // Do that for callers.
+ greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy);
+ q.push_back(ts);
+ }
+
+ ThreadState* take_next_to_destroy() const
+ {
+ greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy);
+ ThreadState* result = q.back();
+ q.pop_back();
+ return result;
+ }
+};
+
+}; // namespace greenlet
+
+static const greenlet::GreenletGlobals* mod_globs;
+
+#endif // T_GREENLET_GLOBALS
diff --git a/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp
new file mode 100644
index 0000000..c33aadb
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TMainGreenlet.cpp
@@ -0,0 +1,155 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of greenlet::MainGreenlet.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+
+#include "greenlet_greenlet.hpp"
+#include "greenlet_thread_state.hpp"
+
+
+// Protected by the GIL. Incremented when we create a main greenlet,
+// in a new thread, decremented when it is destroyed.
+static Py_ssize_t G_TOTAL_MAIN_GREENLETS;
+
+namespace greenlet {
+greenlet::PythonAllocator MainGreenlet::allocator;
+
+void* MainGreenlet::operator new(size_t UNUSED(count))
+{
+ return allocator.allocate(1);
+}
+
+
+void MainGreenlet::operator delete(void* ptr)
+{
+ return allocator.deallocate(static_cast(ptr),
+ 1);
+}
+
+
+MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state)
+ : Greenlet(p, StackState::make_main()),
+ _self(p),
+ _thread_state(state)
+{
+ G_TOTAL_MAIN_GREENLETS++;
+}
+
+MainGreenlet::~MainGreenlet()
+{
+ G_TOTAL_MAIN_GREENLETS--;
+ this->tp_clear();
+}
+
+ThreadState*
+MainGreenlet::thread_state() const noexcept
+{
+ return this->_thread_state;
+}
+
+void
+MainGreenlet::thread_state(ThreadState* t) noexcept
+{
+ assert(!t);
+ this->_thread_state = t;
+}
+
+BorrowedGreenlet
+MainGreenlet::self() const noexcept
+{
+ return BorrowedGreenlet(this->_self.borrow());
+}
+
+
+const BorrowedMainGreenlet
+MainGreenlet::main_greenlet() const
+{
+ return this->_self;
+}
+
+BorrowedMainGreenlet
+MainGreenlet::find_main_greenlet_in_lineage() const
+{
+ return BorrowedMainGreenlet(this->_self);
+}
+
+bool
+MainGreenlet::was_running_in_dead_thread() const noexcept
+{
+ return !this->_thread_state;
+}
+
+OwnedObject
+MainGreenlet::g_switch()
+{
+ try {
+ this->check_switch_allowed();
+ }
+ catch (const PyErrOccurred&) {
+ this->release_args();
+ throw;
+ }
+
+ switchstack_result_t err = this->g_switchstack();
+ if (err.status < 0) {
+ // XXX: This code path is untested, but it is shared
+ // with the UserGreenlet path that is tested.
+ return this->on_switchstack_or_initialstub_failure(
+ this,
+ err,
+ true, // target was me
+ false // was initial stub
+ );
+ }
+
+ return err.the_new_current_greenlet->g_switch_finish(err);
+}
+
+int
+MainGreenlet::tp_traverse(visitproc visit, void* arg)
+{
+ if (this->_thread_state) {
+ // we've already traversed main, (self), don't do it again.
+ int result = this->_thread_state->tp_traverse(visit, arg, false);
+ if (result) {
+ return result;
+ }
+ }
+ return Greenlet::tp_traverse(visit, arg);
+}
+
+const OwnedObject&
+MainGreenlet::run() const
+{
+ throw AttributeError("Main greenlets do not have a run attribute.");
+}
+
+void
+MainGreenlet::run(const BorrowedObject UNUSED(nrun))
+{
+ throw AttributeError("Main greenlets do not have a run attribute.");
+}
+
+void
+MainGreenlet::parent(const BorrowedObject raw_new_parent)
+{
+ if (!raw_new_parent) {
+ throw AttributeError("can't delete attribute");
+ }
+ throw AttributeError("cannot set the parent of a main greenlet");
+}
+
+const OwnedGreenlet
+MainGreenlet::parent() const
+{
+ return OwnedGreenlet(); // null becomes None
+}
+
+}; // namespace greenlet
diff --git a/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp b/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp
new file mode 100644
index 0000000..465d417
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TPythonState.cpp
@@ -0,0 +1,375 @@
+#ifndef GREENLET_PYTHON_STATE_CPP
+#define GREENLET_PYTHON_STATE_CPP
+
+#include
+#include "greenlet_greenlet.hpp"
+
+namespace greenlet {
+
+PythonState::PythonState()
+ : _top_frame()
+#if GREENLET_USE_CFRAME
+ ,cframe(nullptr)
+ ,use_tracing(0)
+#endif
+#if GREENLET_PY312
+ ,py_recursion_depth(0)
+ ,c_recursion_depth(0)
+#else
+ ,recursion_depth(0)
+#endif
+ ,trash_delete_nesting(0)
+#if GREENLET_PY311
+ ,current_frame(nullptr)
+ ,datastack_chunk(nullptr)
+ ,datastack_top(nullptr)
+ ,datastack_limit(nullptr)
+#endif
+{
+#if GREENLET_USE_CFRAME
+ /*
+ The PyThreadState->cframe pointer usually points to memory on
+ the stack, alloceted in a call into PyEval_EvalFrameDefault.
+
+ Initially, before any evaluation begins, it points to the
+ initial PyThreadState object's ``root_cframe`` object, which is
+ statically allocated for the lifetime of the thread.
+
+ A greenlet can last for longer than a call to
+ PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer
+ to be the current ``PyThreadState->cframe``; nor could we use
+ one from the greenlet parent for the same reason. Yet a further
+ no: we can't allocate one scoped to the greenlet and then
+ destroy it when the greenlet is deallocated, because inside the
+ interpreter the _PyCFrame objects form a linked list, and that too
+ can result in accessing memory beyond its dynamic lifetime (if
+ the greenlet doesn't actually finish before it dies, its entry
+ could still be in the list).
+
+ Using the ``root_cframe`` is problematic, though, because its
+ members are never modified by the interpreter and are set to 0,
+ meaning that its ``use_tracing`` flag is never updated. We don't
+ want to modify that value in the ``root_cframe`` ourself: it
+ *shouldn't* matter much because we should probably never get
+ back to the point where that's the only cframe on the stack;
+ even if it did matter, the major consequence of an incorrect
+ value for ``use_tracing`` is that if its true the interpreter
+ does some extra work --- however, it's just good code hygiene.
+
+ Our solution: before a greenlet runs, after its initial
+ creation, it uses the ``root_cframe`` just to have something to
+ put there. However, once the greenlet is actually switched to
+ for the first time, ``g_initialstub`` (which doesn't actually
+ "return" while the greenlet is running) stores a new _PyCFrame on
+ its local stack, and copies the appropriate values from the
+ currently running _PyCFrame; this is then made the _PyCFrame for the
+ newly-minted greenlet. ``g_initialstub`` then proceeds to call
+ ``glet.run()``, which results in ``PyEval_...`` adding the
+ _PyCFrame to the list. Switches continue as normal. Finally, when
+ the greenlet finishes, the call to ``glet.run()`` returns and
+ the _PyCFrame is taken out of the linked list and the stack value
+ is now unused and free to expire.
+
+ XXX: I think we can do better. If we're deallocing in the same
+ thread, can't we traverse the list and unlink our frame?
+ Can we just keep a reference to the thread state in case we
+ dealloc in another thread? (Is that even possible if we're still
+ running and haven't returned from g_initialstub?)
+ */
+ this->cframe = &PyThreadState_GET()->root_cframe;
+#endif
+}
+
+
+inline void PythonState::may_switch_away() noexcept
+{
+#if GREENLET_PY311
+ // PyThreadState_GetFrame is probably going to have to allocate a
+ // new frame object. That may trigger garbage collection. Because
+ // we call this during the early phases of a switch (it doesn't
+ // matter to which greenlet, as this has a global effect), if a GC
+ // triggers a switch away, two things can happen, both bad:
+ // - We might not get switched back to, halting forward progress.
+ // this is pathological, but possible.
+ // - We might get switched back to with a different set of
+ // arguments or a throw instead of a switch. That would corrupt
+ // our state (specifically, PyErr_Occurred() and this->args()
+ // would no longer agree).
+ //
+ // Thus, when we call this API, we need to have GC disabled.
+ // This method serves as a bottleneck we call when maybe beginning
+ // a switch. In this way, it is always safe -- no risk of GC -- to
+ // use ``_GetFrame()`` whenever we need to, just as it was in
+ // <=3.10 (because subsequent calls will be cached and not
+ // allocate memory).
+
+ GCDisabledGuard no_gc;
+ Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET()));
+#endif
+}
+
+void PythonState::operator<<(const PyThreadState *const tstate) noexcept
+{
+ this->_context.steal(tstate->context);
+#if GREENLET_USE_CFRAME
+ /*
+ IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because
+ the call to ``slp_switch()`` changes the contents of the stack,
+ you cannot read from ``ts_current->cframe`` after that call and
+ necessarily get the same values you get from reading it here.
+ Anything you need to restore from now to then must be saved in a
+ global/threadlocal variable (because we can't use stack
+ variables here either). For things that need to persist across
+ the switch, use `will_switch_from`.
+ */
+ this->cframe = tstate->cframe;
+ #if !GREENLET_PY312
+ this->use_tracing = tstate->cframe->use_tracing;
+ #endif
+#endif // GREENLET_USE_CFRAME
+#if GREENLET_PY311
+ #if GREENLET_PY312
+ this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining;
+ this->c_recursion_depth = C_RECURSION_LIMIT - tstate->c_recursion_remaining;
+ #else // not 312
+ this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining;
+ #endif // GREENLET_PY312
+ this->current_frame = tstate->cframe->current_frame;
+ this->datastack_chunk = tstate->datastack_chunk;
+ this->datastack_top = tstate->datastack_top;
+ this->datastack_limit = tstate->datastack_limit;
+
+ PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate);
+ Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new
+ // reference.
+ this->_top_frame.steal(frame);
+ #if GREENLET_PY312
+ this->trash_delete_nesting = tstate->trash.delete_nesting;
+ #else // not 312
+ this->trash_delete_nesting = tstate->trash_delete_nesting;
+ #endif // GREENLET_PY312
+#else // Not 311
+ this->recursion_depth = tstate->recursion_depth;
+ this->_top_frame.steal(tstate->frame);
+ this->trash_delete_nesting = tstate->trash_delete_nesting;
+#endif // GREENLET_PY311
+}
+
+#if GREENLET_PY312
+void GREENLET_NOINLINE(PythonState::unexpose_frames)()
+{
+ if (!this->top_frame()) {
+ return;
+ }
+
+ // See GreenletState::expose_frames() and the comment on frames_were_exposed
+ // for more information about this logic.
+ _PyInterpreterFrame *iframe = this->_top_frame->f_frame;
+ while (iframe != nullptr) {
+ _PyInterpreterFrame *prev_exposed = iframe->previous;
+ assert(iframe->frame_obj);
+ memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0],
+ sizeof(void *));
+ iframe = prev_exposed;
+ }
+}
+#else
+void PythonState::unexpose_frames()
+{}
+#endif
+
+void PythonState::operator>>(PyThreadState *const tstate) noexcept
+{
+ tstate->context = this->_context.relinquish_ownership();
+ /* Incrementing this value invalidates the contextvars cache,
+ which would otherwise remain valid across switches */
+ tstate->context_ver++;
+#if GREENLET_USE_CFRAME
+ tstate->cframe = this->cframe;
+ /*
+ If we were tracing, we need to keep tracing.
+ There should never be the possibility of hitting the
+ root_cframe here. See note above about why we can't
+ just copy this from ``origin->cframe->use_tracing``.
+ */
+ #if !GREENLET_PY312
+ tstate->cframe->use_tracing = this->use_tracing;
+ #endif
+#endif // GREENLET_USE_CFRAME
+#if GREENLET_PY311
+ #if GREENLET_PY312
+ tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth;
+ tstate->c_recursion_remaining = C_RECURSION_LIMIT - this->c_recursion_depth;
+ this->unexpose_frames();
+ #else // \/ 3.11
+ tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth;
+ #endif // GREENLET_PY312
+ tstate->cframe->current_frame = this->current_frame;
+ tstate->datastack_chunk = this->datastack_chunk;
+ tstate->datastack_top = this->datastack_top;
+ tstate->datastack_limit = this->datastack_limit;
+ this->_top_frame.relinquish_ownership();
+ #if GREENLET_PY312
+ tstate->trash.delete_nesting = this->trash_delete_nesting;
+ #else // not 3.12
+ tstate->trash_delete_nesting = this->trash_delete_nesting;
+ #endif // GREENLET_PY312
+#else // not 3.11
+ tstate->frame = this->_top_frame.relinquish_ownership();
+ tstate->recursion_depth = this->recursion_depth;
+ tstate->trash_delete_nesting = this->trash_delete_nesting;
+#endif // GREENLET_PY311
+}
+
+inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept
+{
+#if GREENLET_USE_CFRAME && !GREENLET_PY312
+ // The weird thing is, we don't actually save this for an
+ // effect on the current greenlet, it's saved for an
+ // effect on the target greenlet. That is, we want
+ // continuity of this setting across the greenlet switch.
+ this->use_tracing = origin_tstate->cframe->use_tracing;
+#endif
+}
+
+void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept
+{
+ this->_top_frame = nullptr;
+#if GREENLET_PY312
+ this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining;
+ // XXX: TODO: Comment from a reviewer:
+ // Should this be ``C_RECURSION_LIMIT - tstate->c_recursion_remaining``?
+ // But to me it looks more like that might not be the right
+ // initialization either?
+ this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining;
+#elif GREENLET_PY311
+ this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining;
+#else
+ this->recursion_depth = tstate->recursion_depth;
+#endif
+}
+// TODO: Better state management about when we own the top frame.
+int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept
+{
+ Py_VISIT(this->_context.borrow());
+ if (own_top_frame) {
+ Py_VISIT(this->_top_frame.borrow());
+ }
+ return 0;
+}
+
+void PythonState::tp_clear(bool own_top_frame) noexcept
+{
+ PythonStateContext::tp_clear();
+ // If we get here owning a frame,
+ // we got dealloc'd without being finished. We may or may not be
+ // in the same thread.
+ if (own_top_frame) {
+ this->_top_frame.CLEAR();
+ }
+}
+
+#if GREENLET_USE_CFRAME
+void PythonState::set_new_cframe(_PyCFrame& frame) noexcept
+{
+ frame = *PyThreadState_GET()->cframe;
+ /* Make the target greenlet refer to the stack value. */
+ this->cframe = &frame;
+ /*
+ And restore the link to the previous frame so this one gets
+ unliked appropriately.
+ */
+ this->cframe->previous = &PyThreadState_GET()->root_cframe;
+}
+#endif
+
+const PythonState::OwnedFrame& PythonState::top_frame() const noexcept
+{
+ return this->_top_frame;
+}
+
+void PythonState::did_finish(PyThreadState* tstate) noexcept
+{
+#if GREENLET_PY311
+ // See https://github.com/gevent/gevent/issues/1924 and
+ // https://github.com/python-greenlet/greenlet/issues/328. In
+ // short, Python 3.11 allocates memory for frames as a sort of
+ // linked list that's kept as part of PyThreadState in the
+ // ``datastack_chunk`` member and friends. These are saved and
+ // restored as part of switching greenlets.
+ //
+ // When we initially switch to a greenlet, we set those to NULL.
+ // That causes the frame management code to treat this like a
+ // brand new thread and start a fresh list of chunks, beginning
+ // with a new "root" chunk. As we make calls in this greenlet,
+ // those chunks get added, and as calls return, they get popped.
+ // But the frame code (pystate.c) is careful to make sure that the
+ // root chunk never gets popped.
+ //
+ // Thus, when a greenlet exits for the last time, there will be at
+ // least a single root chunk that we must be responsible for
+ // deallocating.
+ //
+ // The complex part is that these chunks are allocated and freed
+ // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public
+ // functions, and they aren't exported for linking. It so happens
+ // that we know they are just thin wrappers around the Arena
+ // allocator, so we can use that directly to deallocate in a
+ // compatible way.
+ //
+ // CAUTION: Check this implementation detail on every major version.
+ //
+ // It might be nice to be able to do this in our destructor, but
+ // can we be sure that no one else is using that memory? Plus, as
+ // described below, our pointers may not even be valid anymore. As
+ // a special case, there is one time that we know we can do this,
+ // and that's from the destructor of the associated UserGreenlet
+ // (NOT main greenlet)
+ PyObjectArenaAllocator alloc;
+ _PyStackChunk* chunk = nullptr;
+ if (tstate) {
+ // We really did finish, we can never be switched to again.
+ chunk = tstate->datastack_chunk;
+ // Unfortunately, we can't do much sanity checking. Our
+ // this->datastack_chunk pointer is out of date (evaluation may
+ // have popped down through it already) so we can't verify that
+ // we deallocate it. I don't think we can even check datastack_top
+ // for the same reason.
+
+ PyObject_GetArenaAllocator(&alloc);
+ tstate->datastack_chunk = nullptr;
+ tstate->datastack_limit = nullptr;
+ tstate->datastack_top = nullptr;
+
+ }
+ else if (this->datastack_chunk) {
+ // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're
+ // still holding a stack chunk, it's garbage because we know
+ // we can never switch back to let cPython clean it up.
+ // Because the last time we got switched away from, and we
+ // haven't run since then, we know our chain is valid and can
+ // be dealloced.
+ chunk = this->datastack_chunk;
+ PyObject_GetArenaAllocator(&alloc);
+ }
+
+ if (alloc.free && chunk) {
+ // In case the arena mechanism has been torn down already.
+ while (chunk) {
+ _PyStackChunk *prev = chunk->previous;
+ chunk->previous = nullptr;
+ alloc.free(alloc.ctx, chunk, chunk->size);
+ chunk = prev;
+ }
+ }
+
+ this->datastack_chunk = nullptr;
+ this->datastack_limit = nullptr;
+ this->datastack_top = nullptr;
+#endif
+}
+
+
+}; // namespace greenlet
+
+#endif // GREENLET_PYTHON_STATE_CPP
diff --git a/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp b/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp
new file mode 100644
index 0000000..9aab596
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TStackState.cpp
@@ -0,0 +1,265 @@
+#ifndef GREENLET_STACK_STATE_CPP
+#define GREENLET_STACK_STATE_CPP
+
+#include "greenlet_greenlet.hpp"
+
+namespace greenlet {
+
+#ifdef GREENLET_USE_STDIO
+#include
+using std::cerr;
+using std::endl;
+
+std::ostream& operator<<(std::ostream& os, const StackState& s)
+{
+ os << "StackState(stack_start=" << (void*)s._stack_start
+ << ", stack_stop=" << (void*)s.stack_stop
+ << ", stack_copy=" << (void*)s.stack_copy
+ << ", stack_saved=" << s._stack_saved
+ << ", stack_prev=" << s.stack_prev
+ << ", addr=" << &s
+ << ")";
+ return os;
+}
+#endif
+
+StackState::StackState(void* mark, StackState& current)
+ : _stack_start(nullptr),
+ stack_stop((char*)mark),
+ stack_copy(nullptr),
+ _stack_saved(0),
+ /* Skip a dying greenlet */
+ stack_prev(current._stack_start
+ ? ¤t
+ : current.stack_prev)
+{
+}
+
+StackState::StackState()
+ : _stack_start(nullptr),
+ stack_stop(nullptr),
+ stack_copy(nullptr),
+ _stack_saved(0),
+ stack_prev(nullptr)
+{
+}
+
+StackState::StackState(const StackState& other)
+// can't use a delegating constructor because of
+// MSVC for Python 2.7
+ : _stack_start(nullptr),
+ stack_stop(nullptr),
+ stack_copy(nullptr),
+ _stack_saved(0),
+ stack_prev(nullptr)
+{
+ this->operator=(other);
+}
+
+StackState& StackState::operator=(const StackState& other)
+{
+ if (&other == this) {
+ return *this;
+ }
+ if (other._stack_saved) {
+ throw std::runtime_error("Refusing to steal memory.");
+ }
+
+ //If we have memory allocated, dispose of it
+ this->free_stack_copy();
+
+ this->_stack_start = other._stack_start;
+ this->stack_stop = other.stack_stop;
+ this->stack_copy = other.stack_copy;
+ this->_stack_saved = other._stack_saved;
+ this->stack_prev = other.stack_prev;
+ return *this;
+}
+
+inline void StackState::free_stack_copy() noexcept
+{
+ PyMem_Free(this->stack_copy);
+ this->stack_copy = nullptr;
+ this->_stack_saved = 0;
+}
+
+inline void StackState::copy_heap_to_stack(const StackState& current) noexcept
+{
+
+ /* Restore the heap copy back into the C stack */
+ if (this->_stack_saved != 0) {
+ memcpy(this->_stack_start, this->stack_copy, this->_stack_saved);
+ this->free_stack_copy();
+ }
+ StackState* owner = const_cast(¤t);
+ if (!owner->_stack_start) {
+ owner = owner->stack_prev; /* greenlet is dying, skip it */
+ }
+ while (owner && owner->stack_stop <= this->stack_stop) {
+ // cerr << "\tOwner: " << owner << endl;
+ owner = owner->stack_prev; /* find greenlet with more stack */
+ }
+ this->stack_prev = owner;
+ // cerr << "\tFinished with: " << *this << endl;
+}
+
+inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept
+{
+ /* Save more of g's stack into the heap -- at least up to 'stop'
+ g->stack_stop |________|
+ | |
+ | __ stop . . . . .
+ | | ==> . .
+ |________| _______
+ | | | |
+ | | | |
+ g->stack_start | | |_______| g->stack_copy
+ */
+ intptr_t sz1 = this->_stack_saved;
+ intptr_t sz2 = stop - this->_stack_start;
+ assert(this->_stack_start);
+ if (sz2 > sz1) {
+ char* c = (char*)PyMem_Realloc(this->stack_copy, sz2);
+ if (!c) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1);
+ this->stack_copy = c;
+ this->_stack_saved = sz2;
+ }
+ return 0;
+}
+
+inline int StackState::copy_stack_to_heap(char* const stackref,
+ const StackState& current) noexcept
+{
+ /* must free all the C stack up to target_stop */
+ const char* const target_stop = this->stack_stop;
+
+ StackState* owner = const_cast(¤t);
+ assert(owner->_stack_saved == 0); // everything is present on the stack
+ if (!owner->_stack_start) {
+ owner = owner->stack_prev; /* not saved if dying */
+ }
+ else {
+ owner->_stack_start = stackref;
+ }
+
+ while (owner->stack_stop < target_stop) {
+ /* ts_current is entierely within the area to free */
+ if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) {
+ return -1; /* XXX */
+ }
+ owner = owner->stack_prev;
+ }
+ if (owner != this) {
+ if (owner->copy_stack_to_heap_up_to(target_stop)) {
+ return -1; /* XXX */
+ }
+ }
+ return 0;
+}
+
+inline bool StackState::started() const noexcept
+{
+ return this->stack_stop != nullptr;
+}
+
+inline bool StackState::main() const noexcept
+{
+ return this->stack_stop == (char*)-1;
+}
+
+inline bool StackState::active() const noexcept
+{
+ return this->_stack_start != nullptr;
+}
+
+inline void StackState::set_active() noexcept
+{
+ assert(this->_stack_start == nullptr);
+ this->_stack_start = (char*)1;
+}
+
+inline void StackState::set_inactive() noexcept
+{
+ this->_stack_start = nullptr;
+ // XXX: What if we still have memory out there?
+ // That case is actually triggered by
+ // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks)
+ // and
+ // test_issue251_issue252_need_to_collect_in_background
+ // (greenlet.tests.test_leaks.TestLeaks)
+ //
+ // Those objects never get deallocated, so the destructor never
+ // runs.
+ // It *seems* safe to clean up the memory here?
+ if (this->_stack_saved) {
+ this->free_stack_copy();
+ }
+}
+
+inline intptr_t StackState::stack_saved() const noexcept
+{
+ return this->_stack_saved;
+}
+
+inline char* StackState::stack_start() const noexcept
+{
+ return this->_stack_start;
+}
+
+
+inline StackState StackState::make_main() noexcept
+{
+ StackState s;
+ s._stack_start = (char*)1;
+ s.stack_stop = (char*)-1;
+ return s;
+}
+
+StackState::~StackState()
+{
+ if (this->_stack_saved != 0) {
+ this->free_stack_copy();
+ }
+}
+
+void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const
+{
+ char* dest = static_cast(vdest);
+ const char* src = static_cast(vsrc);
+ if (src + n <= this->_stack_start
+ || src >= this->_stack_start + this->_stack_saved
+ || this->_stack_saved == 0) {
+ // Nothing we're copying was spilled from the stack
+ memcpy(dest, src, n);
+ return;
+ }
+
+ if (src < this->_stack_start) {
+ // Copy the part before the saved stack.
+ // We know src + n > _stack_start due to the test above.
+ const size_t nbefore = this->_stack_start - src;
+ memcpy(dest, src, nbefore);
+ dest += nbefore;
+ src += nbefore;
+ n -= nbefore;
+ }
+ // We know src >= _stack_start after the before-copy, and
+ // src < _stack_start + _stack_saved due to the first if condition
+ size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src);
+ memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled);
+ dest += nspilled;
+ src += nspilled;
+ n -= nspilled;
+ if (n > 0) {
+ // Copy the part after the saved stack
+ memcpy(dest, src, n);
+ }
+}
+
+}; // namespace greenlet
+
+#endif // GREENLET_STACK_STATE_CPP
diff --git a/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp b/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp
new file mode 100644
index 0000000..a149a1a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TThreadStateDestroy.cpp
@@ -0,0 +1,195 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of the ThreadState destructors.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+#ifndef T_THREADSTATE_DESTROY
+#define T_THREADSTATE_DESTROY
+
+#include "greenlet_greenlet.hpp"
+#include "greenlet_thread_state.hpp"
+#include "greenlet_thread_support.hpp"
+#include "greenlet_cpython_add_pending.hpp"
+#include "TGreenletGlobals.cpp"
+
+namespace greenlet {
+
+struct ThreadState_DestroyWithGIL
+{
+ ThreadState_DestroyWithGIL(ThreadState* state)
+ {
+ if (state && state->has_main_greenlet()) {
+ DestroyWithGIL(state);
+ }
+ }
+
+ static int
+ DestroyWithGIL(ThreadState* state)
+ {
+ // Holding the GIL.
+ // Passed a non-shared pointer to the actual thread state.
+ // state -> main greenlet
+ assert(state->has_main_greenlet());
+ PyGreenlet* main(state->borrow_main_greenlet());
+ // When we need to do cross-thread operations, we check this.
+ // A NULL value means the thread died some time ago.
+ // We do this here, rather than in a Python dealloc function
+ // for the greenlet, in case there's still a reference out
+ // there.
+ static_cast(main->pimpl)->thread_state(nullptr);
+
+ delete state; // Deleting this runs the destructor, DECREFs the main greenlet.
+ return 0;
+ }
+};
+
+
+
+struct ThreadState_DestroyNoGIL
+{
+ // ensure this is actually defined.
+ static_assert(GREENLET_BROKEN_PY_ADD_PENDING == 1 || GREENLET_BROKEN_PY_ADD_PENDING == 0,
+ "GREENLET_BROKEN_PY_ADD_PENDING not defined correctly.");
+
+#if GREENLET_BROKEN_PY_ADD_PENDING
+ static int _push_pending_call(struct _pending_calls *pending,
+ int (*func)(void *), void *arg)
+ {
+ int i = pending->last;
+ int j = (i + 1) % NPENDINGCALLS;
+ if (j == pending->first) {
+ return -1; /* Queue full */
+ }
+ pending->calls[i].func = func;
+ pending->calls[i].arg = arg;
+ pending->last = j;
+ return 0;
+ }
+
+ static int AddPendingCall(int (*func)(void *), void *arg)
+ {
+ _PyRuntimeState *runtime = &_PyRuntime;
+ if (!runtime) {
+ // obviously impossible
+ return 0;
+ }
+ struct _pending_calls *pending = &runtime->ceval.pending;
+ if (!pending->lock) {
+ return 0;
+ }
+ int result = 0;
+ PyThread_acquire_lock(pending->lock, WAIT_LOCK);
+ if (!pending->finishing) {
+ result = _push_pending_call(pending, func, arg);
+ }
+ PyThread_release_lock(pending->lock);
+ SIGNAL_PENDING_CALLS(&runtime->ceval);
+ return result;
+ }
+#else
+ // Python < 3.8 or >= 3.9
+ static int AddPendingCall(int (*func)(void*), void* arg)
+ {
+ return Py_AddPendingCall(func, arg);
+ }
+#endif
+
+ ThreadState_DestroyNoGIL(ThreadState* state)
+ {
+ // We are *NOT* holding the GIL. Our thread is in the middle
+ // of its death throes and the Python thread state is already
+ // gone so we can't use most Python APIs. One that is safe is
+ // ``Py_AddPendingCall``, unless the interpreter itself has
+ // been torn down. There is a limited number of calls that can
+ // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we
+ // coalesce these calls using our own queue.
+ if (state && state->has_main_greenlet()) {
+ // mark the thread as dead ASAP.
+ // this is racy! If we try to throw or switch to a
+ // greenlet from this thread from some other thread before
+ // we clear the state pointer, it won't realize the state
+ // is dead which can crash the process.
+ PyGreenlet* p = state->borrow_main_greenlet();
+ assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr);
+ static_cast(p->pimpl)->thread_state(nullptr);
+ }
+
+ // NOTE: Because we're not holding the GIL here, some other
+ // Python thread could run and call ``os.fork()``, which would
+ // be bad if that happenend while we are holding the cleanup
+ // lock (it wouldn't function in the child process).
+ // Make a best effort to try to keep the duration we hold the
+ // lock short.
+ // TODO: On platforms that support it, use ``pthread_atfork`` to
+ // drop this lock.
+ LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock);
+
+ if (state && state->has_main_greenlet()) {
+ // Because we don't have the GIL, this is a race condition.
+ if (!PyInterpreterState_Head()) {
+ // We have to leak the thread state, if the
+ // interpreter has shut down when we're getting
+ // deallocated, we can't run the cleanup code that
+ // deleting it would imply.
+ return;
+ }
+
+ mod_globs->queue_to_destroy(state);
+ if (mod_globs->thread_states_to_destroy.size() == 1) {
+ // We added the first item to the queue. We need to schedule
+ // the cleanup.
+ int result = ThreadState_DestroyNoGIL::AddPendingCall(
+ ThreadState_DestroyNoGIL::DestroyQueueWithGIL,
+ NULL);
+ if (result < 0) {
+ // Hmm, what can we do here?
+ fprintf(stderr,
+ "greenlet: WARNING: failed in call to Py_AddPendingCall; "
+ "expect a memory leak.\n");
+ }
+ }
+ }
+ }
+
+ static int
+ DestroyQueueWithGIL(void* UNUSED(arg))
+ {
+ // We're holding the GIL here, so no Python code should be able to
+ // run to call ``os.fork()``.
+ while (1) {
+ ThreadState* to_destroy;
+ {
+ LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock);
+ if (mod_globs->thread_states_to_destroy.empty()) {
+ break;
+ }
+ to_destroy = mod_globs->take_next_to_destroy();
+ }
+ // Drop the lock while we do the actual deletion.
+ ThreadState_DestroyWithGIL::DestroyWithGIL(to_destroy);
+ }
+ return 0;
+ }
+
+};
+
+}; // namespace greenlet
+
+// The intent when GET_THREAD_STATE() is needed multiple times in a
+// function is to take a reference to its return value in a local
+// variable, to avoid the thread-local indirection. On some platforms
+// (macOS), accessing a thread-local involves a function call (plus an
+// initial function call in each function that uses a thread local);
+// in contrast, static volatile variables are at some pre-computed
+// offset.
+typedef greenlet::ThreadStateCreator ThreadStateCreator;
+static thread_local ThreadStateCreator g_thread_state_global;
+#define GET_THREAD_STATE() g_thread_state_global
+
+#endif //T_THREADSTATE_DESTROY
diff --git a/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp
new file mode 100644
index 0000000..495a794
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/TUserGreenlet.cpp
@@ -0,0 +1,667 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/**
+ * Implementation of greenlet::UserGreenlet.
+ *
+ * Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+
+#include "greenlet_internal.hpp"
+#include "greenlet_greenlet.hpp"
+#include "greenlet_thread_state.hpp"
+#include "TThreadStateDestroy.cpp"
+
+
+namespace greenlet {
+using greenlet::refs::BorrowedMainGreenlet;
+greenlet::PythonAllocator UserGreenlet::allocator;
+
+void* UserGreenlet::operator new(size_t UNUSED(count))
+{
+ return allocator.allocate(1);
+}
+
+
+void UserGreenlet::operator delete(void* ptr)
+{
+ return allocator.deallocate(static_cast(ptr),
+ 1);
+}
+
+
+UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent)
+ : Greenlet(p), _parent(the_parent)
+{
+ this->_self = p;
+}
+
+UserGreenlet::~UserGreenlet()
+{
+ // Python 3.11: If we don't clear out the raw frame datastack
+ // when deleting an unfinished greenlet,
+ // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails.
+ this->python_state.did_finish(nullptr);
+ this->tp_clear();
+}
+
+BorrowedGreenlet
+UserGreenlet::self() const noexcept
+{
+ return this->_self;
+}
+
+
+
+const BorrowedMainGreenlet
+UserGreenlet::main_greenlet() const
+{
+ return this->_main_greenlet;
+}
+
+
+BorrowedMainGreenlet
+UserGreenlet::find_main_greenlet_in_lineage() const
+{
+ if (this->started()) {
+ assert(this->_main_greenlet);
+ return BorrowedMainGreenlet(this->_main_greenlet);
+ }
+
+ if (!this->_parent) {
+ /* garbage collected greenlet in chain */
+ // XXX: WHAT?
+ return BorrowedMainGreenlet(nullptr);
+ }
+
+ return this->_parent->find_main_greenlet_in_lineage();
+}
+
+
+/**
+ * CAUTION: This will allocate memory and may trigger garbage
+ * collection and arbitrary Python code.
+ */
+OwnedObject
+UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state)
+{
+ /* The dying greenlet cannot be a parent of ts_current
+ because the 'parent' field chain would hold a
+ reference */
+ UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state);
+
+ // We don't care about the return value, only whether an
+ // exception happened. Whether or not an exception happens,
+ // we need to restore the parent in case the greenlet gets
+ // resurrected.
+ return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state);
+}
+
+ThreadState*
+UserGreenlet::thread_state() const noexcept
+{
+ // TODO: maybe make this throw, if the thread state isn't there?
+ // if (!this->main_greenlet) {
+ // throw std::runtime_error("No thread state"); // TODO: Better exception
+ // }
+ if (!this->_main_greenlet) {
+ return nullptr;
+ }
+ return this->_main_greenlet->thread_state();
+}
+
+
+bool
+UserGreenlet::was_running_in_dead_thread() const noexcept
+{
+ return this->_main_greenlet && !this->thread_state();
+}
+
+OwnedObject
+UserGreenlet::g_switch()
+{
+ assert(this->args() || PyErr_Occurred());
+
+ try {
+ this->check_switch_allowed();
+ }
+ catch (const PyErrOccurred&) {
+ this->release_args();
+ throw;
+ }
+
+ // Switching greenlets used to attempt to clean out ones that need
+ // deleted *if* we detected a thread switch. Should it still do
+ // that?
+ // An issue is that if we delete a greenlet from another thread,
+ // it gets queued to this thread, and ``kill_greenlet()`` switches
+ // back into the greenlet
+
+ /* find the real target by ignoring dead greenlets,
+ and if necessary starting a greenlet. */
+ switchstack_result_t err;
+ Greenlet* target = this;
+ // TODO: probably cleaner to handle the case where we do
+ // switch to ourself separately from the other cases.
+ // This can probably even further be simplified if we keep
+ // track of the switching_state we're going for and just call
+ // into g_switch() if it's not ourself. The main problem with that
+ // is that we would be using more stack space.
+ bool target_was_me = true;
+ bool was_initial_stub = false;
+ while (target) {
+ if (target->active()) {
+ if (!target_was_me) {
+ target->args() <<= this->args();
+ assert(!this->args());
+ }
+ err = target->g_switchstack();
+ break;
+ }
+ if (!target->started()) {
+ // We never encounter a main greenlet that's not started.
+ assert(!target->main());
+ UserGreenlet* real_target = static_cast(target);
+ assert(real_target);
+ void* dummymarker;
+ was_initial_stub = true;
+ if (!target_was_me) {
+ target->args() <<= this->args();
+ assert(!this->args());
+ }
+ try {
+ // This can only throw back to us while we're
+ // still in this greenlet. Once the new greenlet
+ // is bootstrapped, it has its own exception state.
+ err = real_target->g_initialstub(&dummymarker);
+ }
+ catch (const PyErrOccurred&) {
+ this->release_args();
+ throw;
+ }
+ catch (const GreenletStartedWhileInPython&) {
+ // The greenlet was started sometime before this
+ // greenlet actually switched to it, i.e.,
+ // "concurrent" calls to switch() or throw().
+ // We need to retry the switch.
+ // Note that the current greenlet has been reset
+ // to this one (or we wouldn't be running!)
+ continue;
+ }
+ break;
+ }
+
+ target = target->parent();
+ target_was_me = false;
+ }
+ // The ``this`` pointer and all other stack or register based
+ // variables are invalid now, at least where things succeed
+ // above.
+ // But this one, probably not so much? It's not clear if it's
+ // safe to throw an exception at this point.
+
+ if (err.status < 0) {
+ // If we get here, either g_initialstub()
+ // failed, or g_switchstack() failed. Either one of those
+ // cases SHOULD leave us in the original greenlet with a valid
+ // stack.
+ return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub);
+ }
+
+ // err.the_new_current_greenlet would be the same as ``target``,
+ // if target wasn't probably corrupt.
+ return err.the_new_current_greenlet->g_switch_finish(err);
+}
+
+
+
+Greenlet::switchstack_result_t
+UserGreenlet::g_initialstub(void* mark)
+{
+ OwnedObject run;
+
+ // We need to grab a reference to the current switch arguments
+ // in case we're entered concurrently during the call to
+ // GetAttr() and have to try again.
+ // We'll restore them when we return in that case.
+ // Scope them tightly to avoid ref leaks.
+ {
+ SwitchingArgs args(this->args());
+
+ /* save exception in case getattr clears it */
+ PyErrPieces saved;
+
+ /*
+ self.run is the object to call in the new greenlet.
+ This could run arbitrary python code and switch greenlets!
+ */
+ run = this->_self.PyRequireAttr(mod_globs->str_run);
+ /* restore saved exception */
+ saved.PyErrRestore();
+
+
+ /* recheck that it's safe to switch in case greenlet reparented anywhere above */
+ this->check_switch_allowed();
+
+ /* by the time we got here another start could happen elsewhere,
+ * that means it should now be a regular switch.
+ * This can happen if the Python code is a subclass that implements
+ * __getattribute__ or __getattr__, or makes ``run`` a descriptor;
+ * all of those can run arbitrary code that switches back into
+ * this greenlet.
+ */
+ if (this->stack_state.started()) {
+ // the successful switch cleared these out, we need to
+ // restore our version. They will be copied on up to the
+ // next target.
+ assert(!this->args());
+ this->args() <<= args;
+ throw GreenletStartedWhileInPython();
+ }
+ }
+
+ // Sweet, if we got here, we have the go-ahead and will switch
+ // greenlets.
+ // Nothing we do from here on out should allow for a thread or
+ // greenlet switch: No arbitrary calls to Python, including
+ // decref'ing
+
+#if GREENLET_USE_CFRAME
+ /* OK, we need it, we're about to switch greenlets, save the state. */
+ /*
+ See green_new(). This is a stack-allocated variable used
+ while *self* is in PyObject_Call().
+ We want to defer copying the state info until we're sure
+ we need it and are in a stable place to do so.
+ */
+ _PyCFrame trace_info;
+
+ this->python_state.set_new_cframe(trace_info);
+#endif
+ /* start the greenlet */
+ ThreadState& thread_state = GET_THREAD_STATE().state();
+ this->stack_state = StackState(mark,
+ thread_state.borrow_current()->stack_state);
+ this->python_state.set_initial_state(PyThreadState_GET());
+ this->exception_state.clear();
+ this->_main_greenlet = thread_state.get_main_greenlet();
+
+ /* perform the initial switch */
+ switchstack_result_t err = this->g_switchstack();
+ /* returns twice!
+ The 1st time with ``err == 1``: we are in the new greenlet.
+ This one owns a greenlet that used to be current.
+ The 2nd time with ``err <= 0``: back in the caller's
+ greenlet; this happens if the child finishes or switches
+ explicitly to us. Either way, the ``err`` variable is
+ created twice at the same memory location, but possibly
+ having different ``origin`` values. Note that it's not
+ constructed for the second time until the switch actually happens.
+ */
+ if (err.status == 1) {
+ // In the new greenlet.
+
+ // This never returns! Calling inner_bootstrap steals
+ // the contents of our run object within this stack frame, so
+ // it is not valid to do anything with it.
+ try {
+ this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(),
+ run.relinquish_ownership());
+ }
+ // Getting a C++ exception here isn't good. It's probably a
+ // bug in the underlying greenlet, meaning it's probably a
+ // C++ extension. We're going to abort anyway, but try to
+ // display some nice information *if* possible. Some obscure
+ // platforms don't properly support this (old 32-bit Arm, see see
+ // https://github.com/python-greenlet/greenlet/issues/385); that's not
+ // great, but should usually be OK because, as mentioned above, we're
+ // terminating anyway.
+ //
+ // The catching is tested by
+ // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``.
+ //
+ // PyErrOccurred can theoretically be thrown by
+ // inner_bootstrap() -> g_switch_finish(), but that should
+ // never make it back to here. It is a std::exception and
+ // would be caught if it is.
+ catch (const std::exception& e) {
+ std::string base = "greenlet: Unhandled C++ exception: ";
+ base += e.what();
+ Py_FatalError(base.c_str());
+ }
+ catch (...) {
+ // Some compilers/runtimes use exceptions internally.
+ // It appears that GCC on Linux with libstdc++ throws an
+ // exception internally at process shutdown time to unwind
+ // stacks and clean up resources. Depending on exactly
+ // where we are when the process exits, that could result
+ // in an unknown exception getting here. If we
+ // Py_FatalError() or abort() here, we interfere with
+ // orderly process shutdown. Throwing the exception on up
+ // is the right thing to do.
+ //
+ // gevent's ``examples/dns_mass_resolve.py`` demonstrates this.
+#ifndef NDEBUG
+ fprintf(stderr,
+ "greenlet: inner_bootstrap threw unknown exception; "
+ "is the process terminating?\n");
+#endif
+ throw;
+ }
+ Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n");
+ }
+
+
+ // In contrast, notice that we're keeping the origin greenlet
+ // around as an owned reference; we need it to call the trace
+ // function for the switch back into the parent. It was only
+ // captured at the time the switch actually happened, though,
+ // so we haven't been keeping an extra reference around this
+ // whole time.
+
+ /* back in the parent */
+ if (err.status < 0) {
+ /* start failed badly, restore greenlet state */
+ this->stack_state = StackState();
+ this->_main_greenlet.CLEAR();
+ // CAUTION: This may run arbitrary Python code.
+ run.CLEAR(); // inner_bootstrap didn't run, we own the reference.
+ }
+
+ // In the success case, the spawned code (inner_bootstrap) will
+ // take care of decrefing this, so we relinquish ownership so as
+ // to not double-decref.
+
+ run.relinquish_ownership();
+
+ return err;
+}
+
+
+void
+UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run)
+{
+ // The arguments here would be another great place for move.
+ // As it is, we take them as a reference so that when we clear
+ // them we clear what's on the stack above us. Do that NOW, and
+ // without using a C++ RAII object,
+ // so there's no way that exiting the parent frame can clear it,
+ // or we clear it unexpectedly. This arises in the context of the
+ // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325
+ //PyObject* run = _run.relinquish_ownership();
+
+ /* in the new greenlet */
+ assert(this->thread_state()->borrow_current() == this->_self);
+ // C++ exceptions cannot propagate to the parent greenlet from
+ // here. (TODO: Do we need a catch(...) clause, perhaps on the
+ // function itself? ALl we could do is terminate the program.)
+ // NOTE: On 32-bit Windows, the call chain is extremely
+ // important here in ways that are subtle, having to do with
+ // the depth of the SEH list. The call to restore it MUST NOT
+ // add a new SEH handler to the list, or we'll restore it to
+ // the wrong thing.
+ this->thread_state()->restore_exception_state();
+ /* stack variables from above are no good and also will not unwind! */
+ // EXCEPT: That can't be true, we access run, among others, here.
+
+ this->stack_state.set_active(); /* running */
+
+ // We're about to possibly run Python code again, which
+ // could switch back/away to/from us, so we need to grab the
+ // arguments locally.
+ SwitchingArgs args;
+ args <<= this->args();
+ assert(!this->args());
+
+ // XXX: We could clear this much earlier, right?
+ // Or would that introduce the possibility of running Python
+ // code when we don't want to?
+ // CAUTION: This may run arbitrary Python code.
+ this->_run_callable.CLEAR();
+
+
+ // The first switch we need to manually call the trace
+ // function here instead of in g_switch_finish, because we
+ // never return there.
+ if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) {
+ OwnedGreenlet trace_origin;
+ trace_origin = origin_greenlet;
+ try {
+ g_calltrace(tracefunc,
+ args ? mod_globs->event_switch : mod_globs->event_throw,
+ trace_origin,
+ this->_self);
+ }
+ catch (const PyErrOccurred&) {
+ /* Turn trace errors into switch throws */
+ args.CLEAR();
+ }
+ }
+
+ // We no longer need the origin, it was only here for
+ // tracing.
+ // We may never actually exit this stack frame so we need
+ // to explicitly clear it.
+ // This could run Python code and switch.
+ Py_CLEAR(origin_greenlet);
+
+ OwnedObject result;
+ if (!args) {
+ /* pending exception */
+ result = NULL;
+ }
+ else {
+ /* call g.run(*args, **kwargs) */
+ // This could result in further switches
+ try {
+ //result = run.PyCall(args.args(), args.kwargs());
+ // CAUTION: Just invoking this, before the function even
+ // runs, may cause memory allocations, which may trigger
+ // GC, which may run arbitrary Python code.
+ result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow()));
+ }
+ catch (...) {
+ // Unhandled C++ exception!
+
+ // If we declare ourselves as noexcept, if we don't catch
+ // this here, most platforms will just abort() the
+ // process. But on 64-bit Windows with older versions of
+ // the C runtime, this can actually corrupt memory and
+ // just return. We see this when compiling with the
+ // Windows 7.0 SDK targeting Windows Server 2008, but not
+ // when using the Appveyor Visual Studio 2019 image. So
+ // this currently only affects Python 2.7 on Windows 64.
+ // That is, the tests pass and the runtime aborts
+ // everywhere else.
+ //
+ // However, if we catch it and try to continue with a
+ // Python error, then all Windows 64 bit platforms corrupt
+ // memory. So all we can do is manually abort, hopefully
+ // with a good error message. (Note that the above was
+ // tested WITHOUT the `/EHr` switch being used at compile
+ // time, so MSVC may have "optimized" out important
+ // checking. Using that switch, we may be in a better
+ // place in terms of memory corruption.) But sometimes it
+ // can't be caught here at all, which is confusing but not
+ // terribly surprising; so again, the G_NOEXCEPT_WIN32
+ // plus "/EHr".
+ //
+ // Hopefully the basic C stdlib is still functional enough
+ // for us to at least print an error.
+ //
+ // It gets more complicated than that, though, on some
+ // platforms, specifically at least Linux/gcc/libstdc++. They use
+ // an exception to unwind the stack when a background
+ // thread exits. (See comments about noexcept.) So this
+ // may not actually represent anything untoward. On those
+ // platforms we allow throws of this to propagate, or
+ // attempt to anyway.
+# if defined(WIN32) || defined(_WIN32)
+ Py_FatalError(
+ "greenlet: Unhandled C++ exception from a greenlet run function. "
+ "Because memory is likely corrupted, terminating process.");
+ std::abort();
+#else
+ throw;
+#endif
+ }
+ }
+ // These lines may run arbitrary code
+ args.CLEAR();
+ Py_CLEAR(run);
+
+ if (!result
+ && mod_globs->PyExc_GreenletExit.PyExceptionMatches()
+ && (this->args())) {
+ // This can happen, for example, if our only reference
+ // goes away after we switch back to the parent.
+ // See test_dealloc_switch_args_not_lost
+ PyErrPieces clear_error;
+ result <<= this->args();
+ result = single_result(result);
+ }
+ this->release_args();
+ this->python_state.did_finish(PyThreadState_GET());
+
+ result = g_handle_exit(result);
+ assert(this->thread_state()->borrow_current() == this->_self);
+
+ /* jump back to parent */
+ this->stack_state.set_inactive(); /* dead */
+
+
+ // TODO: Can we decref some things here? Release our main greenlet
+ // and maybe parent?
+ for (Greenlet* parent = this->_parent;
+ parent;
+ parent = parent->parent()) {
+ // We need to somewhere consume a reference to
+ // the result; in most cases we'll never have control
+ // back in this stack frame again. Calling
+ // green_switch actually adds another reference!
+ // This would probably be clearer with a specific API
+ // to hand results to the parent.
+ parent->args() <<= result;
+ assert(!result);
+ // The parent greenlet now owns the result; in the
+ // typical case we'll never get back here to assign to
+ // result and thus release the reference.
+ try {
+ result = parent->g_switch();
+ }
+ catch (const PyErrOccurred&) {
+ // Ignore, keep passing the error on up.
+ }
+
+ /* Return here means switch to parent failed,
+ * in which case we throw *current* exception
+ * to the next parent in chain.
+ */
+ assert(!result);
+ }
+ /* We ran out of parents, cannot continue */
+ PyErr_WriteUnraisable(this->self().borrow_o());
+ Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; "
+ "cannot continue");
+ std::abort();
+}
+
+void
+UserGreenlet::run(const BorrowedObject nrun)
+{
+ if (this->started()) {
+ throw AttributeError(
+ "run cannot be set "
+ "after the start of the greenlet");
+ }
+ this->_run_callable = nrun;
+}
+
+const OwnedGreenlet
+UserGreenlet::parent() const
+{
+ return this->_parent;
+}
+
+void
+UserGreenlet::parent(const BorrowedObject raw_new_parent)
+{
+ if (!raw_new_parent) {
+ throw AttributeError("can't delete attribute");
+ }
+
+ BorrowedMainGreenlet main_greenlet_of_new_parent;
+ BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could
+ // throw
+ // TypeError!
+ for (BorrowedGreenlet p = new_parent; p; p = p->parent()) {
+ if (p == this->_self) {
+ throw ValueError("cyclic parent chain");
+ }
+ main_greenlet_of_new_parent = p->main_greenlet();
+ }
+
+ if (!main_greenlet_of_new_parent) {
+ throw ValueError("parent must not be garbage collected");
+ }
+
+ if (this->started()
+ && this->_main_greenlet != main_greenlet_of_new_parent) {
+ throw ValueError("parent cannot be on a different thread");
+ }
+
+ this->_parent = new_parent;
+}
+
+void
+UserGreenlet::murder_in_place()
+{
+ this->_main_greenlet.CLEAR();
+ Greenlet::murder_in_place();
+}
+
+bool
+UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const
+{
+ return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet();
+}
+
+
+int
+UserGreenlet::tp_traverse(visitproc visit, void* arg)
+{
+ Py_VISIT(this->_parent.borrow_o());
+ Py_VISIT(this->_main_greenlet.borrow_o());
+ Py_VISIT(this->_run_callable.borrow_o());
+
+ return Greenlet::tp_traverse(visit, arg);
+}
+
+int
+UserGreenlet::tp_clear()
+{
+ Greenlet::tp_clear();
+ this->_parent.CLEAR();
+ this->_main_greenlet.CLEAR();
+ this->_run_callable.CLEAR();
+ return 0;
+}
+
+UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p,
+ const ThreadState& thread_state)
+ : oldparent(p->_parent),
+ greenlet(p)
+{
+ p->_parent = thread_state.get_current();
+}
+
+UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard()
+{
+ this->greenlet->_parent = oldparent;
+ oldparent.CLEAR();
+}
+
+}; //namespace greenlet
diff --git a/venv/lib/python3.11/site-packages/greenlet/__init__.py b/venv/lib/python3.11/site-packages/greenlet/__init__.py
new file mode 100644
index 0000000..298a19d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/__init__.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+"""
+The root of the greenlet package.
+"""
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+
+__all__ = [
+ '__version__',
+ '_C_API',
+
+ 'GreenletExit',
+ 'error',
+
+ 'getcurrent',
+ 'greenlet',
+
+ 'gettrace',
+ 'settrace',
+]
+
+# pylint:disable=no-name-in-module
+
+###
+# Metadata
+###
+__version__ = '3.0.3'
+from ._greenlet import _C_API # pylint:disable=no-name-in-module
+
+###
+# Exceptions
+###
+from ._greenlet import GreenletExit
+from ._greenlet import error
+
+###
+# greenlets
+###
+from ._greenlet import getcurrent
+from ._greenlet import greenlet
+
+###
+# tracing
+###
+try:
+ from ._greenlet import gettrace
+ from ._greenlet import settrace
+except ImportError:
+ # Tracing wasn't supported.
+ # XXX: The option to disable it was removed in 1.0,
+ # so this branch should be dead code.
+ pass
+
+###
+# Constants
+# These constants aren't documented and aren't recommended.
+# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS
+# is the same as ``sys.version_info[:2] >= 3.7``
+###
+from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import
+from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import
+from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import
+
+# Controlling the use of the gc module. Provisional API for this greenlet
+# implementation in 2.0.
+from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import
+from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import
+from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import
+
+# Other APIS in the _greenlet module are for test support.
diff --git a/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..1323b1b
Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/__pycache__/__init__.cpython-311.pyc differ
diff --git a/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..1d293a3
Binary files /dev/null and b/venv/lib/python3.11/site-packages/greenlet/_greenlet.cpython-311-x86_64-linux-gnu.so differ
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp b/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp
new file mode 100644
index 0000000..5a9818e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet.cpp
@@ -0,0 +1,1494 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+/* Format with:
+ * clang-format -i --style=file src/greenlet/greenlet.c
+ *
+ *
+ * Fix missing braces with:
+ * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements"
+*/
+#include
+#include
+#include
+#include
+
+
+#define PY_SSIZE_T_CLEAN
+#include
+#include "structmember.h" // PyMemberDef
+
+#include "greenlet_internal.hpp"
+// Code after this point can assume access to things declared in stdint.h,
+// including the fixed-width types. This goes for the platform-specific switch functions
+// as well.
+#include "greenlet_refs.hpp"
+#include "greenlet_slp_switch.hpp"
+#include "greenlet_thread_state.hpp"
+#include "greenlet_thread_support.hpp"
+#include "greenlet_greenlet.hpp"
+
+#include "TGreenletGlobals.cpp"
+#include "TThreadStateDestroy.cpp"
+#include "TGreenlet.cpp"
+#include "TMainGreenlet.cpp"
+#include "TUserGreenlet.cpp"
+#include "TBrokenGreenlet.cpp"
+#include "TExceptionState.cpp"
+#include "TPythonState.cpp"
+#include "TStackState.cpp"
+
+
+using greenlet::LockGuard;
+using greenlet::LockInitError;
+using greenlet::PyErrOccurred;
+using greenlet::Require;
+
+using greenlet::g_handle_exit;
+using greenlet::single_result;
+
+using greenlet::Greenlet;
+using greenlet::UserGreenlet;
+using greenlet::MainGreenlet;
+using greenlet::BrokenGreenlet;
+using greenlet::ThreadState;
+using greenlet::PythonState;
+
+
+
+// ******* Implementation of things from included files
+template
+greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other)
+{
+ this->_set_raw_pointer(static_cast(other));
+ return *this;
+}
+
+template
+inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept
+{
+ if (!this->p) {
+ return nullptr;
+ }
+ return reinterpret_cast(this->p)->pimpl;
+}
+
+template
+greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p)
+ : BorrowedReference(nullptr)
+{
+
+ this->_set_raw_pointer(p.borrow());
+}
+
+template
+inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept
+{
+ if (!this->p) {
+ return nullptr;
+ }
+ return reinterpret_cast(this->p)->pimpl;
+}
+
+
+
+#ifdef __clang__
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wmissing-field-initializers"
+# pragma clang diagnostic ignored "-Wwritable-strings"
+#elif defined(__GNUC__)
+# pragma GCC diagnostic push
+// warning: ISO C++ forbids converting a string constant to ‘char*’
+// (The python APIs aren't const correct and accept writable char*)
+# pragma GCC diagnostic ignored "-Wwrite-strings"
+#endif
+
+
+/***********************************************************
+
+A PyGreenlet is a range of C stack addresses that must be
+saved and restored in such a way that the full range of the
+stack contains valid data when we switch to it.
+
+Stack layout for a greenlet:
+
+ | ^^^ |
+ | older data |
+ | |
+ stack_stop . |_______________|
+ . | |
+ . | greenlet data |
+ . | in stack |
+ . * |_______________| . . _____________ stack_copy + stack_saved
+ . | | | |
+ . | data | |greenlet data|
+ . | unrelated | | saved |
+ . | to | | in heap |
+ stack_start . | this | . . |_____________| stack_copy
+ | greenlet |
+ | |
+ | newer data |
+ | vvv |
+
+
+Note that a greenlet's stack data is typically partly at its correct
+place in the stack, and partly saved away in the heap, but always in
+the above configuration: two blocks, the more recent one in the heap
+and the older one still in the stack (either block may be empty).
+
+Greenlets are chained: each points to the previous greenlet, which is
+the one that owns the data currently in the C stack above my
+stack_stop. The currently running greenlet is the first element of
+this chain. The main (initial) greenlet is the last one. Greenlets
+whose stack is entirely in the heap can be skipped from the chain.
+
+The chain is not related to execution order, but only to the order
+in which bits of C stack happen to belong to greenlets at a particular
+point in time.
+
+The main greenlet doesn't have a stack_stop: it is responsible for the
+complete rest of the C stack, and we don't know where it begins. We
+use (char*) -1, the largest possible address.
+
+States:
+ stack_stop == NULL && stack_start == NULL: did not start yet
+ stack_stop != NULL && stack_start == NULL: already finished
+ stack_stop != NULL && stack_start != NULL: active
+
+The running greenlet's stack_start is undefined but not NULL.
+
+ ***********************************************************/
+
+static PyGreenlet*
+green_create_main(ThreadState* state)
+{
+ PyGreenlet* gmain;
+
+ /* create the main greenlet for this thread */
+ gmain = (PyGreenlet*)PyType_GenericAlloc(&PyGreenlet_Type, 0);
+ if (gmain == NULL) {
+ Py_FatalError("green_create_main failed to alloc");
+ return NULL;
+ }
+ new MainGreenlet(gmain, state);
+
+ assert(Py_REFCNT(gmain) == 1);
+ return gmain;
+}
+
+
+
+/***********************************************************/
+
+/* Some functions must not be inlined:
+ * slp_restore_state, when inlined into slp_switch might cause
+ it to restore stack over its own local variables
+ * slp_save_state, when inlined would add its own local
+ variables to the saved stack, wasting space
+ * slp_switch, cannot be inlined for obvious reasons
+ * g_initialstub, when inlined would receive a pointer into its
+ own stack frame, leading to incomplete stack save/restore
+
+g_initialstub is a member function and declared virtual so that the
+compiler always calls it through a vtable.
+
+slp_save_state and slp_restore_state are also member functions. They
+are called from trampoline functions that themselves are declared as
+not eligible for inlining.
+*/
+
+extern "C" {
+static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref)
+{
+ return switching_thread_state->slp_save_state(stackref);
+}
+static void GREENLET_NOINLINE(slp_restore_state_trampoline)()
+{
+ switching_thread_state->slp_restore_state();
+}
+}
+
+
+/***********************************************************/
+
+static PyGreenlet*
+green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds))
+{
+ PyGreenlet* o =
+ (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict);
+ if (o) {
+ new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current());
+ assert(Py_REFCNT(o) == 1);
+ }
+ return o;
+}
+
+static PyGreenlet*
+green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds))
+{
+ PyGreenlet* o =
+ (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict);
+ if (o) {
+ new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current());
+ assert(Py_REFCNT(o) == 1);
+ }
+ return o;
+}
+
+static int
+green_setrun(BorrowedGreenlet self, BorrowedObject nrun, void* c);
+static int
+green_setparent(BorrowedGreenlet self, BorrowedObject nparent, void* c);
+
+static int
+green_init(BorrowedGreenlet self, BorrowedObject args, BorrowedObject kwargs)
+{
+ PyArgParseParam run;
+ PyArgParseParam nparent;
+ static const char* const kwlist[] = {
+ "run",
+ "parent",
+ NULL
+ };
+
+ // recall: The O specifier does NOT increase the reference count.
+ if (!PyArg_ParseTupleAndKeywords(
+ args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) {
+ return -1;
+ }
+
+ if (run) {
+ if (green_setrun(self, run, NULL)) {
+ return -1;
+ }
+ }
+ if (nparent && !nparent.is_None()) {
+ return green_setparent(self, nparent, NULL);
+ }
+ return 0;
+}
+
+
+
+static int
+green_traverse(PyGreenlet* self, visitproc visit, void* arg)
+{
+ // We must only visit referenced objects, i.e. only objects
+ // Py_INCREF'ed by this greenlet (directly or indirectly):
+ //
+ // - stack_prev is not visited: holds previous stack pointer, but it's not
+ // referenced
+ // - frames are not visited as we don't strongly reference them;
+ // alive greenlets are not garbage collected
+ // anyway. This can be a problem, however, if this greenlet is
+ // never allowed to finish, and is referenced from the frame: we
+ // have an uncollectible cycle in that case. Note that the
+ // frame object itself is also frequently not even tracked by the GC
+ // starting with Python 3.7 (frames are allocated by the
+ // interpreter untracked, and only become tracked when their
+ // evaluation is finished if they have a refcount > 1). All of
+ // this is to say that we should probably strongly reference
+ // the frame object. Doing so, while always allowing GC on a
+ // greenlet, solves several leaks for us.
+
+ Py_VISIT(self->dict);
+ if (!self->pimpl) {
+ // Hmm. I have seen this at interpreter shutdown time,
+ // I think. That's very odd because this doesn't go away until
+ // we're ``green_dealloc()``, at which point we shouldn't be
+ // traversed anymore.
+ return 0;
+ }
+
+ return self->pimpl->tp_traverse(visit, arg);
+}
+
+static int
+green_is_gc(BorrowedGreenlet self)
+{
+ int result = 0;
+ /* Main greenlet can be garbage collected since it can only
+ become unreachable if the underlying thread exited.
+ Active greenlets --- including those that are suspended ---
+ cannot be garbage collected, however.
+ */
+ if (self->main() || !self->active()) {
+ result = 1;
+ }
+ // The main greenlet pointer will eventually go away after the thread dies.
+ if (self->was_running_in_dead_thread()) {
+ // Our thread is dead! We can never run again. Might as well
+ // GC us. Note that if a tuple containing only us and other
+ // immutable objects had been scanned before this, when we
+ // would have returned 0, the tuple will take itself out of GC
+ // tracking and never be investigated again. So that could
+ // result in both us and the tuple leaking due to an
+ // unreachable/uncollectible reference. The same goes for
+ // dictionaries.
+ //
+ // It's not a great idea to be changing our GC state on the
+ // fly.
+ result = 1;
+ }
+ return result;
+}
+
+
+static int
+green_clear(PyGreenlet* self)
+{
+ /* Greenlet is only cleared if it is about to be collected.
+ Since active greenlets are not garbage collectable, we can
+ be sure that, even if they are deallocated during clear,
+ nothing they reference is in unreachable or finalizers,
+ so even if it switches we are relatively safe. */
+ // XXX: Are we responsible for clearing weakrefs here?
+ Py_CLEAR(self->dict);
+ return self->pimpl->tp_clear();
+}
+
+/**
+ * Returns 0 on failure (the object was resurrected) or 1 on success.
+ **/
+static int
+_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self)
+{
+ /* Hacks hacks hacks copied from instance_dealloc() */
+ /* Temporarily resurrect the greenlet. */
+ assert(self.REFCNT() == 0);
+ Py_SET_REFCNT(self.borrow(), 1);
+ /* Save the current exception, if any. */
+ PyErrPieces saved_err;
+ try {
+ // BY THE TIME WE GET HERE, the state may actually be going
+ // away
+ // if we're shutting down the interpreter and freeing thread
+ // entries,
+ // this could result in freeing greenlets that were leaked. So
+ // we can't try to read the state.
+ self->deallocing_greenlet_in_thread(
+ self->thread_state()
+ ? static_cast(GET_THREAD_STATE())
+ : nullptr);
+ }
+ catch (const PyErrOccurred&) {
+ PyErr_WriteUnraisable(self.borrow_o());
+ /* XXX what else should we do? */
+ }
+ /* Check for no resurrection must be done while we keep
+ * our internal reference, otherwise PyFile_WriteObject
+ * causes recursion if using Py_INCREF/Py_DECREF
+ */
+ if (self.REFCNT() == 1 && self->active()) {
+ /* Not resurrected, but still not dead!
+ XXX what else should we do? we complain. */
+ PyObject* f = PySys_GetObject("stderr");
+ Py_INCREF(self.borrow_o()); /* leak! */
+ if (f != NULL) {
+ PyFile_WriteString("GreenletExit did not kill ", f);
+ PyFile_WriteObject(self.borrow_o(), f, 0);
+ PyFile_WriteString("\n", f);
+ }
+ }
+ /* Restore the saved exception. */
+ saved_err.PyErrRestore();
+ /* Undo the temporary resurrection; can't use DECREF here,
+ * it would cause a recursive call.
+ */
+ assert(self.REFCNT() > 0);
+
+ Py_ssize_t refcnt = self.REFCNT() - 1;
+ Py_SET_REFCNT(self.borrow_o(), refcnt);
+ if (refcnt != 0) {
+ /* Resurrected! */
+ _Py_NewReference(self.borrow_o());
+ Py_SET_REFCNT(self.borrow_o(), refcnt);
+ /* Better to use tp_finalizer slot (PEP 442)
+ * and call ``PyObject_CallFinalizerFromDealloc``,
+ * but that's only supported in Python 3.4+; see
+ * Modules/_io/iobase.c for an example.
+ *
+ * The following approach is copied from iobase.c in CPython 2.7.
+ * (along with much of this function in general). Here's their
+ * comment:
+ *
+ * When called from a heap type's dealloc, the type will be
+ * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). */
+ if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) {
+ Py_INCREF(self.TYPE());
+ }
+
+ PyObject_GC_Track((PyObject*)self);
+
+ _Py_DEC_REFTOTAL;
+#ifdef COUNT_ALLOCS
+ --Py_TYPE(self)->tp_frees;
+ --Py_TYPE(self)->tp_allocs;
+#endif /* COUNT_ALLOCS */
+ return 0;
+ }
+ return 1;
+}
+
+
+static void
+green_dealloc(PyGreenlet* self)
+{
+ PyObject_GC_UnTrack(self);
+ BorrowedGreenlet me(self);
+ if (me->active()
+ && me->started()
+ && !me->main()) {
+ if (!_green_dealloc_kill_started_non_main_greenlet(me)) {
+ return;
+ }
+ }
+
+ if (self->weakreflist != NULL) {
+ PyObject_ClearWeakRefs((PyObject*)self);
+ }
+ Py_CLEAR(self->dict);
+
+ if (self->pimpl) {
+ // In case deleting this, which frees some memory,
+ // somehow winds up calling back into us. That's usually a
+ //bug in our code.
+ Greenlet* p = self->pimpl;
+ self->pimpl = nullptr;
+ delete p;
+ }
+ // and finally we're done. self is now invalid.
+ Py_TYPE(self)->tp_free((PyObject*)self);
+}
+
+
+
+static OwnedObject
+throw_greenlet(BorrowedGreenlet self, PyErrPieces& err_pieces)
+{
+ PyObject* result = nullptr;
+ err_pieces.PyErrRestore();
+ assert(PyErr_Occurred());
+ if (self->started() && !self->active()) {
+ /* dead greenlet: turn GreenletExit into a regular return */
+ result = g_handle_exit(OwnedObject()).relinquish_ownership();
+ }
+ self->args() <<= result;
+
+ return single_result(self->g_switch());
+}
+
+
+
+PyDoc_STRVAR(
+ green_switch_doc,
+ "switch(*args, **kwargs)\n"
+ "\n"
+ "Switch execution to this greenlet.\n"
+ "\n"
+ "If this greenlet has never been run, then this greenlet\n"
+ "will be switched to using the body of ``self.run(*args, **kwargs)``.\n"
+ "\n"
+ "If the greenlet is active (has been run, but was switch()'ed\n"
+ "out before leaving its run function), then this greenlet will\n"
+ "be resumed and the return value to its switch call will be\n"
+ "None if no arguments are given, the given argument if one\n"
+ "argument is given, or the args tuple and keyword args dict if\n"
+ "multiple arguments are given.\n"
+ "\n"
+ "If the greenlet is dead, or is the current greenlet then this\n"
+ "function will simply return the arguments using the same rules as\n"
+ "above.\n");
+
+static PyObject*
+green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs)
+{
+ using greenlet::SwitchingArgs;
+ SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs));
+ self->pimpl->may_switch_away();
+ self->pimpl->args() <<= switch_args;
+
+ // If we're switching out of a greenlet, and that switch is the
+ // last thing the greenlet does, the greenlet ought to be able to
+ // go ahead and die at that point. Currently, someone else must
+ // manually switch back to the greenlet so that we "fall off the
+ // end" and can perform cleanup. You'd think we'd be able to
+ // figure out that this is happening using the frame's ``f_lasti``
+ // member, which is supposed to be an index into
+ // ``frame->f_code->co_code``, the bytecode string. However, in
+ // recent interpreters, ``f_lasti`` tends not to be updated thanks
+ // to things like the PREDICT() macros in ceval.c. So it doesn't
+ // really work to do that in many cases. For example, the Python
+ // code:
+ // def run():
+ // greenlet.getcurrent().parent.switch()
+ // produces bytecode of len 16, with the actual call to switch()
+ // being at index 10 (in Python 3.10). However, the reported
+ // ``f_lasti`` we actually see is...5! (Which happens to be the
+ // second byte of the CALL_METHOD op for ``getcurrent()``).
+
+ try {
+ //OwnedObject result = single_result(self->pimpl->g_switch());
+ OwnedObject result(single_result(self->pimpl->g_switch()));
+#ifndef NDEBUG
+ // Note that the current greenlet isn't necessarily self. If self
+ // finished, we went to one of its parents.
+ assert(!self->pimpl->args());
+
+ const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current();
+ // It's possible it's never been switched to.
+ assert(!current->args());
+#endif
+ PyObject* p = result.relinquish_ownership();
+
+ if (!p && !PyErr_Occurred()) {
+ // This shouldn't be happening anymore, so the asserts
+ // are there for debug builds. Non-debug builds
+ // crash "gracefully" in this case, although there is an
+ // argument to be made for killing the process in all
+ // cases --- for this to be the case, our switches
+ // probably nested in an incorrect way, so the state is
+ // suspicious. Nothing should be corrupt though, just
+ // confused at the Python level. Letting this propagate is
+ // probably good enough.
+ assert(p || PyErr_Occurred());
+ throw PyErrOccurred(
+ mod_globs->PyExc_GreenletError,
+ "Greenlet.switch() returned NULL without an exception set."
+ );
+ }
+ return p;
+ }
+ catch(const PyErrOccurred&) {
+ return nullptr;
+ }
+}
+
+PyDoc_STRVAR(
+ green_throw_doc,
+ "Switches execution to this greenlet, but immediately raises the\n"
+ "given exception in this greenlet. If no argument is provided, the "
+ "exception\n"
+ "defaults to `greenlet.GreenletExit`. The normal exception\n"
+ "propagation rules apply, as described for `switch`. Note that calling "
+ "this\n"
+ "method is almost equivalent to the following::\n"
+ "\n"
+ " def raiser():\n"
+ " raise typ, val, tb\n"
+ " g_raiser = greenlet(raiser, parent=g)\n"
+ " g_raiser.switch()\n"
+ "\n"
+ "except that this trick does not work for the\n"
+ "`greenlet.GreenletExit` exception, which would not propagate\n"
+ "from ``g_raiser`` to ``g``.\n");
+
+static PyObject*
+green_throw(PyGreenlet* self, PyObject* args)
+{
+ PyArgParseParam typ(mod_globs->PyExc_GreenletExit);
+ PyArgParseParam val;
+ PyArgParseParam tb;
+
+ if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) {
+ return nullptr;
+ }
+
+ assert(typ.borrow() || val.borrow());
+
+ self->pimpl->may_switch_away();
+ try {
+ // Both normalizing the error and the actual throw_greenlet
+ // could throw PyErrOccurred.
+ PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow());
+
+ return throw_greenlet(self, err_pieces).relinquish_ownership();
+ }
+ catch (const PyErrOccurred&) {
+ return nullptr;
+ }
+}
+
+static int
+green_bool(PyGreenlet* self)
+{
+ return self->pimpl->active();
+}
+
+/**
+ * CAUTION: Allocates memory, may run GC and arbitrary Python code.
+ */
+static PyObject*
+green_getdict(PyGreenlet* self, void* UNUSED(context))
+{
+ if (self->dict == NULL) {
+ self->dict = PyDict_New();
+ if (self->dict == NULL) {
+ return NULL;
+ }
+ }
+ Py_INCREF(self->dict);
+ return self->dict;
+}
+
+static int
+green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context))
+{
+ PyObject* tmp;
+
+ if (val == NULL) {
+ PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted");
+ return -1;
+ }
+ if (!PyDict_Check(val)) {
+ PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary");
+ return -1;
+ }
+ tmp = self->dict;
+ Py_INCREF(val);
+ self->dict = val;
+ Py_XDECREF(tmp);
+ return 0;
+}
+
+static bool
+_green_not_dead(BorrowedGreenlet self)
+{
+ // XXX: Where else should we do this?
+ // Probably on entry to most Python-facing functions?
+ if (self->was_running_in_dead_thread()) {
+ self->deactivate_and_free();
+ return false;
+ }
+ return self->active() || !self->started();
+}
+
+
+static PyObject*
+green_getdead(BorrowedGreenlet self, void* UNUSED(context))
+{
+ if (_green_not_dead(self)) {
+ Py_RETURN_FALSE;
+ }
+ else {
+ Py_RETURN_TRUE;
+ }
+}
+
+static PyObject*
+green_get_stack_saved(PyGreenlet* self, void* UNUSED(context))
+{
+ return PyLong_FromSsize_t(self->pimpl->stack_saved());
+}
+
+
+static PyObject*
+green_getrun(BorrowedGreenlet self, void* UNUSED(context))
+{
+ try {
+ OwnedObject result(self->run());
+ return result.relinquish_ownership();
+ }
+ catch(const PyErrOccurred&) {
+ return nullptr;
+ }
+}
+
+
+
+
+
+static int
+green_setrun(BorrowedGreenlet self, BorrowedObject nrun, void* UNUSED(context))
+{
+ try {
+ self->run(nrun);
+ return 0;
+ }
+ catch(const PyErrOccurred&) {
+ return -1;
+ }
+}
+
+static PyObject*
+green_getparent(BorrowedGreenlet self, void* UNUSED(context))
+{
+ return self->parent().acquire_or_None();
+}
+
+
+
+static int
+green_setparent(BorrowedGreenlet self, BorrowedObject nparent, void* UNUSED(context))
+{
+ try {
+ self->parent(nparent);
+ }
+ catch(const PyErrOccurred&) {
+ return -1;
+ }
+ return 0;
+}
+
+
+static PyObject*
+green_getcontext(const PyGreenlet* self, void* UNUSED(context))
+{
+ const Greenlet *const g = self->pimpl;
+ try {
+ OwnedObject result(g->context());
+ return result.relinquish_ownership();
+ }
+ catch(const PyErrOccurred&) {
+ return nullptr;
+ }
+}
+
+static int
+green_setcontext(BorrowedGreenlet self, PyObject* nctx, void* UNUSED(context))
+{
+ try {
+ self->context(nctx);
+ return 0;
+ }
+ catch(const PyErrOccurred&) {
+ return -1;
+ }
+}
+
+
+static PyObject*
+green_getframe(BorrowedGreenlet self, void* UNUSED(context))
+{
+ const PythonState::OwnedFrame& top_frame = self->top_frame();
+ return top_frame.acquire_or_None();
+}
+
+
+static PyObject*
+green_getstate(PyGreenlet* self)
+{
+ PyErr_Format(PyExc_TypeError,
+ "cannot serialize '%s' object",
+ Py_TYPE(self)->tp_name);
+ return nullptr;
+}
+
+static PyObject*
+green_repr(BorrowedGreenlet self)
+{
+ /*
+ Return a string like
+
+
+ The handling of greenlets across threads is not super good.
+ We mostly use the internal definitions of these terms, but they
+ generally should make sense to users as well.
+ */
+ PyObject* result;
+ int never_started = !self->started() && !self->active();
+
+ const char* const tp_name = Py_TYPE(self)->tp_name;
+
+ if (_green_not_dead(self)) {
+ /* XXX: The otid= is almost useless because you can't correlate it to
+ any thread identifier exposed to Python. We could use
+ PyThreadState_GET()->thread_id, but we'd need to save that in the
+ greenlet, or save the whole PyThreadState object itself.
+
+ As it stands, its only useful for identifying greenlets from the same thread.
+ */
+ const char* state_in_thread;
+ if (self->was_running_in_dead_thread()) {
+ // The thread it was running in is dead!
+ // This can happen, especially at interpreter shut down.
+ // It complicates debugging output because it may be
+ // impossible to access the current thread state at that
+ // time. Thus, don't access the current thread state.
+ state_in_thread = " (thread exited)";
+ }
+ else {
+ state_in_thread = GET_THREAD_STATE().state().is_current(self)
+ ? " current"
+ : (self->started() ? " suspended" : "");
+ }
+ result = PyUnicode_FromFormat(
+ "<%s object at %p (otid=%p)%s%s%s%s>",
+ tp_name,
+ self.borrow_o(),
+ self->thread_state(),
+ state_in_thread,
+ self->active() ? " active" : "",
+ never_started ? " pending" : " started",
+ self->main() ? " main" : ""
+ );
+ }
+ else {
+ result = PyUnicode_FromFormat(
+ "<%s object at %p (otid=%p) %sdead>",
+ tp_name,
+ self.borrow_o(),
+ self->thread_state(),
+ self->was_running_in_dead_thread()
+ ? "(thread exited) "
+ : ""
+ );
+ }
+
+ return result;
+}
+
+/*****************************************************************************
+ * C interface
+ *
+ * These are exported using the CObject API
+ */
+extern "C" {
+static PyGreenlet*
+PyGreenlet_GetCurrent(void)
+{
+ return GET_THREAD_STATE().state().get_current().relinquish_ownership();
+}
+
+static int
+PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent)
+{
+ return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL);
+}
+
+static PyGreenlet*
+PyGreenlet_New(PyObject* run, PyGreenlet* parent)
+{
+ using greenlet::refs::NewDictReference;
+ // In the past, we didn't use green_new and green_init, but that
+ // was a maintenance issue because we duplicated code. This way is
+ // much safer, but slightly slower. If that's a problem, we could
+ // refactor green_init to separate argument parsing from initialization.
+ OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr));
+ if (!g) {
+ return NULL;
+ }
+
+ try {
+ NewDictReference kwargs;
+ if (run) {
+ kwargs.SetItem(mod_globs->str_run, run);
+ }
+ if (parent) {
+ kwargs.SetItem("parent", (PyObject*)parent);
+ }
+
+ Require(green_init(g, mod_globs->empty_tuple, kwargs));
+ }
+ catch (const PyErrOccurred&) {
+ return nullptr;
+ }
+
+ return g.relinquish_ownership();
+}
+
+static PyObject*
+PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return NULL;
+ }
+
+ if (args == NULL) {
+ args = mod_globs->empty_tuple;
+ }
+
+ if (kwargs == NULL || !PyDict_Check(kwargs)) {
+ kwargs = NULL;
+ }
+
+ return green_switch(self, args, kwargs);
+}
+
+static PyObject*
+PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return nullptr;
+ }
+ try {
+ PyErrPieces err_pieces(typ, val, tb);
+ return throw_greenlet(self, err_pieces).relinquish_ownership();
+ }
+ catch (const PyErrOccurred&) {
+ return nullptr;
+ }
+}
+
+static int
+Extern_PyGreenlet_MAIN(PyGreenlet* self)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return -1;
+ }
+ return self->pimpl->main();
+}
+
+static int
+Extern_PyGreenlet_ACTIVE(PyGreenlet* self)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return -1;
+ }
+ return self->pimpl->active();
+}
+
+static int
+Extern_PyGreenlet_STARTED(PyGreenlet* self)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return -1;
+ }
+ return self->pimpl->started();
+}
+
+static PyGreenlet*
+Extern_PyGreenlet_GET_PARENT(PyGreenlet* self)
+{
+ if (!PyGreenlet_Check(self)) {
+ PyErr_BadArgument();
+ return NULL;
+ }
+ // This can return NULL even if there is no exception
+ return self->pimpl->parent().acquire();
+}
+} // extern C.
+
+/** End C API ****************************************************************/
+
+static PyMethodDef green_methods[] = {
+ {"switch",
+ reinterpret_cast(green_switch),
+ METH_VARARGS | METH_KEYWORDS,
+ green_switch_doc},
+ {"throw", (PyCFunction)green_throw, METH_VARARGS, green_throw_doc},
+ {"__getstate__", (PyCFunction)green_getstate, METH_NOARGS, NULL},
+ {NULL, NULL} /* sentinel */
+};
+
+static PyGetSetDef green_getsets[] = {
+ /* name, getter, setter, doc, context pointer */
+ {"__dict__", (getter)green_getdict, (setter)green_setdict, /*XXX*/ NULL},
+ {"run", (getter)green_getrun, (setter)green_setrun, /*XXX*/ NULL},
+ {"parent", (getter)green_getparent, (setter)green_setparent, /*XXX*/ NULL},
+ {"gr_frame", (getter)green_getframe, NULL, /*XXX*/ NULL},
+ {"gr_context",
+ (getter)green_getcontext,
+ (setter)green_setcontext,
+ /*XXX*/ NULL},
+ {"dead", (getter)green_getdead, NULL, /*XXX*/ NULL},
+ {"_stack_saved", (getter)green_get_stack_saved, NULL, /*XXX*/ NULL},
+ {NULL}
+};
+
+static PyMemberDef green_members[] = {
+ {NULL}
+};
+
+static PyNumberMethods green_as_number = {
+ NULL, /* nb_add */
+ NULL, /* nb_subtract */
+ NULL, /* nb_multiply */
+ NULL, /* nb_remainder */
+ NULL, /* nb_divmod */
+ NULL, /* nb_power */
+ NULL, /* nb_negative */
+ NULL, /* nb_positive */
+ NULL, /* nb_absolute */
+ (inquiry)green_bool, /* nb_bool */
+};
+
+
+PyTypeObject PyGreenlet_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "greenlet.greenlet", /* tp_name */
+ sizeof(PyGreenlet), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)green_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ (reprfunc)green_repr, /* tp_repr */
+ &green_as_number, /* tp_as _number*/
+ 0, /* tp_as _sequence*/
+ 0, /* tp_as _mapping*/
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer*/
+ G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
+ "greenlet(run=None, parent=None) -> greenlet\n\n"
+ "Creates a new greenlet object (without running it).\n\n"
+ " - *run* -- The callable to invoke.\n"
+ " - *parent* -- The parent greenlet. The default is the current "
+ "greenlet.", /* tp_doc */
+ (traverseproc)green_traverse, /* tp_traverse */
+ (inquiry)green_clear, /* tp_clear */
+ 0, /* tp_richcompare */
+ offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ green_methods, /* tp_methods */
+ green_members, /* tp_members */
+ green_getsets, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ offsetof(PyGreenlet, dict), /* tp_dictoffset */
+ (initproc)green_init, /* tp_init */
+ PyType_GenericAlloc, /* tp_alloc */
+ (newfunc)green_new, /* tp_new */
+ PyObject_GC_Del, /* tp_free */
+ (inquiry)green_is_gc, /* tp_is_gc */
+};
+
+
+
+static PyObject*
+green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context))
+{
+ BrokenGreenlet* broken = dynamic_cast(self->pimpl);
+ return PyBool_FromLong(broken->_force_switch_error);
+}
+
+static int
+green_unswitchable_setforce(PyGreenlet* self, BorrowedObject nforce, void* UNUSED(context))
+{
+ if (!nforce) {
+ PyErr_SetString(
+ PyExc_AttributeError,
+ "Cannot delete force_switch_error"
+ );
+ return -1;
+ }
+ BrokenGreenlet* broken = dynamic_cast(self->pimpl);
+ int is_true = PyObject_IsTrue(nforce);
+ if (is_true == -1) {
+ return -1;
+ }
+ broken->_force_switch_error = is_true;
+ return 0;
+}
+
+static PyObject*
+green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context))
+{
+ BrokenGreenlet* broken = dynamic_cast(self->pimpl);
+ return PyBool_FromLong(broken->_force_slp_switch_error);
+}
+
+static int
+green_unswitchable_setforceslp(PyGreenlet* self, BorrowedObject nforce, void* UNUSED(context))
+{
+ if (!nforce) {
+ PyErr_SetString(
+ PyExc_AttributeError,
+ "Cannot delete force_slp_switch_error"
+ );
+ return -1;
+ }
+ BrokenGreenlet* broken = dynamic_cast(self->pimpl);
+ int is_true = PyObject_IsTrue(nforce);
+ if (is_true == -1) {
+ return -1;
+ }
+ broken->_force_slp_switch_error = is_true;
+ return 0;
+}
+
+static PyGetSetDef green_unswitchable_getsets[] = {
+ /* name, getter, setter, doc, context pointer */
+ {"force_switch_error",
+ (getter)green_unswitchable_getforce,
+ (setter)green_unswitchable_setforce,
+ /*XXX*/ NULL},
+ {"force_slp_switch_error",
+ (getter)green_unswitchable_getforceslp,
+ (setter)green_unswitchable_setforceslp,
+ /*XXX*/ NULL},
+
+ {NULL}
+};
+
+PyTypeObject PyGreenletUnswitchable_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "greenlet._greenlet.UnswitchableGreenlet",
+ 0, /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)green_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as _number*/
+ 0, /* tp_as _sequence*/
+ 0, /* tp_as _mapping*/
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer*/
+ G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
+ "Undocumented internal class", /* tp_doc */
+ (traverseproc)green_traverse, /* tp_traverse */
+ (inquiry)green_clear, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ 0, /* tp_methods */
+ 0, /* tp_members */
+ green_unswitchable_getsets, /* tp_getset */
+ &PyGreenlet_Type, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ (initproc)green_init, /* tp_init */
+ PyType_GenericAlloc, /* tp_alloc */
+ (newfunc)green_unswitchable_new, /* tp_new */
+ PyObject_GC_Del, /* tp_free */
+ (inquiry)green_is_gc, /* tp_is_gc */
+};
+
+
+PyDoc_STRVAR(mod_getcurrent_doc,
+ "getcurrent() -> greenlet\n"
+ "\n"
+ "Returns the current greenlet (i.e. the one which called this "
+ "function).\n");
+
+static PyObject*
+mod_getcurrent(PyObject* UNUSED(module))
+{
+ return GET_THREAD_STATE().state().get_current().relinquish_ownership_o();
+}
+
+PyDoc_STRVAR(mod_settrace_doc,
+ "settrace(callback) -> object\n"
+ "\n"
+ "Sets a new tracing function and returns the previous one.\n");
+static PyObject*
+mod_settrace(PyObject* UNUSED(module), PyObject* args)
+{
+ PyArgParseParam tracefunc;
+ if (!PyArg_ParseTuple(args, "O", &tracefunc)) {
+ return NULL;
+ }
+ ThreadState& state = GET_THREAD_STATE();
+ OwnedObject previous = state.get_tracefunc();
+ if (!previous) {
+ previous = Py_None;
+ }
+
+ state.set_tracefunc(tracefunc);
+
+ return previous.relinquish_ownership();
+}
+
+PyDoc_STRVAR(mod_gettrace_doc,
+ "gettrace() -> object\n"
+ "\n"
+ "Returns the currently set tracing function, or None.\n");
+
+static PyObject*
+mod_gettrace(PyObject* UNUSED(module))
+{
+ OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc();
+ if (!tracefunc) {
+ tracefunc = Py_None;
+ }
+ return tracefunc.relinquish_ownership();
+}
+
+PyDoc_STRVAR(mod_set_thread_local_doc,
+ "set_thread_local(key, value) -> None\n"
+ "\n"
+ "Set a value in the current thread-local dictionary. Debbuging only.\n");
+
+static PyObject*
+mod_set_thread_local(PyObject* UNUSED(module), PyObject* args)
+{
+ PyArgParseParam key;
+ PyArgParseParam value;
+ PyObject* result = NULL;
+
+ if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) {
+ if(PyDict_SetItem(
+ PyThreadState_GetDict(), // borrow
+ key,
+ value) == 0 ) {
+ // success
+ Py_INCREF(Py_None);
+ result = Py_None;
+ }
+ }
+ return result;
+}
+
+PyDoc_STRVAR(mod_get_pending_cleanup_count_doc,
+ "get_pending_cleanup_count() -> Integer\n"
+ "\n"
+ "Get the number of greenlet cleanup operations pending. Testing only.\n");
+
+
+static PyObject*
+mod_get_pending_cleanup_count(PyObject* UNUSED(module))
+{
+ LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock);
+ return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size());
+}
+
+PyDoc_STRVAR(mod_get_total_main_greenlets_doc,
+ "get_total_main_greenlets() -> Integer\n"
+ "\n"
+ "Quickly return the number of main greenlets that exist. Testing only.\n");
+
+static PyObject*
+mod_get_total_main_greenlets(PyObject* UNUSED(module))
+{
+ return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS);
+}
+
+PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc,
+ "get_clocks_used_doing_optional_cleanup() -> Integer\n"
+ "\n"
+ "Get the number of clock ticks the program has used doing optional "
+ "greenlet cleanup.\n"
+ "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n"
+ "that leaked after a thread exited. This requires invoking Python's garbage collector,\n"
+ "which may have a performance cost proportional to the number of live objects.\n"
+ "This function returns the amount of processor time\n"
+ "greenlet has used to do this. In programs that run with very large amounts of live\n"
+ "objects, this metric can be used to decide whether the cost of doing this cleanup\n"
+ "is worth the memory leak being corrected. If not, you can disable the cleanup\n"
+ "using ``enable_optional_cleanup(False)``.\n"
+ "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n"
+ "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n"
+ "by dividing by the value of CLOCKS_PER_SEC."
+ "If cleanup has been disabled, returns None."
+ "\n"
+ "This is an implementation specific, provisional API. It may be changed or removed\n"
+ "in the future.\n"
+ ".. versionadded:: 2.0"
+ );
+static PyObject*
+mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module))
+{
+ std::clock_t& clocks = ThreadState::clocks_used_doing_gc();
+
+ if (clocks == std::clock_t(-1)) {
+ Py_RETURN_NONE;
+ }
+ // This might not actually work on some implementations; clock_t
+ // is an opaque type.
+ return PyLong_FromSsize_t(clocks);
+}
+
+PyDoc_STRVAR(mod_enable_optional_cleanup_doc,
+ "mod_enable_optional_cleanup(bool) -> None\n"
+ "\n"
+ "Enable or disable optional cleanup operations.\n"
+ "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n"
+ );
+static PyObject*
+mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag)
+{
+ int is_true = PyObject_IsTrue(flag);
+ if (is_true == -1) {
+ return nullptr;
+ }
+
+ std::clock_t& clocks = ThreadState::clocks_used_doing_gc();
+ if (is_true) {
+ // If we already have a value, we don't want to lose it.
+ if (clocks == std::clock_t(-1)) {
+ clocks = 0;
+ }
+ }
+ else {
+ clocks = std::clock_t(-1);
+ }
+ Py_RETURN_NONE;
+}
+
+PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc,
+ "get_tstate_trash_delete_nesting() -> Integer\n"
+ "\n"
+ "Return the 'trash can' nesting level. Testing only.\n");
+static PyObject*
+mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module))
+{
+ PyThreadState* tstate = PyThreadState_GET();
+
+#if GREENLET_PY312
+ return PyLong_FromLong(tstate->trash.delete_nesting);
+#else
+ return PyLong_FromLong(tstate->trash_delete_nesting);
+#endif
+}
+
+static PyMethodDef GreenMethods[] = {
+ {"getcurrent",
+ (PyCFunction)mod_getcurrent,
+ METH_NOARGS,
+ mod_getcurrent_doc},
+ {"settrace", (PyCFunction)mod_settrace, METH_VARARGS, mod_settrace_doc},
+ {"gettrace", (PyCFunction)mod_gettrace, METH_NOARGS, mod_gettrace_doc},
+ {"set_thread_local", (PyCFunction)mod_set_thread_local, METH_VARARGS, mod_set_thread_local_doc},
+ {"get_pending_cleanup_count", (PyCFunction)mod_get_pending_cleanup_count, METH_NOARGS, mod_get_pending_cleanup_count_doc},
+ {"get_total_main_greenlets", (PyCFunction)mod_get_total_main_greenlets, METH_NOARGS, mod_get_total_main_greenlets_doc},
+ {"get_clocks_used_doing_optional_cleanup", (PyCFunction)mod_get_clocks_used_doing_optional_cleanup, METH_NOARGS, mod_get_clocks_used_doing_optional_cleanup_doc},
+ {"enable_optional_cleanup", (PyCFunction)mod_enable_optional_cleanup, METH_O, mod_enable_optional_cleanup_doc},
+ {"get_tstate_trash_delete_nesting", (PyCFunction)mod_get_tstate_trash_delete_nesting, METH_NOARGS, mod_get_tstate_trash_delete_nesting_doc},
+ {NULL, NULL} /* Sentinel */
+};
+
+static const char* const copy_on_greentype[] = {
+ "getcurrent",
+ "error",
+ "GreenletExit",
+ "settrace",
+ "gettrace",
+ NULL
+};
+
+static struct PyModuleDef greenlet_module_def = {
+ PyModuleDef_HEAD_INIT,
+ "greenlet._greenlet",
+ NULL,
+ -1,
+ GreenMethods,
+};
+
+
+
+static PyObject*
+greenlet_internal_mod_init() noexcept
+{
+ static void* _PyGreenlet_API[PyGreenlet_API_pointers];
+
+ try {
+ CreatedModule m(greenlet_module_def);
+
+ Require(PyType_Ready(&PyGreenlet_Type));
+ Require(PyType_Ready(&PyGreenletUnswitchable_Type));
+
+ mod_globs = new greenlet::GreenletGlobals;
+ ThreadState::init();
+
+ m.PyAddObject("greenlet", PyGreenlet_Type);
+ m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type);
+ m.PyAddObject("error", mod_globs->PyExc_GreenletError);
+ m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit);
+
+ m.PyAddObject("GREENLET_USE_GC", 1);
+ m.PyAddObject("GREENLET_USE_TRACING", 1);
+ m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L);
+ m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L);
+
+ OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC));
+ m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec);
+
+ /* also publish module-level data as attributes of the greentype. */
+ // XXX: This is weird, and enables a strange pattern of
+ // confusing the class greenlet with the module greenlet; with
+ // the exception of (possibly) ``getcurrent()``, this
+ // shouldn't be encouraged so don't add new items here.
+ for (const char* const* p = copy_on_greentype; *p; p++) {
+ OwnedObject o = m.PyRequireAttr(*p);
+ PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow());
+ }
+
+ /*
+ * Expose C API
+ */
+
+ /* types */
+ _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type;
+
+ /* exceptions */
+ _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError;
+ _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit;
+
+ /* methods */
+ _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New;
+ _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent;
+ _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw;
+ _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch;
+ _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent;
+
+ /* Previously macros, but now need to be functions externally. */
+ _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN;
+ _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED;
+ _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE;
+ _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT;
+
+ /* XXX: Note that our module name is ``greenlet._greenlet``, but for
+ backwards compatibility with existing C code, we need the _C_API to
+ be directly in greenlet.
+ */
+ const NewReference c_api_object(Require(
+ PyCapsule_New(
+ (void*)_PyGreenlet_API,
+ "greenlet._C_API",
+ NULL)));
+ m.PyAddObject("_C_API", c_api_object);
+ assert(c_api_object.REFCNT() == 2);
+
+ // cerr << "Sizes:"
+ // << "\n\tGreenlet : " << sizeof(Greenlet)
+ // << "\n\tUserGreenlet : " << sizeof(UserGreenlet)
+ // << "\n\tMainGreenlet : " << sizeof(MainGreenlet)
+ // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState)
+ // << "\n\tPythonState : " << sizeof(greenlet::PythonState)
+ // << "\n\tStackState : " << sizeof(greenlet::StackState)
+ // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs)
+ // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject)
+ // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject)
+ // << "\n\tPyGreenlet : " << sizeof(PyGreenlet)
+ // << endl;
+
+ return m.borrow(); // But really it's the main reference.
+ }
+ catch (const LockInitError& e) {
+ PyErr_SetString(PyExc_MemoryError, e.what());
+ return NULL;
+ }
+ catch (const PyErrOccurred&) {
+ return NULL;
+ }
+
+}
+
+extern "C" {
+
+PyMODINIT_FUNC
+PyInit__greenlet(void)
+{
+ return greenlet_internal_mod_init();
+}
+
+}; // extern C
+
+#ifdef __clang__
+# pragma clang diagnostic pop
+#elif defined(__GNUC__)
+# pragma GCC diagnostic pop
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet.h b/venv/lib/python3.11/site-packages/greenlet/greenlet.h
new file mode 100644
index 0000000..d02a16e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet.h
@@ -0,0 +1,164 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+
+/* Greenlet object interface */
+
+#ifndef Py_GREENLETOBJECT_H
+#define Py_GREENLETOBJECT_H
+
+
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* This is deprecated and undocumented. It does not change. */
+#define GREENLET_VERSION "1.0.0"
+
+#ifndef GREENLET_MODULE
+#define implementation_ptr_t void*
+#endif
+
+typedef struct _greenlet {
+ PyObject_HEAD
+ PyObject* weakreflist;
+ PyObject* dict;
+ implementation_ptr_t pimpl;
+} PyGreenlet;
+
+#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type))
+
+
+/* C API functions */
+
+/* Total number of symbols that are exported */
+#define PyGreenlet_API_pointers 12
+
+#define PyGreenlet_Type_NUM 0
+#define PyExc_GreenletError_NUM 1
+#define PyExc_GreenletExit_NUM 2
+
+#define PyGreenlet_New_NUM 3
+#define PyGreenlet_GetCurrent_NUM 4
+#define PyGreenlet_Throw_NUM 5
+#define PyGreenlet_Switch_NUM 6
+#define PyGreenlet_SetParent_NUM 7
+
+#define PyGreenlet_MAIN_NUM 8
+#define PyGreenlet_STARTED_NUM 9
+#define PyGreenlet_ACTIVE_NUM 10
+#define PyGreenlet_GET_PARENT_NUM 11
+
+#ifndef GREENLET_MODULE
+/* This section is used by modules that uses the greenlet C API */
+static void** _PyGreenlet_API = NULL;
+
+# define PyGreenlet_Type \
+ (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM])
+
+# define PyExc_GreenletError \
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM])
+
+# define PyExc_GreenletExit \
+ ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM])
+
+/*
+ * PyGreenlet_New(PyObject *args)
+ *
+ * greenlet.greenlet(run, parent=None)
+ */
+# define PyGreenlet_New \
+ (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \
+ _PyGreenlet_API[PyGreenlet_New_NUM])
+
+/*
+ * PyGreenlet_GetCurrent(void)
+ *
+ * greenlet.getcurrent()
+ */
+# define PyGreenlet_GetCurrent \
+ (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM])
+
+/*
+ * PyGreenlet_Throw(
+ * PyGreenlet *greenlet,
+ * PyObject *typ,
+ * PyObject *val,
+ * PyObject *tb)
+ *
+ * g.throw(...)
+ */
+# define PyGreenlet_Throw \
+ (*(PyObject * (*)(PyGreenlet * self, \
+ PyObject * typ, \
+ PyObject * val, \
+ PyObject * tb)) \
+ _PyGreenlet_API[PyGreenlet_Throw_NUM])
+
+/*
+ * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args)
+ *
+ * g.switch(*args, **kwargs)
+ */
+# define PyGreenlet_Switch \
+ (*(PyObject * \
+ (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \
+ _PyGreenlet_API[PyGreenlet_Switch_NUM])
+
+/*
+ * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent)
+ *
+ * g.parent = new_parent
+ */
+# define PyGreenlet_SetParent \
+ (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \
+ _PyGreenlet_API[PyGreenlet_SetParent_NUM])
+
+/*
+ * PyGreenlet_GetParent(PyObject* greenlet)
+ *
+ * return greenlet.parent;
+ *
+ * This could return NULL even if there is no exception active.
+ * If it does not return NULL, you are responsible for decrementing the
+ * reference count.
+ */
+# define PyGreenlet_GetParent \
+ (*(PyGreenlet* (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM])
+
+/*
+ * deprecated, undocumented alias.
+ */
+# define PyGreenlet_GET_PARENT PyGreenlet_GetParent
+
+# define PyGreenlet_MAIN \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_MAIN_NUM])
+
+# define PyGreenlet_STARTED \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_STARTED_NUM])
+
+# define PyGreenlet_ACTIVE \
+ (*(int (*)(PyGreenlet*)) \
+ _PyGreenlet_API[PyGreenlet_ACTIVE_NUM])
+
+
+
+
+/* Macro that imports greenlet and initializes C API */
+/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we
+ keep the older definition to be sure older code that might have a copy of
+ the header still works. */
+# define PyGreenlet_Import() \
+ { \
+ _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \
+ }
+
+#endif /* GREENLET_MODULE */
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_GREENLETOBJECT_H */
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp
new file mode 100644
index 0000000..b452f54
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_allocator.hpp
@@ -0,0 +1,63 @@
+#ifndef GREENLET_ALLOCATOR_HPP
+#define GREENLET_ALLOCATOR_HPP
+
+#define PY_SSIZE_T_CLEAN
+#include
+#include
+#include "greenlet_compiler_compat.hpp"
+
+
+namespace greenlet
+{
+ // This allocator is stateless; all instances are identical.
+ // It can *ONLY* be used when we're sure we're holding the GIL
+ // (Python's allocators require the GIL).
+ template
+ struct PythonAllocator : public std::allocator {
+
+ PythonAllocator(const PythonAllocator& UNUSED(other))
+ : std::allocator()
+ {
+ }
+
+ PythonAllocator(const std::allocator other)
+ : std::allocator(other)
+ {}
+
+ template
+ PythonAllocator(const std::allocator& other)
+ : std::allocator(other)
+ {
+ }
+
+ PythonAllocator() : std::allocator() {}
+
+ T* allocate(size_t number_objects, const void* UNUSED(hint)=0)
+ {
+ void* p;
+ if (number_objects == 1)
+ p = PyObject_Malloc(sizeof(T));
+ else
+ p = PyMem_Malloc(sizeof(T) * number_objects);
+ return static_cast(p);
+ }
+
+ void deallocate(T* t, size_t n)
+ {
+ void* p = t;
+ if (n == 1) {
+ PyObject_Free(p);
+ }
+ else
+ PyMem_Free(p);
+ }
+ // This member is deprecated in C++17 and removed in C++20
+ template< class U >
+ struct rebind {
+ typedef PythonAllocator other;
+ };
+
+ };
+}
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp
new file mode 100644
index 0000000..ee5bbdd
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_compiler_compat.hpp
@@ -0,0 +1,95 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+#ifndef GREENLET_COMPILER_COMPAT_HPP
+#define GREENLET_COMPILER_COMPAT_HPP
+
+/**
+ * Definitions to aid with compatibility with different compilers.
+ *
+ * .. caution:: Use extreme care with noexcept.
+ * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on
+ * Linux, implement stack unwinding by throwing an uncatchable
+ * exception, one that specifically does not appear to be an active
+ * exception to the rest of the runtime. If this happens while we're in a noexcept function,
+ * we have violated our dynamic exception contract, and so the runtime
+ * will call std::terminate(), which kills the process with the
+ * unhelpful message "terminate called without an active exception".
+ *
+ * This has happened in this scenario: A background thread is running
+ * a greenlet that has made a native call and released the GIL.
+ * Meanwhile, the main thread finishes and starts shutting down the
+ * interpreter. When the background thread is scheduled again and
+ * attempts to obtain the GIL, it notices that the interpreter is
+ * exiting and calls ``pthread_exit()``. This in turn starts to unwind
+ * the stack by throwing that exception. But we had the ``PyCall``
+ * functions annotated as noexcept, so the runtime terminated us.
+ *
+ * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6
+ * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6
+ * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1
+ * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130
+ * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280
+ * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36
+ * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370
+ * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224
+ * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467
+ * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203
+ * #13 0x00000000006101cd in socket_gethostbyname
+ */
+
+#include
+
+# if defined(__clang__)
+# define G_FP_TMPL_STATIC static
+# else
+// GCC has no problem allowing static function pointers, but emits
+// tons of warnings about "whose type uses the anonymous namespace [-Wsubobject-linkage]"
+# define G_FP_TMPL_STATIC
+# endif
+
+# define G_NO_COPIES_OF_CLS(Cls) private: \
+ Cls(const Cls& other) = delete; \
+ Cls& operator=(const Cls& other) = delete
+
+# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \
+ Cls& operator=(const Cls& other) = delete
+
+# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \
+ Cls(const Cls& other) = delete;
+
+
+// CAUTION: MSVC is stupidly picky:
+//
+// "The compiler ignores, without warning, any __declspec keywords
+// placed after * or & and in front of the variable identifier in a
+// declaration."
+// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160)
+//
+// So pointer return types must be handled differently (because of the
+// trailing *), or you get inscrutable compiler warnings like "error
+// C2059: syntax error: ''"
+//
+// In C++ 11, there is a standard syntax for attributes, and
+// GCC defines an attribute to use with this: [[gnu:noinline]].
+// In the future, this is expected to become standard.
+
+#if defined(__GNUC__) || defined(__clang__)
+/* We used to check for GCC 4+ or 3.4+, but those compilers are
+ laughably out of date. Just assume they support it. */
+# define GREENLET_NOINLINE(name) __attribute__((noinline)) name
+# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name
+# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__))
+#elif defined(_MSC_VER)
+/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */
+# define GREENLET_NOINLINE(name) __declspec(noinline) name
+# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name
+# define UNUSED(x) UNUSED_ ## x
+#endif
+
+#if defined(_MSC_VER)
+# define G_NOEXCEPT_WIN32 noexcept
+#else
+# define G_NOEXCEPT_WIN32
+#endif
+
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_add_pending.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_add_pending.hpp
new file mode 100644
index 0000000..0d28efd
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_add_pending.hpp
@@ -0,0 +1,172 @@
+#ifndef GREENLET_CPYTHON_ADD_PENDING_HPP
+#define GREENLET_CPYTHON_ADD_PENDING_HPP
+
+#if (PY_VERSION_HEX >= 0x30800A0 && PY_VERSION_HEX < 0x3090000) && !(defined(_WIN32) || defined(WIN32))
+// XXX: From Python 3.8a3 [1] up until Python 3.9a6 [2][3],
+// ``Py_AddPendingCall`` would try to produce a Python exception if
+// the interpreter was in the beginning of shutting down when this
+// function is called. However, ``Py_AddPendingCall`` doesn't require
+// the GIL, and we are absolutely not holding it when we make that
+// call. That means that trying to create the Python exception is
+// using the C API in an undefined state; here the C API detects this
+// and aborts the process with an error ("Fatal Python error: Python
+// memory allocator called without holding the GIL": Add ->
+// PyErr_SetString -> PyUnicode_New -> PyObject_Malloc). This arises
+// (obviously) in multi-threaded programs and happens if one thread is
+// exiting and cleaning up its thread-local data while the other
+// thread is trying to shut down the interpreter. A crash on shutdown
+// is still a crash and could result in data loss (e.g., daemon
+// threads are still running, pending signal handlers may be present,
+// buffers may not be flushed, there may be __del__ that need run,
+// etc), so we have to work around it.
+//
+// Of course, we can (and do) check for whether the interpreter is
+// shutting down before calling ``Py_AddPendingCall``, but that's a
+// race condition since we don't hold the GIL, and so we may not
+// actually get the right answer. Plus, ``Py_FinalizeEx`` actually
+// calls ``_Py_FinishPendingCalls`` (which sets the pending->finishing
+// flag, which is used to gate creating the exceptioen) *before*
+// publishing any other data that would let us detect the shutdown
+// (such as runtime->finalizing). So that point is moot.
+//
+// Our solution for those versions is to inline the same code, without
+// the problematic bit that sets the exception. Unfortunately, all of
+// the structure definitions are private/opaque, *and* we can't
+// actually count on being able to include their definitions from
+// ``internal/pycore_*``, because on some platforms those header files
+// are incomplete (i.e., on macOS with macports 3.8, the includes are
+// fine, but on Ubuntu jammy with 3.8 from ppa:deadsnakes or GitHub
+// Actions 3.8 (I think it's Ubuntu 18.04), they con't be used; at
+// least, I couldn't get them to work). So we need to define the
+// structures and _PyRuntime data member ourself. Yet more
+// unfortunately, _PyRuntime won't link on Windows, so we can only do
+// this on other platforms.
+//
+// [1] https://github.com/python/cpython/commit/842a2f07f2f08a935ef470bfdaeef40f87490cfc
+// [2] https://github.com/python/cpython/commit/cfc3c2f8b34d3864717ab584c5b6c260014ba55a
+// [3] https://github.com/python/cpython/issues/81308
+# define GREENLET_BROKEN_PY_ADD_PENDING 1
+
+// When defining these structures, the important thing is to get
+// binary compatibility, i.e., structure layout. For that, we only
+// need to define fields up to the ones we use; after that they're
+// irrelevant UNLESS the structure is included in another structure
+// *before* the structure we're interested in --- in that case, it
+// must be complete. Ellipsis indicate elided trailing members.
+// Pointer types are changed to void* to keep from having to define
+// more structures.
+
+// From "internal/pycore_atomic.h"
+
+// There are several different definitions of this, including the
+// plain ``int`` version, a ``volatile int`` and an ``_Atomic int``
+// I don't think any of those change the size/layout.
+typedef struct _Py_atomic_int {
+ volatile int _value;
+} _Py_atomic_int;
+
+// This needs too much infrastructure, so we just do a regular store.
+#define _Py_atomic_store_relaxed(ATOMIC_VAL, NEW_VAL) \
+ (ATOMIC_VAL)->_value = NEW_VAL
+
+
+
+// From "internal/pycore_pymem.h"
+#define NUM_GENERATIONS 3
+
+
+struct gc_generation {
+ PyGC_Head head; // We already have this defined.
+ int threshold;
+ int count;
+};
+struct gc_generation_stats {
+ Py_ssize_t collections;
+ Py_ssize_t collected;
+ Py_ssize_t uncollectable;
+};
+
+struct _gc_runtime_state {
+ void *trash_delete_later;
+ int trash_delete_nesting;
+ int enabled;
+ int debug;
+ struct gc_generation generations[NUM_GENERATIONS];
+ void *generation0;
+ struct gc_generation permanent_generation;
+ struct gc_generation_stats generation_stats[NUM_GENERATIONS];
+ int collecting;
+ void *garbage;
+ void *callbacks;
+ Py_ssize_t long_lived_total;
+ Py_ssize_t long_lived_pending;
+};
+
+// From "internal/pycore_pystate.h"
+struct _pending_calls {
+ int finishing;
+ PyThread_type_lock lock;
+ _Py_atomic_int calls_to_do;
+ int async_exc;
+#define NPENDINGCALLS 32
+ struct {
+ int (*func)(void *);
+ void *arg;
+ } calls[NPENDINGCALLS];
+ int first;
+ int last;
+};
+
+struct _ceval_runtime_state {
+ int recursion_limit;
+ int tracing_possible;
+ _Py_atomic_int eval_breaker;
+ _Py_atomic_int gil_drop_request;
+ struct _pending_calls pending;
+ // ...
+};
+
+typedef struct pyruntimestate {
+ int preinitializing;
+ int preinitialized;
+ int core_initialized;
+ int initialized;
+ void *finalizing;
+
+ struct pyinterpreters {
+ PyThread_type_lock mutex;
+ void *head;
+ void *main;
+ int64_t next_id;
+ } interpreters;
+ // XXX Remove this field once we have a tp_* slot.
+ struct _xidregistry {
+ PyThread_type_lock mutex;
+ void *head;
+ } xidregistry;
+
+ unsigned long main_thread;
+
+#define NEXITFUNCS 32
+ void (*exitfuncs[NEXITFUNCS])(void);
+ int nexitfuncs;
+
+ struct _gc_runtime_state gc;
+ struct _ceval_runtime_state ceval;
+ // ...
+} _PyRuntimeState;
+
+#define SIGNAL_PENDING_CALLS(ceval) \
+ do { \
+ _Py_atomic_store_relaxed(&(ceval)->pending.calls_to_do, 1); \
+ _Py_atomic_store_relaxed(&(ceval)->eval_breaker, 1); \
+ } while (0)
+
+extern _PyRuntimeState _PyRuntime;
+
+#else
+# define GREENLET_BROKEN_PY_ADD_PENDING 0
+#endif
+
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp
new file mode 100644
index 0000000..cdc1617
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_cpython_compat.hpp
@@ -0,0 +1,127 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+#ifndef GREENLET_CPYTHON_COMPAT_H
+#define GREENLET_CPYTHON_COMPAT_H
+
+/**
+ * Helpers for compatibility with multiple versions of CPython.
+ */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+
+
+#if PY_VERSION_HEX >= 0x30A00B1
+# define GREENLET_PY310 1
+/*
+Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member.
+See https://github.com/python/cpython/pull/25276
+We have to save and restore this as well.
+*/
+# define GREENLET_USE_CFRAME 1
+#else
+# define GREENLET_USE_CFRAME 0
+# define GREENLET_PY310 0
+#endif
+
+
+
+#if PY_VERSION_HEX >= 0x30B00A4
+/*
+Greenlet won't compile on anything older than Python 3.11 alpha 4 (see
+https://bugs.python.org/issue46090). Summary of breaking internal changes:
+- Python 3.11 alpha 1 changed how frame objects are represented internally.
+ - https://github.com/python/cpython/pull/30122
+- Python 3.11 alpha 3 changed how recursion limits are stored.
+ - https://github.com/python/cpython/pull/29524
+- Python 3.11 alpha 4 changed how exception state is stored. It also includes a
+ change to help greenlet save and restore the interpreter frame "data stack".
+ - https://github.com/python/cpython/pull/30122
+ - https://github.com/python/cpython/pull/30234
+*/
+# define GREENLET_PY311 1
+#else
+# define GREENLET_PY311 0
+#endif
+
+
+#if PY_VERSION_HEX >= 0x30C0000
+# define GREENLET_PY312 1
+#else
+# define GREENLET_PY312 0
+#endif
+
+#ifndef Py_SET_REFCNT
+/* Py_REFCNT and Py_SIZE macros are converted to functions
+https://bugs.python.org/issue39573 */
+# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt)
+#endif
+
+#ifndef _Py_DEC_REFTOTAL
+/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by:
+ https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924
+
+ The symbol we use to replace it was removed by at least 3.12.
+*/
+# ifdef Py_REF_DEBUG
+# if GREENLET_PY312
+# define _Py_DEC_REFTOTAL
+# else
+# define _Py_DEC_REFTOTAL _Py_RefTotal--
+# endif
+# else
+# define _Py_DEC_REFTOTAL
+# endif
+#endif
+// Define these flags like Cython does if we're on an old version.
+#ifndef Py_TPFLAGS_CHECKTYPES
+ #define Py_TPFLAGS_CHECKTYPES 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_INDEX
+ #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+#ifndef Py_TPFLAGS_HAVE_NEWBUFFER
+ #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+
+#ifndef Py_TPFLAGS_HAVE_VERSION_TAG
+ #define Py_TPFLAGS_HAVE_VERSION_TAG 0
+#endif
+
+#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC
+
+
+#if PY_VERSION_HEX < 0x03090000
+// The official version only became available in 3.9
+# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o)
+#endif
+
+
+// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+static inline void PyThreadState_EnterTracing(PyThreadState *tstate)
+{
+ tstate->tracing++;
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = 0;
+#else
+ tstate->use_tracing = 0;
+#endif
+}
+#endif
+
+// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2
+#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION)
+static inline void PyThreadState_LeaveTracing(PyThreadState *tstate)
+{
+ tstate->tracing--;
+ int use_tracing = (tstate->c_tracefunc != NULL
+ || tstate->c_profilefunc != NULL);
+#if PY_VERSION_HEX >= 0x030A00A1
+ tstate->cframe->use_tracing = use_tracing;
+#else
+ tstate->use_tracing = use_tracing;
+#endif
+}
+#endif
+
+#endif /* GREENLET_CPYTHON_COMPAT_H */
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp
new file mode 100644
index 0000000..3807018
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_exceptions.hpp
@@ -0,0 +1,150 @@
+#ifndef GREENLET_EXCEPTIONS_HPP
+#define GREENLET_EXCEPTIONS_HPP
+
+#define PY_SSIZE_T_CLEAN
+#include
+#include
+#include
+
+#ifdef __clang__
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wunused-function"
+#endif
+
+namespace greenlet {
+
+ class PyErrOccurred : public std::runtime_error
+ {
+ public:
+
+ // CAUTION: In debug builds, may run arbitrary Python code.
+ static const PyErrOccurred
+ from_current()
+ {
+ assert(PyErr_Occurred());
+#ifndef NDEBUG
+ // This is not exception safe, and
+ // not necessarily safe in general (what if it switches?)
+ // But we only do this in debug mode, where we are in
+ // tight control of what exceptions are getting raised and
+ // can prevent those issues.
+
+ // You can't call PyObject_Str with a pending exception.
+ PyObject* typ;
+ PyObject* val;
+ PyObject* tb;
+
+ PyErr_Fetch(&typ, &val, &tb);
+ PyObject* typs = PyObject_Str(typ);
+ PyObject* vals = PyObject_Str(val ? val : typ);
+ const char* typ_msg = PyUnicode_AsUTF8(typs);
+ const char* val_msg = PyUnicode_AsUTF8(vals);
+ PyErr_Restore(typ, val, tb);
+
+ std::string msg(typ_msg);
+ msg += ": ";
+ msg += val_msg;
+ PyErrOccurred ex(msg);
+ Py_XDECREF(typs);
+ Py_XDECREF(vals);
+
+ return ex;
+#else
+ return PyErrOccurred();
+#endif
+ }
+
+ PyErrOccurred() : std::runtime_error("")
+ {
+ assert(PyErr_Occurred());
+ }
+
+ PyErrOccurred(const std::string& msg) : std::runtime_error(msg)
+ {
+ assert(PyErr_Occurred());
+ }
+
+ PyErrOccurred(PyObject* exc_kind, const char* const msg)
+ : std::runtime_error(msg)
+ {
+ PyErr_SetString(exc_kind, msg);
+ }
+
+ PyErrOccurred(PyObject* exc_kind, const std::string msg)
+ : std::runtime_error(msg)
+ {
+ // This copies the c_str, so we don't have any lifetime
+ // issues to worry about.
+ PyErr_SetString(exc_kind, msg.c_str());
+ }
+ };
+
+ class TypeError : public PyErrOccurred
+ {
+ public:
+ TypeError(const char* const what)
+ : PyErrOccurred(PyExc_TypeError, what)
+ {
+ }
+ TypeError(const std::string what)
+ : PyErrOccurred(PyExc_TypeError, what)
+ {
+ }
+ };
+
+ class ValueError : public PyErrOccurred
+ {
+ public:
+ ValueError(const char* const what)
+ : PyErrOccurred(PyExc_ValueError, what)
+ {
+ }
+ };
+
+ class AttributeError : public PyErrOccurred
+ {
+ public:
+ AttributeError(const char* const what)
+ : PyErrOccurred(PyExc_AttributeError, what)
+ {
+ }
+ };
+
+ /**
+ * Calls `Py_FatalError` when constructed, so you can't actually
+ * throw this. It just makes static analysis easier.
+ */
+ class PyFatalError : public std::runtime_error
+ {
+ public:
+ PyFatalError(const char* const msg)
+ : std::runtime_error(msg)
+ {
+ Py_FatalError(msg);
+ }
+ };
+
+ static inline PyObject*
+ Require(PyObject* p, const std::string& msg="")
+ {
+ if (!p) {
+ throw PyErrOccurred(msg);
+ }
+ return p;
+ };
+
+ static inline void
+ Require(const int retval)
+ {
+ if (retval < 0) {
+ throw PyErrOccurred();
+ }
+ };
+
+
+};
+#ifdef __clang__
+# pragma clang diagnostic pop
+#endif
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_greenlet.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_greenlet.hpp
new file mode 100644
index 0000000..d52ce1f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_greenlet.hpp
@@ -0,0 +1,805 @@
+#ifndef GREENLET_GREENLET_HPP
+#define GREENLET_GREENLET_HPP
+/*
+ * Declarations of the core data structures.
+*/
+
+#define PY_SSIZE_T_CLEAN
+#include
+
+#include "greenlet_compiler_compat.hpp"
+#include "greenlet_refs.hpp"
+#include "greenlet_cpython_compat.hpp"
+#include "greenlet_allocator.hpp"
+
+using greenlet::refs::OwnedObject;
+using greenlet::refs::OwnedGreenlet;
+using greenlet::refs::OwnedMainGreenlet;
+using greenlet::refs::BorrowedGreenlet;
+
+#if PY_VERSION_HEX < 0x30B00A6
+# define _PyCFrame CFrame
+# define _PyInterpreterFrame _interpreter_frame
+#endif
+
+#if GREENLET_PY312
+# include "internal/pycore_frame.h"
+#endif
+
+// XXX: TODO: Work to remove all virtual functions
+// for speed of calling and size of objects (no vtable).
+// One pattern is the Curiously Recurring Template
+namespace greenlet
+{
+ class ExceptionState
+ {
+ private:
+ G_NO_COPIES_OF_CLS(ExceptionState);
+
+ // Even though these are borrowed objects, we actually own
+ // them, when they're not null.
+ // XXX: Express that in the API.
+ private:
+ _PyErr_StackItem* exc_info;
+ _PyErr_StackItem exc_state;
+ public:
+ ExceptionState();
+ void operator<<(const PyThreadState *const tstate) noexcept;
+ void operator>>(PyThreadState* tstate) noexcept;
+ void clear() noexcept;
+
+ int tp_traverse(visitproc visit, void* arg) noexcept;
+ void tp_clear() noexcept;
+ };
+
+ template
+ void operator<<(const PyThreadState *const tstate, T& exc);
+
+ class PythonStateContext
+ {
+ protected:
+ greenlet::refs::OwnedContext _context;
+ public:
+ inline const greenlet::refs::OwnedContext& context() const
+ {
+ return this->_context;
+ }
+ inline greenlet::refs::OwnedContext& context()
+ {
+ return this->_context;
+ }
+
+ inline void tp_clear()
+ {
+ this->_context.CLEAR();
+ }
+
+ template
+ inline static PyObject* context(T* tstate)
+ {
+ return tstate->context;
+ }
+
+ template
+ inline static void context(T* tstate, PyObject* new_context)
+ {
+ tstate->context = new_context;
+ tstate->context_ver++;
+ }
+ };
+ class SwitchingArgs;
+ class PythonState : public PythonStateContext
+ {
+ public:
+ typedef greenlet::refs::OwnedReference OwnedFrame;
+ private:
+ G_NO_COPIES_OF_CLS(PythonState);
+ // We own this if we're suspended (although currently we don't
+ // tp_traverse into it; that's a TODO). If we're running, it's
+ // empty. If we get deallocated and *still* have a frame, it
+ // won't be reachable from the place that normally decref's
+ // it, so we need to do it (hence owning it).
+ OwnedFrame _top_frame;
+#if GREENLET_USE_CFRAME
+ _PyCFrame* cframe;
+ int use_tracing;
+#endif
+#if GREENLET_PY312
+ int py_recursion_depth;
+ int c_recursion_depth;
+#else
+ int recursion_depth;
+#endif
+ int trash_delete_nesting;
+#if GREENLET_PY311
+ _PyInterpreterFrame* current_frame;
+ _PyStackChunk* datastack_chunk;
+ PyObject** datastack_top;
+ PyObject** datastack_limit;
+#endif
+ // The PyInterpreterFrame list on 3.12+ contains some entries that are
+ // on the C stack, which can't be directly accessed while a greenlet is
+ // suspended. In order to keep greenlet gr_frame introspection working,
+ // we adjust stack switching to rewrite the interpreter frame list
+ // to skip these C-stack frames; we call this "exposing" the greenlet's
+ // frames because it makes them valid to work with in Python. Then when
+ // the greenlet is resumed we need to remember to reverse the operation
+ // we did. The C-stack frames are "entry frames" which are a low-level
+ // interpreter detail; they're not needed for introspection, but do
+ // need to be present for the eval loop to work.
+ void unexpose_frames();
+
+ public:
+
+ PythonState();
+ // You can use this for testing whether we have a frame
+ // or not. It returns const so they can't modify it.
+ const OwnedFrame& top_frame() const noexcept;
+
+ inline void operator<<(const PyThreadState *const tstate) noexcept;
+ inline void operator>>(PyThreadState* tstate) noexcept;
+ void clear() noexcept;
+
+ int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept;
+ void tp_clear(bool own_top_frame) noexcept;
+ void set_initial_state(const PyThreadState* const tstate) noexcept;
+#if GREENLET_USE_CFRAME
+ void set_new_cframe(_PyCFrame& frame) noexcept;
+#endif
+
+ inline void may_switch_away() noexcept;
+ inline void will_switch_from(PyThreadState *const origin_tstate) noexcept;
+ void did_finish(PyThreadState* tstate) noexcept;
+ };
+
+ class StackState
+ {
+ // By having only plain C (POD) members, no virtual functions
+ // or bases, we get a trivial assignment operator generated
+ // for us. However, that's not safe since we do manage memory.
+ // So we declare an assignment operator that only works if we
+ // don't have any memory allocated. (We don't use
+ // std::shared_ptr for reference counting just to keep this
+ // object small)
+ private:
+ char* _stack_start;
+ char* stack_stop;
+ char* stack_copy;
+ intptr_t _stack_saved;
+ StackState* stack_prev;
+ inline int copy_stack_to_heap_up_to(const char* const stop) noexcept;
+ inline void free_stack_copy() noexcept;
+
+ public:
+ /**
+ * Creates a started, but inactive, state, using *current*
+ * as the previous.
+ */
+ StackState(void* mark, StackState& current);
+ /**
+ * Creates an inactive, unstarted, state.
+ */
+ StackState();
+ ~StackState();
+ StackState(const StackState& other);
+ StackState& operator=(const StackState& other);
+ inline void copy_heap_to_stack(const StackState& current) noexcept;
+ inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept;
+ inline bool started() const noexcept;
+ inline bool main() const noexcept;
+ inline bool active() const noexcept;
+ inline void set_active() noexcept;
+ inline void set_inactive() noexcept;
+ inline intptr_t stack_saved() const noexcept;
+ inline char* stack_start() const noexcept;
+ static inline StackState make_main() noexcept;
+#ifdef GREENLET_USE_STDIO
+ friend std::ostream& operator<<(std::ostream& os, const StackState& s);
+#endif
+
+ // Fill in [dest, dest + n) with the values that would be at
+ // [src, src + n) while this greenlet is running. This is like memcpy
+ // except that if the greenlet is suspended it accounts for the portion
+ // of the greenlet's stack that was spilled to the heap. `src` may
+ // be on this greenlet's stack, or on the heap, but not on a different
+ // greenlet's stack.
+ void copy_from_stack(void* dest, const void* src, size_t n) const;
+ };
+#ifdef GREENLET_USE_STDIO
+ std::ostream& operator<<(std::ostream& os, const StackState& s);
+#endif
+
+ class SwitchingArgs
+ {
+ private:
+ G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs);
+ // If args and kwargs are both false (NULL), this is a *throw*, not a
+ // switch. PyErr_... must have been called already.
+ OwnedObject _args;
+ OwnedObject _kwargs;
+ public:
+
+ SwitchingArgs()
+ {}
+
+ SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs)
+ : _args(args),
+ _kwargs(kwargs)
+ {}
+
+ SwitchingArgs(const SwitchingArgs& other)
+ : _args(other._args),
+ _kwargs(other._kwargs)
+ {}
+
+ const OwnedObject& args()
+ {
+ return this->_args;
+ }
+
+ const OwnedObject& kwargs()
+ {
+ return this->_kwargs;
+ }
+
+ /**
+ * Moves ownership from the argument to this object.
+ */
+ SwitchingArgs& operator<<=(SwitchingArgs& other)
+ {
+ if (this != &other) {
+ this->_args = other._args;
+ this->_kwargs = other._kwargs;
+ other.CLEAR();
+ }
+ return *this;
+ }
+
+ /**
+ * Acquires ownership of the argument (consumes the reference).
+ */
+ SwitchingArgs& operator<<=(PyObject* args)
+ {
+ this->_args = OwnedObject::consuming(args);
+ this->_kwargs.CLEAR();
+ return *this;
+ }
+
+ /**
+ * Acquires ownership of the argument.
+ *
+ * Sets the args to be the given value; clears the kwargs.
+ */
+ SwitchingArgs& operator<<=(OwnedObject& args)
+ {
+ assert(&args != &this->_args);
+ this->_args = args;
+ this->_kwargs.CLEAR();
+ args.CLEAR();
+
+ return *this;
+ }
+
+ explicit operator bool() const noexcept
+ {
+ return this->_args || this->_kwargs;
+ }
+
+ inline void CLEAR()
+ {
+ this->_args.CLEAR();
+ this->_kwargs.CLEAR();
+ }
+
+ const std::string as_str() const noexcept
+ {
+ return PyUnicode_AsUTF8(
+ OwnedObject::consuming(
+ PyUnicode_FromFormat(
+ "SwitchingArgs(args=%R, kwargs=%R)",
+ this->_args.borrow(),
+ this->_kwargs.borrow()
+ )
+ ).borrow()
+ );
+ }
+ };
+
+ class ThreadState;
+
+ class UserGreenlet;
+ class MainGreenlet;
+
+ class Greenlet
+ {
+ private:
+ G_NO_COPIES_OF_CLS(Greenlet);
+ private:
+ // XXX: Work to remove these.
+ friend class ThreadState;
+ friend class UserGreenlet;
+ friend class MainGreenlet;
+ protected:
+ ExceptionState exception_state;
+ SwitchingArgs switch_args;
+ StackState stack_state;
+ PythonState python_state;
+ Greenlet(PyGreenlet* p, const StackState& initial_state);
+ public:
+ Greenlet(PyGreenlet* p);
+ virtual ~Greenlet();
+
+ const OwnedObject context() const;
+
+ // You MUST call this _very_ early in the switching process to
+ // prepare anything that may need prepared. This might perform
+ // garbage collections or otherwise run arbitrary Python code.
+ //
+ // One specific use of it is for Python 3.11+, preventing
+ // running arbitrary code at unsafe times. See
+ // PythonState::may_switch_away().
+ inline void may_switch_away()
+ {
+ this->python_state.may_switch_away();
+ }
+
+ inline void context(refs::BorrowedObject new_context);
+
+ inline SwitchingArgs& args()
+ {
+ return this->switch_args;
+ }
+
+ virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0;
+
+ inline intptr_t stack_saved() const noexcept
+ {
+ return this->stack_state.stack_saved();
+ }
+
+ // This is used by the macro SLP_SAVE_STATE to compute the
+ // difference in stack sizes. It might be nice to handle the
+ // computation ourself, but the type of the result
+ // varies by platform, so doing it in the macro is the
+ // simplest way.
+ inline const char* stack_start() const noexcept
+ {
+ return this->stack_state.stack_start();
+ }
+
+ virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state);
+ virtual OwnedObject g_switch() = 0;
+ /**
+ * Force the greenlet to appear dead. Used when it's not
+ * possible to throw an exception into a greenlet anymore.
+ *
+ * This losses access to the thread state and the main greenlet.
+ */
+ virtual void murder_in_place();
+
+ /**
+ * Called when somebody notices we were running in a dead
+ * thread to allow cleaning up resources (because we can't
+ * raise GreenletExit into it anymore).
+ * This is very similar to ``murder_in_place()``, except that
+ * it DOES NOT lose the main greenlet or thread state.
+ */
+ inline void deactivate_and_free();
+
+
+ // Called when some thread wants to deallocate a greenlet
+ // object.
+ // The thread may or may not be the same thread the greenlet
+ // was running in.
+ // The thread state will be null if the thread the greenlet
+ // was running in was known to have exited.
+ void deallocing_greenlet_in_thread(const ThreadState* current_state);
+
+ // Must be called on 3.12+ before exposing a suspended greenlet's
+ // frames to user code. This rewrites the linked list of interpreter
+ // frames to skip the ones that are being stored on the C stack (which
+ // can't be safely accessed while the greenlet is suspended because
+ // that stack space might be hosting a different greenlet), and
+ // sets PythonState::frames_were_exposed so we remember to restore
+ // the original list before resuming the greenlet. The C-stack frames
+ // are a low-level interpreter implementation detail; while they're
+ // important to the bytecode eval loop, they're superfluous for
+ // introspection purposes.
+ void expose_frames();
+
+
+ // TODO: Figure out how to make these non-public.
+ inline void slp_restore_state() noexcept;
+ inline int slp_save_state(char *const stackref) noexcept;
+
+ inline bool is_currently_running_in_some_thread() const;
+ virtual bool belongs_to_thread(const ThreadState* state) const;
+
+ inline bool started() const
+ {
+ return this->stack_state.started();
+ }
+ inline bool active() const
+ {
+ return this->stack_state.active();
+ }
+ inline bool main() const
+ {
+ return this->stack_state.main();
+ }
+ virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0;
+
+ virtual const OwnedGreenlet parent() const = 0;
+ virtual void parent(const refs::BorrowedObject new_parent) = 0;
+
+ inline const PythonState::OwnedFrame& top_frame()
+ {
+ return this->python_state.top_frame();
+ }
+
+ virtual const OwnedObject& run() const = 0;
+ virtual void run(const refs::BorrowedObject nrun) = 0;
+
+
+ virtual int tp_traverse(visitproc visit, void* arg);
+ virtual int tp_clear();
+
+
+ // Return the thread state that the greenlet is running in, or
+ // null if the greenlet is not running or the thread is known
+ // to have exited.
+ virtual ThreadState* thread_state() const noexcept = 0;
+
+ // Return true if the greenlet is known to have been running
+ // (active) in a thread that has now exited.
+ virtual bool was_running_in_dead_thread() const noexcept = 0;
+
+ // Return a borrowed greenlet that is the Python object
+ // this object represents.
+ virtual BorrowedGreenlet self() const noexcept = 0;
+
+ // For testing. If this returns true, we should pretend that
+ // slp_switch() failed.
+ virtual bool force_slp_switch_error() const noexcept;
+
+ protected:
+ inline void release_args();
+
+ // The functions that must not be inlined are declared virtual.
+ // We also mark them as protected, not private, so that the
+ // compiler is forced to call them through a function pointer.
+ // (A sufficiently smart compiler could directly call a private
+ // virtual function since it can never be overridden in a
+ // subclass).
+
+ // Also TODO: Switch away from integer error codes and to enums,
+ // or throw exceptions when possible.
+ struct switchstack_result_t
+ {
+ int status;
+ Greenlet* the_new_current_greenlet;
+ OwnedGreenlet origin_greenlet;
+
+ switchstack_result_t()
+ : status(0),
+ the_new_current_greenlet(nullptr)
+ {}
+
+ switchstack_result_t(int err)
+ : status(err),
+ the_new_current_greenlet(nullptr)
+ {}
+
+ switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin)
+ : status(err),
+ the_new_current_greenlet(state),
+ origin_greenlet(origin)
+ {
+ }
+
+ switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin)
+ : status(err),
+ the_new_current_greenlet(state),
+ origin_greenlet(origin)
+ {
+ }
+
+ switchstack_result_t(const switchstack_result_t& other)
+ : status(other.status),
+ the_new_current_greenlet(other.the_new_current_greenlet),
+ origin_greenlet(other.origin_greenlet)
+ {}
+
+ switchstack_result_t& operator=(const switchstack_result_t& other)
+ {
+ this->status = other.status;
+ this->the_new_current_greenlet = other.the_new_current_greenlet;
+ this->origin_greenlet = other.origin_greenlet;
+ return *this;
+ }
+ };
+
+ OwnedObject on_switchstack_or_initialstub_failure(
+ Greenlet* target,
+ const switchstack_result_t& err,
+ const bool target_was_me=false,
+ const bool was_initial_stub=false);
+
+ // Returns the previous greenlet we just switched away from.
+ virtual OwnedGreenlet g_switchstack_success() noexcept;
+
+
+ // Check the preconditions for switching to this greenlet; if they
+ // aren't met, throws PyErrOccurred. Most callers will want to
+ // catch this and clear the arguments
+ inline void check_switch_allowed() const;
+ class GreenletStartedWhileInPython : public std::runtime_error
+ {
+ public:
+ GreenletStartedWhileInPython() : std::runtime_error("")
+ {}
+ };
+
+ protected:
+
+
+ /**
+ Perform a stack switch into this greenlet.
+
+ This temporarily sets the global variable
+ ``switching_thread_state`` to this greenlet; as soon as the
+ call to ``slp_switch`` completes, this is reset to NULL.
+ Consequently, this depends on the GIL.
+
+ TODO: Adopt the stackman model and pass ``slp_switch`` a
+ callback function and context pointer; this eliminates the
+ need for global variables altogether.
+
+ Because the stack switch happens in this function, this
+ function can't use its own stack (local) variables, set
+ before the switch, and then accessed after the switch.
+
+ Further, you con't even access ``g_thread_state_global``
+ before and after the switch from the global variable.
+ Because it is thread local some compilers cache it in a
+ register/on the stack, notably new versions of MSVC; this
+ breaks with strange crashes sometime later, because writing
+ to anything in ``g_thread_state_global`` after the switch
+ is actually writing to random memory. For this reason, we
+ call a non-inlined function to finish the operation. (XXX:
+ The ``/GT`` MSVC compiler argument probably fixes that.)
+
+ It is very important that stack switch is 'atomic', i.e. no
+ calls into other Python code allowed (except very few that
+ are safe), because global variables are very fragile. (This
+ should no longer be the case with thread-local variables.)
+
+ */
+ // Made virtual to facilitate subclassing UserGreenlet for testing.
+ virtual switchstack_result_t g_switchstack(void);
+
+class TracingGuard
+{
+private:
+ PyThreadState* tstate;
+public:
+ TracingGuard()
+ : tstate(PyThreadState_GET())
+ {
+ PyThreadState_EnterTracing(this->tstate);
+ }
+
+ ~TracingGuard()
+ {
+ PyThreadState_LeaveTracing(this->tstate);
+ this->tstate = nullptr;
+ }
+
+ inline void CallTraceFunction(const OwnedObject& tracefunc,
+ const greenlet::refs::ImmortalEventName& event,
+ const BorrowedGreenlet& origin,
+ const BorrowedGreenlet& target)
+ {
+ // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut
+ // function for that that's specialized to avoid the Py_BuildValue
+ // string parsing, or start with just using "ON" format with PyTuple_Pack(2,
+ // origin, target). That seems like what the N format is meant
+ // for.
+ // XXX: Why does event not automatically cast back to a PyObject?
+ // It tries to call the "deleted constructor ImmortalEventName
+ // const" instead.
+ assert(tracefunc);
+ assert(event);
+ assert(origin);
+ assert(target);
+ greenlet::refs::NewReference retval(
+ PyObject_CallFunction(
+ tracefunc.borrow(),
+ "O(OO)",
+ event.borrow(),
+ origin.borrow(),
+ target.borrow()
+ ));
+ if (!retval) {
+ throw PyErrOccurred::from_current();
+ }
+ }
+};
+
+ static void
+ g_calltrace(const OwnedObject& tracefunc,
+ const greenlet::refs::ImmortalEventName& event,
+ const greenlet::refs::BorrowedGreenlet& origin,
+ const BorrowedGreenlet& target);
+ private:
+ OwnedObject g_switch_finish(const switchstack_result_t& err);
+
+ };
+
+ class UserGreenlet : public Greenlet
+ {
+ private:
+ static greenlet::PythonAllocator allocator;
+ BorrowedGreenlet _self;
+ OwnedMainGreenlet _main_greenlet;
+ OwnedObject _run_callable;
+ OwnedGreenlet _parent;
+ public:
+ static void* operator new(size_t UNUSED(count));
+ static void operator delete(void* ptr);
+
+ UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent);
+ virtual ~UserGreenlet();
+
+ virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const;
+ virtual bool was_running_in_dead_thread() const noexcept;
+ virtual ThreadState* thread_state() const noexcept;
+ virtual OwnedObject g_switch();
+ virtual const OwnedObject& run() const
+ {
+ if (this->started() || !this->_run_callable) {
+ throw AttributeError("run");
+ }
+ return this->_run_callable;
+ }
+ virtual void run(const refs::BorrowedObject nrun);
+
+ virtual const OwnedGreenlet parent() const;
+ virtual void parent(const refs::BorrowedObject new_parent);
+
+ virtual const refs::BorrowedMainGreenlet main_greenlet() const;
+
+ virtual BorrowedGreenlet self() const noexcept;
+ virtual void murder_in_place();
+ virtual bool belongs_to_thread(const ThreadState* state) const;
+ virtual int tp_traverse(visitproc visit, void* arg);
+ virtual int tp_clear();
+ class ParentIsCurrentGuard
+ {
+ private:
+ OwnedGreenlet oldparent;
+ UserGreenlet* greenlet;
+ G_NO_COPIES_OF_CLS(ParentIsCurrentGuard);
+ public:
+ ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state);
+ ~ParentIsCurrentGuard();
+ };
+ virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state);
+ protected:
+ virtual switchstack_result_t g_initialstub(void* mark);
+ private:
+ // This function isn't meant to return.
+ // This accepts raw pointers and the ownership of them at the
+ // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``.
+ void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run);
+ };
+
+ class BrokenGreenlet : public UserGreenlet
+ {
+ private:
+ static greenlet::PythonAllocator allocator;
+ public:
+ bool _force_switch_error = false;
+ bool _force_slp_switch_error = false;
+
+ static void* operator new(size_t UNUSED(count));
+ static void operator delete(void* ptr);
+ BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent)
+ : UserGreenlet(p, the_parent)
+ {}
+ virtual ~BrokenGreenlet()
+ {}
+
+ virtual switchstack_result_t g_switchstack(void);
+ virtual bool force_slp_switch_error() const noexcept;
+
+ };
+
+ class MainGreenlet : public Greenlet
+ {
+ private:
+ static greenlet::PythonAllocator allocator;
+ refs::BorrowedMainGreenlet _self;
+ ThreadState* _thread_state;
+ G_NO_COPIES_OF_CLS(MainGreenlet);
+ public:
+ static void* operator new(size_t UNUSED(count));
+ static void operator delete(void* ptr);
+
+ MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*);
+ virtual ~MainGreenlet();
+
+
+ virtual const OwnedObject& run() const;
+ virtual void run(const refs::BorrowedObject nrun);
+
+ virtual const OwnedGreenlet parent() const;
+ virtual void parent(const refs::BorrowedObject new_parent);
+
+ virtual const refs::BorrowedMainGreenlet main_greenlet() const;
+
+ virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const;
+ virtual bool was_running_in_dead_thread() const noexcept;
+ virtual ThreadState* thread_state() const noexcept;
+ void thread_state(ThreadState*) noexcept;
+ virtual OwnedObject g_switch();
+ virtual BorrowedGreenlet self() const noexcept;
+ virtual int tp_traverse(visitproc visit, void* arg);
+ };
+
+ // Instantiate one on the stack to save the GC state,
+ // and then disable GC. When it goes out of scope, GC will be
+ // restored to its original state. Sadly, these APIs are only
+ // available on 3.10+; luckily, we only need them on 3.11+.
+#if GREENLET_PY310
+ class GCDisabledGuard
+ {
+ private:
+ int was_enabled = 0;
+ public:
+ GCDisabledGuard()
+ : was_enabled(PyGC_IsEnabled())
+ {
+ PyGC_Disable();
+ }
+
+ ~GCDisabledGuard()
+ {
+ if (this->was_enabled) {
+ PyGC_Enable();
+ }
+ }
+ };
+#endif
+
+ OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept;
+
+ //TODO: Greenlet::g_switch() should call this automatically on its
+ //return value. As it is, the module code is calling it.
+ static inline OwnedObject
+ single_result(const OwnedObject& results)
+ {
+ if (results
+ && PyTuple_Check(results.borrow())
+ && PyTuple_GET_SIZE(results.borrow()) == 1) {
+ PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0);
+ assert(result);
+ return OwnedObject::owning(result);
+ }
+ return results;
+ }
+
+
+ static OwnedObject
+ g_handle_exit(const OwnedObject& greenlet_result);
+
+
+ template
+ void operator<<(const PyThreadState *const lhs, T& rhs)
+ {
+ rhs.operator<<(lhs);
+ }
+
+} // namespace greenlet ;
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp
new file mode 100644
index 0000000..c8e3849
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_internal.hpp
@@ -0,0 +1,106 @@
+/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */
+#ifndef GREENLET_INTERNAL_H
+#define GREENLET_INTERNAL_H
+#ifdef __clang__
+# pragma clang diagnostic push
+# pragma clang diagnostic ignored "-Wunused-function"
+# pragma clang diagnostic ignored "-Wmissing-field-initializers"
+# pragma clang diagnostic ignored "-Wunused-variable"
+#endif
+
+/**
+ * Implementation helpers.
+ *
+ * C++ templates and inline functions should go here.
+ */
+#define PY_SSIZE_T_CLEAN
+#include "greenlet_compiler_compat.hpp"
+#include "greenlet_cpython_compat.hpp"
+#include "greenlet_exceptions.hpp"
+#include "greenlet_greenlet.hpp"
+#include "greenlet_allocator.hpp"
+
+#include
+#include
+
+#define GREENLET_MODULE
+struct _greenlet;
+typedef struct _greenlet PyGreenlet;
+namespace greenlet {
+
+ class ThreadState;
+
+};
+
+
+#define implementation_ptr_t greenlet::Greenlet*
+
+
+#include "greenlet.h"
+
+G_FP_TMPL_STATIC inline void
+greenlet::refs::MainGreenletExactChecker(void *p)
+{
+ if (!p) {
+ return;
+ }
+ // We control the class of the main greenlet exactly.
+ if (Py_TYPE(p) != &PyGreenlet_Type) {
+ std::string err("MainGreenlet: Expected exactly a greenlet, not a ");
+ err += Py_TYPE(p)->tp_name;
+ throw greenlet::TypeError(err);
+ }
+
+ // Greenlets from dead threads no longer respond to main() with a
+ // true value; so in that case we need to perform an additional
+ // check.
+ Greenlet* g = ((PyGreenlet*)p)->pimpl;
+ if (g->main()) {
+ return;
+ }
+ if (!dynamic_cast(g)) {
+ std::string err("MainGreenlet: Expected exactly a main greenlet, not a ");
+ err += Py_TYPE(p)->tp_name;
+ throw greenlet::TypeError(err);
+ }
+}
+
+
+
+template
+inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept
+{
+ return reinterpret_cast(this->p)->pimpl;
+}
+
+template
+inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept
+{
+ return reinterpret_cast(this->p)->pimpl;
+}
+
+#include
+#include
+
+
+extern PyTypeObject PyGreenlet_Type;
+
+
+
+/**
+ * Forward declarations needed in multiple files.
+ */
+static PyGreenlet* green_create_main(greenlet::ThreadState*);
+static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs);
+static int green_is_gc(BorrowedGreenlet self);
+
+#ifdef __clang__
+# pragma clang diagnostic pop
+#endif
+
+
+#endif
+
+// Local Variables:
+// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10")
+// End:
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp
new file mode 100644
index 0000000..72ee68b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_refs.hpp
@@ -0,0 +1,1100 @@
+#ifndef GREENLET_REFS_HPP
+#define GREENLET_REFS_HPP
+
+#define PY_SSIZE_T_CLEAN
+#include
+//#include "greenlet_internal.hpp"
+#include "greenlet_compiler_compat.hpp"
+#include "greenlet_cpython_compat.hpp"
+#include "greenlet_exceptions.hpp"
+
+struct _greenlet;
+struct _PyMainGreenlet;
+
+typedef struct _greenlet PyGreenlet;
+extern PyTypeObject PyGreenlet_Type;
+
+
+#ifdef GREENLET_USE_STDIO
+#include
+using std::cerr;
+using std::endl;
+#endif
+
+namespace greenlet
+{
+ class Greenlet;
+
+ namespace refs
+ {
+ // Type checkers throw a TypeError if the argument is not
+ // null, and isn't of the required Python type.
+ // (We can't use most of the defined type checkers
+ // like PyList_Check, etc, directly, because they are
+ // implemented as macros.)
+ typedef void (*TypeChecker)(void*);
+
+ G_FP_TMPL_STATIC inline void
+ NoOpChecker(void*)
+ {
+ return;
+ }
+
+ G_FP_TMPL_STATIC inline void
+ GreenletChecker(void *p)
+ {
+ if (!p) {
+ return;
+ }
+
+ PyTypeObject* typ = Py_TYPE(p);
+ // fast, common path. (PyObject_TypeCheck is a macro or
+ // static inline function, and it also does a
+ // direct comparison of the type pointers, but its fast
+ // path only handles one type)
+ if (typ == &PyGreenlet_Type) {
+ return;
+ }
+
+ if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) {
+ std::string err("GreenletChecker: Expected any type of greenlet, not ");
+ err += Py_TYPE(p)->tp_name;
+ throw TypeError(err);
+ }
+ }
+
+ G_FP_TMPL_STATIC inline void
+ MainGreenletExactChecker(void *p);
+
+ template
+ class PyObjectPointer;
+
+ template
+ class OwnedReference;
+
+
+ template
+ class BorrowedReference;
+
+ typedef BorrowedReference BorrowedObject;
+ typedef OwnedReference OwnedObject;
+
+ class ImmortalObject;
+ class ImmortalString;
+
+ template
+ class _OwnedGreenlet;
+
+ typedef _OwnedGreenlet OwnedGreenlet;
+ typedef _OwnedGreenlet OwnedMainGreenlet;
+
+ template
+ class _BorrowedGreenlet;
+
+ typedef _BorrowedGreenlet BorrowedGreenlet;
+
+ G_FP_TMPL_STATIC inline void
+ ContextExactChecker(void *p)
+ {
+ if (!p) {
+ return;
+ }
+ if (!PyContext_CheckExact(p)) {
+ throw TypeError(
+ "greenlet context must be a contextvars.Context or None"
+ );
+ }
+ }
+
+ typedef OwnedReference OwnedContext;
+ }
+}
+
+namespace greenlet {
+
+
+ namespace refs {
+ // A set of classes to make reference counting rules in python
+ // code explicit.
+ //
+ // Rules of use:
+ // (1) Functions returning a new reference that the caller of the
+ // function is expected to dispose of should return a
+ // ``OwnedObject`` object. This object automatically releases its
+ // reference when it goes out of scope. It works like a ``std::shared_ptr``
+ // and can be copied or used as a function parameter (but don't do
+ // that). Note that constructing a ``OwnedObject`` from a
+ // PyObject* steals the reference.
+ // (2) Parameters to functions should be either a
+ // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``.
+ // If the function needs to create its own new reference, it can
+ // do so by copying to a local ``OwnedObject``.
+ // (3) Functions returning an existing pointer that is NOT
+ // incref'd, and which the caller MUST NOT decref,
+ // should return a ``BorrowedObject``.
+
+ //
+ // For a class with a single pointer member, whose constructor
+ // does nothing but copy a pointer parameter into the member, and
+ // which can then be converted back to the pointer type, compilers
+ // generate code that's the same as just passing the pointer.
+ // That is, func(BorrowedObject x) called like ``PyObject* p =
+ // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the
+ // pointer type with 0 overhead.
+ //
+ // If there are no virtual functions, no complex inheritance (maybe?) and
+ // no destructor, these can be directly used as parameters in
+ // Python callbacks like tp_init: the layout is the same as a
+ // single pointer. Only subclasses with trivial constructors that
+ // do nothing but set the single pointer member are safe to use
+ // that way.
+
+
+ // This is the base class for things that can be done with a
+ // PyObject pointer. It assumes nothing about memory management.
+ // NOTE: Nothing is virtual, so subclasses shouldn't add new
+ // storage fields or try to override these methods.
+ template
+ class PyObjectPointer
+ {
+ public:
+ typedef T PyType;
+ protected:
+ T* p;
+ public:
+ explicit PyObjectPointer(T* it=nullptr) : p(it)
+ {
+ TC(p);
+ }
+
+ // We don't allow automatic casting to PyObject* at this
+ // level, because then we could be passed to Py_DECREF/INCREF,
+ // but we want nothing to do with memory management. If you
+ // know better, then you can use the get() method, like on a
+ // std::shared_ptr. Except we name it borrow() to clarify that
+ // if this is a reference-tracked object, the pointer you get
+ // back will go away when the object does.
+ // TODO: This should probably not exist here, but be moved
+ // down to relevant sub-types.
+
+ inline T* borrow() const noexcept
+ {
+ return this->p;
+ }
+
+ PyObject* borrow_o() const noexcept
+ {
+ return reinterpret_cast(this->p);
+ }
+
+ inline T* operator->() const noexcept
+ {
+ return this->p;
+ }
+
+ bool is_None() const noexcept
+ {
+ return this->p == Py_None;
+ }
+
+ inline PyObject* acquire_or_None() const noexcept
+ {
+ PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None;
+ Py_INCREF(result);
+ return result;
+ }
+
+ explicit operator bool() const noexcept
+ {
+ return p != nullptr;
+ }
+
+ inline Py_ssize_t REFCNT() const noexcept
+ {
+ return p ? Py_REFCNT(p) : -42;
+ }
+
+ inline PyTypeObject* TYPE() const noexcept
+ {
+ return p ? Py_TYPE(p) : nullptr;
+ }
+
+ inline OwnedObject PyStr() const noexcept;
+ inline const std::string as_str() const noexcept;
+ inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept;
+ inline OwnedObject PyRequireAttr(const char* const name) const;
+ inline OwnedObject PyRequireAttr(const ImmortalString& name) const;
+ inline OwnedObject PyCall(const BorrowedObject& arg) const;
+ inline OwnedObject PyCall(PyGreenlet* arg) const ;
+ inline OwnedObject PyCall(PyObject* arg) const ;
+ // PyObject_Call(this, args, kwargs);
+ inline OwnedObject PyCall(const BorrowedObject args,
+ const BorrowedObject kwargs) const;
+ inline OwnedObject PyCall(const OwnedObject& args,
+ const OwnedObject& kwargs) const;
+
+ protected:
+ void _set_raw_pointer(void* t)
+ {
+ TC(t);
+ p = reinterpret_cast(t);
+ }
+ void* _get_raw_pointer() const
+ {
+ return p;
+ }
+ };
+
+#ifdef GREENLET_USE_STDIO
+ template
+ std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s)
+ {
+ const std::type_info& t = typeid(s);
+ os << t.name()
+ << "(addr=" << s.borrow()
+ << ", refcnt=" << s.REFCNT()
+ << ", value=" << s.as_str()
+ << ")";
+
+ return os;
+ }
+#endif
+
+ template
+ inline bool operator==(const PyObjectPointer& lhs, const void* const rhs) noexcept
+ {
+ return lhs.borrow_o() == rhs;
+ }
+
+ template
+ inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept
+ {
+ return lhs.borrow_o() == rhs.borrow_o();
+ }
+
+ template
+ inline bool operator!=(const PyObjectPointer& lhs,
+ const PyObjectPointer& rhs) noexcept
+ {
+ return lhs.borrow_o() != rhs.borrow_o();
+ }
+
+ template
+ class OwnedReference : public PyObjectPointer
+ {
+ private:
+ friend class OwnedList;
+
+ protected:
+ explicit OwnedReference(T* it) : PyObjectPointer(it)
+ {
+ }
+
+ public:
+
+ // Constructors
+
+ static OwnedReference consuming(PyObject* p)
+ {
+ return OwnedReference(reinterpret_cast(p));
+ }
+
+ static OwnedReference owning(T* p)
+ {
+ OwnedReference result(p);
+ Py_XINCREF(result.p);
+ return result;
+ }
+
+ OwnedReference() : PyObjectPointer(nullptr)
+ {}
+
+ explicit OwnedReference(const PyObjectPointer<>& other)
+ : PyObjectPointer(nullptr)
+ {
+ T* op = other.borrow();
+ TC(op);
+ this->p = other.borrow();
+ Py_XINCREF(this->p);
+ }
+
+ // It would be good to make use of the C++11 distinction
+ // between move and copy operations, e.g., constructing from a
+ // pointer should be a move operation.
+ // In the common case of ``OwnedObject x = Py_SomeFunction()``,
+ // the call to the copy constructor will be elided completely.
+ OwnedReference(const OwnedReference& other)
+ : PyObjectPointer(other.p)
+ {
+ Py_XINCREF(this->p);
+ }
+
+ static OwnedReference None()
+ {
+ Py_INCREF(Py_None);
+ return OwnedReference(Py_None);
+ }
+
+ // We can assign from exactly our type without any extra checking
+ OwnedReference& operator=(const OwnedReference& other)
+ {
+ Py_XINCREF(other.p);
+ const T* tmp = this->p;
+ this->p = other.p;
+ Py_XDECREF(tmp);
+ return *this;
+ }
+
+ OwnedReference& operator=(const BorrowedReference other)
+ {
+ return this->operator=(other.borrow());
+ }
+
+ OwnedReference& operator=(T* const other)
+ {
+ TC(other);
+ Py_XINCREF(other);
+ T* tmp = this->p;
+ this->p = other;
+ Py_XDECREF(tmp);
+ return *this;
+ }
+
+ // We can assign from an arbitrary reference type
+ // if it passes our check.
+ template
+ OwnedReference& operator=(const OwnedReference& other)
+ {
+ X* op = other.borrow();
+ TC(op);
+ return this->operator=(reinterpret_cast(op));
+ }
+
+ inline void steal(T* other)
+ {
+ assert(this->p == nullptr);
+ TC(other);
+ this->p = other;
+ }
+
+ T* relinquish_ownership()
+ {
+ T* result = this->p;
+ this->p = nullptr;
+ return result;
+ }
+
+ T* acquire() const
+ {
+ // Return a new reference.
+ // TODO: This may go away when we have reference objects
+ // throughout the code.
+ Py_XINCREF(this->p);
+ return this->p;
+ }
+
+ // Nothing else declares a destructor, we're the leaf, so we
+ // should be able to get away without virtual.
+ ~OwnedReference()
+ {
+ Py_CLEAR(this->p);
+ }
+
+ void CLEAR()
+ {
+ Py_CLEAR(this->p);
+ assert(this->p == nullptr);
+ }
+ };
+
+ static inline
+ void operator<<=(PyObject*& target, OwnedObject& o)
+ {
+ target = o.relinquish_ownership();
+ }
+
+ class NewReference : public OwnedObject
+ {
+ private:
+ G_NO_COPIES_OF_CLS(NewReference);
+ public:
+ // Consumes the reference. Only use this
+ // for API return values.
+ NewReference(PyObject* it) : OwnedObject(it)
+ {
+ }
+ };
+
+ class NewDictReference : public NewReference
+ {
+ private:
+ G_NO_COPIES_OF_CLS(NewDictReference);
+ public:
+ NewDictReference() : NewReference(PyDict_New())
+ {
+ if (!this->p) {
+ throw PyErrOccurred();
+ }
+ }
+
+ void SetItem(const char* const key, PyObject* value)
+ {
+ Require(PyDict_SetItemString(this->p, key, value));
+ }
+
+ void SetItem(const PyObjectPointer<>& key, PyObject* value)
+ {
+ Require(PyDict_SetItem(this->p, key.borrow_o(), value));
+ }
+ };
+
+ template
+ class _OwnedGreenlet: public OwnedReference
+ {
+ private:
+ protected:
+ _OwnedGreenlet(T* it) : OwnedReference(it)
+ {}
+
+ public:
+ _OwnedGreenlet() : OwnedReference()
+ {}
+
+ _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other)
+ {
+ }
+ _OwnedGreenlet(OwnedMainGreenlet& other) :
+ OwnedReference(reinterpret_cast(other.acquire()))
+ {
+ }
+ _OwnedGreenlet(const BorrowedGreenlet& other);
+ // Steals a reference.
+ static _OwnedGreenlet consuming(PyGreenlet* it)
+ {
+ return _OwnedGreenlet(reinterpret_cast(it));
+ }
+
+ inline _OwnedGreenlet& operator=(const OwnedGreenlet& other)
+ {
+ return this->operator=(other.borrow());
+ }
+
+ inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other);
+
+ _OwnedGreenlet& operator=(const OwnedMainGreenlet& other)
+ {
+ PyGreenlet* owned = other.acquire();
+ Py_XDECREF(this->p);
+ this->p = reinterpret_cast(owned);
+ return *this;
+ }
+
+ _OwnedGreenlet& operator=(T* const other)
+ {
+ OwnedReference::operator=(other);
+ return *this;
+ }
+
+ T* relinquish_ownership()
+ {
+ T* result = this->p;
+ this->p = nullptr;
+ return result;
+ }
+
+ PyObject* relinquish_ownership_o()
+ {
+ return reinterpret_cast(relinquish_ownership());
+ }
+
+ inline Greenlet* operator->() const noexcept;
+ inline operator Greenlet*() const noexcept;
+ };
+
+ template
+ class BorrowedReference : public PyObjectPointer
+ {
+ public:
+ // Allow implicit creation from PyObject* pointers as we
+ // transition to using these classes. Also allow automatic
+ // conversion to PyObject* for passing to C API calls and even
+ // for Py_INCREF/DECREF, because we ourselves do no memory management.
+ BorrowedReference(T* it) : PyObjectPointer(it)
+ {}
+
+ BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow())
+ {}
+
+ BorrowedReference() : PyObjectPointer(nullptr)
+ {}
+
+ operator T*() const
+ {
+ return this->p;
+ }
+ };
+
+ typedef BorrowedReference BorrowedObject;
+ //typedef BorrowedReference BorrowedGreenlet;
+
+ template
+ class _BorrowedGreenlet : public BorrowedReference
+ {
+ public:
+ _BorrowedGreenlet() :
+ BorrowedReference(nullptr)
+ {}
+
+ _BorrowedGreenlet(T* it) :
+ BorrowedReference(it)
+ {}
+
+ _BorrowedGreenlet(const BorrowedObject& it);
+
+ _BorrowedGreenlet(const OwnedGreenlet& it) :
+ BorrowedReference(it.borrow())
+ {}
+
+ _BorrowedGreenlet& operator=(const BorrowedObject& other);
+
+ // We get one of these for PyGreenlet, but one for PyObject
+ // is handy as well
+ operator PyObject*() const
+ {
+ return reinterpret_cast(this->p);
+ }
+ inline Greenlet* operator->() const noexcept;
+ inline operator Greenlet*() const noexcept;
+ };
+
+ typedef _BorrowedGreenlet BorrowedGreenlet;
+
+ template
+ _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other)
+ : OwnedReference(reinterpret_cast(other.borrow()))
+ {
+ Py_XINCREF(this->p);
+ }
+
+
+ class BorrowedMainGreenlet
+ : public _BorrowedGreenlet
+ {
+ public:
+ BorrowedMainGreenlet(const OwnedMainGreenlet& it) :
+ _BorrowedGreenlet(it.borrow())
+ {}
+ BorrowedMainGreenlet(PyGreenlet* it=nullptr)
+ : _BorrowedGreenlet(it)
+ {}
+ };
+
+ template
+ _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other)
+ {
+ return this->operator=(other.borrow());
+ }
+
+
+ class ImmortalObject : public PyObjectPointer<>
+ {
+ private:
+ G_NO_ASSIGNMENT_OF_CLS(ImmortalObject);
+ public:
+ explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it)
+ {
+ }
+
+ ImmortalObject(const ImmortalObject& other)
+ : PyObjectPointer<>(other.p)
+ {
+
+ }
+
+ /**
+ * Become the new owner of the object. Does not change the
+ * reference count.
+ */
+ ImmortalObject& operator=(PyObject* it)
+ {
+ assert(this->p == nullptr);
+ this->p = it;
+ return *this;
+ }
+
+ static ImmortalObject consuming(PyObject* it)
+ {
+ return ImmortalObject(it);
+ }
+
+ inline operator PyObject*() const
+ {
+ return this->p;
+ }
+ };
+
+ class ImmortalString : public ImmortalObject
+ {
+ private:
+ G_NO_COPIES_OF_CLS(ImmortalString);
+ const char* str;
+ public:
+ ImmortalString(const char* const str) :
+ ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr)
+ {
+ this->str = str;
+ }
+
+ inline ImmortalString& operator=(const char* const str)
+ {
+ if (!this->p) {
+ this->p = Require(PyUnicode_InternFromString(str));
+ this->str = str;
+ }
+ else {
+ assert(this->str == str);
+ }
+ return *this;
+ }
+
+ inline operator std::string() const
+ {
+ return this->str;
+ }
+
+ };
+
+ class ImmortalEventName : public ImmortalString
+ {
+ private:
+ G_NO_COPIES_OF_CLS(ImmortalEventName);
+ public:
+ ImmortalEventName(const char* const str) : ImmortalString(str)
+ {}
+ };
+
+ class ImmortalException : public ImmortalObject
+ {
+ private:
+ G_NO_COPIES_OF_CLS(ImmortalException);
+ public:
+ ImmortalException(const char* const name, PyObject* base=nullptr) :
+ ImmortalObject(name
+ // Python 2.7 isn't const correct
+ ? Require(PyErr_NewException((char*)name, base, nullptr))
+ : nullptr)
+ {}
+
+ inline bool PyExceptionMatches() const
+ {
+ return PyErr_ExceptionMatches(this->p) > 0;
+ }
+
+ };
+
+ template
+ inline OwnedObject PyObjectPointer::PyStr() const noexcept
+ {
+ if (!this->p) {
+ return OwnedObject();
+ }
+ return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p)));
+ }
+
+ template
+ inline const std::string PyObjectPointer::as_str() const noexcept
+ {
+ // NOTE: This is not Python exception safe.
+ if (this->p) {
+ // The Python APIs return a cached char* value that's only valid
+ // as long as the original object stays around, and we're
+ // about to (probably) toss it. Hence the copy to std::string.
+ OwnedObject py_str = this->PyStr();
+ if (!py_str) {
+ return "(nil)";
+ }
+ return PyUnicode_AsUTF8(py_str.borrow());
+ }
+ return "(nil)";
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept
+ {
+ assert(this->p);
+ return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const
+ {
+ assert(this->p);
+ return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const
+ {
+ assert(this->p);
+ return OwnedObject::consuming(Require(
+ PyObject_GetAttr(
+ reinterpret_cast(this->p),
+ name
+ ),
+ name
+ ));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const
+ {
+ return this->PyCall(arg.borrow());
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const
+ {
+ return this->PyCall(reinterpret_cast(arg));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const
+ {
+ assert(this->p);
+ return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args,
+ const BorrowedObject kwargs) const
+ {
+ assert(this->p);
+ return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs));
+ }
+
+ template
+ inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args,
+ const OwnedObject& kwargs) const
+ {
+ assert(this->p);
+ return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow()));
+ }
+
+ G_FP_TMPL_STATIC inline void
+ ListChecker(void * p)
+ {
+ if (!p) {
+ return;
+ }
+ if (!PyList_Check(p)) {
+ throw TypeError("Expected a list");
+ }
+ }
+
+ class OwnedList : public OwnedReference
+ {
+ private:
+ G_NO_ASSIGNMENT_OF_CLS(OwnedList);
+ public:
+ // TODO: Would like to use move.
+ explicit OwnedList(const OwnedObject& other)
+ : OwnedReference(other)
+ {
+ }
+
+ OwnedList& operator=(const OwnedObject& other)
+ {
+ if (other && PyList_Check(other.p)) {
+ // Valid list. Own a new reference to it, discard the
+ // reference to what we did own.
+ PyObject* new_ptr = other.p;
+ Py_INCREF(new_ptr);
+ Py_XDECREF(this->p);
+ this->p = new_ptr;
+ }
+ else {
+ // Either the other object was NULL (an error) or it
+ // wasn't a list. Either way, we're now invalidated.
+ Py_XDECREF(this->p);
+ this->p = nullptr;
+ }
+ return *this;
+ }
+
+ inline bool empty() const
+ {
+ return PyList_GET_SIZE(p) == 0;
+ }
+
+ inline Py_ssize_t size() const
+ {
+ return PyList_GET_SIZE(p);
+ }
+
+ inline BorrowedObject at(const Py_ssize_t index) const
+ {
+ return PyList_GET_ITEM(p, index);
+ }
+
+ inline void clear()
+ {
+ PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL);
+ }
+ };
+
+ // Use this to represent the module object used at module init
+ // time.
+ // This could either be a borrowed (Py2) or new (Py3) reference;
+ // either way, we don't want to do any memory management
+ // on it here, Python itself will handle that.
+ // XXX: Actually, that's not quite right. On Python 3, if an
+ // exception occurs before we return to the interpreter, this will
+ // leak; but all previous versions also had that problem.
+ class CreatedModule : public PyObjectPointer<>
+ {
+ private:
+ G_NO_COPIES_OF_CLS(CreatedModule);
+ public:
+ CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>(
+ Require(PyModule_Create(&mod_def)))
+ {
+ }
+
+ // PyAddObject(): Add a reference to the object to the module.
+ // On return, the reference count of the object is unchanged.
+ //
+ // The docs warn that PyModule_AddObject only steals the
+ // reference on success, so if it fails after we've incref'd
+ // or allocated, we're responsible for the decref.
+ void PyAddObject(const char* name, const long new_bool)
+ {
+ OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool)));
+ this->PyAddObject(name, p);
+ }
+
+ void PyAddObject(const char* name, const OwnedObject& new_object)
+ {
+ // The caller already owns a reference they will decref
+ // when their variable goes out of scope, we still need to
+ // incref/decref.
+ this->PyAddObject(name, new_object.borrow());
+ }
+
+ void PyAddObject(const char* name, const ImmortalObject& new_object)
+ {
+ this->PyAddObject(name, new_object.borrow());
+ }
+
+ void PyAddObject(const char* name, PyTypeObject& type)
+ {
+ this->PyAddObject(name, reinterpret_cast(&type));
+ }
+
+ void PyAddObject(const char* name, PyObject* new_object)
+ {
+ Py_INCREF(new_object);
+ try {
+ Require(PyModule_AddObject(this->p, name, new_object));
+ }
+ catch (const PyErrOccurred&) {
+ Py_DECREF(p);
+ throw;
+ }
+ }
+ };
+
+ class PyErrFetchParam : public PyObjectPointer<>
+ {
+ // Not an owned object, because we can't be initialized with
+ // one, and we only sometimes acquire ownership.
+ private:
+ G_NO_COPIES_OF_CLS(PyErrFetchParam);
+ public:
+ // To allow declaring these and passing them to
+ // PyErr_Fetch we implement the empty constructor,
+ // and the address operator.
+ PyErrFetchParam() : PyObjectPointer<>(nullptr)
+ {
+ }
+
+ PyObject** operator&()
+ {
+ return &this->p;
+ }
+
+ // This allows us to pass one directly without the &,
+ // BUT it has higher precedence than the bool operator
+ // if it's not explicit.
+ operator PyObject**()
+ {
+ return &this->p;
+ }
+
+ // We don't want to be able to pass these to Py_DECREF and
+ // such so we don't have the implicit PyObject* conversion.
+
+ inline PyObject* relinquish_ownership()
+ {
+ PyObject* result = this->p;
+ this->p = nullptr;
+ return result;
+ }
+
+ ~PyErrFetchParam()
+ {
+ Py_XDECREF(p);
+ }
+ };
+
+ class OwnedErrPiece : public OwnedObject
+ {
+ private:
+
+ public:
+ // Unlike OwnedObject, this increments the refcount.
+ OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p)
+ {
+ this->acquire();
+ }
+
+ PyObject** operator&()
+ {
+ return &this->p;
+ }
+
+ inline operator PyObject*() const
+ {
+ return this->p;
+ }
+
+ operator PyTypeObject*() const
+ {
+ return reinterpret_cast(this->p);
+ }
+ };
+
+ class PyErrPieces
+ {
+ private:
+ OwnedErrPiece type;
+ OwnedErrPiece instance;
+ OwnedErrPiece traceback;
+ bool restored;
+ public:
+ // Takes new references; if we're destroyed before
+ // restoring the error, we drop the references.
+ PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) :
+ type(t),
+ instance(v),
+ traceback(tb),
+ restored(0)
+ {
+ this->normalize();
+ }
+
+ PyErrPieces() :
+ restored(0)
+ {
+ // PyErr_Fetch transfers ownership to us, so
+ // we don't actually need to INCREF; but we *do*
+ // need to DECREF if we're not restored.
+ PyErrFetchParam t, v, tb;
+ PyErr_Fetch(&t, &v, &tb);
+ type.steal(t.relinquish_ownership());
+ instance.steal(v.relinquish_ownership());
+ traceback.steal(tb.relinquish_ownership());
+ }
+
+ void PyErrRestore()
+ {
+ // can only do this once
+ assert(!this->restored);
+ this->restored = true;
+ PyErr_Restore(
+ this->type.relinquish_ownership(),
+ this->instance.relinquish_ownership(),
+ this->traceback.relinquish_ownership());
+ assert(!this->type && !this->instance && !this->traceback);
+ }
+
+ private:
+ void normalize()
+ {
+ // First, check the traceback argument, replacing None,
+ // with NULL
+ if (traceback.is_None()) {
+ traceback = nullptr;
+ }
+
+ if (traceback && !PyTraceBack_Check(traceback.borrow())) {
+ throw PyErrOccurred(PyExc_TypeError,
+ "throw() third argument must be a traceback object");
+ }
+
+ if (PyExceptionClass_Check(type)) {
+ // If we just had a type, we'll now have a type and
+ // instance.
+ // The type's refcount will have gone up by one
+ // because of the instance and the instance will have
+ // a refcount of one. Either way, we owned, and still
+ // do own, exactly one reference.
+ PyErr_NormalizeException(&type, &instance, &traceback);
+
+ }
+ else if (PyExceptionInstance_Check(type)) {
+ /* Raising an instance --- usually that means an
+ object that is a subclass of BaseException, but on
+ Python 2, that can also mean an arbitrary old-style
+ object. The value should be a dummy. */
+ if (instance && !instance.is_None()) {
+ throw PyErrOccurred(
+ PyExc_TypeError,
+ "instance exception may not have a separate value");
+ }
+ /* Normalize to raise , */
+ this->instance = this->type;
+ this->type = PyExceptionInstance_Class(instance.borrow());
+
+ /*
+ It would be tempting to do this:
+
+ Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow()));
+ this->type = PyExceptionInstance_Class(instance.borrow());
+ assert(this->type.REFCNT() == type_count + 1);
+
+ But that doesn't work on Python 2 in the case of
+ old-style instances: The result of Py_TYPE is going to
+ be the global shared that all
+ old-style classes have, while the return of Instance_Class()
+ will be the Python-level class object. The two are unrelated.
+ */
+ }
+ else {
+ /* Not something you can raise. throw() fails. */
+ PyErr_Format(PyExc_TypeError,
+ "exceptions must be classes, or instances, not %s",
+ Py_TYPE(type.borrow())->tp_name);
+ throw PyErrOccurred();
+ }
+ }
+ };
+
+ // PyArg_Parse's O argument returns a borrowed reference.
+ class PyArgParseParam : public BorrowedObject
+ {
+ private:
+ G_NO_COPIES_OF_CLS(PyArgParseParam);
+ public:
+ explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p)
+ {
+ }
+
+ inline PyObject** operator&()
+ {
+ return &this->p;
+ }
+ };
+
+};};
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp
new file mode 100644
index 0000000..bd4b7ae
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_slp_switch.hpp
@@ -0,0 +1,99 @@
+#ifndef GREENLET_SLP_SWITCH_HPP
+#define GREENLET_SLP_SWITCH_HPP
+
+#include "greenlet_compiler_compat.hpp"
+#include "greenlet_refs.hpp"
+
+/*
+ * the following macros are spliced into the OS/compiler
+ * specific code, in order to simplify maintenance.
+ */
+// We can save about 10% of the time it takes to switch greenlets if
+// we thread the thread state through the slp_save_state() and the
+// following slp_restore_state() calls from
+// slp_switch()->g_switchstack() (which already needs to access it).
+//
+// However:
+//
+// that requires changing the prototypes and implementations of the
+// switching functions. If we just change the prototype of
+// slp_switch() to accept the argument and update the macros, without
+// changing the implementation of slp_switch(), we get crashes on
+// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear);
+// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit
+// windows is an issue because slp_switch is written fully in assembly
+// and currently ignores its argument so some code would have to be
+// adjusted there to pass the argument on to the
+// ``slp_save_state_asm()`` function (but interestingly, because of
+// the calling convention, the extra argument is just ignored and
+// things function fine, albeit slower, if we just modify
+// ``slp_save_state_asm`()` to fetch the pointer to pass to the
+// macro.)
+//
+// Our compromise is to use a *glabal*, untracked, weak, pointer
+// to the necessary thread state during the process of switching only.
+// This is safe because we're protected by the GIL, and if we're
+// running this code, the thread isn't exiting. This also nets us a
+// 10-12% speed improvement.
+
+static greenlet::Greenlet* volatile switching_thread_state = nullptr;
+
+
+extern "C" {
+static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref);
+static void GREENLET_NOINLINE(slp_restore_state_trampoline)();
+}
+
+
+#define SLP_SAVE_STATE(stackref, stsizediff) \
+do { \
+ assert(switching_thread_state); \
+ stackref += STACK_MAGIC; \
+ if (slp_save_state_trampoline((char*)stackref)) \
+ return -1; \
+ if (!switching_thread_state->active()) \
+ return 1; \
+ stsizediff = switching_thread_state->stack_start() - (char*)stackref; \
+} while (0)
+
+#define SLP_RESTORE_STATE() slp_restore_state_trampoline()
+
+#define SLP_EVAL
+extern "C" {
+#define slp_switch GREENLET_NOINLINE(slp_switch)
+#include "slp_platformselect.h"
+}
+#undef slp_switch
+
+#ifndef STACK_MAGIC
+# error \
+ "greenlet needs to be ported to this platform, or taught how to detect your compiler properly."
+#endif /* !STACK_MAGIC */
+
+
+
+#ifdef EXTERNAL_ASM
+/* CCP addition: Make these functions, to be called from assembler.
+ * The token include file for the given platform should enable the
+ * EXTERNAL_ASM define so that this is included.
+ */
+extern "C" {
+intptr_t
+slp_save_state_asm(intptr_t* ref)
+{
+ intptr_t diff;
+ SLP_SAVE_STATE(ref, diff);
+ return diff;
+}
+
+void
+slp_restore_state_asm(void)
+{
+ SLP_RESTORE_STATE();
+}
+
+extern int slp_switch(void);
+};
+#endif
+
+#endif
diff --git a/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_state.hpp b/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_state.hpp
new file mode 100644
index 0000000..045371f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/greenlet/greenlet_thread_state.hpp
@@ -0,0 +1,543 @@
+#ifndef GREENLET_THREAD_STATE_HPP
+#define GREENLET_THREAD_STATE_HPP
+
+#include
+#include