venv
This commit is contained in:
parent
6c1b634a22
commit
704dedb0f4
|
@ -1,247 +0,0 @@
|
|||
<#
|
||||
.Synopsis
|
||||
Activate a Python virtual environment for the current PowerShell session.
|
||||
|
||||
.Description
|
||||
Pushes the python executable for a virtual environment to the front of the
|
||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||
in a Python virtual environment. Makes use of the command line switches as
|
||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||
|
||||
.Parameter VenvDir
|
||||
Path to the directory that contains the virtual environment to activate. The
|
||||
default value for this is the parent of the directory that the Activate.ps1
|
||||
script is located within.
|
||||
|
||||
.Parameter Prompt
|
||||
The prompt prefix to display when this virtual environment is activated. By
|
||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||
|
||||
.Example
|
||||
Activate.ps1
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Verbose
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and shows extra information about the activation as it executes.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||
Activates the Python virtual environment located in the specified location.
|
||||
|
||||
.Example
|
||||
Activate.ps1 -Prompt "MyPython"
|
||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||
and prefixes the current prompt with the specified string (surrounded in
|
||||
parentheses) while the virtual environment is active.
|
||||
|
||||
.Notes
|
||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||
execution policy for the user. You can do this by issuing the following PowerShell
|
||||
command:
|
||||
|
||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||
|
||||
For more information on Execution Policies:
|
||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||
|
||||
#>
|
||||
Param(
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$VenvDir,
|
||||
[Parameter(Mandatory = $false)]
|
||||
[String]
|
||||
$Prompt
|
||||
)
|
||||
|
||||
<# Function declarations --------------------------------------------------- #>
|
||||
|
||||
<#
|
||||
.Synopsis
|
||||
Remove all shell session elements added by the Activate script, including the
|
||||
addition of the virtual environment's Python executable from the beginning of
|
||||
the PATH variable.
|
||||
|
||||
.Parameter NonDestructive
|
||||
If present, do not remove this function from the global namespace for the
|
||||
session.
|
||||
|
||||
#>
|
||||
function global:deactivate ([switch]$NonDestructive) {
|
||||
# Revert to original values
|
||||
|
||||
# The prior prompt:
|
||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
|
||||
# The prior PYTHONHOME:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
}
|
||||
|
||||
# The prior PATH:
|
||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||
}
|
||||
|
||||
# Just remove the VIRTUAL_ENV altogether:
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV
|
||||
}
|
||||
|
||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
||||
}
|
||||
|
||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||
}
|
||||
|
||||
# Leave deactivate function in the global namespace if requested:
|
||||
if (-not $NonDestructive) {
|
||||
Remove-Item -Path function:deactivate
|
||||
}
|
||||
}
|
||||
|
||||
<#
|
||||
.Description
|
||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||
given folder, and returns them in a map.
|
||||
|
||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||
then it is considered a `key = value` line. The left hand string is the key,
|
||||
the right hand is the value.
|
||||
|
||||
If the value starts with a `'` or a `"` then the first and last character is
|
||||
stripped from the value before being captured.
|
||||
|
||||
.Parameter ConfigDir
|
||||
Path to the directory that contains the `pyvenv.cfg` file.
|
||||
#>
|
||||
function Get-PyVenvConfig(
|
||||
[String]
|
||||
$ConfigDir
|
||||
) {
|
||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||
|
||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||
|
||||
# An empty map will be returned if no config file is found.
|
||||
$pyvenvConfig = @{ }
|
||||
|
||||
if ($pyvenvConfigPath) {
|
||||
|
||||
Write-Verbose "File exists, parse `key = value` lines"
|
||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||
|
||||
$pyvenvConfigContent | ForEach-Object {
|
||||
$keyval = $PSItem -split "\s*=\s*", 2
|
||||
if ($keyval[0] -and $keyval[1]) {
|
||||
$val = $keyval[1]
|
||||
|
||||
# Remove extraneous quotations around a string value.
|
||||
if ("'""".Contains($val.Substring(0, 1))) {
|
||||
$val = $val.Substring(1, $val.Length - 2)
|
||||
}
|
||||
|
||||
$pyvenvConfig[$keyval[0]] = $val
|
||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||
}
|
||||
}
|
||||
}
|
||||
return $pyvenvConfig
|
||||
}
|
||||
|
||||
|
||||
<# Begin Activate script --------------------------------------------------- #>
|
||||
|
||||
# Determine the containing directory of this script
|
||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||
|
||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||
|
||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||
# First, get the location of the virtual environment, it might not be
|
||||
# VenvExecDir if specified on the command line.
|
||||
if ($VenvDir) {
|
||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||
Write-Verbose "VenvDir=$VenvDir"
|
||||
}
|
||||
|
||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||
# as `prompt`.
|
||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||
|
||||
# Next, set the prompt from the command line, or the config file, or
|
||||
# just use the name of the virtual environment folder.
|
||||
if ($Prompt) {
|
||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||
}
|
||||
else {
|
||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||
$Prompt = $pyvenvCfg['prompt'];
|
||||
}
|
||||
else {
|
||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||
}
|
||||
}
|
||||
|
||||
Write-Verbose "Prompt = '$Prompt'"
|
||||
Write-Verbose "VenvDir='$VenvDir'"
|
||||
|
||||
# Deactivate any currently active virtual environment, but leave the
|
||||
# deactivate function in place.
|
||||
deactivate -nondestructive
|
||||
|
||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||
# that there is an activated venv.
|
||||
$env:VIRTUAL_ENV = $VenvDir
|
||||
|
||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||
|
||||
Write-Verbose "Setting prompt to '$Prompt'"
|
||||
|
||||
# Set the prompt to include the env name
|
||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||
|
||||
function global:prompt {
|
||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||
_OLD_VIRTUAL_PROMPT
|
||||
}
|
||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
||||
}
|
||||
|
||||
# Clear PYTHONHOME
|
||||
if (Test-Path -Path Env:PYTHONHOME) {
|
||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||
Remove-Item -Path Env:PYTHONHOME
|
||||
}
|
||||
|
||||
# Add the venv to the PATH
|
||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
@ -1,70 +0,0 @@
|
|||
# This file must be used with "source bin/activate" *from bash*
|
||||
# You cannot run it directly
|
||||
|
||||
deactivate () {
|
||||
# reset old environment variables
|
||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||
export PATH
|
||||
unset _OLD_VIRTUAL_PATH
|
||||
fi
|
||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||
export PYTHONHOME
|
||||
unset _OLD_VIRTUAL_PYTHONHOME
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
||||
|
||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||
export PS1
|
||||
unset _OLD_VIRTUAL_PS1
|
||||
fi
|
||||
|
||||
unset VIRTUAL_ENV
|
||||
unset VIRTUAL_ENV_PROMPT
|
||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||
# Self destruct!
|
||||
unset -f deactivate
|
||||
fi
|
||||
}
|
||||
|
||||
# unset irrelevant variables
|
||||
deactivate nondestructive
|
||||
|
||||
# on Windows, a path can contain colons and backslashes and has to be converted:
|
||||
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
|
||||
# transform D:\path\to\venv to /d/path/to/venv on MSYS
|
||||
# and to /cygdrive/d/path/to/venv on Cygwin
|
||||
export VIRTUAL_ENV=$(cygpath "/home/klaas/kasse-py/venv")
|
||||
else
|
||||
# use the path as-is
|
||||
export VIRTUAL_ENV="/home/klaas/kasse-py/venv"
|
||||
fi
|
||||
|
||||
_OLD_VIRTUAL_PATH="$PATH"
|
||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
export PATH
|
||||
|
||||
# unset PYTHONHOME if set
|
||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||
unset PYTHONHOME
|
||||
fi
|
||||
|
||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||
PS1="(venv) ${PS1:-}"
|
||||
export PS1
|
||||
VIRTUAL_ENV_PROMPT="(venv) "
|
||||
export VIRTUAL_ENV_PROMPT
|
||||
fi
|
||||
|
||||
# Call hash to forget past commands. Without forgetting
|
||||
# past commands the $PATH changes we made may not be respected
|
||||
hash -r 2> /dev/null
|
|
@ -1,27 +0,0 @@
|
|||
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||
# You cannot run it directly.
|
||||
|
||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||
|
||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
setenv VIRTUAL_ENV "/home/klaas/kasse-py/venv"
|
||||
|
||||
set _OLD_VIRTUAL_PATH="$PATH"
|
||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
|
||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||
|
||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||
set prompt = "(venv) $prompt"
|
||||
setenv VIRTUAL_ENV_PROMPT "(venv) "
|
||||
endif
|
||||
|
||||
alias pydoc python -m pydoc
|
||||
|
||||
rehash
|
|
@ -1,69 +0,0 @@
|
|||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||
# (https://fishshell.com/). You cannot run it directly.
|
||||
|
||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||
# reset old environment variables
|
||||
if test -n "$_OLD_VIRTUAL_PATH"
|
||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||
set -e _OLD_VIRTUAL_PATH
|
||||
end
|
||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||
end
|
||||
|
||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||
# prevents error when using nested fish instances (Issue #93858)
|
||||
if functions -q _old_fish_prompt
|
||||
functions -e fish_prompt
|
||||
functions -c _old_fish_prompt fish_prompt
|
||||
functions -e _old_fish_prompt
|
||||
end
|
||||
end
|
||||
|
||||
set -e VIRTUAL_ENV
|
||||
set -e VIRTUAL_ENV_PROMPT
|
||||
if test "$argv[1]" != "nondestructive"
|
||||
# Self-destruct!
|
||||
functions -e deactivate
|
||||
end
|
||||
end
|
||||
|
||||
# Unset irrelevant variables.
|
||||
deactivate nondestructive
|
||||
|
||||
set -gx VIRTUAL_ENV "/home/klaas/kasse-py/venv"
|
||||
|
||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||
|
||||
# Unset PYTHONHOME if set.
|
||||
if set -q PYTHONHOME
|
||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||
set -e PYTHONHOME
|
||||
end
|
||||
|
||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||
# fish uses a function instead of an env var to generate the prompt.
|
||||
|
||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||
functions -c fish_prompt _old_fish_prompt
|
||||
|
||||
# With the original prompt function renamed, we can override with our own.
|
||||
function fish_prompt
|
||||
# Save the return status of the last command.
|
||||
set -l old_status $status
|
||||
|
||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||
printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
|
||||
|
||||
# Restore the return status of the previous command.
|
||||
echo "exit $old_status" | .
|
||||
# Output the original/"old" prompt.
|
||||
_old_fish_prompt
|
||||
end
|
||||
|
||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||
set -gx VIRTUAL_ENV_PROMPT "(venv) "
|
||||
end
|
|
@ -1,8 +0,0 @@
|
|||
#!/home/klaas/kasse-py/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from dotenv.__main__ import cli
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(cli())
|
|
@ -1,8 +0,0 @@
|
|||
#!/home/klaas/kasse-py/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from flask.cli import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||
#!/home/klaas/kasse-py/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||
#!/home/klaas/kasse-py/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
|
@ -1,8 +0,0 @@
|
|||
#!/home/klaas/kasse-py/venv/bin/python3.12
|
||||
# -*- coding: utf-8 -*-
|
||||
import re
|
||||
import sys
|
||||
from pip._internal.cli.main import main
|
||||
if __name__ == '__main__':
|
||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||
sys.exit(main())
|
|
@ -1 +0,0 @@
|
|||
python3.12
|
|
@ -1 +0,0 @@
|
|||
python3.12
|
|
@ -1 +0,0 @@
|
|||
/home/linuxbrew/.linuxbrew/opt/python@3.12/bin/python3.12
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,28 +0,0 @@
|
|||
Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,92 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: MarkupSafe
|
||||
Version: 3.0.2
|
||||
Summary: Safely add untrusted strings to HTML/XML markup.
|
||||
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||
License: Copyright 2010 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Project-URL: Donate, https://palletsprojects.com/donate
|
||||
Project-URL: Documentation, https://markupsafe.palletsprojects.com/
|
||||
Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/
|
||||
Project-URL: Source, https://github.com/pallets/markupsafe/
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
||||
Classifier: Topic :: Text Processing :: Markup :: HTML
|
||||
Classifier: Typing :: Typed
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
License-File: LICENSE.txt
|
||||
|
||||
# MarkupSafe
|
||||
|
||||
MarkupSafe implements a text object that escapes characters so it is
|
||||
safe to use in HTML and XML. Characters that have special meanings are
|
||||
replaced so that they display as the actual characters. This mitigates
|
||||
injection attacks, meaning untrusted user input can safely be displayed
|
||||
on a page.
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
```pycon
|
||||
>>> from markupsafe import Markup, escape
|
||||
|
||||
>>> # escape replaces special characters and wraps in Markup
|
||||
>>> escape("<script>alert(document.cookie);</script>")
|
||||
Markup('<script>alert(document.cookie);</script>')
|
||||
|
||||
>>> # wrap in Markup to mark text "safe" and prevent escaping
|
||||
>>> Markup("<strong>Hello</strong>")
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> escape(Markup("<strong>Hello</strong>"))
|
||||
Markup('<strong>hello</strong>')
|
||||
|
||||
>>> # Markup is a str subclass
|
||||
>>> # methods and operators escape their arguments
|
||||
>>> template = Markup("Hello <em>{name}</em>")
|
||||
>>> template.format(name='"World"')
|
||||
Markup('Hello <em>"World"</em>')
|
||||
```
|
||||
|
||||
## Donate
|
||||
|
||||
The Pallets organization develops and supports MarkupSafe and other
|
||||
popular packages. In order to grow the community of contributors and
|
||||
users, and allow the maintainers to devote more time to the projects,
|
||||
[please donate today][].
|
||||
|
||||
[please donate today]: https://palletsprojects.com/donate
|
|
@ -1,15 +0,0 @@
|
|||
MarkupSafe-3.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
MarkupSafe-3.0.2.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475
|
||||
MarkupSafe-3.0.2.dist-info/METADATA,sha256=aAwbZhSmXdfFuMM-rEHpeiHRkBOGESyVLJIuwzHP-nw,3975
|
||||
MarkupSafe-3.0.2.dist-info/RECORD,,
|
||||
MarkupSafe-3.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
MarkupSafe-3.0.2.dist-info/WHEEL,sha256=OVgtqZzfzIXXtylXP90gxCZ6CKBCwKYyHM8PpMEjN1M,151
|
||||
MarkupSafe-3.0.2.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
|
||||
markupsafe/__init__.py,sha256=sr-U6_27DfaSrj5jnHYxWN-pvhM27sjlDplMDPZKm7k,13214
|
||||
markupsafe/__pycache__/__init__.cpython-312.pyc,,
|
||||
markupsafe/__pycache__/_native.cpython-312.pyc,,
|
||||
markupsafe/_native.py,sha256=hSLs8Jmz5aqayuengJJ3kdT5PwNpBWpKrmQSdipndC8,210
|
||||
markupsafe/_speedups.c,sha256=O7XulmTo-epI6n2FtMVOrJXl8EAaIwD2iNYmBI5SEoQ,4149
|
||||
markupsafe/_speedups.cpython-312-x86_64-linux-gnu.so,sha256=t1DBZlpsjFA30BOOvXfXfT1wvO_4cS16VbHz1-49q5U,43432
|
||||
markupsafe/_speedups.pyi,sha256=ENd1bYe7gbBUf2ywyYWOGUpnXOHNJ-cgTNqetlW8h5k,41
|
||||
markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@ -1,6 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.2.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
|
@ -1 +0,0 @@
|
|||
markupsafe
|
|
@ -1,7 +0,0 @@
|
|||
Authors
|
||||
=======
|
||||
|
||||
``pyjwt`` is currently written and maintained by `Jose Padilla <https://github.com/jpadilla>`_.
|
||||
Originally written and maintained by `Jeff Lindsay <https://github.com/progrium>`_.
|
||||
|
||||
A full list of contributors can be found on GitHub’s `overview <https://github.com/jpadilla/pyjwt/graphs/contributors>`_.
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,21 +0,0 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015-2022 José Padilla
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -1,106 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: PyJWT
|
||||
Version: 2.10.1
|
||||
Summary: JSON Web Token implementation in Python
|
||||
Author-email: Jose Padilla <hello@jpadilla.com>
|
||||
License: MIT
|
||||
Project-URL: Homepage, https://github.com/jpadilla/pyjwt
|
||||
Keywords: json,jwt,security,signing,token,web
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Topic :: Utilities
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
License-File: AUTHORS.rst
|
||||
Provides-Extra: crypto
|
||||
Requires-Dist: cryptography>=3.4.0; extra == "crypto"
|
||||
Provides-Extra: dev
|
||||
Requires-Dist: coverage[toml]==5.0.4; extra == "dev"
|
||||
Requires-Dist: cryptography>=3.4.0; extra == "dev"
|
||||
Requires-Dist: pre-commit; extra == "dev"
|
||||
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "dev"
|
||||
Requires-Dist: sphinx; extra == "dev"
|
||||
Requires-Dist: sphinx-rtd-theme; extra == "dev"
|
||||
Requires-Dist: zope.interface; extra == "dev"
|
||||
Provides-Extra: docs
|
||||
Requires-Dist: sphinx; extra == "docs"
|
||||
Requires-Dist: sphinx-rtd-theme; extra == "docs"
|
||||
Requires-Dist: zope.interface; extra == "docs"
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: coverage[toml]==5.0.4; extra == "tests"
|
||||
Requires-Dist: pytest<7.0.0,>=6.0.0; extra == "tests"
|
||||
|
||||
PyJWT
|
||||
=====
|
||||
|
||||
.. image:: https://github.com/jpadilla/pyjwt/workflows/CI/badge.svg
|
||||
:target: https://github.com/jpadilla/pyjwt/actions?query=workflow%3ACI
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pyjwt.svg
|
||||
:target: https://pypi.python.org/pypi/pyjwt
|
||||
|
||||
.. image:: https://codecov.io/gh/jpadilla/pyjwt/branch/master/graph/badge.svg
|
||||
:target: https://codecov.io/gh/jpadilla/pyjwt
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=stable
|
||||
:target: https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Original implementation was written by `@progrium <https://github.com/progrium>`_.
|
||||
|
||||
Sponsor
|
||||
-------
|
||||
|
||||
.. |auth0-logo| image:: https://github.com/user-attachments/assets/ee98379e-ee76-4bcb-943a-e25c4ea6d174
|
||||
:width: 160px
|
||||
|
||||
+--------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/signup <https://auth0.com/signup?utm_source=external_sites&utm_medium=pyjwt&utm_campaign=devn_signup>`_. |
|
||||
+--------------+-----------------------------------------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
|
||||
|
||||
Installing
|
||||
----------
|
||||
|
||||
Install with **pip**:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ pip install PyJWT
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> import jwt
|
||||
>>> encoded = jwt.encode({"some": "payload"}, "secret", algorithm="HS256")
|
||||
>>> print(encoded)
|
||||
eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg
|
||||
>>> jwt.decode(encoded, "secret", algorithms=["HS256"])
|
||||
{'some': 'payload'}
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
View the full docs online at https://pyjwt.readthedocs.io/en/stable/
|
||||
|
||||
|
||||
Tests
|
||||
-----
|
||||
|
||||
You can run tests from the project root after cloning with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tox
|
|
@ -1,33 +0,0 @@
|
|||
PyJWT-2.10.1.dist-info/AUTHORS.rst,sha256=klzkNGECnu2_VY7At89_xLBF3vUSDruXk3xwgUBxzwc,322
|
||||
PyJWT-2.10.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
PyJWT-2.10.1.dist-info/LICENSE,sha256=eXp6ICMdTEM-nxkR2xcx0GtYKLmPSZgZoDT3wPVvXOU,1085
|
||||
PyJWT-2.10.1.dist-info/METADATA,sha256=EkewF6D6KU8SGaaQzVYfxUUU1P_gs_dp1pYTkoYvAx8,3990
|
||||
PyJWT-2.10.1.dist-info/RECORD,,
|
||||
PyJWT-2.10.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
PyJWT-2.10.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
||||
PyJWT-2.10.1.dist-info/top_level.txt,sha256=RP5DHNyJbMq2ka0FmfTgoSaQzh7e3r5XuCWCO8a00k8,4
|
||||
jwt/__init__.py,sha256=VB2vFKuboTjcDGeZ8r-UqK_dz3NsQSQEqySSICby8Xg,1711
|
||||
jwt/__pycache__/__init__.cpython-312.pyc,,
|
||||
jwt/__pycache__/algorithms.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jwk.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jws.cpython-312.pyc,,
|
||||
jwt/__pycache__/api_jwt.cpython-312.pyc,,
|
||||
jwt/__pycache__/exceptions.cpython-312.pyc,,
|
||||
jwt/__pycache__/help.cpython-312.pyc,,
|
||||
jwt/__pycache__/jwk_set_cache.cpython-312.pyc,,
|
||||
jwt/__pycache__/jwks_client.cpython-312.pyc,,
|
||||
jwt/__pycache__/types.cpython-312.pyc,,
|
||||
jwt/__pycache__/utils.cpython-312.pyc,,
|
||||
jwt/__pycache__/warnings.cpython-312.pyc,,
|
||||
jwt/algorithms.py,sha256=cKr-XEioe0mBtqJMCaHEswqVOA1Z8Purt5Sb3Bi-5BE,30409
|
||||
jwt/api_jwk.py,sha256=6F1r7rmm8V5qEnBKA_xMjS9R7VoANe1_BL1oD2FrAjE,4451
|
||||
jwt/api_jws.py,sha256=aM8vzqQf6mRrAw7bRy-Moj_pjWsKSVQyYK896AfMjJU,11762
|
||||
jwt/api_jwt.py,sha256=OGT4hok1l5A6FH_KdcrU5g6u6EQ8B7em0r9kGM9SYgA,14512
|
||||
jwt/exceptions.py,sha256=bUIOJ-v9tjopTLS-FYOTc3kFx5WP5IZt7ksN_HE1G9Q,1211
|
||||
jwt/help.py,sha256=vFdNzjQoAch04XCMYpCkyB2blaqHAGAqQrtf9nSPkdk,1808
|
||||
jwt/jwk_set_cache.py,sha256=hBKmN-giU7-G37L_XKgc_OZu2ah4wdbj1ZNG_GkoSE8,959
|
||||
jwt/jwks_client.py,sha256=p9b-IbQqo2tEge9Zit3oSPBFNePqwho96VLbnUrHUWs,4259
|
||||
jwt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
jwt/types.py,sha256=VnhGv_VFu5a7_mrPoSCB7HaNLrJdhM8Sq1sSfEg0gLU,99
|
||||
jwt/utils.py,sha256=hxOjvDBheBYhz-RIPiEz7Q88dSUSTMzEdKE_Ww2VdJw,3640
|
||||
jwt/warnings.py,sha256=50XWOnyNsIaqzUJTk6XHNiIDykiL763GYA92MjTKmok,59
|
|
@ -1,5 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.6.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
|
@ -1 +0,0 @@
|
|||
jwt
|
Binary file not shown.
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,201 +0,0 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,322 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: bcrypt
|
||||
Version: 4.2.1
|
||||
Summary: Modern password hashing for your software and your servers
|
||||
Author-email: The Python Cryptographic Authority developers <cryptography-dev@python.org>
|
||||
License: Apache-2.0
|
||||
Project-URL: homepage, https://github.com/pyca/bcrypt/
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Programming Language :: Python :: 3.7
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE
|
||||
Provides-Extra: tests
|
||||
Requires-Dist: pytest!=3.3.0,>=3.2.1; extra == "tests"
|
||||
Provides-Extra: typecheck
|
||||
Requires-Dist: mypy; extra == "typecheck"
|
||||
|
||||
bcrypt
|
||||
======
|
||||
|
||||
.. image:: https://img.shields.io/pypi/v/bcrypt.svg
|
||||
:target: https://pypi.org/project/bcrypt/
|
||||
:alt: Latest Version
|
||||
|
||||
.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main
|
||||
:target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain
|
||||
|
||||
Acceptable password hashing for your software and your servers (but you should
|
||||
really use argon2id or scrypt)
|
||||
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
To install bcrypt, simply:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ pip install bcrypt
|
||||
|
||||
Note that bcrypt should build very easily on Linux provided you have a C
|
||||
compiler and a Rust compiler (the minimum supported Rust version is 1.56.0).
|
||||
|
||||
For Debian and Ubuntu, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ sudo apt-get install build-essential cargo
|
||||
|
||||
For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ sudo yum install gcc cargo
|
||||
|
||||
For Alpine, the following command will ensure that the required dependencies are installed:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ apk add --update musl-dev gcc cargo
|
||||
|
||||
|
||||
Alternatives
|
||||
============
|
||||
|
||||
While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_.
|
||||
|
||||
Changelog
|
||||
=========
|
||||
|
||||
4.2.1
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions - this should resolve crashes on Python 3.13
|
||||
free-threaded builds.
|
||||
* We no longer build ``manylinux`` wheels for PyPy 3.9.
|
||||
|
||||
4.2.0
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions
|
||||
* Removed the ``BCRYPT_ALLOW_RUST_163`` environment variable.
|
||||
|
||||
4.1.3
|
||||
-----
|
||||
|
||||
* Bump Rust dependency versions
|
||||
|
||||
4.1.2
|
||||
-----
|
||||
|
||||
* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors
|
||||
relating to initializing a module multiple times per process.
|
||||
|
||||
4.1.1
|
||||
-----
|
||||
|
||||
* Fixed the type signature on the ``kdf`` method.
|
||||
* Fixed packaging bug on Windows.
|
||||
* Fixed incompatibility with passlib package detection assumptions.
|
||||
|
||||
4.1.0
|
||||
-----
|
||||
|
||||
* Dropped support for Python 3.6.
|
||||
* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable)
|
||||
|
||||
4.0.1
|
||||
-----
|
||||
|
||||
* We now build PyPy ``manylinux`` wheels.
|
||||
* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in
|
||||
a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``.
|
||||
|
||||
4.0.0
|
||||
-----
|
||||
|
||||
* ``bcrypt`` is now implemented in Rust. Users building from source will need
|
||||
to have a Rust compiler available. Nothing will change for users downloading
|
||||
wheels.
|
||||
* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest
|
||||
``pip`` to ensure this doesn’t cause issues downloading wheels on their
|
||||
platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms.
|
||||
* ``NUL`` bytes are now allowed in inputs.
|
||||
|
||||
|
||||
3.2.2
|
||||
-----
|
||||
|
||||
* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works.
|
||||
|
||||
3.2.1
|
||||
-----
|
||||
|
||||
* Added support for compilation on z/OS
|
||||
* The next release of ``bcrypt`` with be 4.0 and it will require Rust at
|
||||
compile time, for users building from source. There will be no additional
|
||||
requirement for users who are installing from wheels. Users on most
|
||||
platforms will be able to obtain a wheel by making sure they have an up to
|
||||
date ``pip``. The minimum supported Rust version will be 1.56.0.
|
||||
* This will be the final release for which we ship ``manylinux2010`` wheels.
|
||||
Going forward the minimum supported manylinux ABI for our wheels will be
|
||||
``manylinux2014``. The vast majority of users will continue to receive
|
||||
``manylinux`` wheels provided they have an up to date ``pip``.
|
||||
|
||||
|
||||
3.2.0
|
||||
-----
|
||||
|
||||
* Added typehints for library functions.
|
||||
* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5).
|
||||
* Shipped ``abi3`` Windows wheels (requires pip >= 20).
|
||||
|
||||
3.1.7
|
||||
-----
|
||||
|
||||
* Set a ``setuptools`` lower bound for PEP517 wheel building.
|
||||
* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce
|
||||
them was a maintenance burden.
|
||||
|
||||
3.1.6
|
||||
-----
|
||||
|
||||
* Added support for compilation on Haiku.
|
||||
|
||||
3.1.5
|
||||
-----
|
||||
|
||||
* Added support for compilation on AIX.
|
||||
* Dropped Python 2.6 and 3.3 support.
|
||||
* Switched to using ``abi3`` wheels for Python 3. If you are not getting a
|
||||
wheel on a compatible platform please upgrade your ``pip`` version.
|
||||
|
||||
3.1.4
|
||||
-----
|
||||
|
||||
* Fixed compilation with mingw and on illumos.
|
||||
|
||||
3.1.3
|
||||
-----
|
||||
* Fixed a compilation issue on Solaris.
|
||||
* Added a warning when using too few rounds with ``kdf``.
|
||||
|
||||
3.1.2
|
||||
-----
|
||||
* Fixed a compile issue affecting big endian platforms.
|
||||
* Fixed invalid escape sequence warnings on Python 3.6.
|
||||
* Fixed building in non-UTF8 environments on Python 2.
|
||||
|
||||
3.1.1
|
||||
-----
|
||||
* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3.
|
||||
|
||||
3.1.0
|
||||
-----
|
||||
* Added support for ``checkpw``, a convenience method for verifying a password.
|
||||
* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt.
|
||||
* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug.
|
||||
* Fixed compilation under Alpine Linux.
|
||||
|
||||
3.0.0
|
||||
-----
|
||||
* Switched the C backend to code obtained from the OpenBSD project rather than
|
||||
openwall.
|
||||
* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function.
|
||||
|
||||
2.0.0
|
||||
-----
|
||||
* Added support for an adjustible prefix when calling ``gensalt``.
|
||||
* Switched to CFFI 1.0+
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Password Hashing
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Hashing and then later checking that a password matches the previous hashed
|
||||
password is very simple:
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> password = b"super secret password"
|
||||
>>> # Hash a password for the first time, with a randomly-generated salt
|
||||
>>> hashed = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
>>> # Check that an unhashed password matches one that has previously been
|
||||
>>> # hashed
|
||||
>>> if bcrypt.checkpw(password, hashed):
|
||||
... print("It Matches!")
|
||||
... else:
|
||||
... print("It Does not Match :(")
|
||||
|
||||
KDF
|
||||
~~~
|
||||
|
||||
As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``.
|
||||
This KDF is used in OpenSSH's newer encrypted private key format.
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> key = bcrypt.kdf(
|
||||
... password=b'password',
|
||||
... salt=b'salt',
|
||||
... desired_key_bytes=32,
|
||||
... rounds=100)
|
||||
|
||||
|
||||
Adjustable Work Factor
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
One of bcrypt's features is an adjustable logarithmic work factor. To adjust
|
||||
the work factor merely pass the desired number of rounds to
|
||||
``bcrypt.gensalt(rounds=12)`` which defaults to 12):
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import bcrypt
|
||||
>>> password = b"super secret password"
|
||||
>>> # Hash a password for the first time, with a certain number of rounds
|
||||
>>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14))
|
||||
>>> # Check that a unhashed password matches one that has previously been
|
||||
>>> # hashed
|
||||
>>> if bcrypt.checkpw(password, hashed):
|
||||
... print("It Matches!")
|
||||
... else:
|
||||
... print("It Does not Match :(")
|
||||
|
||||
|
||||
Adjustable Prefix
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Another one of bcrypt's features is an adjustable prefix to let you define what
|
||||
libraries you'll remain compatible with. To adjust this, pass either ``2a`` or
|
||||
``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object.
|
||||
|
||||
As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated.
|
||||
|
||||
Maximum Password Length
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The bcrypt algorithm only handles passwords up to 72 characters, any characters
|
||||
beyond that are ignored. To work around this, a common approach is to hash a
|
||||
password with a cryptographic hash (such as ``sha256``) and then base64
|
||||
encode it to prevent NULL byte problems before hashing the result with
|
||||
``bcrypt``:
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> password = b"an incredibly long password" * 10
|
||||
>>> hashed = bcrypt.hashpw(
|
||||
... base64.b64encode(hashlib.sha256(password).digest()),
|
||||
... bcrypt.gensalt()
|
||||
... )
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
This library should be compatible with py-bcrypt and it will run on Python
|
||||
3.6+, and PyPy 3.
|
||||
|
||||
Security
|
||||
--------
|
||||
|
||||
``bcrypt`` follows the `same security policy as cryptography`_, if you
|
||||
identify a vulnerability, we ask you to contact us privately.
|
||||
|
||||
.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html
|
||||
.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt
|
||||
.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io
|
||||
.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt
|
|
@ -1,12 +0,0 @@
|
|||
bcrypt-4.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
bcrypt-4.2.1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
||||
bcrypt-4.2.1.dist-info/METADATA,sha256=9I1jiKhGDK7nBfaOm-rFxaqofBgxP8sPcSC8aHCjp-I,9807
|
||||
bcrypt-4.2.1.dist-info/RECORD,,
|
||||
bcrypt-4.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
bcrypt-4.2.1.dist-info/WHEEL,sha256=CWd2bcHWKTmDlOCtxYujGGtO2gFv1Z6YQy6Oqmd76wc,111
|
||||
bcrypt-4.2.1.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7
|
||||
bcrypt/__init__.py,sha256=zTtuqGGQxDgxcqm1f_0UbbPS6uCl-WxL98gSYDMSUbw,1000
|
||||
bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333
|
||||
bcrypt/__pycache__/__init__.cpython-312.pyc,,
|
||||
bcrypt/_bcrypt.abi3.so,sha256=JYDgngquj0nblGpVWSBo7z9h3uHTCWjJZ6JLUD5c5JA,624720
|
||||
bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@ -1,5 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.5.0)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp39-abi3-manylinux_2_28_x86_64
|
||||
|
|
@ -1 +0,0 @@
|
|||
bcrypt
|
|
@ -1,43 +0,0 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from ._bcrypt import (
|
||||
__author__,
|
||||
__copyright__,
|
||||
__email__,
|
||||
__license__,
|
||||
__summary__,
|
||||
__title__,
|
||||
__uri__,
|
||||
checkpw,
|
||||
gensalt,
|
||||
hashpw,
|
||||
kdf,
|
||||
)
|
||||
from ._bcrypt import (
|
||||
__version_ex__ as __version__,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"gensalt",
|
||||
"hashpw",
|
||||
"checkpw",
|
||||
"kdf",
|
||||
"__title__",
|
||||
"__summary__",
|
||||
"__uri__",
|
||||
"__version__",
|
||||
"__author__",
|
||||
"__email__",
|
||||
"__license__",
|
||||
"__copyright__",
|
||||
]
|
|
@ -1,10 +0,0 @@
|
|||
def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ...
|
||||
def hashpw(password: bytes, salt: bytes) -> bytes: ...
|
||||
def checkpw(password: bytes, hashed_password: bytes) -> bool: ...
|
||||
def kdf(
|
||||
password: bytes,
|
||||
salt: bytes,
|
||||
desired_key_bytes: int,
|
||||
rounds: int,
|
||||
ignore_few_rounds: bool = False,
|
||||
) -> bytes: ...
|
Binary file not shown.
Binary file not shown.
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,20 +0,0 @@
|
|||
Copyright 2010 Jason Kirtland
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -1,60 +0,0 @@
|
|||
Metadata-Version: 2.3
|
||||
Name: blinker
|
||||
Version: 1.9.0
|
||||
Summary: Fast, simple object-to-object and broadcast signaling
|
||||
Author: Jason Kirtland
|
||||
Maintainer-email: Pallets Ecosystem <contact@palletsprojects.com>
|
||||
Requires-Python: >=3.9
|
||||
Description-Content-Type: text/markdown
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Typing :: Typed
|
||||
Project-URL: Chat, https://discord.gg/pallets
|
||||
Project-URL: Documentation, https://blinker.readthedocs.io
|
||||
Project-URL: Source, https://github.com/pallets-eco/blinker/
|
||||
|
||||
# Blinker
|
||||
|
||||
Blinker provides a fast dispatching system that allows any number of
|
||||
interested parties to subscribe to events, or "signals".
|
||||
|
||||
|
||||
## Pallets Community Ecosystem
|
||||
|
||||
> [!IMPORTANT]\
|
||||
> This project is part of the Pallets Community Ecosystem. Pallets is the open
|
||||
> source organization that maintains Flask; Pallets-Eco enables community
|
||||
> maintenance of related projects. If you are interested in helping maintain
|
||||
> this project, please reach out on [the Pallets Discord server][discord].
|
||||
>
|
||||
> [discord]: https://discord.gg/pallets
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
Signal receivers can subscribe to specific senders or receive signals
|
||||
sent by any sender.
|
||||
|
||||
```pycon
|
||||
>>> from blinker import signal
|
||||
>>> started = signal('round-started')
|
||||
>>> def each(round):
|
||||
... print(f"Round {round}")
|
||||
...
|
||||
>>> started.connect(each)
|
||||
|
||||
>>> def round_two(round):
|
||||
... print("This is round two.")
|
||||
...
|
||||
>>> started.connect(round_two, sender=2)
|
||||
|
||||
>>> for round in range(1, 4):
|
||||
... started.send(round)
|
||||
...
|
||||
Round 1!
|
||||
Round 2!
|
||||
This is round two.
|
||||
Round 3!
|
||||
```
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
blinker-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
blinker-1.9.0.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054
|
||||
blinker-1.9.0.dist-info/METADATA,sha256=uIRiM8wjjbHkCtbCyTvctU37IAZk0kEe5kxAld1dvzA,1633
|
||||
blinker-1.9.0.dist-info/RECORD,,
|
||||
blinker-1.9.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
blinker-1.9.0.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
||||
blinker/__init__.py,sha256=I2EdZqpy4LyjX17Hn1yzJGWCjeLaVaPzsMgHkLfj_cQ,317
|
||||
blinker/__pycache__/__init__.cpython-312.pyc,,
|
||||
blinker/__pycache__/_utilities.cpython-312.pyc,,
|
||||
blinker/__pycache__/base.cpython-312.pyc,,
|
||||
blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675
|
||||
blinker/base.py,sha256=QpDuvXXcwJF49lUBcH5BiST46Rz9wSG7VW_p7N_027M,19132
|
||||
blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@ -1,4 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: flit 3.10.1
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
|
@ -1,17 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from .base import ANY
|
||||
from .base import default_namespace
|
||||
from .base import NamedSignal
|
||||
from .base import Namespace
|
||||
from .base import Signal
|
||||
from .base import signal
|
||||
|
||||
__all__ = [
|
||||
"ANY",
|
||||
"default_namespace",
|
||||
"NamedSignal",
|
||||
"Namespace",
|
||||
"Signal",
|
||||
"signal",
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,64 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import inspect
|
||||
import typing as t
|
||||
from weakref import ref
|
||||
from weakref import WeakMethod
|
||||
|
||||
T = t.TypeVar("T")
|
||||
|
||||
|
||||
class Symbol:
|
||||
"""A constant symbol, nicer than ``object()``. Repeated calls return the
|
||||
same instance.
|
||||
|
||||
>>> Symbol('foo') is Symbol('foo')
|
||||
True
|
||||
>>> Symbol('foo')
|
||||
foo
|
||||
"""
|
||||
|
||||
symbols: t.ClassVar[dict[str, Symbol]] = {}
|
||||
|
||||
def __new__(cls, name: str) -> Symbol:
|
||||
if name in cls.symbols:
|
||||
return cls.symbols[name]
|
||||
|
||||
obj = super().__new__(cls)
|
||||
cls.symbols[name] = obj
|
||||
return obj
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def __getnewargs__(self) -> tuple[t.Any, ...]:
|
||||
return (self.name,)
|
||||
|
||||
|
||||
def make_id(obj: object) -> c.Hashable:
|
||||
"""Get a stable identifier for a receiver or sender, to be used as a dict
|
||||
key or in a set.
|
||||
"""
|
||||
if inspect.ismethod(obj):
|
||||
# The id of a bound method is not stable, but the id of the unbound
|
||||
# function and instance are.
|
||||
return id(obj.__func__), id(obj.__self__)
|
||||
|
||||
if isinstance(obj, (str, int)):
|
||||
# Instances with the same value always compare equal and have the same
|
||||
# hash, even if the id may change.
|
||||
return obj
|
||||
|
||||
# Assume other types are not hashable but will always be the same instance.
|
||||
return id(obj)
|
||||
|
||||
|
||||
def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]:
|
||||
if inspect.ismethod(obj):
|
||||
return WeakMethod(obj, callback) # type: ignore[arg-type, return-value]
|
||||
|
||||
return ref(obj, callback)
|
|
@ -1,512 +0,0 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import collections.abc as c
|
||||
import sys
|
||||
import typing as t
|
||||
import weakref
|
||||
from collections import defaultdict
|
||||
from contextlib import contextmanager
|
||||
from functools import cached_property
|
||||
from inspect import iscoroutinefunction
|
||||
|
||||
from ._utilities import make_id
|
||||
from ._utilities import make_ref
|
||||
from ._utilities import Symbol
|
||||
|
||||
F = t.TypeVar("F", bound=c.Callable[..., t.Any])
|
||||
|
||||
ANY = Symbol("ANY")
|
||||
"""Symbol for "any sender"."""
|
||||
|
||||
ANY_ID = 0
|
||||
|
||||
|
||||
class Signal:
|
||||
"""A notification emitter.
|
||||
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
ANY = ANY
|
||||
"""An alias for the :data:`~blinker.ANY` sender symbol."""
|
||||
|
||||
set_class: type[set[t.Any]] = set
|
||||
"""The set class to use for tracking connected receivers and senders.
|
||||
Python's ``set`` is unordered. If receivers must be dispatched in the order
|
||||
they were connected, an ordered set implementation can be used.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
|
||||
@cached_property
|
||||
def receiver_connected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`connect` call.
|
||||
|
||||
The signal sender is the signal instance, and the :meth:`connect`
|
||||
arguments are passed through: ``receiver``, ``sender``, and ``weak``.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver connects.")
|
||||
|
||||
@cached_property
|
||||
def receiver_disconnected(self) -> Signal:
|
||||
"""Emitted at the end of each :meth:`disconnect` call.
|
||||
|
||||
The sender is the signal instance, and the :meth:`disconnect` arguments
|
||||
are passed through: ``receiver`` and ``sender``.
|
||||
|
||||
This signal is emitted **only** when :meth:`disconnect` is called
|
||||
explicitly. This signal cannot be emitted by an automatic disconnect
|
||||
when a weakly referenced receiver or sender goes out of scope, as the
|
||||
instance is no longer be available to be used as the sender for this
|
||||
signal.
|
||||
|
||||
An alternative approach is available by subscribing to
|
||||
:attr:`receiver_connected` and setting up a custom weakref cleanup
|
||||
callback on weak receivers and senders.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
"""
|
||||
return Signal(doc="Emitted after a receiver disconnects.")
|
||||
|
||||
def __init__(self, doc: str | None = None) -> None:
|
||||
if doc:
|
||||
self.__doc__ = doc
|
||||
|
||||
self.receivers: dict[
|
||||
t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any]
|
||||
] = {}
|
||||
"""The map of connected receivers. Useful to quickly check if any
|
||||
receivers are connected to the signal: ``if s.receivers:``. The
|
||||
structure and data is not part of the public API, but checking its
|
||||
boolean value is.
|
||||
"""
|
||||
|
||||
self.is_muted: bool = False
|
||||
self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class)
|
||||
self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {}
|
||||
|
||||
def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F:
|
||||
"""Connect ``receiver`` to be called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.
|
||||
"""
|
||||
receiver_id = make_id(receiver)
|
||||
sender_id = ANY_ID if sender is ANY else make_id(sender)
|
||||
|
||||
if weak:
|
||||
self.receivers[receiver_id] = make_ref(
|
||||
receiver, self._make_cleanup_receiver(receiver_id)
|
||||
)
|
||||
else:
|
||||
self.receivers[receiver_id] = receiver
|
||||
|
||||
self._by_sender[sender_id].add(receiver_id)
|
||||
self._by_receiver[receiver_id].add(sender_id)
|
||||
|
||||
if sender is not ANY and sender_id not in self._weak_senders:
|
||||
# store a cleanup for weakref-able senders
|
||||
try:
|
||||
self._weak_senders[sender_id] = make_ref(
|
||||
sender, self._make_cleanup_sender(sender_id)
|
||||
)
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers:
|
||||
try:
|
||||
self.receiver_connected.send(
|
||||
self, receiver=receiver, sender=sender, weak=weak
|
||||
)
|
||||
except TypeError:
|
||||
# TODO no explanation or test for this
|
||||
self.disconnect(receiver, sender)
|
||||
raise
|
||||
|
||||
return receiver
|
||||
|
||||
def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]:
|
||||
"""Connect the decorated function to be called when the signal is sent
|
||||
by ``sender``.
|
||||
|
||||
The decorated function will be called when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument along
|
||||
with any extra keyword arguments.
|
||||
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender. A receiver may be connected
|
||||
to multiple senders by calling :meth:`connect` multiple times.
|
||||
:param weak: Track the receiver with a :mod:`weakref`. The receiver will
|
||||
be automatically disconnected when it is garbage collected. When
|
||||
connecting a receiver defined within a function, set to ``False``,
|
||||
otherwise it will be disconnected when the function scope ends.=
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
|
||||
def decorator(fn: F) -> F:
|
||||
self.connect(fn, sender, weak)
|
||||
return fn
|
||||
|
||||
return decorator
|
||||
|
||||
@contextmanager
|
||||
def connected_to(
|
||||
self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY
|
||||
) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily connects ``receiver`` to the
|
||||
signal while a ``with`` block executes. When the block exits, the
|
||||
receiver is disconnected. Useful for tests.
|
||||
|
||||
:param receiver: The callable to call when :meth:`send` is called with
|
||||
the given ``sender``, passing ``sender`` as a positional argument
|
||||
along with any extra keyword arguments.
|
||||
:param sender: Any object or :data:`ANY`. ``receiver`` will only be
|
||||
called when :meth:`send` is called with this sender. If ``ANY``, the
|
||||
receiver will be called for any sender.
|
||||
|
||||
.. versionadded:: 1.1
|
||||
"""
|
||||
self.connect(receiver, sender=sender, weak=False)
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.disconnect(receiver)
|
||||
|
||||
@contextmanager
|
||||
def muted(self) -> c.Generator[None, None, None]:
|
||||
"""A context manager that temporarily disables the signal. No receivers
|
||||
will be called if the signal is sent, until the ``with`` block exits.
|
||||
Useful for tests.
|
||||
"""
|
||||
self.is_muted = True
|
||||
|
||||
try:
|
||||
yield None
|
||||
finally:
|
||||
self.is_muted = False
|
||||
|
||||
def send(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_async_wrapper: c.Callable[
|
||||
[c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Call all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _async_wrapper: Will be called on any receivers that are async
|
||||
coroutines to turn them into sync callables. For example, could run
|
||||
the receiver with an event loop.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionchanged:: 1.7
|
||||
Added the ``_async_wrapper`` argument.
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if iscoroutinefunction(receiver):
|
||||
if _async_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a coroutine function.")
|
||||
|
||||
result = _async_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
async def send_async(
|
||||
self,
|
||||
sender: t.Any | None = None,
|
||||
/,
|
||||
*,
|
||||
_sync_wrapper: c.Callable[
|
||||
[c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]
|
||||
]
|
||||
| None = None,
|
||||
**kwargs: t.Any,
|
||||
) -> list[tuple[c.Callable[..., t.Any], t.Any]]:
|
||||
"""Await all receivers that are connected to the given ``sender``
|
||||
or :data:`ANY`. Each receiver is called with ``sender`` as a positional
|
||||
argument along with any extra keyword arguments. Return a list of
|
||||
``(receiver, return value)`` tuples.
|
||||
|
||||
The order receivers are called is undefined, but can be influenced by
|
||||
setting :attr:`set_class`.
|
||||
|
||||
If a receiver raises an exception, that exception will propagate up.
|
||||
This makes debugging straightforward, with an assumption that correctly
|
||||
implemented receivers will not raise.
|
||||
|
||||
:param sender: Call receivers connected to this sender, in addition to
|
||||
those connected to :data:`ANY`.
|
||||
:param _sync_wrapper: Will be called on any receivers that are sync
|
||||
callables to turn them into async coroutines. For example,
|
||||
could call the receiver in a thread.
|
||||
:param kwargs: Extra keyword arguments to pass to each receiver.
|
||||
|
||||
.. versionadded:: 1.7
|
||||
"""
|
||||
if self.is_muted:
|
||||
return []
|
||||
|
||||
results = []
|
||||
|
||||
for receiver in self.receivers_for(sender):
|
||||
if not iscoroutinefunction(receiver):
|
||||
if _sync_wrapper is None:
|
||||
raise RuntimeError("Cannot send to a non-coroutine function.")
|
||||
|
||||
result = await _sync_wrapper(receiver)(sender, **kwargs)
|
||||
else:
|
||||
result = await receiver(sender, **kwargs)
|
||||
|
||||
results.append((receiver, result))
|
||||
|
||||
return results
|
||||
|
||||
def has_receivers_for(self, sender: t.Any) -> bool:
|
||||
"""Check if there is at least one receiver that will be called with the
|
||||
given ``sender``. A receiver connected to :data:`ANY` will always be
|
||||
called, regardless of sender. Does not check if weakly referenced
|
||||
receivers are still live. See :meth:`receivers_for` for a stronger
|
||||
search.
|
||||
|
||||
:param sender: Check for receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
if not self.receivers:
|
||||
return False
|
||||
|
||||
if self._by_sender[ANY_ID]:
|
||||
return True
|
||||
|
||||
if sender is ANY:
|
||||
return False
|
||||
|
||||
return make_id(sender) in self._by_sender
|
||||
|
||||
def receivers_for(
|
||||
self, sender: t.Any
|
||||
) -> c.Generator[c.Callable[..., t.Any], None, None]:
|
||||
"""Yield each receiver to be called for ``sender``, in addition to those
|
||||
to be called for :data:`ANY`. Weakly referenced receivers that are not
|
||||
live will be disconnected and skipped.
|
||||
|
||||
:param sender: Yield receivers connected to this sender, in addition
|
||||
to those connected to :data:`ANY`.
|
||||
"""
|
||||
# TODO: test receivers_for(ANY)
|
||||
if not self.receivers:
|
||||
return
|
||||
|
||||
sender_id = make_id(sender)
|
||||
|
||||
if sender_id in self._by_sender:
|
||||
ids = self._by_sender[ANY_ID] | self._by_sender[sender_id]
|
||||
else:
|
||||
ids = self._by_sender[ANY_ID].copy()
|
||||
|
||||
for receiver_id in ids:
|
||||
receiver = self.receivers.get(receiver_id)
|
||||
|
||||
if receiver is None:
|
||||
continue
|
||||
|
||||
if isinstance(receiver, weakref.ref):
|
||||
strong = receiver()
|
||||
|
||||
if strong is None:
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
continue
|
||||
|
||||
yield strong
|
||||
else:
|
||||
yield receiver
|
||||
|
||||
def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None:
|
||||
"""Disconnect ``receiver`` from being called when the signal is sent by
|
||||
``sender``.
|
||||
|
||||
:param receiver: A connected receiver callable.
|
||||
:param sender: Disconnect from only this sender. By default, disconnect
|
||||
from all senders.
|
||||
"""
|
||||
sender_id: c.Hashable
|
||||
|
||||
if sender is ANY:
|
||||
sender_id = ANY_ID
|
||||
else:
|
||||
sender_id = make_id(sender)
|
||||
|
||||
receiver_id = make_id(receiver)
|
||||
self._disconnect(receiver_id, sender_id)
|
||||
|
||||
if (
|
||||
"receiver_disconnected" in self.__dict__
|
||||
and self.receiver_disconnected.receivers
|
||||
):
|
||||
self.receiver_disconnected.send(self, receiver=receiver, sender=sender)
|
||||
|
||||
def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None:
|
||||
if sender_id == ANY_ID:
|
||||
if self._by_receiver.pop(receiver_id, None) is not None:
|
||||
for bucket in self._by_sender.values():
|
||||
bucket.discard(receiver_id)
|
||||
|
||||
self.receivers.pop(receiver_id, None)
|
||||
else:
|
||||
self._by_sender[sender_id].discard(receiver_id)
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
def _make_cleanup_receiver(
|
||||
self, receiver_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]:
|
||||
"""Create a callback function to disconnect a weakly referenced
|
||||
receiver when it is garbage collected.
|
||||
"""
|
||||
|
||||
def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None:
|
||||
# If the interpreter is shutting down, disconnecting can result in a
|
||||
# weird ignored exception. Don't call it in that case.
|
||||
if not sys.is_finalizing():
|
||||
self._disconnect(receiver_id, ANY_ID)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _make_cleanup_sender(
|
||||
self, sender_id: c.Hashable
|
||||
) -> c.Callable[[weakref.ref[t.Any]], None]:
|
||||
"""Create a callback function to disconnect all receivers for a weakly
|
||||
referenced sender when it is garbage collected.
|
||||
"""
|
||||
assert sender_id != ANY_ID
|
||||
|
||||
def cleanup(ref: weakref.ref[t.Any]) -> None:
|
||||
self._weak_senders.pop(sender_id, None)
|
||||
|
||||
for receiver_id in self._by_sender.pop(sender_id, ()):
|
||||
self._by_receiver[receiver_id].discard(sender_id)
|
||||
|
||||
return cleanup
|
||||
|
||||
def _cleanup_bookkeeping(self) -> None:
|
||||
"""Prune unused sender/receiver bookkeeping. Not threadsafe.
|
||||
|
||||
Connecting & disconnecting leaves behind a small amount of bookkeeping
|
||||
data. Typical workloads using Blinker, for example in most web apps,
|
||||
Flask, CLI scripts, etc., are not adversely affected by this
|
||||
bookkeeping.
|
||||
|
||||
With a long-running process performing dynamic signal routing with high
|
||||
volume, e.g. connecting to function closures, senders are all unique
|
||||
object instances. Doing all of this over and over may cause memory usage
|
||||
to grow due to extraneous bookkeeping. (An empty ``set`` for each stale
|
||||
sender/receiver pair.)
|
||||
|
||||
This method will prune that bookkeeping away, with the caveat that such
|
||||
pruning is not threadsafe. The risk is that cleanup of a fully
|
||||
disconnected receiver/sender pair occurs while another thread is
|
||||
connecting that same pair. If you are in the highly dynamic, unique
|
||||
receiver/sender situation that has lead you to this method, that failure
|
||||
mode is perhaps not a big deal for you.
|
||||
"""
|
||||
for mapping in (self._by_sender, self._by_receiver):
|
||||
for ident, bucket in list(mapping.items()):
|
||||
if not bucket:
|
||||
mapping.pop(ident, None)
|
||||
|
||||
def _clear_state(self) -> None:
|
||||
"""Disconnect all receivers and senders. Useful for tests."""
|
||||
self._weak_senders.clear()
|
||||
self.receivers.clear()
|
||||
self._by_sender.clear()
|
||||
self._by_receiver.clear()
|
||||
|
||||
|
||||
class NamedSignal(Signal):
|
||||
"""A named generic notification emitter. The name is not used by the signal
|
||||
itself, but matches the key in the :class:`Namespace` that it belongs to.
|
||||
|
||||
:param name: The name of the signal within the namespace.
|
||||
:param doc: The docstring for the signal.
|
||||
"""
|
||||
|
||||
def __init__(self, name: str, doc: str | None = None) -> None:
|
||||
super().__init__(doc)
|
||||
|
||||
#: The name of this signal.
|
||||
self.name: str = name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
base = super().__repr__()
|
||||
return f"{base[:-1]}; {self.name!r}>" # noqa: E702
|
||||
|
||||
|
||||
class Namespace(dict[str, NamedSignal]):
|
||||
"""A dict mapping names to signals."""
|
||||
|
||||
def signal(self, name: str, doc: str | None = None) -> NamedSignal:
|
||||
"""Return the :class:`NamedSignal` for the given ``name``, creating it
|
||||
if required. Repeated calls with the same name return the same signal.
|
||||
|
||||
:param name: The name of the signal.
|
||||
:param doc: The docstring of the signal.
|
||||
"""
|
||||
if name not in self:
|
||||
self[name] = NamedSignal(name, doc)
|
||||
|
||||
return self[name]
|
||||
|
||||
|
||||
class _PNamespaceSignal(t.Protocol):
|
||||
def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ...
|
||||
|
||||
|
||||
default_namespace: Namespace = Namespace()
|
||||
"""A default :class:`Namespace` for creating named signals. :func:`signal`
|
||||
creates a :class:`NamedSignal` in this namespace.
|
||||
"""
|
||||
|
||||
signal: _PNamespaceSignal = default_namespace.signal
|
||||
"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given
|
||||
``name``, creating it if required. Repeated calls with the same name return the
|
||||
same signal.
|
||||
"""
|
|
@ -1 +0,0 @@
|
|||
pip
|
|
@ -1,26 +0,0 @@
|
|||
|
||||
Except when otherwise stated (look for LICENSE files in directories or
|
||||
information at the beginning of each file) all software and
|
||||
documentation is licensed as follows:
|
||||
|
||||
The MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or
|
||||
sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
|
|
@ -1,40 +0,0 @@
|
|||
Metadata-Version: 2.1
|
||||
Name: cffi
|
||||
Version: 1.17.1
|
||||
Summary: Foreign Function Interface for Python calling C code.
|
||||
Home-page: http://cffi.readthedocs.org
|
||||
Author: Armin Rigo, Maciej Fijalkowski
|
||||
Author-email: python-cffi@googlegroups.com
|
||||
License: MIT
|
||||
Project-URL: Documentation, http://cffi.readthedocs.org/
|
||||
Project-URL: Source Code, https://github.com/python-cffi/cffi
|
||||
Project-URL: Issue Tracker, https://github.com/python-cffi/cffi/issues
|
||||
Project-URL: Changelog, https://cffi.readthedocs.io/en/latest/whatsnew.html
|
||||
Project-URL: Downloads, https://github.com/python-cffi/cffi/releases
|
||||
Project-URL: Contact, https://groups.google.com/forum/#!forum/python-cffi
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.8
|
||||
Classifier: Programming Language :: Python :: 3.9
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||
Classifier: License :: OSI Approved :: MIT License
|
||||
Requires-Python: >=3.8
|
||||
License-File: LICENSE
|
||||
Requires-Dist: pycparser
|
||||
|
||||
|
||||
CFFI
|
||||
====
|
||||
|
||||
Foreign Function Interface for Python calling C code.
|
||||
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
||||
|
||||
Contact
|
||||
-------
|
||||
|
||||
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
|
@ -1,49 +0,0 @@
|
|||
_cffi_backend.cpython-312-x86_64-linux-gnu.so,sha256=-fK60bkCudr6tjAHt4dA3x_CHaOWgVs_Lb2J0JGO3Po,1114632
|
||||
cffi-1.17.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||
cffi-1.17.1.dist-info/LICENSE,sha256=BLgPWwd7vtaICM_rreteNSPyqMmpZJXFh72W3x6sKjM,1294
|
||||
cffi-1.17.1.dist-info/METADATA,sha256=u6nuvP_qPJKu2zvIbi2zkGzVu7KjnnRIYUFyIrOY3j4,1531
|
||||
cffi-1.17.1.dist-info/RECORD,,
|
||||
cffi-1.17.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
cffi-1.17.1.dist-info/WHEEL,sha256=h7F_RlbsFAwUaa98BSEEv6RQhdTqVo2FhuJDzTSKXxc,151
|
||||
cffi-1.17.1.dist-info/entry_points.txt,sha256=y6jTxnyeuLnL-XJcDv8uML3n6wyYiGRg8MTp_QGJ9Ho,75
|
||||
cffi-1.17.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19
|
||||
cffi/__init__.py,sha256=H6t_ebva6EeHpUuItFLW1gbRp94eZRNJODLaWKdbx1I,513
|
||||
cffi/__pycache__/__init__.cpython-312.pyc,,
|
||||
cffi/__pycache__/_imp_emulation.cpython-312.pyc,,
|
||||
cffi/__pycache__/_shimmed_dist_utils.cpython-312.pyc,,
|
||||
cffi/__pycache__/api.cpython-312.pyc,,
|
||||
cffi/__pycache__/backend_ctypes.cpython-312.pyc,,
|
||||
cffi/__pycache__/cffi_opcode.cpython-312.pyc,,
|
||||
cffi/__pycache__/commontypes.cpython-312.pyc,,
|
||||
cffi/__pycache__/cparser.cpython-312.pyc,,
|
||||
cffi/__pycache__/error.cpython-312.pyc,,
|
||||
cffi/__pycache__/ffiplatform.cpython-312.pyc,,
|
||||
cffi/__pycache__/lock.cpython-312.pyc,,
|
||||
cffi/__pycache__/model.cpython-312.pyc,,
|
||||
cffi/__pycache__/pkgconfig.cpython-312.pyc,,
|
||||
cffi/__pycache__/recompiler.cpython-312.pyc,,
|
||||
cffi/__pycache__/setuptools_ext.cpython-312.pyc,,
|
||||
cffi/__pycache__/vengine_cpy.cpython-312.pyc,,
|
||||
cffi/__pycache__/vengine_gen.cpython-312.pyc,,
|
||||
cffi/__pycache__/verifier.cpython-312.pyc,,
|
||||
cffi/_cffi_errors.h,sha256=zQXt7uR_m8gUW-fI2hJg0KoSkJFwXv8RGUkEDZ177dQ,3908
|
||||
cffi/_cffi_include.h,sha256=Exhmgm9qzHWzWivjfTe0D7Xp4rPUkVxdNuwGhMTMzbw,15055
|
||||
cffi/_embedding.h,sha256=EDKw5QrLvQoe3uosXB3H1xPVTYxsn33eV3A43zsA_Fw,18787
|
||||
cffi/_imp_emulation.py,sha256=RxREG8zAbI2RPGBww90u_5fi8sWdahpdipOoPzkp7C0,2960
|
||||
cffi/_shimmed_dist_utils.py,sha256=Bjj2wm8yZbvFvWEx5AEfmqaqZyZFhYfoyLLQHkXZuao,2230
|
||||
cffi/api.py,sha256=alBv6hZQkjpmZplBphdaRn2lPO9-CORs_M7ixabvZWI,42169
|
||||
cffi/backend_ctypes.py,sha256=h5ZIzLc6BFVXnGyc9xPqZWUS7qGy7yFSDqXe68Sa8z4,42454
|
||||
cffi/cffi_opcode.py,sha256=JDV5l0R0_OadBX_uE7xPPTYtMdmpp8I9UYd6av7aiDU,5731
|
||||
cffi/commontypes.py,sha256=7N6zPtCFlvxXMWhHV08psUjdYIK2XgsN3yo5dgua_v4,2805
|
||||
cffi/cparser.py,sha256=0qI3mEzZSNVcCangoyXOoAcL-RhpQL08eG8798T024s,44789
|
||||
cffi/error.py,sha256=v6xTiS4U0kvDcy4h_BDRo5v39ZQuj-IMRYLv5ETddZs,877
|
||||
cffi/ffiplatform.py,sha256=avxFjdikYGJoEtmJO7ewVmwG_VEVl6EZ_WaNhZYCqv4,3584
|
||||
cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747
|
||||
cffi/model.py,sha256=W30UFQZE73jL5Mx5N81YT77us2W2iJjTm0XYfnwz1cg,21797
|
||||
cffi/parse_c_type.h,sha256=OdwQfwM9ktq6vlCB43exFQmxDBtj2MBNdK8LYl15tjw,5976
|
||||
cffi/pkgconfig.py,sha256=LP1w7vmWvmKwyqLaU1Z243FOWGNQMrgMUZrvgFuOlco,4374
|
||||
cffi/recompiler.py,sha256=sim4Tm7lamt2Jn8uzKN0wMYp6ODByk3g7of47-h9LD4,65367
|
||||
cffi/setuptools_ext.py,sha256=-ebj79lO2_AUH-kRcaja2pKY1Z_5tloGwsJgzK8P3Cc,8871
|
||||
cffi/vengine_cpy.py,sha256=8UagT6ZEOZf6Dju7_CfNulue8CnsHLEzJYhnqUhoF04,43752
|
||||
cffi/vengine_gen.py,sha256=DUlEIrDiVin1Pnhn1sfoamnS5NLqfJcOdhRoeSNeJRg,26939
|
||||
cffi/verifier.py,sha256=oX8jpaohg2Qm3aHcznidAdvrVm5N4sQYG0a3Eo5mIl4,11182
|
|
@ -1,6 +0,0 @@
|
|||
Wheel-Version: 1.0
|
||||
Generator: setuptools (74.1.1)
|
||||
Root-Is-Purelib: false
|
||||
Tag: cp312-cp312-manylinux_2_17_x86_64
|
||||
Tag: cp312-cp312-manylinux2014_x86_64
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
[distutils.setup_keywords]
|
||||
cffi_modules = cffi.setuptools_ext:cffi_modules
|
|
@ -1,2 +0,0 @@
|
|||
_cffi_backend
|
||||
cffi
|
|
@ -1,14 +0,0 @@
|
|||
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||
'FFIError']
|
||||
|
||||
from .api import FFI
|
||||
from .error import CDefError, FFIError, VerificationError, VerificationMissing
|
||||
from .error import PkgConfigError
|
||||
|
||||
__version__ = "1.17.1"
|
||||
__version_info__ = (1, 17, 1)
|
||||
|
||||
# The verifier module file names are based on the CRC32 of a string that
|
||||
# contains the following version number. It may be older than __version__
|
||||
# if nothing is clearly incompatible.
|
||||
__version_verifier_modules__ = "0.8.6"
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,149 +0,0 @@
|
|||
#ifndef CFFI_MESSAGEBOX
|
||||
# ifdef _MSC_VER
|
||||
# define CFFI_MESSAGEBOX 1
|
||||
# else
|
||||
# define CFFI_MESSAGEBOX 0
|
||||
# endif
|
||||
#endif
|
||||
|
||||
|
||||
#if CFFI_MESSAGEBOX
|
||||
/* Windows only: logic to take the Python-CFFI embedding logic
|
||||
initialization errors and display them in a background thread
|
||||
with MessageBox. The idea is that if the whole program closes
|
||||
as a result of this problem, then likely it is already a console
|
||||
program and you can read the stderr output in the console too.
|
||||
If it is not a console program, then it will likely show its own
|
||||
dialog to complain, or generally not abruptly close, and for this
|
||||
case the background thread should stay alive.
|
||||
*/
|
||||
static void *volatile _cffi_bootstrap_text;
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void)
|
||||
{
|
||||
PyObject *result = NULL;
|
||||
PyObject *x, *m, *bi;
|
||||
|
||||
if (InterlockedCompareExchangePointer(&_cffi_bootstrap_text,
|
||||
(void *)1, NULL) != NULL)
|
||||
return (PyObject *)1;
|
||||
|
||||
m = PyImport_AddModule("_cffi_error_capture");
|
||||
if (m == NULL)
|
||||
goto error;
|
||||
|
||||
result = PyModule_GetDict(m);
|
||||
if (result == NULL)
|
||||
goto error;
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
bi = PyImport_ImportModule("builtins");
|
||||
#else
|
||||
bi = PyImport_ImportModule("__builtin__");
|
||||
#endif
|
||||
if (bi == NULL)
|
||||
goto error;
|
||||
PyDict_SetItemString(result, "__builtins__", bi);
|
||||
Py_DECREF(bi);
|
||||
|
||||
x = PyRun_String(
|
||||
"import sys\n"
|
||||
"class FileLike:\n"
|
||||
" def write(self, x):\n"
|
||||
" try:\n"
|
||||
" of.write(x)\n"
|
||||
" except: pass\n"
|
||||
" self.buf += x\n"
|
||||
" def flush(self):\n"
|
||||
" pass\n"
|
||||
"fl = FileLike()\n"
|
||||
"fl.buf = ''\n"
|
||||
"of = sys.stderr\n"
|
||||
"sys.stderr = fl\n"
|
||||
"def done():\n"
|
||||
" sys.stderr = of\n"
|
||||
" return fl.buf\n", /* make sure the returned value stays alive */
|
||||
Py_file_input,
|
||||
result, result);
|
||||
Py_XDECREF(x);
|
||||
|
||||
error:
|
||||
if (PyErr_Occurred())
|
||||
{
|
||||
PyErr_WriteUnraisable(Py_None);
|
||||
PyErr_Clear();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma comment(lib, "user32.lib")
|
||||
|
||||
static DWORD WINAPI _cffi_bootstrap_dialog(LPVOID ignored)
|
||||
{
|
||||
Sleep(666); /* may be interrupted if the whole process is closing */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
MessageBoxW(NULL, (wchar_t *)_cffi_bootstrap_text,
|
||||
L"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#else
|
||||
MessageBoxA(NULL, (char *)_cffi_bootstrap_text,
|
||||
"Python-CFFI error",
|
||||
MB_OK | MB_ICONERROR);
|
||||
#endif
|
||||
_cffi_bootstrap_text = NULL;
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void _cffi_stop_error_capture(PyObject *ecap)
|
||||
{
|
||||
PyObject *s;
|
||||
void *text;
|
||||
|
||||
if (ecap == (PyObject *)1)
|
||||
return;
|
||||
|
||||
if (ecap == NULL)
|
||||
goto error;
|
||||
|
||||
s = PyRun_String("done()", Py_eval_input, ecap, ecap);
|
||||
if (s == NULL)
|
||||
goto error;
|
||||
|
||||
/* Show a dialog box, but in a background thread, and
|
||||
never show multiple dialog boxes at once. */
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
text = PyUnicode_AsWideCharString(s, NULL);
|
||||
#else
|
||||
text = PyString_AsString(s);
|
||||
#endif
|
||||
|
||||
_cffi_bootstrap_text = text;
|
||||
|
||||
if (text != NULL)
|
||||
{
|
||||
HANDLE h;
|
||||
h = CreateThread(NULL, 0, _cffi_bootstrap_dialog,
|
||||
NULL, 0, NULL);
|
||||
if (h != NULL)
|
||||
CloseHandle(h);
|
||||
}
|
||||
/* decref the string, but it should stay alive as 'fl.buf'
|
||||
in the small module above. It will really be freed only if
|
||||
we later get another similar error. So it's a leak of at
|
||||
most one copy of the small module. That's fine for this
|
||||
situation which is usually a "fatal error" anyway. */
|
||||
Py_DECREF(s);
|
||||
PyErr_Clear();
|
||||
return;
|
||||
|
||||
error:
|
||||
_cffi_bootstrap_text = NULL;
|
||||
PyErr_Clear();
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
static PyObject *_cffi_start_error_capture(void) { return NULL; }
|
||||
static void _cffi_stop_error_capture(PyObject *ecap) { }
|
||||
|
||||
#endif
|
|
@ -1,389 +0,0 @@
|
|||
#define _CFFI_
|
||||
|
||||
/* We try to define Py_LIMITED_API before including Python.h.
|
||||
|
||||
Mess: we can only define it if Py_DEBUG, Py_TRACE_REFS and
|
||||
Py_REF_DEBUG are not defined. This is a best-effort approximation:
|
||||
we can learn about Py_DEBUG from pyconfig.h, but it is unclear if
|
||||
the same works for the other two macros. Py_DEBUG implies them,
|
||||
but not the other way around.
|
||||
|
||||
The implementation is messy (issue #350): on Windows, with _MSC_VER,
|
||||
we have to define Py_LIMITED_API even before including pyconfig.h.
|
||||
In that case, we guess what pyconfig.h will do to the macros above,
|
||||
and check our guess after the #include.
|
||||
|
||||
Note that on Windows, with CPython 3.x, you need >= 3.5 and virtualenv
|
||||
version >= 16.0.0. With older versions of either, you don't get a
|
||||
copy of PYTHON3.DLL in the virtualenv. We can't check the version of
|
||||
CPython *before* we even include pyconfig.h. ffi.set_source() puts
|
||||
a ``#define _CFFI_NO_LIMITED_API'' at the start of this file if it is
|
||||
running on Windows < 3.5, as an attempt at fixing it, but that's
|
||||
arguably wrong because it may not be the target version of Python.
|
||||
Still better than nothing I guess. As another workaround, you can
|
||||
remove the definition of Py_LIMITED_API here.
|
||||
|
||||
See also 'py_limited_api' in cffi/setuptools_ext.py.
|
||||
*/
|
||||
#if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
|
||||
# ifdef _MSC_VER
|
||||
# if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
|
||||
# define Py_LIMITED_API
|
||||
# endif
|
||||
# include <pyconfig.h>
|
||||
/* sanity-check: Py_LIMITED_API will cause crashes if any of these
|
||||
are also defined. Normally, the Python file PC/pyconfig.h does not
|
||||
cause any of these to be defined, with the exception that _DEBUG
|
||||
causes Py_DEBUG. Double-check that. */
|
||||
# ifdef Py_LIMITED_API
|
||||
# if defined(Py_DEBUG)
|
||||
# error "pyconfig.h unexpectedly defines Py_DEBUG, but Py_LIMITED_API is set"
|
||||
# endif
|
||||
# if defined(Py_TRACE_REFS)
|
||||
# error "pyconfig.h unexpectedly defines Py_TRACE_REFS, but Py_LIMITED_API is set"
|
||||
# endif
|
||||
# if defined(Py_REF_DEBUG)
|
||||
# error "pyconfig.h unexpectedly defines Py_REF_DEBUG, but Py_LIMITED_API is set"
|
||||
# endif
|
||||
# endif
|
||||
# else
|
||||
# include <pyconfig.h>
|
||||
# if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG) && !defined(_CFFI_NO_LIMITED_API)
|
||||
# define Py_LIMITED_API
|
||||
# endif
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#include <Python.h>
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
#include <stddef.h>
|
||||
#include "parse_c_type.h"
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
# define _cffi_float_complex_t _Fcomplex /* include <complex.h> for it */
|
||||
# define _cffi_double_complex_t _Dcomplex /* include <complex.h> for it */
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
# define _cffi_float_complex_t float _Complex
|
||||
# define _cffi_double_complex_t double _Complex
|
||||
#endif
|
||||
|
||||
#ifdef __GNUC__
|
||||
# define _CFFI_UNUSED_FN __attribute__((unused))
|
||||
#else
|
||||
# define _CFFI_UNUSED_FN /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
# ifndef _Bool
|
||||
typedef bool _Bool; /* semi-hackish: C++ has no _Bool; bool is builtin */
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
# define PyInt_FromLong PyLong_FromLong
|
||||
#endif
|
||||
|
||||
#define _cffi_from_c_double PyFloat_FromDouble
|
||||
#define _cffi_from_c_float PyFloat_FromDouble
|
||||
#define _cffi_from_c_long PyInt_FromLong
|
||||
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
|
||||
#define _cffi_from_c_longlong PyLong_FromLongLong
|
||||
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
|
||||
#define _cffi_from_c__Bool PyBool_FromLong
|
||||
|
||||
#define _cffi_to_c_double PyFloat_AsDouble
|
||||
#define _cffi_to_c_float PyFloat_AsDouble
|
||||
|
||||
#define _cffi_from_c_int(x, type) \
|
||||
(((type)-1) > 0 ? /* unsigned */ \
|
||||
(sizeof(type) < sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
sizeof(type) == sizeof(long) ? \
|
||||
PyLong_FromUnsignedLong((unsigned long)x) : \
|
||||
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
|
||||
(sizeof(type) <= sizeof(long) ? \
|
||||
PyInt_FromLong((long)x) : \
|
||||
PyLong_FromLongLong((long long)x)))
|
||||
|
||||
#define _cffi_to_c_int(o, type) \
|
||||
((type)( \
|
||||
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
|
||||
: (type)_cffi_to_c_i8(o)) : \
|
||||
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
|
||||
: (type)_cffi_to_c_i16(o)) : \
|
||||
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
|
||||
: (type)_cffi_to_c_i32(o)) : \
|
||||
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
|
||||
: (type)_cffi_to_c_i64(o)) : \
|
||||
(Py_FatalError("unsupported size for type " #type), (type)0)))
|
||||
|
||||
#define _cffi_to_c_i8 \
|
||||
((int(*)(PyObject *))_cffi_exports[1])
|
||||
#define _cffi_to_c_u8 \
|
||||
((int(*)(PyObject *))_cffi_exports[2])
|
||||
#define _cffi_to_c_i16 \
|
||||
((int(*)(PyObject *))_cffi_exports[3])
|
||||
#define _cffi_to_c_u16 \
|
||||
((int(*)(PyObject *))_cffi_exports[4])
|
||||
#define _cffi_to_c_i32 \
|
||||
((int(*)(PyObject *))_cffi_exports[5])
|
||||
#define _cffi_to_c_u32 \
|
||||
((unsigned int(*)(PyObject *))_cffi_exports[6])
|
||||
#define _cffi_to_c_i64 \
|
||||
((long long(*)(PyObject *))_cffi_exports[7])
|
||||
#define _cffi_to_c_u64 \
|
||||
((unsigned long long(*)(PyObject *))_cffi_exports[8])
|
||||
#define _cffi_to_c_char \
|
||||
((int(*)(PyObject *))_cffi_exports[9])
|
||||
#define _cffi_from_c_pointer \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[10])
|
||||
#define _cffi_to_c_pointer \
|
||||
((char *(*)(PyObject *, struct _cffi_ctypedescr *))_cffi_exports[11])
|
||||
#define _cffi_get_struct_layout \
|
||||
not used any more
|
||||
#define _cffi_restore_errno \
|
||||
((void(*)(void))_cffi_exports[13])
|
||||
#define _cffi_save_errno \
|
||||
((void(*)(void))_cffi_exports[14])
|
||||
#define _cffi_from_c_char \
|
||||
((PyObject *(*)(char))_cffi_exports[15])
|
||||
#define _cffi_from_c_deref \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[16])
|
||||
#define _cffi_to_c \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[17])
|
||||
#define _cffi_from_c_struct \
|
||||
((PyObject *(*)(char *, struct _cffi_ctypedescr *))_cffi_exports[18])
|
||||
#define _cffi_to_c_wchar_t \
|
||||
((_cffi_wchar_t(*)(PyObject *))_cffi_exports[19])
|
||||
#define _cffi_from_c_wchar_t \
|
||||
((PyObject *(*)(_cffi_wchar_t))_cffi_exports[20])
|
||||
#define _cffi_to_c_long_double \
|
||||
((long double(*)(PyObject *))_cffi_exports[21])
|
||||
#define _cffi_to_c__Bool \
|
||||
((_Bool(*)(PyObject *))_cffi_exports[22])
|
||||
#define _cffi_prepare_pointer_call_argument \
|
||||
((Py_ssize_t(*)(struct _cffi_ctypedescr *, \
|
||||
PyObject *, char **))_cffi_exports[23])
|
||||
#define _cffi_convert_array_from_object \
|
||||
((int(*)(char *, struct _cffi_ctypedescr *, PyObject *))_cffi_exports[24])
|
||||
#define _CFFI_CPIDX 25
|
||||
#define _cffi_call_python \
|
||||
((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
|
||||
#define _cffi_to_c_wchar3216_t \
|
||||
((int(*)(PyObject *))_cffi_exports[26])
|
||||
#define _cffi_from_c_wchar3216_t \
|
||||
((PyObject *(*)(int))_cffi_exports[27])
|
||||
#define _CFFI_NUM_EXPORTS 28
|
||||
|
||||
struct _cffi_ctypedescr;
|
||||
|
||||
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
|
||||
|
||||
#define _cffi_type(index) ( \
|
||||
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
|
||||
(struct _cffi_ctypedescr *)_cffi_types[index])
|
||||
|
||||
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
|
||||
const struct _cffi_type_context_s *ctx)
|
||||
{
|
||||
PyObject *module, *o_arg, *new_module;
|
||||
void *raw[] = {
|
||||
(void *)module_name,
|
||||
(void *)version,
|
||||
(void *)_cffi_exports,
|
||||
(void *)ctx,
|
||||
};
|
||||
|
||||
module = PyImport_ImportModule("_cffi_backend");
|
||||
if (module == NULL)
|
||||
goto failure;
|
||||
|
||||
o_arg = PyLong_FromVoidPtr((void *)raw);
|
||||
if (o_arg == NULL)
|
||||
goto failure;
|
||||
|
||||
new_module = PyObject_CallMethod(
|
||||
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
|
||||
|
||||
Py_DECREF(o_arg);
|
||||
Py_DECREF(module);
|
||||
return new_module;
|
||||
|
||||
failure:
|
||||
Py_XDECREF(module);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
#ifdef HAVE_WCHAR_H
|
||||
typedef wchar_t _cffi_wchar_t;
|
||||
#else
|
||||
typedef uint16_t _cffi_wchar_t; /* same random pick as _cffi_backend.c */
|
||||
#endif
|
||||
|
||||
_CFFI_UNUSED_FN static uint16_t _cffi_to_c_char16_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return (uint16_t)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (uint16_t)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char16_t(uint16_t x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 2)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t((int)x);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static int _cffi_to_c_char32_t(PyObject *o)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return (int)_cffi_to_c_wchar_t(o);
|
||||
else
|
||||
return (int)_cffi_to_c_wchar3216_t(o);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static PyObject *_cffi_from_c_char32_t(unsigned int x)
|
||||
{
|
||||
if (sizeof(_cffi_wchar_t) == 4)
|
||||
return _cffi_from_c_wchar_t((_cffi_wchar_t)x);
|
||||
else
|
||||
return _cffi_from_c_wchar3216_t((int)x);
|
||||
}
|
||||
|
||||
union _cffi_union_alignment_u {
|
||||
unsigned char m_char;
|
||||
unsigned short m_short;
|
||||
unsigned int m_int;
|
||||
unsigned long m_long;
|
||||
unsigned long long m_longlong;
|
||||
float m_float;
|
||||
double m_double;
|
||||
long double m_longdouble;
|
||||
};
|
||||
|
||||
struct _cffi_freeme_s {
|
||||
struct _cffi_freeme_s *next;
|
||||
union _cffi_union_alignment_u alignment;
|
||||
};
|
||||
|
||||
_CFFI_UNUSED_FN static int
|
||||
_cffi_convert_array_argument(struct _cffi_ctypedescr *ctptr, PyObject *arg,
|
||||
char **output_data, Py_ssize_t datasize,
|
||||
struct _cffi_freeme_s **freeme)
|
||||
{
|
||||
char *p;
|
||||
if (datasize < 0)
|
||||
return -1;
|
||||
|
||||
p = *output_data;
|
||||
if (p == NULL) {
|
||||
struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
|
||||
offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
|
||||
if (fp == NULL)
|
||||
return -1;
|
||||
fp->next = *freeme;
|
||||
*freeme = fp;
|
||||
p = *output_data = (char *)&fp->alignment;
|
||||
}
|
||||
memset((void *)p, 0, (size_t)datasize);
|
||||
return _cffi_convert_array_from_object(p, ctptr, arg);
|
||||
}
|
||||
|
||||
_CFFI_UNUSED_FN static void
|
||||
_cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
|
||||
{
|
||||
do {
|
||||
void *p = (void *)freeme;
|
||||
freeme = freeme->next;
|
||||
PyObject_Free(p);
|
||||
} while (freeme != NULL);
|
||||
}
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
#else
|
||||
_CFFI_UNUSED_FN
|
||||
static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
|
||||
# define _cffi_call_python _cffi_call_python_org
|
||||
#endif
|
||||
|
||||
|
||||
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
|
||||
|
||||
#define _cffi_prim_int(size, sign) \
|
||||
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
|
||||
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
|
||||
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
|
||||
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
|
||||
_CFFI__UNKNOWN_PRIM)
|
||||
|
||||
#define _cffi_prim_float(size) \
|
||||
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
|
||||
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
|
||||
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
|
||||
_CFFI__UNKNOWN_FLOAT_PRIM)
|
||||
|
||||
#define _cffi_check_int(got, got_nonpos, expected) \
|
||||
((got_nonpos) == (expected <= 0) && \
|
||||
(got) == (unsigned long long)expected)
|
||||
|
||||
#ifdef MS_WIN32
|
||||
# define _cffi_stdcall __stdcall
|
||||
#else
|
||||
# define _cffi_stdcall /* nothing */
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -1,550 +0,0 @@
|
|||
|
||||
/***** Support code for embedding *****/
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
|
||||
#if defined(_WIN32)
|
||||
# define CFFI_DLLEXPORT __declspec(dllexport)
|
||||
#elif defined(__GNUC__)
|
||||
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
|
||||
#else
|
||||
# define CFFI_DLLEXPORT /* nothing */
|
||||
#endif
|
||||
|
||||
|
||||
/* There are two global variables of type _cffi_call_python_fnptr:
|
||||
|
||||
* _cffi_call_python, which we declare just below, is the one called
|
||||
by ``extern "Python"`` implementations.
|
||||
|
||||
* _cffi_call_python_org, which on CPython is actually part of the
|
||||
_cffi_exports[] array, is the function pointer copied from
|
||||
_cffi_backend. If _cffi_start_python() fails, then this is set
|
||||
to NULL; otherwise, it should never be NULL.
|
||||
|
||||
After initialization is complete, both are equal. However, the
|
||||
first one remains equal to &_cffi_start_and_call_python until the
|
||||
very end of initialization, when we are (or should be) sure that
|
||||
concurrent threads also see a completely initialized world, and
|
||||
only then is it changed.
|
||||
*/
|
||||
#undef _cffi_call_python
|
||||
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
|
||||
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
|
||||
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
|
||||
|
||||
|
||||
#ifndef _MSC_VER
|
||||
/* --- Assuming a GCC not infinitely old --- */
|
||||
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
|
||||
# define cffi_write_barrier() __sync_synchronize()
|
||||
# if !defined(__amd64__) && !defined(__x86_64__) && \
|
||||
!defined(__i386__) && !defined(__i386)
|
||||
# define cffi_read_barrier() __sync_synchronize()
|
||||
# else
|
||||
# define cffi_read_barrier() (void)0
|
||||
# endif
|
||||
#else
|
||||
/* --- Windows threads version --- */
|
||||
# include <Windows.h>
|
||||
# define cffi_compare_and_swap(l,o,n) \
|
||||
(InterlockedCompareExchangePointer(l,n,o) == (o))
|
||||
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
|
||||
# define cffi_read_barrier() (void)0
|
||||
static volatile LONG _cffi_dummy;
|
||||
#endif
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# ifndef _MSC_VER
|
||||
# include <pthread.h>
|
||||
static pthread_mutex_t _cffi_embed_startup_lock;
|
||||
# else
|
||||
static CRITICAL_SECTION _cffi_embed_startup_lock;
|
||||
# endif
|
||||
static char _cffi_embed_startup_lock_ready = 0;
|
||||
#endif
|
||||
|
||||
static void _cffi_acquire_reentrant_mutex(void)
|
||||
{
|
||||
static void *volatile lock = NULL;
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: pthread_mutex_init() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
if (!_cffi_embed_startup_lock_ready) {
|
||||
# ifndef _MSC_VER
|
||||
pthread_mutexattr_t attr;
|
||||
pthread_mutexattr_init(&attr);
|
||||
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
|
||||
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
|
||||
# else
|
||||
InitializeCriticalSection(&_cffi_embed_startup_lock);
|
||||
# endif
|
||||
_cffi_embed_startup_lock_ready = 1;
|
||||
}
|
||||
#endif
|
||||
|
||||
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
|
||||
;
|
||||
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_lock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
EnterCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void _cffi_release_reentrant_mutex(void)
|
||||
{
|
||||
#ifndef _MSC_VER
|
||||
pthread_mutex_unlock(&_cffi_embed_startup_lock);
|
||||
#else
|
||||
LeaveCriticalSection(&_cffi_embed_startup_lock);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
/********** CPython-specific section **********/
|
||||
#ifndef PYPY_VERSION
|
||||
|
||||
#include "_cffi_errors.h"
|
||||
|
||||
|
||||
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
|
||||
|
||||
static void _cffi_py_initialize(void)
|
||||
{
|
||||
/* XXX use initsigs=0, which "skips initialization registration of
|
||||
signal handlers, which might be useful when Python is
|
||||
embedded" according to the Python docs. But review and think
|
||||
if it should be a user-controllable setting.
|
||||
|
||||
XXX we should also give a way to write errors to a buffer
|
||||
instead of to stderr.
|
||||
|
||||
XXX if importing 'site' fails, CPython (any version) calls
|
||||
exit(). Should we try to work around this behavior here?
|
||||
*/
|
||||
Py_InitializeEx(0);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
/* This initializes Python, imports _cffi_backend, and then the
|
||||
present .dll/.so is set up as a CPython C extension module.
|
||||
*/
|
||||
int result;
|
||||
PyGILState_STATE state;
|
||||
PyObject *pycode=NULL, *global_dict=NULL, *x;
|
||||
PyObject *builtins;
|
||||
|
||||
state = PyGILState_Ensure();
|
||||
|
||||
/* Call the initxxx() function from the present module. It will
|
||||
create and initialize us as a CPython extension module, instead
|
||||
of letting the startup Python code do it---it might reimport
|
||||
the same .dll/.so and get maybe confused on some platforms.
|
||||
It might also have troubles locating the .dll/.so again for all
|
||||
I know.
|
||||
*/
|
||||
(void)_CFFI_PYTHON_STARTUP_FUNC();
|
||||
if (PyErr_Occurred())
|
||||
goto error;
|
||||
|
||||
/* Now run the Python code provided to ffi.embedding_init_code().
|
||||
*/
|
||||
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
|
||||
"<init code for '" _CFFI_MODULE_NAME "'>",
|
||||
Py_file_input);
|
||||
if (pycode == NULL)
|
||||
goto error;
|
||||
global_dict = PyDict_New();
|
||||
if (global_dict == NULL)
|
||||
goto error;
|
||||
builtins = PyEval_GetBuiltins();
|
||||
if (builtins == NULL)
|
||||
goto error;
|
||||
if (PyDict_SetItemString(global_dict, "__builtins__", builtins) < 0)
|
||||
goto error;
|
||||
x = PyEval_EvalCode(
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
(PyCodeObject *)
|
||||
#endif
|
||||
pycode, global_dict, global_dict);
|
||||
if (x == NULL)
|
||||
goto error;
|
||||
Py_DECREF(x);
|
||||
|
||||
/* Done! Now if we've been called from
|
||||
_cffi_start_and_call_python() in an ``extern "Python"``, we can
|
||||
only hope that the Python code did correctly set up the
|
||||
corresponding @ffi.def_extern() function. Otherwise, the
|
||||
general logic of ``extern "Python"`` functions (inside the
|
||||
_cffi_backend module) will find that the reference is still
|
||||
missing and print an error.
|
||||
*/
|
||||
result = 0;
|
||||
done:
|
||||
Py_XDECREF(pycode);
|
||||
Py_XDECREF(global_dict);
|
||||
PyGILState_Release(state);
|
||||
return result;
|
||||
|
||||
error:;
|
||||
{
|
||||
/* Print as much information as potentially useful.
|
||||
Debugging load-time failures with embedding is not fun
|
||||
*/
|
||||
PyObject *ecap;
|
||||
PyObject *exception, *v, *tb, *f, *modules, *mod;
|
||||
PyErr_Fetch(&exception, &v, &tb);
|
||||
ecap = _cffi_start_error_capture();
|
||||
f = PySys_GetObject((char *)"stderr");
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString(
|
||||
"Failed to initialize the Python-CFFI embedding logic:\n\n", f);
|
||||
}
|
||||
|
||||
if (exception != NULL) {
|
||||
PyErr_NormalizeException(&exception, &v, &tb);
|
||||
PyErr_Display(exception, v, tb);
|
||||
}
|
||||
Py_XDECREF(exception);
|
||||
Py_XDECREF(v);
|
||||
Py_XDECREF(tb);
|
||||
|
||||
if (f != NULL && f != Py_None) {
|
||||
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
|
||||
"\ncompiled with cffi version: 1.17.1"
|
||||
"\n_cffi_backend module: ", f);
|
||||
modules = PyImport_GetModuleDict();
|
||||
mod = PyDict_GetItemString(modules, "_cffi_backend");
|
||||
if (mod == NULL) {
|
||||
PyFile_WriteString("not loaded", f);
|
||||
}
|
||||
else {
|
||||
v = PyObject_GetAttrString(mod, "__file__");
|
||||
PyFile_WriteObject(v, f, 0);
|
||||
Py_XDECREF(v);
|
||||
}
|
||||
PyFile_WriteString("\nsys.path: ", f);
|
||||
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
|
||||
PyFile_WriteString("\n\n", f);
|
||||
}
|
||||
_cffi_stop_error_capture(ecap);
|
||||
}
|
||||
result = -1;
|
||||
goto done;
|
||||
}
|
||||
|
||||
#if PY_VERSION_HEX < 0x03080000
|
||||
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
|
||||
#endif
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
/* This does the basic initialization of Python. It can be called
|
||||
completely concurrently from unrelated threads. It assumes
|
||||
that we don't hold the GIL before (if it exists), and we don't
|
||||
hold it afterwards.
|
||||
|
||||
(What it really does used to be completely different in Python 2
|
||||
and Python 3, with the Python 2 solution avoiding the spin-lock
|
||||
around the Py_InitializeEx() call. However, after recent changes
|
||||
to CPython 2.7 (issue #358) it no longer works. So we use the
|
||||
Python 3 solution everywhere.)
|
||||
|
||||
This initializes Python by calling Py_InitializeEx().
|
||||
Important: this must not be called concurrently at all.
|
||||
So we use a global variable as a simple spin lock. This global
|
||||
variable must be from 'libpythonX.Y.so', not from this
|
||||
cffi-based extension module, because it must be shared from
|
||||
different cffi-based extension modules.
|
||||
|
||||
In Python < 3.8, we choose
|
||||
_PyParser_TokenNames[0] as a completely arbitrary pointer value
|
||||
that is never written to. The default is to point to the
|
||||
string "ENDMARKER". We change it temporarily to point to the
|
||||
next character in that string. (Yes, I know it's REALLY
|
||||
obscure.)
|
||||
|
||||
In Python >= 3.8, this string array is no longer writable, so
|
||||
instead we pick PyCapsuleType.tp_version_tag. We can't change
|
||||
Python < 3.8 because someone might use a mixture of cffi
|
||||
embedded modules, some of which were compiled before this file
|
||||
changed.
|
||||
|
||||
In Python >= 3.12, this stopped working because that particular
|
||||
tp_version_tag gets modified during interpreter startup. It's
|
||||
arguably a bad idea before 3.12 too, but again we can't change
|
||||
that because someone might use a mixture of cffi embedded
|
||||
modules, and no-one reported a bug so far. In Python >= 3.12
|
||||
we go instead for PyCapsuleType.tp_as_buffer, which is supposed
|
||||
to always be NULL. We write to it temporarily a pointer to
|
||||
a struct full of NULLs, which is semantically the same.
|
||||
*/
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
# if PY_VERSION_HEX < 0x03080000
|
||||
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
|
||||
char *old_value, *locked_value;
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
locked_value = old_value + 1;
|
||||
if (old_value[0] == 'E') {
|
||||
assert(old_value[1] == 'N');
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
assert(old_value[0] == 'N');
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# else
|
||||
# if PY_VERSION_HEX < 0x030C0000
|
||||
int volatile *lock = (int volatile *)&PyCapsule_Type.tp_version_tag;
|
||||
int old_value, locked_value = -42;
|
||||
assert(!(PyCapsule_Type.tp_flags & Py_TPFLAGS_HAVE_VERSION_TAG));
|
||||
# else
|
||||
static struct ebp_s { PyBufferProcs buf; int mark; } empty_buffer_procs;
|
||||
empty_buffer_procs.mark = -42;
|
||||
PyBufferProcs *volatile *lock = (PyBufferProcs *volatile *)
|
||||
&PyCapsule_Type.tp_as_buffer;
|
||||
PyBufferProcs *old_value, *locked_value = &empty_buffer_procs.buf;
|
||||
# endif
|
||||
|
||||
while (1) { /* spin loop */
|
||||
old_value = *lock;
|
||||
if (old_value == 0) {
|
||||
if (cffi_compare_and_swap(lock, old_value, locked_value))
|
||||
break;
|
||||
}
|
||||
else {
|
||||
# if PY_VERSION_HEX < 0x030C0000
|
||||
assert(old_value == locked_value);
|
||||
# else
|
||||
/* The pointer should point to a possibly different
|
||||
empty_buffer_procs from another C extension module */
|
||||
assert(((struct ebp_s *)old_value)->mark == -42);
|
||||
# endif
|
||||
/* should ideally do a spin loop instruction here, but
|
||||
hard to do it portably and doesn't really matter I
|
||||
think: PyEval_InitThreads() should be very fast, and
|
||||
this is only run at start-up anyway. */
|
||||
}
|
||||
}
|
||||
# endif
|
||||
#endif
|
||||
|
||||
/* call Py_InitializeEx() */
|
||||
if (!Py_IsInitialized()) {
|
||||
_cffi_py_initialize();
|
||||
#if PY_VERSION_HEX < 0x03070000
|
||||
PyEval_InitThreads();
|
||||
#endif
|
||||
PyEval_SaveThread(); /* release the GIL */
|
||||
/* the returned tstate must be the one that has been stored into the
|
||||
autoTLSkey by _PyGILState_Init() called from Py_Initialize(). */
|
||||
}
|
||||
else {
|
||||
#if PY_VERSION_HEX < 0x03070000
|
||||
/* PyEval_InitThreads() is always a no-op from CPython 3.7 */
|
||||
PyGILState_STATE state = PyGILState_Ensure();
|
||||
PyEval_InitThreads();
|
||||
PyGILState_Release(state);
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef WITH_THREAD
|
||||
/* release the lock */
|
||||
while (!cffi_compare_and_swap(lock, locked_value, old_value))
|
||||
;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/********** end CPython-specific section **********/
|
||||
|
||||
|
||||
#else
|
||||
|
||||
|
||||
/********** PyPy-specific section **********/
|
||||
|
||||
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
|
||||
|
||||
static struct _cffi_pypy_init_s {
|
||||
const char *name;
|
||||
void *func; /* function pointer */
|
||||
const char *code;
|
||||
} _cffi_pypy_init = {
|
||||
_CFFI_MODULE_NAME,
|
||||
_CFFI_PYTHON_STARTUP_FUNC,
|
||||
_CFFI_PYTHON_STARTUP_CODE,
|
||||
};
|
||||
|
||||
extern int pypy_carefully_make_gil(const char *);
|
||||
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
|
||||
|
||||
static int _cffi_carefully_make_gil(void)
|
||||
{
|
||||
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
|
||||
}
|
||||
|
||||
static int _cffi_initialize_python(void)
|
||||
{
|
||||
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
|
||||
}
|
||||
|
||||
/********** end PyPy-specific section **********/
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef __GNUC__
|
||||
__attribute__((noinline))
|
||||
#endif
|
||||
static _cffi_call_python_fnptr _cffi_start_python(void)
|
||||
{
|
||||
/* Delicate logic to initialize Python. This function can be
|
||||
called multiple times concurrently, e.g. when the process calls
|
||||
its first ``extern "Python"`` functions in multiple threads at
|
||||
once. It can also be called recursively, in which case we must
|
||||
ignore it. We also have to consider what occurs if several
|
||||
different cffi-based extensions reach this code in parallel
|
||||
threads---it is a different copy of the code, then, and we
|
||||
can't have any shared global variable unless it comes from
|
||||
'libpythonX.Y.so'.
|
||||
|
||||
Idea:
|
||||
|
||||
* _cffi_carefully_make_gil(): "carefully" call
|
||||
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
|
||||
|
||||
* then we use a (local) custom lock to make sure that a call to this
|
||||
cffi-based extension will wait if another call to the *same*
|
||||
extension is running the initialization in another thread.
|
||||
It is reentrant, so that a recursive call will not block, but
|
||||
only one from a different thread.
|
||||
|
||||
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
|
||||
At this point, concurrent calls to Py_InitializeEx() are not
|
||||
possible: we have the GIL.
|
||||
|
||||
* do the rest of the specific initialization, which may
|
||||
temporarily release the GIL but not the custom lock.
|
||||
Only release the custom lock when we are done.
|
||||
*/
|
||||
static char called = 0;
|
||||
|
||||
if (_cffi_carefully_make_gil() != 0)
|
||||
return NULL;
|
||||
|
||||
_cffi_acquire_reentrant_mutex();
|
||||
|
||||
/* Here the GIL exists, but we don't have it. We're only protected
|
||||
from concurrency by the reentrant mutex. */
|
||||
|
||||
/* This file only initializes the embedded module once, the first
|
||||
time this is called, even if there are subinterpreters. */
|
||||
if (!called) {
|
||||
called = 1; /* invoke _cffi_initialize_python() only once,
|
||||
but don't set '_cffi_call_python' right now,
|
||||
otherwise concurrent threads won't call
|
||||
this function at all (we need them to wait) */
|
||||
if (_cffi_initialize_python() == 0) {
|
||||
/* now initialization is finished. Switch to the fast-path. */
|
||||
|
||||
/* We would like nobody to see the new value of
|
||||
'_cffi_call_python' without also seeing the rest of the
|
||||
data initialized. However, this is not possible. But
|
||||
the new value of '_cffi_call_python' is the function
|
||||
'cffi_call_python()' from _cffi_backend. So: */
|
||||
cffi_write_barrier();
|
||||
/* ^^^ we put a write barrier here, and a corresponding
|
||||
read barrier at the start of cffi_call_python(). This
|
||||
ensures that after that read barrier, we see everything
|
||||
done here before the write barrier.
|
||||
*/
|
||||
|
||||
assert(_cffi_call_python_org != NULL);
|
||||
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
else {
|
||||
/* initialization failed. Reset this to NULL, even if it was
|
||||
already set to some other value. Future calls to
|
||||
_cffi_start_python() are still forced to occur, and will
|
||||
always return NULL from now on. */
|
||||
_cffi_call_python_org = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
_cffi_release_reentrant_mutex();
|
||||
|
||||
return (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||
}
|
||||
|
||||
static
|
||||
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
|
||||
{
|
||||
_cffi_call_python_fnptr fnptr;
|
||||
int current_err = errno;
|
||||
#ifdef _MSC_VER
|
||||
int current_lasterr = GetLastError();
|
||||
#endif
|
||||
fnptr = _cffi_start_python();
|
||||
if (fnptr == NULL) {
|
||||
fprintf(stderr, "function %s() called, but initialization code "
|
||||
"failed. Returning 0.\n", externpy->name);
|
||||
memset(args, 0, externpy->size_of_result);
|
||||
}
|
||||
#ifdef _MSC_VER
|
||||
SetLastError(current_lasterr);
|
||||
#endif
|
||||
errno = current_err;
|
||||
|
||||
if (fnptr != NULL)
|
||||
fnptr(externpy, args);
|
||||
}
|
||||
|
||||
|
||||
/* The cffi_start_python() function makes sure Python is initialized
|
||||
and our cffi module is set up. It can be called manually from the
|
||||
user C code. The same effect is obtained automatically from any
|
||||
dll-exported ``extern "Python"`` function. This function returns
|
||||
-1 if initialization failed, 0 if all is OK. */
|
||||
_CFFI_UNUSED_FN
|
||||
static int cffi_start_python(void)
|
||||
{
|
||||
if (_cffi_call_python == &_cffi_start_and_call_python) {
|
||||
if (_cffi_start_python() == NULL)
|
||||
return -1;
|
||||
}
|
||||
cffi_read_barrier();
|
||||
return 0;
|
||||
}
|
||||
|
||||
#undef cffi_compare_and_swap
|
||||
#undef cffi_write_barrier
|
||||
#undef cffi_read_barrier
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -1,83 +0,0 @@
|
|||
|
||||
try:
|
||||
# this works on Python < 3.12
|
||||
from imp import *
|
||||
|
||||
except ImportError:
|
||||
# this is a limited emulation for Python >= 3.12.
|
||||
# Note that this is used only for tests or for the old ffi.verify().
|
||||
# This is copied from the source code of Python 3.11.
|
||||
|
||||
from _imp import (acquire_lock, release_lock,
|
||||
is_builtin, is_frozen)
|
||||
|
||||
from importlib._bootstrap import _load
|
||||
|
||||
from importlib import machinery
|
||||
import os
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
SEARCH_ERROR = 0
|
||||
PY_SOURCE = 1
|
||||
PY_COMPILED = 2
|
||||
C_EXTENSION = 3
|
||||
PY_RESOURCE = 4
|
||||
PKG_DIRECTORY = 5
|
||||
C_BUILTIN = 6
|
||||
PY_FROZEN = 7
|
||||
PY_CODERESOURCE = 8
|
||||
IMP_HOOK = 9
|
||||
|
||||
def get_suffixes():
|
||||
extensions = [(s, 'rb', C_EXTENSION)
|
||||
for s in machinery.EXTENSION_SUFFIXES]
|
||||
source = [(s, 'r', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES]
|
||||
bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES]
|
||||
return extensions + source + bytecode
|
||||
|
||||
def find_module(name, path=None):
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("'name' must be a str, not {}".format(type(name)))
|
||||
elif not isinstance(path, (type(None), list)):
|
||||
# Backwards-compatibility
|
||||
raise RuntimeError("'path' must be None or a list, "
|
||||
"not {}".format(type(path)))
|
||||
|
||||
if path is None:
|
||||
if is_builtin(name):
|
||||
return None, None, ('', '', C_BUILTIN)
|
||||
elif is_frozen(name):
|
||||
return None, None, ('', '', PY_FROZEN)
|
||||
else:
|
||||
path = sys.path
|
||||
|
||||
for entry in path:
|
||||
package_directory = os.path.join(entry, name)
|
||||
for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]:
|
||||
package_file_name = '__init__' + suffix
|
||||
file_path = os.path.join(package_directory, package_file_name)
|
||||
if os.path.isfile(file_path):
|
||||
return None, package_directory, ('', '', PKG_DIRECTORY)
|
||||
for suffix, mode, type_ in get_suffixes():
|
||||
file_name = name + suffix
|
||||
file_path = os.path.join(entry, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
break
|
||||
else:
|
||||
continue
|
||||
break # Break out of outer loop when breaking out of inner loop.
|
||||
else:
|
||||
raise ImportError(name, name=name)
|
||||
|
||||
encoding = None
|
||||
if 'b' not in mode:
|
||||
with open(file_path, 'rb') as file:
|
||||
encoding = tokenize.detect_encoding(file.readline)[0]
|
||||
file = open(file_path, mode, encoding=encoding)
|
||||
return file, file_path, (suffix, mode, type_)
|
||||
|
||||
def load_dynamic(name, path, file=None):
|
||||
loader = machinery.ExtensionFileLoader(name, path)
|
||||
spec = machinery.ModuleSpec(name=name, loader=loader, origin=path)
|
||||
return _load(spec)
|
|
@ -1,45 +0,0 @@
|
|||
"""
|
||||
Temporary shim module to indirect the bits of distutils we need from setuptools/distutils while providing useful
|
||||
error messages beyond `No module named 'distutils' on Python >= 3.12, or when setuptools' vendored distutils is broken.
|
||||
|
||||
This is a compromise to avoid a hard-dep on setuptools for Python >= 3.12, since many users don't need runtime compilation support from CFFI.
|
||||
"""
|
||||
import sys
|
||||
|
||||
try:
|
||||
# import setuptools first; this is the most robust way to ensure its embedded distutils is available
|
||||
# (the .pth shim should usually work, but this is even more robust)
|
||||
import setuptools
|
||||
except Exception as ex:
|
||||
if sys.version_info >= (3, 12):
|
||||
# Python 3.12 has no built-in distutils to fall back on, so any import problem is fatal
|
||||
raise Exception("This CFFI feature requires setuptools on Python >= 3.12. The setuptools module is missing or non-functional.") from ex
|
||||
|
||||
# silently ignore on older Pythons (support fallback to stdlib distutils where available)
|
||||
else:
|
||||
del setuptools
|
||||
|
||||
try:
|
||||
# bring in just the bits of distutils we need, whether they really came from setuptools or stdlib-embedded distutils
|
||||
from distutils import log, sysconfig
|
||||
from distutils.ccompiler import CCompiler
|
||||
from distutils.command.build_ext import build_ext
|
||||
from distutils.core import Distribution, Extension
|
||||
from distutils.dir_util import mkpath
|
||||
from distutils.errors import DistutilsSetupError, CompileError, LinkError
|
||||
from distutils.log import set_threshold, set_verbosity
|
||||
|
||||
if sys.platform == 'win32':
|
||||
try:
|
||||
# FUTURE: msvc9compiler module was removed in setuptools 74; consider removing, as it's only used by an ancient patch in `recompiler`
|
||||
from distutils.msvc9compiler import MSVCCompiler
|
||||
except ImportError:
|
||||
MSVCCompiler = None
|
||||
except Exception as ex:
|
||||
if sys.version_info >= (3, 12):
|
||||
raise Exception("This CFFI feature requires setuptools on Python >= 3.12. Please install the setuptools package.") from ex
|
||||
|
||||
# anything older, just let the underlying distutils import error fly
|
||||
raise Exception("This CFFI feature requires distutils. Please install the distutils or setuptools package.") from ex
|
||||
|
||||
del sys
|
|
@ -1,967 +0,0 @@
|
|||
import sys, types
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError
|
||||
from . import model
|
||||
|
||||
try:
|
||||
callable
|
||||
except NameError:
|
||||
# Python 3.1
|
||||
from collections import Callable
|
||||
callable = lambda x: isinstance(x, Callable)
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
_unspecified = object()
|
||||
|
||||
|
||||
|
||||
class FFI(object):
|
||||
r'''
|
||||
The main top-level class that you instantiate once, or once per module.
|
||||
|
||||
Example usage:
|
||||
|
||||
ffi = FFI()
|
||||
ffi.cdef("""
|
||||
int printf(const char *, ...);
|
||||
""")
|
||||
|
||||
C = ffi.dlopen(None) # standard library
|
||||
-or-
|
||||
C = ffi.verify() # use a C compiler: verify the decl above is right
|
||||
|
||||
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
||||
'''
|
||||
|
||||
def __init__(self, backend=None):
|
||||
"""Create an FFI instance. The 'backend' argument is used to
|
||||
select a non-default backend, mostly for tests.
|
||||
"""
|
||||
if backend is None:
|
||||
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
||||
# _cffi_backend.so compiled.
|
||||
import _cffi_backend as backend
|
||||
from . import __version__
|
||||
if backend.__version__ != __version__:
|
||||
# bad version! Try to be as explicit as possible.
|
||||
if hasattr(backend, '__file__'):
|
||||
# CPython
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. When we import the top-level '_cffi_backend' extension module, we get version %s, located in %r. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__,
|
||||
backend.__version__, backend.__file__))
|
||||
else:
|
||||
# PyPy
|
||||
raise Exception("Version mismatch: this is the 'cffi' package version %s, located in %r. This interpreter comes with a built-in '_cffi_backend' module, which is version %s. The two versions should be equal; check your installation." % (
|
||||
__version__, __file__, backend.__version__))
|
||||
# (If you insist you can also try to pass the option
|
||||
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
||||
# rely on it! It's probably not going to work well.)
|
||||
|
||||
from . import cparser
|
||||
self._backend = backend
|
||||
self._lock = allocate_lock()
|
||||
self._parser = cparser.Parser()
|
||||
self._cached_btypes = {}
|
||||
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
||||
self._new_types = types.ModuleType('new_types').__dict__
|
||||
self._function_caches = []
|
||||
self._libraries = []
|
||||
self._cdefsources = []
|
||||
self._included_ffis = []
|
||||
self._windows_unicode = None
|
||||
self._init_once_cache = {}
|
||||
self._cdef_version = None
|
||||
self._embedding = None
|
||||
self._typecache = model.get_typecache(backend)
|
||||
if hasattr(backend, 'set_ffi'):
|
||||
backend.set_ffi(self)
|
||||
for name in list(backend.__dict__):
|
||||
if name.startswith('RTLD_'):
|
||||
setattr(self, name, getattr(backend, name))
|
||||
#
|
||||
with self._lock:
|
||||
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
||||
self.BCharA = self._get_cached_btype(model.char_array_type)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
# _cffi_backend: attach these constants to the class
|
||||
if not hasattr(FFI, 'NULL'):
|
||||
FFI.NULL = self.cast(self.BVoidP, 0)
|
||||
FFI.CData, FFI.CType = backend._get_types()
|
||||
else:
|
||||
# ctypes backend: attach these constants to the instance
|
||||
self.NULL = self.cast(self.BVoidP, 0)
|
||||
self.CData, self.CType = backend._get_types()
|
||||
self.buffer = backend.buffer
|
||||
|
||||
def cdef(self, csource, override=False, packed=False, pack=None):
|
||||
"""Parse the given C source. This registers all declared functions,
|
||||
types, and global variables. The functions and global variables can
|
||||
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
||||
The types can be used in 'ffi.new()' and other functions.
|
||||
If 'packed' is specified as True, all structs declared inside this
|
||||
cdef are packed, i.e. laid out without any field alignment at all.
|
||||
Alternatively, 'pack' can be a small integer, and requests for
|
||||
alignment greater than that are ignored (pack=1 is equivalent to
|
||||
packed=True).
|
||||
"""
|
||||
self._cdef(csource, override=override, packed=packed, pack=pack)
|
||||
|
||||
def embedding_api(self, csource, packed=False, pack=None):
|
||||
self._cdef(csource, packed=packed, pack=pack, dllexport=True)
|
||||
if self._embedding is None:
|
||||
self._embedding = ''
|
||||
|
||||
def _cdef(self, csource, override=False, **options):
|
||||
if not isinstance(csource, str): # unicode, on Python 2
|
||||
if not isinstance(csource, basestring):
|
||||
raise TypeError("cdef() argument must be a string")
|
||||
csource = csource.encode('ascii')
|
||||
with self._lock:
|
||||
self._cdef_version = object()
|
||||
self._parser.parse(csource, override=override, **options)
|
||||
self._cdefsources.append(csource)
|
||||
if override:
|
||||
for cache in self._function_caches:
|
||||
cache.clear()
|
||||
finishlist = self._parser._recomplete
|
||||
if finishlist:
|
||||
self._parser._recomplete = []
|
||||
for tp in finishlist:
|
||||
tp.finish_backend_type(self, finishlist)
|
||||
|
||||
def dlopen(self, name, flags=0):
|
||||
"""Load and return a dynamic library identified by 'name'.
|
||||
The standard C library can be loaded by passing None.
|
||||
Note that functions and types declared by 'ffi.cdef()' are not
|
||||
linked to a particular library, just like C headers; in the
|
||||
library we only look for the actual (untyped) symbols.
|
||||
"""
|
||||
if not (isinstance(name, basestring) or
|
||||
name is None or
|
||||
isinstance(name, self.CData)):
|
||||
raise TypeError("dlopen(name): name must be a file name, None, "
|
||||
"or an already-opened 'void *' handle")
|
||||
with self._lock:
|
||||
lib, function_cache = _make_ffi_library(self, name, flags)
|
||||
self._function_caches.append(function_cache)
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def dlclose(self, lib):
|
||||
"""Close a library obtained with ffi.dlopen(). After this call,
|
||||
access to functions or variables from the library will fail
|
||||
(possibly with a segmentation fault).
|
||||
"""
|
||||
type(lib).__cffi_close__(lib)
|
||||
|
||||
def _typeof_locked(self, cdecl):
|
||||
# call me with the lock!
|
||||
key = cdecl
|
||||
if key in self._parsed_types:
|
||||
return self._parsed_types[key]
|
||||
#
|
||||
if not isinstance(cdecl, str): # unicode, on Python 2
|
||||
cdecl = cdecl.encode('ascii')
|
||||
#
|
||||
type = self._parser.parse_type(cdecl)
|
||||
really_a_function_type = type.is_raw_function
|
||||
if really_a_function_type:
|
||||
type = type.as_function_pointer()
|
||||
btype = self._get_cached_btype(type)
|
||||
result = btype, really_a_function_type
|
||||
self._parsed_types[key] = result
|
||||
return result
|
||||
|
||||
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
||||
# string -> ctype object
|
||||
try:
|
||||
result = self._parsed_types[cdecl]
|
||||
except KeyError:
|
||||
with self._lock:
|
||||
result = self._typeof_locked(cdecl)
|
||||
#
|
||||
btype, really_a_function_type = result
|
||||
if really_a_function_type and not consider_function_as_funcptr:
|
||||
raise CDefError("the type %r is a function type, not a "
|
||||
"pointer-to-function type" % (cdecl,))
|
||||
return btype
|
||||
|
||||
def typeof(self, cdecl):
|
||||
"""Parse the C type given as a string and return the
|
||||
corresponding <ctype> object.
|
||||
It can also be used on 'cdata' instance to get its C type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
return self._typeof(cdecl)
|
||||
if isinstance(cdecl, self.CData):
|
||||
return self._backend.typeof(cdecl)
|
||||
if isinstance(cdecl, types.BuiltinFunctionType):
|
||||
res = _builtin_function_type(cdecl)
|
||||
if res is not None:
|
||||
return res
|
||||
if (isinstance(cdecl, types.FunctionType)
|
||||
and hasattr(cdecl, '_cffi_base_type')):
|
||||
with self._lock:
|
||||
return self._get_cached_btype(cdecl._cffi_base_type)
|
||||
raise TypeError(type(cdecl))
|
||||
|
||||
def sizeof(self, cdecl):
|
||||
"""Return the size in bytes of the argument. It can be a
|
||||
string naming a C type, or a 'cdata' instance.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
BType = self._typeof(cdecl)
|
||||
return self._backend.sizeof(BType)
|
||||
else:
|
||||
return self._backend.sizeof(cdecl)
|
||||
|
||||
def alignof(self, cdecl):
|
||||
"""Return the natural alignment size in bytes of the C type
|
||||
given as a string.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.alignof(cdecl)
|
||||
|
||||
def offsetof(self, cdecl, *fields_or_indexes):
|
||||
"""Return the offset of the named field inside the given
|
||||
structure or array, which must be given as a C type name.
|
||||
You can give several field names in case of nested structures.
|
||||
You can also give numeric values which correspond to array
|
||||
items, in case of an array type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
||||
|
||||
def new(self, cdecl, init=None):
|
||||
"""Allocate an instance according to the specified C type and
|
||||
return a pointer to it. The specified C type must be either a
|
||||
pointer or an array: ``new('X *')`` allocates an X and returns
|
||||
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
||||
n X'es and returns an array referencing it (which works
|
||||
mostly like a pointer, like in C). You can also use
|
||||
``new('X[]', n)`` to allocate an array of a non-constant
|
||||
length n.
|
||||
|
||||
The memory is initialized following the rules of declaring a
|
||||
global variable in C: by default it is zero-initialized, but
|
||||
an explicit initializer can be given which can be used to
|
||||
fill all or part of the memory.
|
||||
|
||||
When the returned <cdata> object goes out of scope, the memory
|
||||
is freed. In other words the returned <cdata> object has
|
||||
ownership of the value of type 'cdecl' that it points to. This
|
||||
means that the raw data can be used as long as this object is
|
||||
kept alive, but must not be used for a longer time. Be careful
|
||||
about that when copying the pointer to the memory somewhere
|
||||
else, e.g. into another structure.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.newp(cdecl, init)
|
||||
|
||||
def new_allocator(self, alloc=None, free=None,
|
||||
should_clear_after_alloc=True):
|
||||
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
||||
but uses the provided low-level 'alloc' and 'free' functions.
|
||||
|
||||
'alloc' is called with the size as argument. If it returns NULL, a
|
||||
MemoryError is raised. 'free' is called with the result of 'alloc'
|
||||
as argument. Both can be either Python function or directly C
|
||||
functions. If 'free' is None, then no free function is called.
|
||||
If both 'alloc' and 'free' are None, the default is used.
|
||||
|
||||
If 'should_clear_after_alloc' is set to False, then the memory
|
||||
returned by 'alloc' is assumed to be already cleared (or you are
|
||||
fine with garbage); otherwise CFFI will clear it.
|
||||
"""
|
||||
compiled_ffi = self._backend.FFI()
|
||||
allocator = compiled_ffi.new_allocator(alloc, free,
|
||||
should_clear_after_alloc)
|
||||
def allocate(cdecl, init=None):
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return allocator(cdecl, init)
|
||||
return allocate
|
||||
|
||||
def cast(self, cdecl, source):
|
||||
"""Similar to a C cast: returns an instance of the named C
|
||||
type initialized with the given 'source'. The source is
|
||||
casted between integers or pointers of any type.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.cast(cdecl, source)
|
||||
|
||||
def string(self, cdata, maxlen=-1):
|
||||
"""Return a Python string (or unicode string) from the 'cdata'.
|
||||
If 'cdata' is a pointer or array of characters or bytes, returns
|
||||
the null-terminated string. The returned string extends until
|
||||
the first null character, or at most 'maxlen' characters. If
|
||||
'cdata' is an array then 'maxlen' defaults to its length.
|
||||
|
||||
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
||||
string following the same rules.
|
||||
|
||||
If 'cdata' is a single character or byte or a wchar_t, returns
|
||||
it as a string or unicode string.
|
||||
|
||||
If 'cdata' is an enum, returns the value of the enumerator as a
|
||||
string, or 'NUMBER' if the value is out of range.
|
||||
"""
|
||||
return self._backend.string(cdata, maxlen)
|
||||
|
||||
def unpack(self, cdata, length):
|
||||
"""Unpack an array of C data of the given length,
|
||||
returning a Python string/unicode/list.
|
||||
|
||||
If 'cdata' is a pointer to 'char', returns a byte string.
|
||||
It does not stop at the first null. This is equivalent to:
|
||||
ffi.buffer(cdata, length)[:]
|
||||
|
||||
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
|
||||
'length' is measured in wchar_t's; it is not the size in bytes.
|
||||
|
||||
If 'cdata' is a pointer to anything else, returns a list of
|
||||
'length' items. This is a faster equivalent to:
|
||||
[cdata[i] for i in range(length)]
|
||||
"""
|
||||
return self._backend.unpack(cdata, length)
|
||||
|
||||
#def buffer(self, cdata, size=-1):
|
||||
# """Return a read-write buffer object that references the raw C data
|
||||
# pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
||||
# an array. Can be passed to functions expecting a buffer, or directly
|
||||
# manipulated with:
|
||||
#
|
||||
# buf[:] get a copy of it in a regular string, or
|
||||
# buf[idx] as a single character
|
||||
# buf[:] = ...
|
||||
# buf[idx] = ... change the content
|
||||
# """
|
||||
# note that 'buffer' is a type, set on this instance by __init__
|
||||
|
||||
def from_buffer(self, cdecl, python_buffer=_unspecified,
|
||||
require_writable=False):
|
||||
"""Return a cdata of the given type pointing to the data of the
|
||||
given Python object, which must support the buffer interface.
|
||||
Note that this is not meant to be used on the built-in types
|
||||
str or unicode (you can build 'char[]' arrays explicitly)
|
||||
but only on objects containing large quantities of raw data
|
||||
in some other format, like 'array.array' or numpy arrays.
|
||||
|
||||
The first argument is optional and default to 'char[]'.
|
||||
"""
|
||||
if python_buffer is _unspecified:
|
||||
cdecl, python_buffer = self.BCharA, cdecl
|
||||
elif isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
return self._backend.from_buffer(cdecl, python_buffer,
|
||||
require_writable)
|
||||
|
||||
def memmove(self, dest, src, n):
|
||||
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
||||
|
||||
Like the C function memmove(), the memory areas may overlap;
|
||||
apart from that it behaves like the C function memcpy().
|
||||
|
||||
'src' can be any cdata ptr or array, or any Python buffer object.
|
||||
'dest' can be any cdata ptr or array, or a writable Python buffer
|
||||
object. The size to copy, 'n', is always measured in bytes.
|
||||
|
||||
Unlike other methods, this one supports all Python buffer including
|
||||
byte strings and bytearrays---but it still does not support
|
||||
non-contiguous buffers.
|
||||
"""
|
||||
return self._backend.memmove(dest, src, n)
|
||||
|
||||
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
||||
"""Return a callback object or a decorator making such a
|
||||
callback object. 'cdecl' must name a C function pointer type.
|
||||
The callback invokes the specified 'python_callable' (which may
|
||||
be provided either directly or via a decorator). Important: the
|
||||
callback object must be manually kept alive for as long as the
|
||||
callback may be invoked from the C level.
|
||||
"""
|
||||
def callback_decorator_wrap(python_callable):
|
||||
if not callable(python_callable):
|
||||
raise TypeError("the 'python_callable' argument "
|
||||
"is not callable")
|
||||
return self._backend.callback(cdecl, python_callable,
|
||||
error, onerror)
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
||||
if python_callable is None:
|
||||
return callback_decorator_wrap # decorator mode
|
||||
else:
|
||||
return callback_decorator_wrap(python_callable) # direct mode
|
||||
|
||||
def getctype(self, cdecl, replace_with=''):
|
||||
"""Return a string giving the C type 'cdecl', which may be itself
|
||||
a string or a <ctype> object. If 'replace_with' is given, it gives
|
||||
extra text to append (or insert for more complicated C types), like
|
||||
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
||||
"""
|
||||
if isinstance(cdecl, basestring):
|
||||
cdecl = self._typeof(cdecl)
|
||||
replace_with = replace_with.strip()
|
||||
if (replace_with.startswith('*')
|
||||
and '&[' in self._backend.getcname(cdecl, '&')):
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif replace_with and not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
return self._backend.getcname(cdecl, replace_with)
|
||||
|
||||
def gc(self, cdata, destructor, size=0):
|
||||
"""Return a new cdata object that points to the same
|
||||
data. Later, when this new cdata object is garbage-collected,
|
||||
'destructor(old_cdata_object)' will be called.
|
||||
|
||||
The optional 'size' gives an estimate of the size, used to
|
||||
trigger the garbage collection more eagerly. So far only used
|
||||
on PyPy. It tells the GC that the returned object keeps alive
|
||||
roughly 'size' bytes of external memory.
|
||||
"""
|
||||
return self._backend.gcp(cdata, destructor, size)
|
||||
|
||||
def _get_cached_btype(self, type):
|
||||
assert self._lock.acquire(False) is False
|
||||
# call me with the lock!
|
||||
try:
|
||||
BType = self._cached_btypes[type]
|
||||
except KeyError:
|
||||
finishlist = []
|
||||
BType = type.get_cached_btype(self, finishlist)
|
||||
for type in finishlist:
|
||||
type.finish_backend_type(self, finishlist)
|
||||
return BType
|
||||
|
||||
def verify(self, source='', tmpdir=None, **kwargs):
|
||||
"""Verify that the current ffi signatures compile on this
|
||||
machine, and return a dynamic library object. The dynamic
|
||||
library can be used to call functions and access global
|
||||
variables declared in this 'ffi'. The library is compiled
|
||||
by the C compiler: it gives you C-level API compatibility
|
||||
(including calling macros). This is unlike 'ffi.dlopen()',
|
||||
which requires binary compatibility in the signatures.
|
||||
"""
|
||||
from .verifier import Verifier, _caller_dir_pycache
|
||||
#
|
||||
# If set_unicode(True) was called, insert the UNICODE and
|
||||
# _UNICODE macro declarations
|
||||
if self._windows_unicode:
|
||||
self._apply_windows_unicode(kwargs)
|
||||
#
|
||||
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
||||
# up the caller's directory, which we want to be the caller of
|
||||
# ffi.verify(), as opposed to the caller of Veritier().
|
||||
tmpdir = tmpdir or _caller_dir_pycache()
|
||||
#
|
||||
# Make a Verifier() and use it to load the library.
|
||||
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
||||
lib = self.verifier.load_library()
|
||||
#
|
||||
# Save the loaded library for keep-alive purposes, even
|
||||
# if the caller doesn't keep it alive itself (it should).
|
||||
self._libraries.append(lib)
|
||||
return lib
|
||||
|
||||
def _get_errno(self):
|
||||
return self._backend.get_errno()
|
||||
def _set_errno(self, errno):
|
||||
self._backend.set_errno(errno)
|
||||
errno = property(_get_errno, _set_errno, None,
|
||||
"the value of 'errno' from/to the C calls")
|
||||
|
||||
def getwinerror(self, code=-1):
|
||||
return self._backend.getwinerror(code)
|
||||
|
||||
def _pointer_to(self, ctype):
|
||||
with self._lock:
|
||||
return model.pointer_cache(self, ctype)
|
||||
|
||||
def addressof(self, cdata, *fields_or_indexes):
|
||||
"""Return the address of a <cdata 'struct-or-union'>.
|
||||
If 'fields_or_indexes' are given, returns the address of that
|
||||
field or array item in the structure or array, recursively in
|
||||
case of nested structures.
|
||||
"""
|
||||
try:
|
||||
ctype = self._backend.typeof(cdata)
|
||||
except TypeError:
|
||||
if '__addressof__' in type(cdata).__dict__:
|
||||
return type(cdata).__addressof__(cdata, *fields_or_indexes)
|
||||
raise
|
||||
if fields_or_indexes:
|
||||
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
||||
else:
|
||||
if ctype.kind == "pointer":
|
||||
raise TypeError("addressof(pointer)")
|
||||
offset = 0
|
||||
ctypeptr = self._pointer_to(ctype)
|
||||
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
||||
|
||||
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
||||
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
||||
for field1 in fields_or_indexes:
|
||||
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
||||
offset += offset1
|
||||
return ctype, offset
|
||||
|
||||
def include(self, ffi_to_include):
|
||||
"""Includes the typedefs, structs, unions and enums defined
|
||||
in another FFI instance. Usage is similar to a #include in C,
|
||||
where a part of the program might include types defined in
|
||||
another part for its own usage. Note that the include()
|
||||
method has no effect on functions, constants and global
|
||||
variables, which must anyway be accessed directly from the
|
||||
lib object returned by the original FFI instance.
|
||||
"""
|
||||
if not isinstance(ffi_to_include, FFI):
|
||||
raise TypeError("ffi.include() expects an argument that is also of"
|
||||
" type cffi.FFI, not %r" % (
|
||||
type(ffi_to_include).__name__,))
|
||||
if ffi_to_include is self:
|
||||
raise ValueError("self.include(self)")
|
||||
with ffi_to_include._lock:
|
||||
with self._lock:
|
||||
self._parser.include(ffi_to_include._parser)
|
||||
self._cdefsources.append('[')
|
||||
self._cdefsources.extend(ffi_to_include._cdefsources)
|
||||
self._cdefsources.append(']')
|
||||
self._included_ffis.append(ffi_to_include)
|
||||
|
||||
def new_handle(self, x):
|
||||
return self._backend.newp_handle(self.BVoidP, x)
|
||||
|
||||
def from_handle(self, x):
|
||||
return self._backend.from_handle(x)
|
||||
|
||||
def release(self, x):
|
||||
self._backend.release(x)
|
||||
|
||||
def set_unicode(self, enabled_flag):
|
||||
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
||||
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
||||
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
||||
declare these types to be (pointers to) plain 8-bit characters.
|
||||
This is mostly for backward compatibility; you usually want True.
|
||||
"""
|
||||
if self._windows_unicode is not None:
|
||||
raise ValueError("set_unicode() can only be called once")
|
||||
enabled_flag = bool(enabled_flag)
|
||||
if enabled_flag:
|
||||
self.cdef("typedef wchar_t TBYTE;"
|
||||
"typedef wchar_t TCHAR;"
|
||||
"typedef const wchar_t *LPCTSTR;"
|
||||
"typedef const wchar_t *PCTSTR;"
|
||||
"typedef wchar_t *LPTSTR;"
|
||||
"typedef wchar_t *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
else:
|
||||
self.cdef("typedef char TBYTE;"
|
||||
"typedef char TCHAR;"
|
||||
"typedef const char *LPCTSTR;"
|
||||
"typedef const char *PCTSTR;"
|
||||
"typedef char *LPTSTR;"
|
||||
"typedef char *PTSTR;"
|
||||
"typedef TBYTE *PTBYTE;"
|
||||
"typedef TCHAR *PTCHAR;")
|
||||
self._windows_unicode = enabled_flag
|
||||
|
||||
def _apply_windows_unicode(self, kwds):
|
||||
defmacros = kwds.get('define_macros', ())
|
||||
if not isinstance(defmacros, (list, tuple)):
|
||||
raise TypeError("'define_macros' must be a list or tuple")
|
||||
defmacros = list(defmacros) + [('UNICODE', '1'),
|
||||
('_UNICODE', '1')]
|
||||
kwds['define_macros'] = defmacros
|
||||
|
||||
def _apply_embedding_fix(self, kwds):
|
||||
# must include an argument like "-lpython2.7" for the compiler
|
||||
def ensure(key, value):
|
||||
lst = kwds.setdefault(key, [])
|
||||
if value not in lst:
|
||||
lst.append(value)
|
||||
#
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
import os
|
||||
if sys.platform == "win32":
|
||||
# we need 'libpypy-c.lib'. Current distributions of
|
||||
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
|
||||
pythonlib = "python{0[0]}{0[1]}".format(sys.version_info)
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
|
||||
else:
|
||||
# we need 'libpypy-c.{so,dylib}', which should be by
|
||||
# default located in 'sys.prefix/bin' for installed
|
||||
# systems.
|
||||
if sys.version_info < (3,):
|
||||
pythonlib = "pypy-c"
|
||||
else:
|
||||
pythonlib = "pypy3-c"
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
|
||||
# On uninstalled pypy's, the libpypy-c is typically found in
|
||||
# .../pypy/goal/.
|
||||
if hasattr(sys, 'prefix'):
|
||||
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
template = "python%d%d"
|
||||
if hasattr(sys, 'gettotalrefcount'):
|
||||
template += '_d'
|
||||
else:
|
||||
try:
|
||||
import sysconfig
|
||||
except ImportError: # 2.6
|
||||
from cffi._shimmed_dist_utils import sysconfig
|
||||
template = "python%d.%d"
|
||||
if sysconfig.get_config_var('DEBUG_EXT'):
|
||||
template += sysconfig.get_config_var('DEBUG_EXT')
|
||||
pythonlib = (template %
|
||||
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||
if hasattr(sys, 'abiflags'):
|
||||
pythonlib += sys.abiflags
|
||||
ensure('libraries', pythonlib)
|
||||
if sys.platform == "win32":
|
||||
ensure('extra_link_args', '/MANIFEST')
|
||||
|
||||
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
||||
import os
|
||||
if hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() cannot be called several times "
|
||||
"per ffi object")
|
||||
if not isinstance(module_name, basestring):
|
||||
raise TypeError("'module_name' must be a string")
|
||||
if os.sep in module_name or (os.altsep and os.altsep in module_name):
|
||||
raise ValueError("'module_name' must not contain '/': use a dotted "
|
||||
"name to make a 'package.module' location")
|
||||
self._assigned_source = (str(module_name), source,
|
||||
source_extension, kwds)
|
||||
|
||||
def set_source_pkgconfig(self, module_name, pkgconfig_libs, source,
|
||||
source_extension='.c', **kwds):
|
||||
from . import pkgconfig
|
||||
if not isinstance(pkgconfig_libs, list):
|
||||
raise TypeError("the pkgconfig_libs argument must be a list "
|
||||
"of package names")
|
||||
kwds2 = pkgconfig.flags_from_pkgconfig(pkgconfig_libs)
|
||||
pkgconfig.merge_flags(kwds, kwds2)
|
||||
self.set_source(module_name, source, source_extension, **kwds)
|
||||
|
||||
def distutils_extension(self, tmpdir='build', verbose=True):
|
||||
from cffi._shimmed_dist_utils import mkpath
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
||||
return self.verifier.get_extension()
|
||||
raise ValueError("set_source() must be called before"
|
||||
" distutils_extension()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("distutils_extension() is only for C extension "
|
||||
"modules, not for dlopen()-style pure Python "
|
||||
"modules")
|
||||
mkpath(tmpdir)
|
||||
ext, updated = recompile(self, module_name,
|
||||
source, tmpdir=tmpdir, extradir=tmpdir,
|
||||
source_extension=source_extension,
|
||||
call_c_compiler=False, **kwds)
|
||||
if verbose:
|
||||
if updated:
|
||||
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
||||
else:
|
||||
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
||||
return ext
|
||||
|
||||
def emit_c_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is None:
|
||||
raise TypeError("emit_c_code() is only for C extension modules, "
|
||||
"not for dlopen()-style pure Python modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False,
|
||||
uses_ffiplatform=False, **kwds)
|
||||
|
||||
def emit_python_code(self, filename):
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before emit_c_code()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
if source is not None:
|
||||
raise TypeError("emit_python_code() is only for dlopen()-style "
|
||||
"pure Python modules, not for C extension modules")
|
||||
recompile(self, module_name, source,
|
||||
c_file=filename, call_c_compiler=False,
|
||||
uses_ffiplatform=False, **kwds)
|
||||
|
||||
def compile(self, tmpdir='.', verbose=0, target=None, debug=None):
|
||||
"""The 'target' argument gives the final file name of the
|
||||
compiled DLL. Use '*' to force distutils' choice, suitable for
|
||||
regular CPython C API modules. Use a file name ending in '.*'
|
||||
to ask for the system's default extension for dynamic libraries
|
||||
(.so/.dll/.dylib).
|
||||
|
||||
The default is '*' when building a non-embedded C API extension,
|
||||
and (module_name + '.*') when building an embedded library.
|
||||
"""
|
||||
from .recompiler import recompile
|
||||
#
|
||||
if not hasattr(self, '_assigned_source'):
|
||||
raise ValueError("set_source() must be called before compile()")
|
||||
module_name, source, source_extension, kwds = self._assigned_source
|
||||
return recompile(self, module_name, source, tmpdir=tmpdir,
|
||||
target=target, source_extension=source_extension,
|
||||
compiler_verbose=verbose, debug=debug, **kwds)
|
||||
|
||||
def init_once(self, func, tag):
|
||||
# Read _init_once_cache[tag], which is either (False, lock) if
|
||||
# we're calling the function now in some thread, or (True, result).
|
||||
# Don't call setdefault() in most cases, to avoid allocating and
|
||||
# immediately freeing a lock; but still use setdefaut() to avoid
|
||||
# races.
|
||||
try:
|
||||
x = self._init_once_cache[tag]
|
||||
except KeyError:
|
||||
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
|
||||
# Common case: we got (True, result), so we return the result.
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Else, it's a lock. Acquire it to serialize the following tests.
|
||||
with x[1]:
|
||||
# Read again from _init_once_cache the current status.
|
||||
x = self._init_once_cache[tag]
|
||||
if x[0]:
|
||||
return x[1]
|
||||
# Call the function and store the result back.
|
||||
result = func()
|
||||
self._init_once_cache[tag] = (True, result)
|
||||
return result
|
||||
|
||||
def embedding_init_code(self, pysource):
|
||||
if self._embedding:
|
||||
raise ValueError("embedding_init_code() can only be called once")
|
||||
# fix 'pysource' before it gets dumped into the C file:
|
||||
# - remove empty lines at the beginning, so it starts at "line 1"
|
||||
# - dedent, if all non-empty lines are indented
|
||||
# - check for SyntaxErrors
|
||||
import re
|
||||
match = re.match(r'\s*\n', pysource)
|
||||
if match:
|
||||
pysource = pysource[match.end():]
|
||||
lines = pysource.splitlines() or ['']
|
||||
prefix = re.match(r'\s*', lines[0]).group()
|
||||
for i in range(1, len(lines)):
|
||||
line = lines[i]
|
||||
if line.rstrip():
|
||||
while not line.startswith(prefix):
|
||||
prefix = prefix[:-1]
|
||||
i = len(prefix)
|
||||
lines = [line[i:]+'\n' for line in lines]
|
||||
pysource = ''.join(lines)
|
||||
#
|
||||
compile(pysource, "cffi_init", "exec")
|
||||
#
|
||||
self._embedding = pysource
|
||||
|
||||
def def_extern(self, *args, **kwds):
|
||||
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
|
||||
"objects")
|
||||
|
||||
def list_types(self):
|
||||
"""Returns the user type names known to this FFI instance.
|
||||
This returns a tuple containing three lists of names:
|
||||
(typedef_names, names_of_structs, names_of_unions)
|
||||
"""
|
||||
typedefs = []
|
||||
structs = []
|
||||
unions = []
|
||||
for key in self._parser._declarations:
|
||||
if key.startswith('typedef '):
|
||||
typedefs.append(key[8:])
|
||||
elif key.startswith('struct '):
|
||||
structs.append(key[7:])
|
||||
elif key.startswith('union '):
|
||||
unions.append(key[6:])
|
||||
typedefs.sort()
|
||||
structs.sort()
|
||||
unions.sort()
|
||||
return (typedefs, structs, unions)
|
||||
|
||||
|
||||
def _load_backend_lib(backend, name, flags):
|
||||
import os
|
||||
if not isinstance(name, basestring):
|
||||
if sys.platform != "win32" or name is not None:
|
||||
return backend.load_library(name, flags)
|
||||
name = "c" # Windows: load_library(None) fails, but this works
|
||||
# on Python 2 (backward compatibility hack only)
|
||||
first_error = None
|
||||
if '.' in name or '/' in name or os.sep in name:
|
||||
try:
|
||||
return backend.load_library(name, flags)
|
||||
except OSError as e:
|
||||
first_error = e
|
||||
import ctypes.util
|
||||
path = ctypes.util.find_library(name)
|
||||
if path is None:
|
||||
if name == "c" and sys.platform == "win32" and sys.version_info >= (3,):
|
||||
raise OSError("dlopen(None) cannot work on Windows for Python 3 "
|
||||
"(see http://bugs.python.org/issue23606)")
|
||||
msg = ("ctypes.util.find_library() did not manage "
|
||||
"to locate a library called %r" % (name,))
|
||||
if first_error is not None:
|
||||
msg = "%s. Additionally, %s" % (first_error, msg)
|
||||
raise OSError(msg)
|
||||
return backend.load_library(path, flags)
|
||||
|
||||
def _make_ffi_library(ffi, libname, flags):
|
||||
backend = ffi._backend
|
||||
backendlib = _load_backend_lib(backend, libname, flags)
|
||||
#
|
||||
def accessor_function(name):
|
||||
key = 'function ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
value = backendlib.load_function(BType, name)
|
||||
library.__dict__[name] = value
|
||||
#
|
||||
def accessor_variable(name):
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
read_variable = backendlib.read_variable
|
||||
write_variable = backendlib.write_variable
|
||||
setattr(FFILibrary, name, property(
|
||||
lambda self: read_variable(BType, name),
|
||||
lambda self, value: write_variable(BType, name, value)))
|
||||
#
|
||||
def addressof_var(name):
|
||||
try:
|
||||
return addr_variables[name]
|
||||
except KeyError:
|
||||
with ffi._lock:
|
||||
if name not in addr_variables:
|
||||
key = 'variable ' + name
|
||||
tp, _ = ffi._parser._declarations[key]
|
||||
BType = ffi._get_cached_btype(tp)
|
||||
if BType.kind != 'array':
|
||||
BType = model.pointer_cache(ffi, BType)
|
||||
p = backendlib.load_function(BType, name)
|
||||
addr_variables[name] = p
|
||||
return addr_variables[name]
|
||||
#
|
||||
def accessor_constant(name):
|
||||
raise NotImplementedError("non-integer constant '%s' cannot be "
|
||||
"accessed from a dlopen() library" % (name,))
|
||||
#
|
||||
def accessor_int_constant(name):
|
||||
library.__dict__[name] = ffi._parser._int_constants[name]
|
||||
#
|
||||
accessors = {}
|
||||
accessors_version = [False]
|
||||
addr_variables = {}
|
||||
#
|
||||
def update_accessors():
|
||||
if accessors_version[0] is ffi._cdef_version:
|
||||
return
|
||||
#
|
||||
for key, (tp, _) in ffi._parser._declarations.items():
|
||||
if not isinstance(tp, model.EnumType):
|
||||
tag, name = key.split(' ', 1)
|
||||
if tag == 'function':
|
||||
accessors[name] = accessor_function
|
||||
elif tag == 'variable':
|
||||
accessors[name] = accessor_variable
|
||||
elif tag == 'constant':
|
||||
accessors[name] = accessor_constant
|
||||
else:
|
||||
for i, enumname in enumerate(tp.enumerators):
|
||||
def accessor_enum(name, tp=tp, i=i):
|
||||
tp.check_not_partial()
|
||||
library.__dict__[name] = tp.enumvalues[i]
|
||||
accessors[enumname] = accessor_enum
|
||||
for name in ffi._parser._int_constants:
|
||||
accessors.setdefault(name, accessor_int_constant)
|
||||
accessors_version[0] = ffi._cdef_version
|
||||
#
|
||||
def make_accessor(name):
|
||||
with ffi._lock:
|
||||
if name in library.__dict__ or name in FFILibrary.__dict__:
|
||||
return # added by another thread while waiting for the lock
|
||||
if name not in accessors:
|
||||
update_accessors()
|
||||
if name not in accessors:
|
||||
raise AttributeError(name)
|
||||
accessors[name](name)
|
||||
#
|
||||
class FFILibrary(object):
|
||||
def __getattr__(self, name):
|
||||
make_accessor(name)
|
||||
return getattr(self, name)
|
||||
def __setattr__(self, name, value):
|
||||
try:
|
||||
property = getattr(self.__class__, name)
|
||||
except AttributeError:
|
||||
make_accessor(name)
|
||||
setattr(self, name, value)
|
||||
else:
|
||||
property.__set__(self, value)
|
||||
def __dir__(self):
|
||||
with ffi._lock:
|
||||
update_accessors()
|
||||
return accessors.keys()
|
||||
def __addressof__(self, name):
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
make_accessor(name)
|
||||
if name in library.__dict__:
|
||||
return library.__dict__[name]
|
||||
if name in FFILibrary.__dict__:
|
||||
return addressof_var(name)
|
||||
raise AttributeError("cffi library has no function or "
|
||||
"global variable named '%s'" % (name,))
|
||||
def __cffi_close__(self):
|
||||
backendlib.close_lib()
|
||||
self.__dict__.clear()
|
||||
#
|
||||
if isinstance(libname, basestring):
|
||||
try:
|
||||
if not isinstance(libname, str): # unicode, on Python 2
|
||||
libname = libname.encode('utf-8')
|
||||
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
||||
except UnicodeError:
|
||||
pass
|
||||
library = FFILibrary()
|
||||
return library, library.__dict__
|
||||
|
||||
def _builtin_function_type(func):
|
||||
# a hack to make at least ffi.typeof(builtin_function) work,
|
||||
# if the builtin function was obtained by 'vengine_cpy'.
|
||||
import sys
|
||||
try:
|
||||
module = sys.modules[func.__module__]
|
||||
ffi = module._cffi_original_ffi
|
||||
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
||||
tp = types_of_builtin_funcs[func]
|
||||
except (KeyError, AttributeError, TypeError):
|
||||
return None
|
||||
else:
|
||||
with ffi._lock:
|
||||
return ffi._get_cached_btype(tp)
|
File diff suppressed because it is too large
Load Diff
|
@ -1,187 +0,0 @@
|
|||
from .error import VerificationError
|
||||
|
||||
class CffiOp(object):
|
||||
def __init__(self, op, arg):
|
||||
self.op = op
|
||||
self.arg = arg
|
||||
|
||||
def as_c_expr(self):
|
||||
if self.op is None:
|
||||
assert isinstance(self.arg, str)
|
||||
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
||||
classname = CLASS_NAME[self.op]
|
||||
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
||||
|
||||
def as_python_bytes(self):
|
||||
if self.op is None and self.arg.isdigit():
|
||||
value = int(self.arg) # non-negative: '-' not in self.arg
|
||||
if value >= 2**31:
|
||||
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
||||
% (self.arg,))
|
||||
return format_four_bytes(value)
|
||||
if isinstance(self.arg, str):
|
||||
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
||||
return format_four_bytes((self.arg << 8) | self.op)
|
||||
|
||||
def __str__(self):
|
||||
classname = CLASS_NAME.get(self.op, self.op)
|
||||
return '(%s %s)' % (classname, self.arg)
|
||||
|
||||
def format_four_bytes(num):
|
||||
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
||||
(num >> 24) & 0xFF,
|
||||
(num >> 16) & 0xFF,
|
||||
(num >> 8) & 0xFF,
|
||||
(num ) & 0xFF)
|
||||
|
||||
OP_PRIMITIVE = 1
|
||||
OP_POINTER = 3
|
||||
OP_ARRAY = 5
|
||||
OP_OPEN_ARRAY = 7
|
||||
OP_STRUCT_UNION = 9
|
||||
OP_ENUM = 11
|
||||
OP_FUNCTION = 13
|
||||
OP_FUNCTION_END = 15
|
||||
OP_NOOP = 17
|
||||
OP_BITFIELD = 19
|
||||
OP_TYPENAME = 21
|
||||
OP_CPYTHON_BLTN_V = 23 # varargs
|
||||
OP_CPYTHON_BLTN_N = 25 # noargs
|
||||
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
||||
OP_CONSTANT = 29
|
||||
OP_CONSTANT_INT = 31
|
||||
OP_GLOBAL_VAR = 33
|
||||
OP_DLOPEN_FUNC = 35
|
||||
OP_DLOPEN_CONST = 37
|
||||
OP_GLOBAL_VAR_F = 39
|
||||
OP_EXTERN_PYTHON = 41
|
||||
|
||||
PRIM_VOID = 0
|
||||
PRIM_BOOL = 1
|
||||
PRIM_CHAR = 2
|
||||
PRIM_SCHAR = 3
|
||||
PRIM_UCHAR = 4
|
||||
PRIM_SHORT = 5
|
||||
PRIM_USHORT = 6
|
||||
PRIM_INT = 7
|
||||
PRIM_UINT = 8
|
||||
PRIM_LONG = 9
|
||||
PRIM_ULONG = 10
|
||||
PRIM_LONGLONG = 11
|
||||
PRIM_ULONGLONG = 12
|
||||
PRIM_FLOAT = 13
|
||||
PRIM_DOUBLE = 14
|
||||
PRIM_LONGDOUBLE = 15
|
||||
|
||||
PRIM_WCHAR = 16
|
||||
PRIM_INT8 = 17
|
||||
PRIM_UINT8 = 18
|
||||
PRIM_INT16 = 19
|
||||
PRIM_UINT16 = 20
|
||||
PRIM_INT32 = 21
|
||||
PRIM_UINT32 = 22
|
||||
PRIM_INT64 = 23
|
||||
PRIM_UINT64 = 24
|
||||
PRIM_INTPTR = 25
|
||||
PRIM_UINTPTR = 26
|
||||
PRIM_PTRDIFF = 27
|
||||
PRIM_SIZE = 28
|
||||
PRIM_SSIZE = 29
|
||||
PRIM_INT_LEAST8 = 30
|
||||
PRIM_UINT_LEAST8 = 31
|
||||
PRIM_INT_LEAST16 = 32
|
||||
PRIM_UINT_LEAST16 = 33
|
||||
PRIM_INT_LEAST32 = 34
|
||||
PRIM_UINT_LEAST32 = 35
|
||||
PRIM_INT_LEAST64 = 36
|
||||
PRIM_UINT_LEAST64 = 37
|
||||
PRIM_INT_FAST8 = 38
|
||||
PRIM_UINT_FAST8 = 39
|
||||
PRIM_INT_FAST16 = 40
|
||||
PRIM_UINT_FAST16 = 41
|
||||
PRIM_INT_FAST32 = 42
|
||||
PRIM_UINT_FAST32 = 43
|
||||
PRIM_INT_FAST64 = 44
|
||||
PRIM_UINT_FAST64 = 45
|
||||
PRIM_INTMAX = 46
|
||||
PRIM_UINTMAX = 47
|
||||
PRIM_FLOATCOMPLEX = 48
|
||||
PRIM_DOUBLECOMPLEX = 49
|
||||
PRIM_CHAR16 = 50
|
||||
PRIM_CHAR32 = 51
|
||||
|
||||
_NUM_PRIM = 52
|
||||
_UNKNOWN_PRIM = -1
|
||||
_UNKNOWN_FLOAT_PRIM = -2
|
||||
_UNKNOWN_LONG_DOUBLE = -3
|
||||
|
||||
_IO_FILE_STRUCT = -1
|
||||
|
||||
PRIMITIVE_TO_INDEX = {
|
||||
'char': PRIM_CHAR,
|
||||
'short': PRIM_SHORT,
|
||||
'int': PRIM_INT,
|
||||
'long': PRIM_LONG,
|
||||
'long long': PRIM_LONGLONG,
|
||||
'signed char': PRIM_SCHAR,
|
||||
'unsigned char': PRIM_UCHAR,
|
||||
'unsigned short': PRIM_USHORT,
|
||||
'unsigned int': PRIM_UINT,
|
||||
'unsigned long': PRIM_ULONG,
|
||||
'unsigned long long': PRIM_ULONGLONG,
|
||||
'float': PRIM_FLOAT,
|
||||
'double': PRIM_DOUBLE,
|
||||
'long double': PRIM_LONGDOUBLE,
|
||||
'_cffi_float_complex_t': PRIM_FLOATCOMPLEX,
|
||||
'_cffi_double_complex_t': PRIM_DOUBLECOMPLEX,
|
||||
'_Bool': PRIM_BOOL,
|
||||
'wchar_t': PRIM_WCHAR,
|
||||
'char16_t': PRIM_CHAR16,
|
||||
'char32_t': PRIM_CHAR32,
|
||||
'int8_t': PRIM_INT8,
|
||||
'uint8_t': PRIM_UINT8,
|
||||
'int16_t': PRIM_INT16,
|
||||
'uint16_t': PRIM_UINT16,
|
||||
'int32_t': PRIM_INT32,
|
||||
'uint32_t': PRIM_UINT32,
|
||||
'int64_t': PRIM_INT64,
|
||||
'uint64_t': PRIM_UINT64,
|
||||
'intptr_t': PRIM_INTPTR,
|
||||
'uintptr_t': PRIM_UINTPTR,
|
||||
'ptrdiff_t': PRIM_PTRDIFF,
|
||||
'size_t': PRIM_SIZE,
|
||||
'ssize_t': PRIM_SSIZE,
|
||||
'int_least8_t': PRIM_INT_LEAST8,
|
||||
'uint_least8_t': PRIM_UINT_LEAST8,
|
||||
'int_least16_t': PRIM_INT_LEAST16,
|
||||
'uint_least16_t': PRIM_UINT_LEAST16,
|
||||
'int_least32_t': PRIM_INT_LEAST32,
|
||||
'uint_least32_t': PRIM_UINT_LEAST32,
|
||||
'int_least64_t': PRIM_INT_LEAST64,
|
||||
'uint_least64_t': PRIM_UINT_LEAST64,
|
||||
'int_fast8_t': PRIM_INT_FAST8,
|
||||
'uint_fast8_t': PRIM_UINT_FAST8,
|
||||
'int_fast16_t': PRIM_INT_FAST16,
|
||||
'uint_fast16_t': PRIM_UINT_FAST16,
|
||||
'int_fast32_t': PRIM_INT_FAST32,
|
||||
'uint_fast32_t': PRIM_UINT_FAST32,
|
||||
'int_fast64_t': PRIM_INT_FAST64,
|
||||
'uint_fast64_t': PRIM_UINT_FAST64,
|
||||
'intmax_t': PRIM_INTMAX,
|
||||
'uintmax_t': PRIM_UINTMAX,
|
||||
}
|
||||
|
||||
F_UNION = 0x01
|
||||
F_CHECK_FIELDS = 0x02
|
||||
F_PACKED = 0x04
|
||||
F_EXTERNAL = 0x08
|
||||
F_OPAQUE = 0x10
|
||||
|
||||
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
||||
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
||||
'F_EXTERNAL', 'F_OPAQUE']])
|
||||
|
||||
CLASS_NAME = {}
|
||||
for _name, _value in list(globals().items()):
|
||||
if _name.startswith('OP_') and isinstance(_value, int):
|
||||
CLASS_NAME[_value] = _name[3:]
|
|
@ -1,82 +0,0 @@
|
|||
import sys
|
||||
from . import model
|
||||
from .error import FFIError
|
||||
|
||||
|
||||
COMMON_TYPES = {}
|
||||
|
||||
try:
|
||||
# fetch "bool" and all simple Windows types
|
||||
from _cffi_backend import _get_common_types
|
||||
_get_common_types(COMMON_TYPES)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
||||
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
||||
COMMON_TYPES['float _Complex'] = '_cffi_float_complex_t'
|
||||
COMMON_TYPES['double _Complex'] = '_cffi_double_complex_t'
|
||||
|
||||
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
if _type.endswith('_t'):
|
||||
COMMON_TYPES[_type] = _type
|
||||
del _type
|
||||
|
||||
_CACHE = {}
|
||||
|
||||
def resolve_common_type(parser, commontype):
|
||||
try:
|
||||
return _CACHE[commontype]
|
||||
except KeyError:
|
||||
cdecl = COMMON_TYPES.get(commontype, commontype)
|
||||
if not isinstance(cdecl, str):
|
||||
result, quals = cdecl, 0 # cdecl is already a BaseType
|
||||
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||
result, quals = model.PrimitiveType(cdecl), 0
|
||||
elif cdecl == 'set-unicode-needed':
|
||||
raise FFIError("The Windows type %r is only available after "
|
||||
"you call ffi.set_unicode()" % (commontype,))
|
||||
else:
|
||||
if commontype == cdecl:
|
||||
raise FFIError(
|
||||
"Unsupported type: %r. Please look at "
|
||||
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
||||
"and file an issue if you think this type should really "
|
||||
"be supported." % (commontype,))
|
||||
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
||||
|
||||
assert isinstance(result, model.BaseTypeByIdentity)
|
||||
_CACHE[commontype] = result, quals
|
||||
return result, quals
|
||||
|
||||
|
||||
# ____________________________________________________________
|
||||
# extra types for Windows (most of them are in commontypes.c)
|
||||
|
||||
|
||||
def win_common_types():
|
||||
return {
|
||||
"UNICODE_STRING": model.StructType(
|
||||
"_UNICODE_STRING",
|
||||
["Length",
|
||||
"MaximumLength",
|
||||
"Buffer"],
|
||||
[model.PrimitiveType("unsigned short"),
|
||||
model.PrimitiveType("unsigned short"),
|
||||
model.PointerType(model.PrimitiveType("wchar_t"))],
|
||||
[-1, -1, -1]),
|
||||
"PUNICODE_STRING": "UNICODE_STRING *",
|
||||
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
||||
|
||||
"TBYTE": "set-unicode-needed",
|
||||
"TCHAR": "set-unicode-needed",
|
||||
"LPCTSTR": "set-unicode-needed",
|
||||
"PCTSTR": "set-unicode-needed",
|
||||
"LPTSTR": "set-unicode-needed",
|
||||
"PTSTR": "set-unicode-needed",
|
||||
"PTBYTE": "set-unicode-needed",
|
||||
"PTCHAR": "set-unicode-needed",
|
||||
}
|
||||
|
||||
if sys.platform == 'win32':
|
||||
COMMON_TYPES.update(win_common_types())
|
File diff suppressed because it is too large
Load Diff
|
@ -1,31 +0,0 @@
|
|||
|
||||
class FFIError(Exception):
|
||||
__module__ = 'cffi'
|
||||
|
||||
class CDefError(Exception):
|
||||
__module__ = 'cffi'
|
||||
def __str__(self):
|
||||
try:
|
||||
current_decl = self.args[1]
|
||||
filename = current_decl.coord.file
|
||||
linenum = current_decl.coord.line
|
||||
prefix = '%s:%d: ' % (filename, linenum)
|
||||
except (AttributeError, TypeError, IndexError):
|
||||
prefix = ''
|
||||
return '%s%s' % (prefix, self.args[0])
|
||||
|
||||
class VerificationError(Exception):
|
||||
""" An error raised when verification fails
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class VerificationMissing(Exception):
|
||||
""" An error raised when incomplete structures are passed into
|
||||
cdef, but no verification has been done
|
||||
"""
|
||||
__module__ = 'cffi'
|
||||
|
||||
class PkgConfigError(Exception):
|
||||
""" An error raised for missing modules in pkg-config
|
||||
"""
|
||||
__module__ = 'cffi'
|
|
@ -1,113 +0,0 @@
|
|||
import sys, os
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
||||
'extra_objects', 'depends']
|
||||
|
||||
def get_extension(srcfilename, modname, sources=(), **kwds):
|
||||
from cffi._shimmed_dist_utils import Extension
|
||||
allsources = [srcfilename]
|
||||
for src in sources:
|
||||
allsources.append(os.path.normpath(src))
|
||||
return Extension(name=modname, sources=allsources, **kwds)
|
||||
|
||||
def compile(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
"""Compile a C extension module using distutils."""
|
||||
|
||||
saved_environ = os.environ.copy()
|
||||
try:
|
||||
outputfilename = _build(tmpdir, ext, compiler_verbose, debug)
|
||||
outputfilename = os.path.abspath(outputfilename)
|
||||
finally:
|
||||
# workaround for a distutils bugs where some env vars can
|
||||
# become longer and longer every time it is used
|
||||
for key, value in saved_environ.items():
|
||||
if os.environ.get(key) != value:
|
||||
os.environ[key] = value
|
||||
return outputfilename
|
||||
|
||||
def _build(tmpdir, ext, compiler_verbose=0, debug=None):
|
||||
# XXX compact but horrible :-(
|
||||
from cffi._shimmed_dist_utils import Distribution, CompileError, LinkError, set_threshold, set_verbosity
|
||||
|
||||
dist = Distribution({'ext_modules': [ext]})
|
||||
dist.parse_config_files()
|
||||
options = dist.get_option_dict('build_ext')
|
||||
if debug is None:
|
||||
debug = sys.flags.debug
|
||||
options['debug'] = ('ffiplatform', debug)
|
||||
options['force'] = ('ffiplatform', True)
|
||||
options['build_lib'] = ('ffiplatform', tmpdir)
|
||||
options['build_temp'] = ('ffiplatform', tmpdir)
|
||||
#
|
||||
try:
|
||||
old_level = set_threshold(0) or 0
|
||||
try:
|
||||
set_verbosity(compiler_verbose)
|
||||
dist.run_command('build_ext')
|
||||
cmd_obj = dist.get_command_obj('build_ext')
|
||||
[soname] = cmd_obj.get_outputs()
|
||||
finally:
|
||||
set_threshold(old_level)
|
||||
except (CompileError, LinkError) as e:
|
||||
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
||||
#
|
||||
return soname
|
||||
|
||||
try:
|
||||
from os.path import samefile
|
||||
except ImportError:
|
||||
def samefile(f1, f2):
|
||||
return os.path.abspath(f1) == os.path.abspath(f2)
|
||||
|
||||
def maybe_relative_path(path):
|
||||
if not os.path.isabs(path):
|
||||
return path # already relative
|
||||
dir = path
|
||||
names = []
|
||||
while True:
|
||||
prevdir = dir
|
||||
dir, name = os.path.split(prevdir)
|
||||
if dir == prevdir or not dir:
|
||||
return path # failed to make it relative
|
||||
names.append(name)
|
||||
try:
|
||||
if samefile(dir, os.curdir):
|
||||
names.reverse()
|
||||
return os.path.join(*names)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
try:
|
||||
int_or_long = (int, long)
|
||||
import cStringIO
|
||||
except NameError:
|
||||
int_or_long = int # Python 3
|
||||
import io as cStringIO
|
||||
|
||||
def _flatten(x, f):
|
||||
if isinstance(x, str):
|
||||
f.write('%ds%s' % (len(x), x))
|
||||
elif isinstance(x, dict):
|
||||
keys = sorted(x.keys())
|
||||
f.write('%dd' % len(keys))
|
||||
for key in keys:
|
||||
_flatten(key, f)
|
||||
_flatten(x[key], f)
|
||||
elif isinstance(x, (list, tuple)):
|
||||
f.write('%dl' % len(x))
|
||||
for value in x:
|
||||
_flatten(value, f)
|
||||
elif isinstance(x, int_or_long):
|
||||
f.write('%di' % (x,))
|
||||
else:
|
||||
raise TypeError(
|
||||
"the keywords to verify() contains unsupported object %r" % (x,))
|
||||
|
||||
def flatten(x):
|
||||
f = cStringIO.StringIO()
|
||||
_flatten(x, f)
|
||||
return f.getvalue()
|
|
@ -1,30 +0,0 @@
|
|||
import sys
|
||||
|
||||
if sys.version_info < (3,):
|
||||
try:
|
||||
from thread import allocate_lock
|
||||
except ImportError:
|
||||
from dummy_thread import allocate_lock
|
||||
else:
|
||||
try:
|
||||
from _thread import allocate_lock
|
||||
except ImportError:
|
||||
from _dummy_thread import allocate_lock
|
||||
|
||||
|
||||
##import sys
|
||||
##l1 = allocate_lock
|
||||
|
||||
##class allocate_lock(object):
|
||||
## def __init__(self):
|
||||
## self._real = l1()
|
||||
## def __enter__(self):
|
||||
## for i in range(4, 0, -1):
|
||||
## print sys._getframe(i).f_code
|
||||
## print
|
||||
## return self._real.__enter__()
|
||||
## def __exit__(self, *args):
|
||||
## return self._real.__exit__(*args)
|
||||
## def acquire(self, f):
|
||||
## assert f is False
|
||||
## return self._real.acquire(f)
|
|
@ -1,618 +0,0 @@
|
|||
import types
|
||||
import weakref
|
||||
|
||||
from .lock import allocate_lock
|
||||
from .error import CDefError, VerificationError, VerificationMissing
|
||||
|
||||
# type qualifiers
|
||||
Q_CONST = 0x01
|
||||
Q_RESTRICT = 0x02
|
||||
Q_VOLATILE = 0x04
|
||||
|
||||
def qualify(quals, replace_with):
|
||||
if quals & Q_CONST:
|
||||
replace_with = ' const ' + replace_with.lstrip()
|
||||
if quals & Q_VOLATILE:
|
||||
replace_with = ' volatile ' + replace_with.lstrip()
|
||||
if quals & Q_RESTRICT:
|
||||
# It seems that __restrict is supported by gcc and msvc.
|
||||
# If you hit some different compiler, add a #define in
|
||||
# _cffi_include.h for it (and in its copies, documented there)
|
||||
replace_with = ' __restrict ' + replace_with.lstrip()
|
||||
return replace_with
|
||||
|
||||
|
||||
class BaseTypeByIdentity(object):
|
||||
is_array_type = False
|
||||
is_raw_function = False
|
||||
|
||||
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
||||
result = self.c_name_with_marker
|
||||
assert result.count('&') == 1
|
||||
# some logic duplication with ffi.getctype()... :-(
|
||||
replace_with = replace_with.strip()
|
||||
if replace_with:
|
||||
if replace_with.startswith('*') and '&[' in result:
|
||||
replace_with = '(%s)' % replace_with
|
||||
elif not replace_with[0] in '[(':
|
||||
replace_with = ' ' + replace_with
|
||||
replace_with = qualify(quals, replace_with)
|
||||
result = result.replace('&', replace_with)
|
||||
if '$' in result:
|
||||
raise VerificationError(
|
||||
"cannot generate '%s' in %s: unknown type name"
|
||||
% (self._get_c_name(), context))
|
||||
return result
|
||||
|
||||
def _get_c_name(self):
|
||||
return self.c_name_with_marker.replace('&', '')
|
||||
|
||||
def has_c_name(self):
|
||||
return '$' not in self._get_c_name()
|
||||
|
||||
def is_integer_type(self):
|
||||
return False
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
try:
|
||||
BType = ffi._cached_btypes[self]
|
||||
except KeyError:
|
||||
BType = self.build_backend_type(ffi, finishlist)
|
||||
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
||||
assert BType2 is BType
|
||||
return BType
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s>' % (self._get_c_name(),)
|
||||
|
||||
def _get_items(self):
|
||||
return [(name, getattr(self, name)) for name in self._attrs_]
|
||||
|
||||
|
||||
class BaseType(BaseTypeByIdentity):
|
||||
|
||||
def __eq__(self, other):
|
||||
return (self.__class__ == other.__class__ and
|
||||
self._get_items() == other._get_items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.__class__, tuple(self._get_items())))
|
||||
|
||||
|
||||
class VoidType(BaseType):
|
||||
_attrs_ = ()
|
||||
|
||||
def __init__(self):
|
||||
self.c_name_with_marker = 'void&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_void_type')
|
||||
|
||||
void_type = VoidType()
|
||||
|
||||
|
||||
class BasePrimitiveType(BaseType):
|
||||
def is_complex_type(self):
|
||||
return False
|
||||
|
||||
|
||||
class PrimitiveType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
ALL_PRIMITIVE_TYPES = {
|
||||
'char': 'c',
|
||||
'short': 'i',
|
||||
'int': 'i',
|
||||
'long': 'i',
|
||||
'long long': 'i',
|
||||
'signed char': 'i',
|
||||
'unsigned char': 'i',
|
||||
'unsigned short': 'i',
|
||||
'unsigned int': 'i',
|
||||
'unsigned long': 'i',
|
||||
'unsigned long long': 'i',
|
||||
'float': 'f',
|
||||
'double': 'f',
|
||||
'long double': 'f',
|
||||
'_cffi_float_complex_t': 'j',
|
||||
'_cffi_double_complex_t': 'j',
|
||||
'_Bool': 'i',
|
||||
# the following types are not primitive in the C sense
|
||||
'wchar_t': 'c',
|
||||
'char16_t': 'c',
|
||||
'char32_t': 'c',
|
||||
'int8_t': 'i',
|
||||
'uint8_t': 'i',
|
||||
'int16_t': 'i',
|
||||
'uint16_t': 'i',
|
||||
'int32_t': 'i',
|
||||
'uint32_t': 'i',
|
||||
'int64_t': 'i',
|
||||
'uint64_t': 'i',
|
||||
'int_least8_t': 'i',
|
||||
'uint_least8_t': 'i',
|
||||
'int_least16_t': 'i',
|
||||
'uint_least16_t': 'i',
|
||||
'int_least32_t': 'i',
|
||||
'uint_least32_t': 'i',
|
||||
'int_least64_t': 'i',
|
||||
'uint_least64_t': 'i',
|
||||
'int_fast8_t': 'i',
|
||||
'uint_fast8_t': 'i',
|
||||
'int_fast16_t': 'i',
|
||||
'uint_fast16_t': 'i',
|
||||
'int_fast32_t': 'i',
|
||||
'uint_fast32_t': 'i',
|
||||
'int_fast64_t': 'i',
|
||||
'uint_fast64_t': 'i',
|
||||
'intptr_t': 'i',
|
||||
'uintptr_t': 'i',
|
||||
'intmax_t': 'i',
|
||||
'uintmax_t': 'i',
|
||||
'ptrdiff_t': 'i',
|
||||
'size_t': 'i',
|
||||
'ssize_t': 'i',
|
||||
}
|
||||
|
||||
def __init__(self, name):
|
||||
assert name in self.ALL_PRIMITIVE_TYPES
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_char_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
||||
def is_integer_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
||||
def is_float_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
||||
def is_complex_type(self):
|
||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'j'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
||||
|
||||
|
||||
class UnknownIntegerType(BasePrimitiveType):
|
||||
_attrs_ = ('name',)
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def is_integer_type(self):
|
||||
return True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("integer type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
class UnknownFloatType(BasePrimitiveType):
|
||||
_attrs_ = ('name', )
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise NotImplementedError("float type '%s' can only be used after "
|
||||
"compilation" % self.name)
|
||||
|
||||
|
||||
class BaseFunctionType(BaseType):
|
||||
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
||||
|
||||
def __init__(self, args, result, ellipsis, abi=None):
|
||||
self.args = args
|
||||
self.result = result
|
||||
self.ellipsis = ellipsis
|
||||
self.abi = abi
|
||||
#
|
||||
reprargs = [arg._get_c_name() for arg in self.args]
|
||||
if self.ellipsis:
|
||||
reprargs.append('...')
|
||||
reprargs = reprargs or ['void']
|
||||
replace_with = self._base_pattern % (', '.join(reprargs),)
|
||||
if abi is not None:
|
||||
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
||||
self.c_name_with_marker = (
|
||||
self.result.c_name_with_marker.replace('&', replace_with))
|
||||
|
||||
|
||||
class RawFunctionType(BaseFunctionType):
|
||||
# Corresponds to a C type like 'int(int)', which is the C type of
|
||||
# a function, but not a pointer-to-function. The backend has no
|
||||
# notion of such a type; it's used temporarily by parsing.
|
||||
_base_pattern = '(&)(%s)'
|
||||
is_raw_function = True
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
raise CDefError("cannot render the type %r: it is a function "
|
||||
"type, not a pointer-to-function type" % (self,))
|
||||
|
||||
def as_function_pointer(self):
|
||||
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class FunctionPtrType(BaseFunctionType):
|
||||
_base_pattern = '(*&)(%s)'
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
result = self.result.get_cached_btype(ffi, finishlist)
|
||||
args = []
|
||||
for tp in self.args:
|
||||
args.append(tp.get_cached_btype(ffi, finishlist))
|
||||
abi_args = ()
|
||||
if self.abi == "__stdcall":
|
||||
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
||||
try:
|
||||
abi_args = (ffi._backend.FFI_STDCALL,)
|
||||
except AttributeError:
|
||||
pass
|
||||
return global_cache(self, ffi, 'new_function_type',
|
||||
tuple(args), result, self.ellipsis, *abi_args)
|
||||
|
||||
def as_raw_function(self):
|
||||
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
||||
|
||||
|
||||
class PointerType(BaseType):
|
||||
_attrs_ = ('totype', 'quals')
|
||||
|
||||
def __init__(self, totype, quals=0):
|
||||
self.totype = totype
|
||||
self.quals = quals
|
||||
extra = " *&"
|
||||
if totype.is_array_type:
|
||||
extra = "(%s)" % (extra.lstrip(),)
|
||||
extra = qualify(quals, extra)
|
||||
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
||||
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
||||
|
||||
voidp_type = PointerType(void_type)
|
||||
|
||||
def ConstPointerType(totype):
|
||||
return PointerType(totype, Q_CONST)
|
||||
|
||||
const_voidp_type = ConstPointerType(void_type)
|
||||
|
||||
|
||||
class NamedPointerType(PointerType):
|
||||
_attrs_ = ('totype', 'name')
|
||||
|
||||
def __init__(self, totype, name, quals=0):
|
||||
PointerType.__init__(self, totype, quals)
|
||||
self.name = name
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
|
||||
class ArrayType(BaseType):
|
||||
_attrs_ = ('item', 'length')
|
||||
is_array_type = True
|
||||
|
||||
def __init__(self, item, length):
|
||||
self.item = item
|
||||
self.length = length
|
||||
#
|
||||
if length is None:
|
||||
brackets = '&[]'
|
||||
elif length == '...':
|
||||
brackets = '&[/*...*/]'
|
||||
else:
|
||||
brackets = '&[%s]' % length
|
||||
self.c_name_with_marker = (
|
||||
self.item.c_name_with_marker.replace('&', brackets))
|
||||
|
||||
def length_is_unknown(self):
|
||||
return isinstance(self.length, str)
|
||||
|
||||
def resolve_length(self, newlength):
|
||||
return ArrayType(self.item, newlength)
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
if self.length_is_unknown():
|
||||
raise CDefError("cannot render the type %r: unknown length" %
|
||||
(self,))
|
||||
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
||||
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
||||
|
||||
char_array_type = ArrayType(PrimitiveType('char'), None)
|
||||
|
||||
|
||||
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
||||
_attrs_ = ('name',)
|
||||
forcename = None
|
||||
|
||||
def build_c_name_with_marker(self):
|
||||
name = self.forcename or '%s %s' % (self.kind, self.name)
|
||||
self.c_name_with_marker = name + '&'
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
self.forcename = forcename
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def get_official_name(self):
|
||||
assert self.c_name_with_marker.endswith('&')
|
||||
return self.c_name_with_marker[:-1]
|
||||
|
||||
|
||||
class StructOrUnion(StructOrUnionOrEnum):
|
||||
fixedlayout = None
|
||||
completed = 0
|
||||
partial = False
|
||||
packed = 0
|
||||
|
||||
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
||||
self.name = name
|
||||
self.fldnames = fldnames
|
||||
self.fldtypes = fldtypes
|
||||
self.fldbitsize = fldbitsize
|
||||
self.fldquals = fldquals
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def anonymous_struct_fields(self):
|
||||
if self.fldtypes is not None:
|
||||
for name, type in zip(self.fldnames, self.fldtypes):
|
||||
if name == '' and isinstance(type, StructOrUnion):
|
||||
yield type
|
||||
|
||||
def enumfields(self, expand_anonymous_struct_union=True):
|
||||
fldquals = self.fldquals
|
||||
if fldquals is None:
|
||||
fldquals = (0,) * len(self.fldnames)
|
||||
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
||||
self.fldbitsize, fldquals):
|
||||
if (name == '' and isinstance(type, StructOrUnion)
|
||||
and expand_anonymous_struct_union):
|
||||
# nested anonymous struct/union
|
||||
for result in type.enumfields():
|
||||
yield result
|
||||
else:
|
||||
yield (name, type, bitsize, quals)
|
||||
|
||||
def force_flatten(self):
|
||||
# force the struct or union to have a declaration that lists
|
||||
# directly all fields returned by enumfields(), flattening
|
||||
# nested anonymous structs/unions.
|
||||
names = []
|
||||
types = []
|
||||
bitsizes = []
|
||||
fldquals = []
|
||||
for name, type, bitsize, quals in self.enumfields():
|
||||
names.append(name)
|
||||
types.append(type)
|
||||
bitsizes.append(bitsize)
|
||||
fldquals.append(quals)
|
||||
self.fldnames = tuple(names)
|
||||
self.fldtypes = tuple(types)
|
||||
self.fldbitsize = tuple(bitsizes)
|
||||
self.fldquals = tuple(fldquals)
|
||||
|
||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
||||
can_delay)
|
||||
if not can_delay:
|
||||
self.finish_backend_type(ffi, finishlist)
|
||||
return BType
|
||||
|
||||
def finish_backend_type(self, ffi, finishlist):
|
||||
if self.completed:
|
||||
if self.completed != 2:
|
||||
raise NotImplementedError("recursive structure declaration "
|
||||
"for '%s'" % (self.name,))
|
||||
return
|
||||
BType = ffi._cached_btypes[self]
|
||||
#
|
||||
self.completed = 1
|
||||
#
|
||||
if self.fldtypes is None:
|
||||
pass # not completing it: it's an opaque struct
|
||||
#
|
||||
elif self.fixedlayout is None:
|
||||
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
||||
for tp in self.fldtypes]
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
||||
extra_flags = ()
|
||||
if self.packed:
|
||||
if self.packed == 1:
|
||||
extra_flags = (8,) # SF_PACKED
|
||||
else:
|
||||
extra_flags = (0, self.packed)
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
-1, -1, *extra_flags)
|
||||
#
|
||||
else:
|
||||
fldtypes = []
|
||||
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
||||
for i in range(len(self.fldnames)):
|
||||
fsize = fieldsize[i]
|
||||
ftype = self.fldtypes[i]
|
||||
#
|
||||
if isinstance(ftype, ArrayType) and ftype.length_is_unknown():
|
||||
# fix the length to match the total size
|
||||
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
||||
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
||||
if nrest != 0:
|
||||
self._verification_error(
|
||||
"field '%s.%s' has a bogus size?" % (
|
||||
self.name, self.fldnames[i] or '{}'))
|
||||
ftype = ftype.resolve_length(nlen)
|
||||
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
||||
self.fldtypes[i+1:])
|
||||
#
|
||||
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
||||
if isinstance(ftype, ArrayType) and ftype.length is None:
|
||||
assert fsize == 0
|
||||
else:
|
||||
bitemsize = ffi.sizeof(BFieldType)
|
||||
if bitemsize != fsize:
|
||||
self._verification_error(
|
||||
"field '%s.%s' is declared as %d bytes, but is "
|
||||
"really %d bytes" % (self.name,
|
||||
self.fldnames[i] or '{}',
|
||||
bitemsize, fsize))
|
||||
fldtypes.append(BFieldType)
|
||||
#
|
||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||
totalsize, totalalignment)
|
||||
self.completed = 2
|
||||
|
||||
def _verification_error(self, msg):
|
||||
raise VerificationError(msg)
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and self.fixedlayout is None:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
finishlist.append(self)
|
||||
#
|
||||
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
||||
self.get_official_name(), key=self)
|
||||
|
||||
|
||||
class StructType(StructOrUnion):
|
||||
kind = 'struct'
|
||||
|
||||
|
||||
class UnionType(StructOrUnion):
|
||||
kind = 'union'
|
||||
|
||||
|
||||
class EnumType(StructOrUnionOrEnum):
|
||||
kind = 'enum'
|
||||
partial = False
|
||||
partial_resolved = False
|
||||
|
||||
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
||||
self.name = name
|
||||
self.enumerators = enumerators
|
||||
self.enumvalues = enumvalues
|
||||
self.baseinttype = baseinttype
|
||||
self.build_c_name_with_marker()
|
||||
|
||||
def force_the_name(self, forcename):
|
||||
StructOrUnionOrEnum.force_the_name(self, forcename)
|
||||
if self.forcename is None:
|
||||
name = self.get_official_name()
|
||||
self.forcename = '$' + name.replace(' ', '_')
|
||||
|
||||
def check_not_partial(self):
|
||||
if self.partial and not self.partial_resolved:
|
||||
raise VerificationMissing(self._get_c_name())
|
||||
|
||||
def build_backend_type(self, ffi, finishlist):
|
||||
self.check_not_partial()
|
||||
base_btype = self.build_baseinttype(ffi, finishlist)
|
||||
return global_cache(self, ffi, 'new_enum_type',
|
||||
self.get_official_name(),
|
||||
self.enumerators, self.enumvalues,
|
||||
base_btype, key=self)
|
||||
|
||||
def build_baseinttype(self, ffi, finishlist):
|
||||
if self.baseinttype is not None:
|
||||
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
||||
#
|
||||
if self.enumvalues:
|
||||
smallest_value = min(self.enumvalues)
|
||||
largest_value = max(self.enumvalues)
|
||||
else:
|
||||
import warnings
|
||||
try:
|
||||
# XXX! The goal is to ensure that the warnings.warn()
|
||||
# will not suppress the warning. We want to get it
|
||||
# several times if we reach this point several times.
|
||||
__warningregistry__.clear()
|
||||
except NameError:
|
||||
pass
|
||||
warnings.warn("%r has no values explicitly defined; "
|
||||
"guessing that it is equivalent to 'unsigned int'"
|
||||
% self._get_c_name())
|
||||
smallest_value = largest_value = 0
|
||||
if smallest_value < 0: # needs a signed type
|
||||
sign = 1
|
||||
candidate1 = PrimitiveType("int")
|
||||
candidate2 = PrimitiveType("long")
|
||||
else:
|
||||
sign = 0
|
||||
candidate1 = PrimitiveType("unsigned int")
|
||||
candidate2 = PrimitiveType("unsigned long")
|
||||
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
||||
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
||||
size1 = ffi.sizeof(btype1)
|
||||
size2 = ffi.sizeof(btype2)
|
||||
if (smallest_value >= ((-1) << (8*size1-1)) and
|
||||
largest_value < (1 << (8*size1-sign))):
|
||||
return btype1
|
||||
if (smallest_value >= ((-1) << (8*size2-1)) and
|
||||
largest_value < (1 << (8*size2-sign))):
|
||||
return btype2
|
||||
raise CDefError("%s values don't all fit into either 'long' "
|
||||
"or 'unsigned long'" % self._get_c_name())
|
||||
|
||||
def unknown_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
tp.force_the_name(name)
|
||||
tp.origin = "unknown_type"
|
||||
return tp
|
||||
|
||||
def unknown_ptr_type(name, structname=None):
|
||||
if structname is None:
|
||||
structname = '$$%s' % name
|
||||
tp = StructType(structname, None, None, None)
|
||||
return NamedPointerType(tp, name)
|
||||
|
||||
|
||||
global_lock = allocate_lock()
|
||||
_typecache_cffi_backend = weakref.WeakValueDictionary()
|
||||
|
||||
def get_typecache(backend):
|
||||
# returns _typecache_cffi_backend if backend is the _cffi_backend
|
||||
# module, or type(backend).__typecache if backend is an instance of
|
||||
# CTypesBackend (or some FakeBackend class during tests)
|
||||
if isinstance(backend, types.ModuleType):
|
||||
return _typecache_cffi_backend
|
||||
with global_lock:
|
||||
if not hasattr(type(backend), '__typecache'):
|
||||
type(backend).__typecache = weakref.WeakValueDictionary()
|
||||
return type(backend).__typecache
|
||||
|
||||
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
||||
key = kwds.pop('key', (funcname, args))
|
||||
assert not kwds
|
||||
try:
|
||||
return ffi._typecache[key]
|
||||
except KeyError:
|
||||
pass
|
||||
try:
|
||||
res = getattr(ffi._backend, funcname)(*args)
|
||||
except NotImplementedError as e:
|
||||
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
||||
# note that setdefault() on WeakValueDictionary is not atomic
|
||||
# and contains a rare bug (http://bugs.python.org/issue19542);
|
||||
# we have to use a lock and do it ourselves
|
||||
cache = ffi._typecache
|
||||
with global_lock:
|
||||
res1 = cache.get(key)
|
||||
if res1 is None:
|
||||
cache[key] = res
|
||||
return res
|
||||
else:
|
||||
return res1
|
||||
|
||||
def pointer_cache(ffi, BType):
|
||||
return global_cache('?', ffi, 'new_pointer_type', BType)
|
||||
|
||||
def attach_exception_info(e, name):
|
||||
if e.args and type(e.args[0]) is str:
|
||||
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
|
@ -1,181 +0,0 @@
|
|||
|
||||
/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
|
||||
beginning of C sources generated by CFFI's ffi.set_source(). */
|
||||
|
||||
typedef void *_cffi_opcode_t;
|
||||
|
||||
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
|
||||
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
|
||||
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
|
||||
|
||||
#define _CFFI_OP_PRIMITIVE 1
|
||||
#define _CFFI_OP_POINTER 3
|
||||
#define _CFFI_OP_ARRAY 5
|
||||
#define _CFFI_OP_OPEN_ARRAY 7
|
||||
#define _CFFI_OP_STRUCT_UNION 9
|
||||
#define _CFFI_OP_ENUM 11
|
||||
#define _CFFI_OP_FUNCTION 13
|
||||
#define _CFFI_OP_FUNCTION_END 15
|
||||
#define _CFFI_OP_NOOP 17
|
||||
#define _CFFI_OP_BITFIELD 19
|
||||
#define _CFFI_OP_TYPENAME 21
|
||||
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
|
||||
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
|
||||
#define _CFFI_OP_CONSTANT 29
|
||||
#define _CFFI_OP_CONSTANT_INT 31
|
||||
#define _CFFI_OP_GLOBAL_VAR 33
|
||||
#define _CFFI_OP_DLOPEN_FUNC 35
|
||||
#define _CFFI_OP_DLOPEN_CONST 37
|
||||
#define _CFFI_OP_GLOBAL_VAR_F 39
|
||||
#define _CFFI_OP_EXTERN_PYTHON 41
|
||||
|
||||
#define _CFFI_PRIM_VOID 0
|
||||
#define _CFFI_PRIM_BOOL 1
|
||||
#define _CFFI_PRIM_CHAR 2
|
||||
#define _CFFI_PRIM_SCHAR 3
|
||||
#define _CFFI_PRIM_UCHAR 4
|
||||
#define _CFFI_PRIM_SHORT 5
|
||||
#define _CFFI_PRIM_USHORT 6
|
||||
#define _CFFI_PRIM_INT 7
|
||||
#define _CFFI_PRIM_UINT 8
|
||||
#define _CFFI_PRIM_LONG 9
|
||||
#define _CFFI_PRIM_ULONG 10
|
||||
#define _CFFI_PRIM_LONGLONG 11
|
||||
#define _CFFI_PRIM_ULONGLONG 12
|
||||
#define _CFFI_PRIM_FLOAT 13
|
||||
#define _CFFI_PRIM_DOUBLE 14
|
||||
#define _CFFI_PRIM_LONGDOUBLE 15
|
||||
|
||||
#define _CFFI_PRIM_WCHAR 16
|
||||
#define _CFFI_PRIM_INT8 17
|
||||
#define _CFFI_PRIM_UINT8 18
|
||||
#define _CFFI_PRIM_INT16 19
|
||||
#define _CFFI_PRIM_UINT16 20
|
||||
#define _CFFI_PRIM_INT32 21
|
||||
#define _CFFI_PRIM_UINT32 22
|
||||
#define _CFFI_PRIM_INT64 23
|
||||
#define _CFFI_PRIM_UINT64 24
|
||||
#define _CFFI_PRIM_INTPTR 25
|
||||
#define _CFFI_PRIM_UINTPTR 26
|
||||
#define _CFFI_PRIM_PTRDIFF 27
|
||||
#define _CFFI_PRIM_SIZE 28
|
||||
#define _CFFI_PRIM_SSIZE 29
|
||||
#define _CFFI_PRIM_INT_LEAST8 30
|
||||
#define _CFFI_PRIM_UINT_LEAST8 31
|
||||
#define _CFFI_PRIM_INT_LEAST16 32
|
||||
#define _CFFI_PRIM_UINT_LEAST16 33
|
||||
#define _CFFI_PRIM_INT_LEAST32 34
|
||||
#define _CFFI_PRIM_UINT_LEAST32 35
|
||||
#define _CFFI_PRIM_INT_LEAST64 36
|
||||
#define _CFFI_PRIM_UINT_LEAST64 37
|
||||
#define _CFFI_PRIM_INT_FAST8 38
|
||||
#define _CFFI_PRIM_UINT_FAST8 39
|
||||
#define _CFFI_PRIM_INT_FAST16 40
|
||||
#define _CFFI_PRIM_UINT_FAST16 41
|
||||
#define _CFFI_PRIM_INT_FAST32 42
|
||||
#define _CFFI_PRIM_UINT_FAST32 43
|
||||
#define _CFFI_PRIM_INT_FAST64 44
|
||||
#define _CFFI_PRIM_UINT_FAST64 45
|
||||
#define _CFFI_PRIM_INTMAX 46
|
||||
#define _CFFI_PRIM_UINTMAX 47
|
||||
#define _CFFI_PRIM_FLOATCOMPLEX 48
|
||||
#define _CFFI_PRIM_DOUBLECOMPLEX 49
|
||||
#define _CFFI_PRIM_CHAR16 50
|
||||
#define _CFFI_PRIM_CHAR32 51
|
||||
|
||||
#define _CFFI__NUM_PRIM 52
|
||||
#define _CFFI__UNKNOWN_PRIM (-1)
|
||||
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
|
||||
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
|
||||
|
||||
#define _CFFI__IO_FILE_STRUCT (-1)
|
||||
|
||||
|
||||
struct _cffi_global_s {
|
||||
const char *name;
|
||||
void *address;
|
||||
_cffi_opcode_t type_op;
|
||||
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
|
||||
// OP_CPYTHON_BLTN_*: addr of direct function
|
||||
};
|
||||
|
||||
struct _cffi_getconst_s {
|
||||
unsigned long long value;
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
int gindex;
|
||||
};
|
||||
|
||||
struct _cffi_struct_union_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
|
||||
int flags; // _CFFI_F_* flags below
|
||||
size_t size;
|
||||
int alignment;
|
||||
int first_field_index; // -> _cffi_fields array
|
||||
int num_fields;
|
||||
};
|
||||
#define _CFFI_F_UNION 0x01 // is a union, not a struct
|
||||
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
|
||||
// "standard layout" or if some are missing
|
||||
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
|
||||
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
|
||||
#define _CFFI_F_OPAQUE 0x10 // opaque
|
||||
|
||||
struct _cffi_field_s {
|
||||
const char *name;
|
||||
size_t field_offset;
|
||||
size_t field_size;
|
||||
_cffi_opcode_t field_type_op;
|
||||
};
|
||||
|
||||
struct _cffi_enum_s {
|
||||
const char *name;
|
||||
int type_index; // -> _cffi_types, on a OP_ENUM
|
||||
int type_prim; // _CFFI_PRIM_xxx
|
||||
const char *enumerators; // comma-delimited string
|
||||
};
|
||||
|
||||
struct _cffi_typename_s {
|
||||
const char *name;
|
||||
int type_index; /* if opaque, points to a possibly artificial
|
||||
OP_STRUCT which is itself opaque */
|
||||
};
|
||||
|
||||
struct _cffi_type_context_s {
|
||||
_cffi_opcode_t *types;
|
||||
const struct _cffi_global_s *globals;
|
||||
const struct _cffi_field_s *fields;
|
||||
const struct _cffi_struct_union_s *struct_unions;
|
||||
const struct _cffi_enum_s *enums;
|
||||
const struct _cffi_typename_s *typenames;
|
||||
int num_globals;
|
||||
int num_struct_unions;
|
||||
int num_enums;
|
||||
int num_typenames;
|
||||
const char *const *includes;
|
||||
int num_types;
|
||||
int flags; /* future extension */
|
||||
};
|
||||
|
||||
struct _cffi_parse_info_s {
|
||||
const struct _cffi_type_context_s *ctx;
|
||||
_cffi_opcode_t *output;
|
||||
unsigned int output_size;
|
||||
size_t error_location;
|
||||
const char *error_message;
|
||||
};
|
||||
|
||||
struct _cffi_externpy_s {
|
||||
const char *name;
|
||||
size_t size_of_result;
|
||||
void *reserved1, *reserved2;
|
||||
};
|
||||
|
||||
#ifdef _CFFI_INTERNAL
|
||||
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
|
||||
static int search_in_globals(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
|
||||
const char *search, size_t search_len);
|
||||
#endif
|
|
@ -1,121 +0,0 @@
|
|||
# pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi
|
||||
import sys, os, subprocess
|
||||
|
||||
from .error import PkgConfigError
|
||||
|
||||
|
||||
def merge_flags(cfg1, cfg2):
|
||||
"""Merge values from cffi config flags cfg2 to cf1
|
||||
|
||||
Example:
|
||||
merge_flags({"libraries": ["one"]}, {"libraries": ["two"]})
|
||||
{"libraries": ["one", "two"]}
|
||||
"""
|
||||
for key, value in cfg2.items():
|
||||
if key not in cfg1:
|
||||
cfg1[key] = value
|
||||
else:
|
||||
if not isinstance(cfg1[key], list):
|
||||
raise TypeError("cfg1[%r] should be a list of strings" % (key,))
|
||||
if not isinstance(value, list):
|
||||
raise TypeError("cfg2[%r] should be a list of strings" % (key,))
|
||||
cfg1[key].extend(value)
|
||||
return cfg1
|
||||
|
||||
|
||||
def call(libname, flag, encoding=sys.getfilesystemencoding()):
|
||||
"""Calls pkg-config and returns the output if found
|
||||
"""
|
||||
a = ["pkg-config", "--print-errors"]
|
||||
a.append(flag)
|
||||
a.append(libname)
|
||||
try:
|
||||
pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
except EnvironmentError as e:
|
||||
raise PkgConfigError("cannot run pkg-config: %s" % (str(e).strip(),))
|
||||
|
||||
bout, berr = pc.communicate()
|
||||
if pc.returncode != 0:
|
||||
try:
|
||||
berr = berr.decode(encoding)
|
||||
except Exception:
|
||||
pass
|
||||
raise PkgConfigError(berr.strip())
|
||||
|
||||
if sys.version_info >= (3,) and not isinstance(bout, str): # Python 3.x
|
||||
try:
|
||||
bout = bout.decode(encoding)
|
||||
except UnicodeDecodeError:
|
||||
raise PkgConfigError("pkg-config %s %s returned bytes that cannot "
|
||||
"be decoded with encoding %r:\n%r" %
|
||||
(flag, libname, encoding, bout))
|
||||
|
||||
if os.altsep != '\\' and '\\' in bout:
|
||||
raise PkgConfigError("pkg-config %s %s returned an unsupported "
|
||||
"backslash-escaped output:\n%r" %
|
||||
(flag, libname, bout))
|
||||
return bout
|
||||
|
||||
|
||||
def flags_from_pkgconfig(libs):
|
||||
r"""Return compiler line flags for FFI.set_source based on pkg-config output
|
||||
|
||||
Usage
|
||||
...
|
||||
ffibuilder.set_source("_foo", pkgconfig = ["libfoo", "libbar >= 1.8.3"])
|
||||
|
||||
If pkg-config is installed on build machine, then arguments include_dirs,
|
||||
library_dirs, libraries, define_macros, extra_compile_args and
|
||||
extra_link_args are extended with an output of pkg-config for libfoo and
|
||||
libbar.
|
||||
|
||||
Raises PkgConfigError in case the pkg-config call fails.
|
||||
"""
|
||||
|
||||
def get_include_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-I")]
|
||||
|
||||
def get_library_dirs(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-L")]
|
||||
|
||||
def get_libraries(string):
|
||||
return [x[2:] for x in string.split() if x.startswith("-l")]
|
||||
|
||||
# convert -Dfoo=bar to list of tuples [("foo", "bar")] expected by distutils
|
||||
def get_macros(string):
|
||||
def _macro(x):
|
||||
x = x[2:] # drop "-D"
|
||||
if '=' in x:
|
||||
return tuple(x.split("=", 1)) # "-Dfoo=bar" => ("foo", "bar")
|
||||
else:
|
||||
return (x, None) # "-Dfoo" => ("foo", None)
|
||||
return [_macro(x) for x in string.split() if x.startswith("-D")]
|
||||
|
||||
def get_other_cflags(string):
|
||||
return [x for x in string.split() if not x.startswith("-I") and
|
||||
not x.startswith("-D")]
|
||||
|
||||
def get_other_libs(string):
|
||||
return [x for x in string.split() if not x.startswith("-L") and
|
||||
not x.startswith("-l")]
|
||||
|
||||
# return kwargs for given libname
|
||||
def kwargs(libname):
|
||||
fse = sys.getfilesystemencoding()
|
||||
all_cflags = call(libname, "--cflags")
|
||||
all_libs = call(libname, "--libs")
|
||||
return {
|
||||
"include_dirs": get_include_dirs(all_cflags),
|
||||
"library_dirs": get_library_dirs(all_libs),
|
||||
"libraries": get_libraries(all_libs),
|
||||
"define_macros": get_macros(all_cflags),
|
||||
"extra_compile_args": get_other_cflags(all_cflags),
|
||||
"extra_link_args": get_other_libs(all_libs),
|
||||
}
|
||||
|
||||
# merge all arguments together
|
||||
ret = {}
|
||||
for libname in libs:
|
||||
lib_flags = kwargs(libname)
|
||||
merge_flags(ret, lib_flags)
|
||||
return ret
|
File diff suppressed because it is too large
Load Diff
|
@ -1,216 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
basestring
|
||||
except NameError:
|
||||
# Python 3.x
|
||||
basestring = str
|
||||
|
||||
def error(msg):
|
||||
from cffi._shimmed_dist_utils import DistutilsSetupError
|
||||
raise DistutilsSetupError(msg)
|
||||
|
||||
|
||||
def execfile(filename, glob):
|
||||
# We use execfile() (here rewritten for Python 3) instead of
|
||||
# __import__() to load the build script. The problem with
|
||||
# a normal import is that in some packages, the intermediate
|
||||
# __init__.py files may already try to import the file that
|
||||
# we are generating.
|
||||
with open(filename) as f:
|
||||
src = f.read()
|
||||
src += '\n' # Python 2.6 compatibility
|
||||
code = compile(src, filename, 'exec')
|
||||
exec(code, glob, glob)
|
||||
|
||||
|
||||
def add_cffi_module(dist, mod_spec):
|
||||
from cffi.api import FFI
|
||||
|
||||
if not isinstance(mod_spec, basestring):
|
||||
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
||||
" not %r" % (type(mod_spec).__name__,))
|
||||
mod_spec = str(mod_spec)
|
||||
try:
|
||||
build_file_name, ffi_var_name = mod_spec.split(':')
|
||||
except ValueError:
|
||||
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
||||
(mod_spec,))
|
||||
if not os.path.exists(build_file_name):
|
||||
ext = ''
|
||||
rewritten = build_file_name.replace('.', '/') + '.py'
|
||||
if os.path.exists(rewritten):
|
||||
ext = ' (rewrite cffi_modules to [%r])' % (
|
||||
rewritten + ':' + ffi_var_name,)
|
||||
error("%r does not name an existing file%s" % (build_file_name, ext))
|
||||
|
||||
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
||||
execfile(build_file_name, mod_vars)
|
||||
|
||||
try:
|
||||
ffi = mod_vars[ffi_var_name]
|
||||
except KeyError:
|
||||
error("%r: object %r not found in module" % (mod_spec,
|
||||
ffi_var_name))
|
||||
if not isinstance(ffi, FFI):
|
||||
ffi = ffi() # maybe it's a function instead of directly an ffi
|
||||
if not isinstance(ffi, FFI):
|
||||
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
||||
type(ffi).__name__))
|
||||
if not hasattr(ffi, '_assigned_source'):
|
||||
error("%r: the set_source() method was not called" % (mod_spec,))
|
||||
module_name, source, source_extension, kwds = ffi._assigned_source
|
||||
if ffi._windows_unicode:
|
||||
kwds = kwds.copy()
|
||||
ffi._apply_windows_unicode(kwds)
|
||||
|
||||
if source is None:
|
||||
_add_py_module(dist, ffi, module_name)
|
||||
else:
|
||||
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
||||
|
||||
def _set_py_limited_api(Extension, kwds):
|
||||
"""
|
||||
Add py_limited_api to kwds if setuptools >= 26 is in use.
|
||||
Do not alter the setting if it already exists.
|
||||
Setuptools takes care of ignoring the flag on Python 2 and PyPy.
|
||||
|
||||
CPython itself should ignore the flag in a debugging version
|
||||
(by not listing .abi3.so in the extensions it supports), but
|
||||
it doesn't so far, creating troubles. That's why we check
|
||||
for "not hasattr(sys, 'gettotalrefcount')" (the 2.7 compatible equivalent
|
||||
of 'd' not in sys.abiflags). (http://bugs.python.org/issue28401)
|
||||
|
||||
On Windows, with CPython <= 3.4, it's better not to use py_limited_api
|
||||
because virtualenv *still* doesn't copy PYTHON3.DLL on these versions.
|
||||
Recently (2020) we started shipping only >= 3.5 wheels, though. So
|
||||
we'll give it another try and set py_limited_api on Windows >= 3.5.
|
||||
"""
|
||||
from cffi import recompiler
|
||||
|
||||
if ('py_limited_api' not in kwds and not hasattr(sys, 'gettotalrefcount')
|
||||
and recompiler.USE_LIMITED_API):
|
||||
import setuptools
|
||||
try:
|
||||
setuptools_major_version = int(setuptools.__version__.partition('.')[0])
|
||||
if setuptools_major_version >= 26:
|
||||
kwds['py_limited_api'] = True
|
||||
except ValueError: # certain development versions of setuptools
|
||||
# If we don't know the version number of setuptools, we
|
||||
# try to set 'py_limited_api' anyway. At worst, we get a
|
||||
# warning.
|
||||
kwds['py_limited_api'] = True
|
||||
return kwds
|
||||
|
||||
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
||||
# We are a setuptools extension. Need this build_ext for py_limited_api.
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from cffi._shimmed_dist_utils import Extension, log, mkpath
|
||||
from cffi import recompiler
|
||||
|
||||
allsources = ['$PLACEHOLDER']
|
||||
allsources.extend(kwds.pop('sources', []))
|
||||
kwds = _set_py_limited_api(Extension, kwds)
|
||||
ext = Extension(name=module_name, sources=allsources, **kwds)
|
||||
|
||||
def make_mod(tmpdir, pre_run=None):
|
||||
c_file = os.path.join(tmpdir, module_name + source_extension)
|
||||
log.info("generating cffi module %r" % c_file)
|
||||
mkpath(tmpdir)
|
||||
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
||||
# arguments just before we turn the ffi into C code. To use it,
|
||||
# subclass the 'distutils.command.build_ext.build_ext' class and
|
||||
# add a method 'def pre_run(self, ext, ffi)'.
|
||||
if pre_run is not None:
|
||||
pre_run(ext, ffi)
|
||||
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
return c_file
|
||||
|
||||
if dist.ext_modules is None:
|
||||
dist.ext_modules = []
|
||||
dist.ext_modules.append(ext)
|
||||
|
||||
base_class = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class):
|
||||
def run(self):
|
||||
if ext.sources[0] == '$PLACEHOLDER':
|
||||
pre_run = getattr(self, 'pre_run', None)
|
||||
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
||||
base_class.run(self)
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
||||
# classes. Even in this case the 'build_ext' command should be
|
||||
# run once; but just in case, the logic above does nothing if
|
||||
# called again.
|
||||
|
||||
|
||||
def _add_py_module(dist, ffi, module_name):
|
||||
from setuptools.command.build_py import build_py
|
||||
from setuptools.command.build_ext import build_ext
|
||||
from cffi._shimmed_dist_utils import log, mkpath
|
||||
from cffi import recompiler
|
||||
|
||||
def generate_mod(py_file):
|
||||
log.info("generating cffi module %r" % py_file)
|
||||
mkpath(os.path.dirname(py_file))
|
||||
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
||||
if not updated:
|
||||
log.info("already up-to-date")
|
||||
|
||||
base_class = dist.cmdclass.get('build_py', build_py)
|
||||
class build_py_make_mod(base_class):
|
||||
def run(self):
|
||||
base_class.run(self)
|
||||
module_path = module_name.split('.')
|
||||
module_path[-1] += '.py'
|
||||
generate_mod(os.path.join(self.build_lib, *module_path))
|
||||
def get_source_files(self):
|
||||
# This is called from 'setup.py sdist' only. Exclude
|
||||
# the generate .py module in this case.
|
||||
saved_py_modules = self.py_modules
|
||||
try:
|
||||
if saved_py_modules:
|
||||
self.py_modules = [m for m in saved_py_modules
|
||||
if m != module_name]
|
||||
return base_class.get_source_files(self)
|
||||
finally:
|
||||
self.py_modules = saved_py_modules
|
||||
dist.cmdclass['build_py'] = build_py_make_mod
|
||||
|
||||
# distutils and setuptools have no notion I could find of a
|
||||
# generated python module. If we don't add module_name to
|
||||
# dist.py_modules, then things mostly work but there are some
|
||||
# combination of options (--root and --record) that will miss
|
||||
# the module. So we add it here, which gives a few apparently
|
||||
# harmless warnings about not finding the file outside the
|
||||
# build directory.
|
||||
# Then we need to hack more in get_source_files(); see above.
|
||||
if dist.py_modules is None:
|
||||
dist.py_modules = []
|
||||
dist.py_modules.append(module_name)
|
||||
|
||||
# the following is only for "build_ext -i"
|
||||
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
||||
class build_ext_make_mod(base_class_2):
|
||||
def run(self):
|
||||
base_class_2.run(self)
|
||||
if self.inplace:
|
||||
# from get_ext_fullpath() in distutils/command/build_ext.py
|
||||
module_path = module_name.split('.')
|
||||
package = '.'.join(module_path[:-1])
|
||||
build_py = self.get_finalized_command('build_py')
|
||||
package_dir = build_py.get_package_dir(package)
|
||||
file_name = module_path[-1] + '.py'
|
||||
generate_mod(os.path.join(package_dir, file_name))
|
||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||
|
||||
def cffi_modules(dist, attr, value):
|
||||
assert attr == 'cffi_modules'
|
||||
if isinstance(value, basestring):
|
||||
value = [value]
|
||||
|
||||
for cffi_module in value:
|
||||
add_cffi_module(dist, cffi_module)
|
File diff suppressed because it is too large
Load Diff
|
@ -1,679 +0,0 @@
|
|||
#
|
||||
# DEPRECATED: implementation for ffi.verify()
|
||||
#
|
||||
import sys, os
|
||||
import types
|
||||
|
||||
from . import model
|
||||
from .error import VerificationError
|
||||
|
||||
|
||||
class VGenericEngine(object):
|
||||
_class_key = 'g'
|
||||
_gen_python_module = False
|
||||
|
||||
def __init__(self, verifier):
|
||||
self.verifier = verifier
|
||||
self.ffi = verifier.ffi
|
||||
self.export_symbols = []
|
||||
self._struct_pending_verification = {}
|
||||
|
||||
def patch_extension_kwds(self, kwds):
|
||||
# add 'export_symbols' to the dictionary. Note that we add the
|
||||
# list before filling it. When we fill it, it will thus also show
|
||||
# up in kwds['export_symbols'].
|
||||
kwds.setdefault('export_symbols', self.export_symbols)
|
||||
|
||||
def find_module(self, module_name, path, so_suffixes):
|
||||
for so_suffix in so_suffixes:
|
||||
basename = module_name + so_suffix
|
||||
if path is None:
|
||||
path = sys.path
|
||||
for dirname in path:
|
||||
filename = os.path.join(dirname, basename)
|
||||
if os.path.isfile(filename):
|
||||
return filename
|
||||
|
||||
def collect_types(self):
|
||||
pass # not needed in the generic engine
|
||||
|
||||
def _prnt(self, what=''):
|
||||
self._f.write(what + '\n')
|
||||
|
||||
def write_source_to_f(self):
|
||||
prnt = self._prnt
|
||||
# first paste some standard set of lines that are mostly '#include'
|
||||
prnt(cffimod_header)
|
||||
# then paste the C source given by the user, verbatim.
|
||||
prnt(self.verifier.preamble)
|
||||
#
|
||||
# call generate_gen_xxx_decl(), for every xxx found from
|
||||
# ffi._parser._declarations. This generates all the functions.
|
||||
self._generate('decl')
|
||||
#
|
||||
# on Windows, distutils insists on putting init_cffi_xyz in
|
||||
# 'export_symbols', so instead of fighting it, just give up and
|
||||
# give it one
|
||||
if sys.platform == 'win32':
|
||||
if sys.version_info >= (3,):
|
||||
prefix = 'PyInit_'
|
||||
else:
|
||||
prefix = 'init'
|
||||
modname = self.verifier.get_module_name()
|
||||
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
||||
|
||||
def load_library(self, flags=0):
|
||||
# import it with the CFFI backend
|
||||
backend = self.ffi._backend
|
||||
# needs to make a path that contains '/', on Posix
|
||||
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
||||
module = backend.load_library(filename, flags)
|
||||
#
|
||||
# call loading_gen_struct() to get the struct layout inferred by
|
||||
# the C compiler
|
||||
self._load(module, 'loading')
|
||||
|
||||
# build the FFILibrary class and instance, this is a module subclass
|
||||
# because modules are expected to have usually-constant-attributes and
|
||||
# in PyPy this means the JIT is able to treat attributes as constant,
|
||||
# which we want.
|
||||
class FFILibrary(types.ModuleType):
|
||||
_cffi_generic_module = module
|
||||
_cffi_ffi = self.ffi
|
||||
_cffi_dir = []
|
||||
def __dir__(self):
|
||||
return FFILibrary._cffi_dir
|
||||
library = FFILibrary("")
|
||||
#
|
||||
# finally, call the loaded_gen_xxx() functions. This will set
|
||||
# up the 'library' object.
|
||||
self._load(module, 'loaded', library=library)
|
||||
return library
|
||||
|
||||
def _get_declarations(self):
|
||||
lst = [(key, tp) for (key, (tp, qual)) in
|
||||
self.ffi._parser._declarations.items()]
|
||||
lst.sort()
|
||||
return lst
|
||||
|
||||
def _generate(self, step_name):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
try:
|
||||
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
||||
step_name))
|
||||
except AttributeError:
|
||||
raise VerificationError(
|
||||
"not implemented in verify(): %r" % name)
|
||||
try:
|
||||
method(tp, realname)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _load(self, module, step_name, **kwds):
|
||||
for name, tp in self._get_declarations():
|
||||
kind, realname = name.split(' ', 1)
|
||||
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
||||
try:
|
||||
method(tp, realname, module, **kwds)
|
||||
except Exception as e:
|
||||
model.attach_exception_info(e, name)
|
||||
raise
|
||||
|
||||
def _generate_nothing(self, tp, name):
|
||||
pass
|
||||
|
||||
def _loaded_noop(self, tp, name, module, **kwds):
|
||||
pass
|
||||
|
||||
# ----------
|
||||
# typedefs: generates no code so far
|
||||
|
||||
_generate_gen_typedef_decl = _generate_nothing
|
||||
_loading_gen_typedef = _loaded_noop
|
||||
_loaded_gen_typedef = _loaded_noop
|
||||
|
||||
# ----------
|
||||
# function declarations
|
||||
|
||||
def _generate_gen_function_decl(self, tp, name):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
# cannot support vararg functions better than this: check for its
|
||||
# exact type (including the fixed arguments), and build it as a
|
||||
# constant function pointer (no _cffi_f_%s wrapper)
|
||||
self._generate_gen_const(False, name, tp)
|
||||
return
|
||||
prnt = self._prnt
|
||||
numargs = len(tp.args)
|
||||
argnames = []
|
||||
for i, type in enumerate(tp.args):
|
||||
indirection = ''
|
||||
if isinstance(type, model.StructOrUnion):
|
||||
indirection = '*'
|
||||
argnames.append('%sx%d' % (indirection, i))
|
||||
context = 'argument of %s' % name
|
||||
arglist = [type.get_c_name(' %s' % arg, context)
|
||||
for type, arg in zip(tp.args, argnames)]
|
||||
tpresult = tp.result
|
||||
if isinstance(tpresult, model.StructOrUnion):
|
||||
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
||||
tpresult = model.void_type
|
||||
arglist = ', '.join(arglist) or 'void'
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
self.export_symbols.append(wrappername)
|
||||
if tp.abi:
|
||||
abi = tp.abi + ' '
|
||||
else:
|
||||
abi = ''
|
||||
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
||||
context = 'result of %s' % name
|
||||
prnt(tpresult.get_c_name(funcdecl, context))
|
||||
prnt('{')
|
||||
#
|
||||
if isinstance(tp.result, model.StructOrUnion):
|
||||
result_code = '*r = '
|
||||
elif not isinstance(tp.result, model.VoidType):
|
||||
result_code = 'return '
|
||||
else:
|
||||
result_code = ''
|
||||
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
_loading_gen_function = _loaded_noop
|
||||
|
||||
def _loaded_gen_function(self, tp, name, module, library):
|
||||
assert isinstance(tp, model.FunctionPtrType)
|
||||
if tp.ellipsis:
|
||||
newfunction = self._load_constant(False, tp, name, module)
|
||||
else:
|
||||
indirections = []
|
||||
base_tp = tp
|
||||
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
||||
or isinstance(tp.result, model.StructOrUnion)):
|
||||
indirect_args = []
|
||||
for i, typ in enumerate(tp.args):
|
||||
if isinstance(typ, model.StructOrUnion):
|
||||
typ = model.PointerType(typ)
|
||||
indirections.append((i, typ))
|
||||
indirect_args.append(typ)
|
||||
indirect_result = tp.result
|
||||
if isinstance(indirect_result, model.StructOrUnion):
|
||||
if indirect_result.fldtypes is None:
|
||||
raise TypeError("'%s' is used as result type, "
|
||||
"but is opaque" % (
|
||||
indirect_result._get_c_name(),))
|
||||
indirect_result = model.PointerType(indirect_result)
|
||||
indirect_args.insert(0, indirect_result)
|
||||
indirections.insert(0, ("result", indirect_result))
|
||||
indirect_result = model.void_type
|
||||
tp = model.FunctionPtrType(tuple(indirect_args),
|
||||
indirect_result, tp.ellipsis)
|
||||
BFunc = self.ffi._get_cached_btype(tp)
|
||||
wrappername = '_cffi_f_%s' % name
|
||||
newfunction = module.load_function(BFunc, wrappername)
|
||||
for i, typ in indirections:
|
||||
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
||||
base_tp)
|
||||
setattr(library, name, newfunction)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
||||
backend = self.ffi._backend
|
||||
BType = self.ffi._get_cached_btype(tp)
|
||||
if i == "result":
|
||||
ffi = self.ffi
|
||||
def newfunc(*args):
|
||||
res = ffi.new(BType)
|
||||
oldfunc(res, *args)
|
||||
return res[0]
|
||||
else:
|
||||
def newfunc(*args):
|
||||
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
||||
return oldfunc(*args)
|
||||
newfunc._cffi_base_type = base_tp
|
||||
return newfunc
|
||||
|
||||
# ----------
|
||||
# named structs
|
||||
|
||||
def _generate_gen_struct_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'struct', name)
|
||||
|
||||
def _loading_gen_struct(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'struct', name, module)
|
||||
|
||||
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_gen_union_decl(self, tp, name):
|
||||
assert name == tp.name
|
||||
self._generate_struct_or_union_decl(tp, 'union', name)
|
||||
|
||||
def _loading_gen_union(self, tp, name, module):
|
||||
self._loading_struct_or_union(tp, 'union', name, module)
|
||||
|
||||
def _loaded_gen_union(self, tp, name, module, **kwds):
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
#
|
||||
prnt = self._prnt
|
||||
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
||||
prnt('{')
|
||||
prnt(' /* only to generate compile-time warnings or errors */')
|
||||
prnt(' (void)p;')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if (isinstance(ftype, model.PrimitiveType)
|
||||
and ftype.is_integer_type()) or fbitsize >= 0:
|
||||
# accept all integers, but complain on float or double
|
||||
prnt(' (void)((p->%s) << 1);' % fname)
|
||||
else:
|
||||
# only accept exactly the type declared.
|
||||
try:
|
||||
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
||||
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
||||
fname))
|
||||
except VerificationError as e:
|
||||
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
||||
prnt('}')
|
||||
self.export_symbols.append(layoutfuncname)
|
||||
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
||||
prnt('{')
|
||||
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
||||
prnt(' static intptr_t nums[] = {')
|
||||
prnt(' sizeof(%s),' % cname)
|
||||
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
prnt(' offsetof(%s, %s),' % (cname, fname))
|
||||
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
||||
prnt(' 0, /* %s */' % ftype._get_c_name())
|
||||
else:
|
||||
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
||||
prnt(' -1')
|
||||
prnt(' };')
|
||||
prnt(' return nums[i];')
|
||||
prnt(' /* the next line is not executed, but compiled */')
|
||||
prnt(' %s(0);' % (checkfuncname,))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_struct_or_union(self, tp, prefix, name, module):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||
#
|
||||
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
||||
function = module.load_function(BFunc, layoutfuncname)
|
||||
layout = []
|
||||
num = 0
|
||||
while True:
|
||||
x = function(num)
|
||||
if x < 0: break
|
||||
layout.append(x)
|
||||
num += 1
|
||||
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
||||
# use the function()'s sizes and offsets to guide the
|
||||
# layout of the struct
|
||||
totalsize = layout[0]
|
||||
totalalignment = layout[1]
|
||||
fieldofs = layout[2::2]
|
||||
fieldsize = layout[3::2]
|
||||
tp.force_flatten()
|
||||
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
||||
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
||||
else:
|
||||
cname = ('%s %s' % (prefix, name)).strip()
|
||||
self._struct_pending_verification[tp] = layout, cname
|
||||
|
||||
def _loaded_struct_or_union(self, tp):
|
||||
if tp.fldnames is None:
|
||||
return # nothing to do with opaque structs
|
||||
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
||||
|
||||
if tp in self._struct_pending_verification:
|
||||
# check that the layout sizes and offsets match the real ones
|
||||
def check(realvalue, expectedvalue, msg):
|
||||
if realvalue != expectedvalue:
|
||||
raise VerificationError(
|
||||
"%s (we have %d, but C compiler says %d)"
|
||||
% (msg, expectedvalue, realvalue))
|
||||
ffi = self.ffi
|
||||
BStruct = ffi._get_cached_btype(tp)
|
||||
layout, cname = self._struct_pending_verification.pop(tp)
|
||||
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
||||
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
||||
i = 2
|
||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||
if fbitsize >= 0:
|
||||
continue # xxx ignore fbitsize for now
|
||||
check(layout[i], ffi.offsetof(BStruct, fname),
|
||||
"wrong offset for field %r" % (fname,))
|
||||
if layout[i+1] != 0:
|
||||
BField = ffi._get_cached_btype(ftype)
|
||||
check(layout[i+1], ffi.sizeof(BField),
|
||||
"wrong size for field %r" % (fname,))
|
||||
i += 2
|
||||
assert i == len(layout)
|
||||
|
||||
# ----------
|
||||
# 'anonymous' declarations. These are produced for anonymous structs
|
||||
# or unions; the 'name' is obtained by a typedef.
|
||||
|
||||
def _generate_gen_anonymous_decl(self, tp, name):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._generate_gen_enum_decl(tp, name, '')
|
||||
else:
|
||||
self._generate_struct_or_union_decl(tp, '', name)
|
||||
|
||||
def _loading_gen_anonymous(self, tp, name, module):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loading_gen_enum(tp, name, module, '')
|
||||
else:
|
||||
self._loading_struct_or_union(tp, '', name, module)
|
||||
|
||||
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
||||
if isinstance(tp, model.EnumType):
|
||||
self._loaded_gen_enum(tp, name, module, **kwds)
|
||||
else:
|
||||
self._loaded_struct_or_union(tp)
|
||||
|
||||
# ----------
|
||||
# constants, likely declared with '#define'
|
||||
|
||||
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
||||
check_value=None):
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_%s_%s' % (category, name)
|
||||
self.export_symbols.append(funcname)
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
assert category == 'const'
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
self._check_int_constant_value(name, check_value)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
elif is_int:
|
||||
assert category == 'const'
|
||||
prnt('int %s(long long *out_value)' % funcname)
|
||||
prnt('{')
|
||||
prnt(' *out_value = (long long)(%s);' % (name,))
|
||||
prnt(' return (%s) <= 0;' % (name,))
|
||||
prnt('}')
|
||||
else:
|
||||
assert tp is not None
|
||||
assert check_value is None
|
||||
if category == 'var':
|
||||
ampersand = '&'
|
||||
else:
|
||||
ampersand = ''
|
||||
extra = ''
|
||||
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
||||
extra = 'const *'
|
||||
ampersand = '&'
|
||||
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
||||
prnt('{')
|
||||
prnt(' return (%s%s);' % (ampersand, name))
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _generate_gen_constant_decl(self, tp, name):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
self._generate_gen_const(is_int, name, tp)
|
||||
|
||||
_loading_gen_constant = _loaded_noop
|
||||
|
||||
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
||||
funcname = '_cffi_const_%s' % name
|
||||
if check_value is not None:
|
||||
assert is_int
|
||||
self._load_known_int_constant(module, funcname)
|
||||
value = check_value
|
||||
elif is_int:
|
||||
BType = self.ffi._typeof_locked("long long*")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType)
|
||||
negative = function(p)
|
||||
value = int(p[0])
|
||||
if value < 0 and not negative:
|
||||
BLongLong = self.ffi._typeof_locked("long long")[0]
|
||||
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
||||
else:
|
||||
assert check_value is None
|
||||
fntypeextra = '(*)(void)'
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
fntypeextra = '*' + fntypeextra
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
value = function()
|
||||
if isinstance(tp, model.StructOrUnion):
|
||||
value = value[0]
|
||||
return value
|
||||
|
||||
def _loaded_gen_constant(self, tp, name, module, library):
|
||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||
value = self._load_constant(is_int, tp, name, module)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# enums
|
||||
|
||||
def _check_int_constant_value(self, name, value):
|
||||
prnt = self._prnt
|
||||
if value <= 0:
|
||||
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
||||
name, name, value))
|
||||
else:
|
||||
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
||||
name, name, value))
|
||||
prnt(' char buf[64];')
|
||||
prnt(' if ((%s) <= 0)' % name)
|
||||
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
||||
prnt(' else')
|
||||
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
||||
name)
|
||||
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
||||
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
||||
prnt(' return -1;')
|
||||
prnt(' }')
|
||||
|
||||
def _load_known_int_constant(self, module, funcname):
|
||||
BType = self.ffi._typeof_locked("char[]")[0]
|
||||
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
p = self.ffi.new(BType, 256)
|
||||
if function(p) < 0:
|
||||
error = self.ffi.string(p)
|
||||
if sys.version_info >= (3,):
|
||||
error = str(error, 'utf-8')
|
||||
raise VerificationError(error)
|
||||
|
||||
def _enum_funcname(self, prefix, name):
|
||||
# "$enum_$1" => "___D_enum____D_1"
|
||||
name = name.replace('$', '___D_')
|
||||
return '_cffi_e_%s_%s' % (prefix, name)
|
||||
|
||||
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
||||
if tp.partial:
|
||||
for enumerator in tp.enumerators:
|
||||
self._generate_gen_const(True, enumerator)
|
||||
return
|
||||
#
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt = self._prnt
|
||||
prnt('int %s(char *out_error)' % funcname)
|
||||
prnt('{')
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
self._check_int_constant_value(enumerator, enumvalue)
|
||||
prnt(' return 0;')
|
||||
prnt('}')
|
||||
prnt()
|
||||
|
||||
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
||||
if tp.partial:
|
||||
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
||||
for enumerator in tp.enumerators]
|
||||
tp.enumvalues = tuple(enumvalues)
|
||||
tp.partial_resolved = True
|
||||
else:
|
||||
funcname = self._enum_funcname(prefix, name)
|
||||
self._load_known_int_constant(module, funcname)
|
||||
|
||||
def _loaded_gen_enum(self, tp, name, module, library):
|
||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||
setattr(library, enumerator, enumvalue)
|
||||
type(library)._cffi_dir.append(enumerator)
|
||||
|
||||
# ----------
|
||||
# macros: for now only for integers
|
||||
|
||||
def _generate_gen_macro_decl(self, tp, name):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
self._generate_gen_const(True, name, check_value=check_value)
|
||||
|
||||
_loading_gen_macro = _loaded_noop
|
||||
|
||||
def _loaded_gen_macro(self, tp, name, module, library):
|
||||
if tp == '...':
|
||||
check_value = None
|
||||
else:
|
||||
check_value = tp # an integer
|
||||
value = self._load_constant(True, tp, name, module,
|
||||
check_value=check_value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
# ----------
|
||||
# global variables
|
||||
|
||||
def _generate_gen_variable_decl(self, tp, name):
|
||||
if isinstance(tp, model.ArrayType):
|
||||
if tp.length_is_unknown():
|
||||
prnt = self._prnt
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
self.export_symbols.append(funcname)
|
||||
prnt("size_t %s(void)" % funcname)
|
||||
prnt("{")
|
||||
prnt(" return sizeof(%s);" % (name,))
|
||||
prnt("}")
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
self._generate_gen_const(False, name, tp_ptr)
|
||||
else:
|
||||
tp_ptr = model.PointerType(tp)
|
||||
self._generate_gen_const(False, name, tp_ptr, category='var')
|
||||
|
||||
_loading_gen_variable = _loaded_noop
|
||||
|
||||
def _loaded_gen_variable(self, tp, name, module, library):
|
||||
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
||||
# sense that "a=..." is forbidden
|
||||
if tp.length_is_unknown():
|
||||
funcname = '_cffi_sizeof_%s' % (name,)
|
||||
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
size = function()
|
||||
BItemType = self.ffi._get_cached_btype(tp.item)
|
||||
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
||||
if rest != 0:
|
||||
raise VerificationError(
|
||||
"bad size: %r does not seem to be an array of %s" %
|
||||
(name, tp.item))
|
||||
tp = tp.resolve_length(length)
|
||||
tp_ptr = model.PointerType(tp.item)
|
||||
value = self._load_constant(False, tp_ptr, name, module)
|
||||
# 'value' is a <cdata 'type *'> which we have to replace with
|
||||
# a <cdata 'type[N]'> if the N is actually known
|
||||
if tp.length is not None:
|
||||
BArray = self.ffi._get_cached_btype(tp)
|
||||
value = self.ffi.cast(BArray, value)
|
||||
setattr(library, name, value)
|
||||
type(library)._cffi_dir.append(name)
|
||||
return
|
||||
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
||||
# it by a property on the class, which reads/writes into ptr[0].
|
||||
funcname = '_cffi_var_%s' % name
|
||||
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
||||
function = module.load_function(BFunc, funcname)
|
||||
ptr = function()
|
||||
def getter(library):
|
||||
return ptr[0]
|
||||
def setter(library, value):
|
||||
ptr[0] = value
|
||||
setattr(type(library), name, property(getter, setter))
|
||||
type(library)._cffi_dir.append(name)
|
||||
|
||||
cffimod_header = r'''
|
||||
#include <stdio.h>
|
||||
#include <stddef.h>
|
||||
#include <stdarg.h>
|
||||
#include <errno.h>
|
||||
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
||||
|
||||
/* this block of #ifs should be kept exactly identical between
|
||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||
and cffi/_cffi_include.h */
|
||||
#if defined(_MSC_VER)
|
||||
# include <malloc.h> /* for alloca() */
|
||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||
typedef __int8 int8_t;
|
||||
typedef __int16 int16_t;
|
||||
typedef __int32 int32_t;
|
||||
typedef __int64 int64_t;
|
||||
typedef unsigned __int8 uint8_t;
|
||||
typedef unsigned __int16 uint16_t;
|
||||
typedef unsigned __int32 uint32_t;
|
||||
typedef unsigned __int64 uint64_t;
|
||||
typedef __int8 int_least8_t;
|
||||
typedef __int16 int_least16_t;
|
||||
typedef __int32 int_least32_t;
|
||||
typedef __int64 int_least64_t;
|
||||
typedef unsigned __int8 uint_least8_t;
|
||||
typedef unsigned __int16 uint_least16_t;
|
||||
typedef unsigned __int32 uint_least32_t;
|
||||
typedef unsigned __int64 uint_least64_t;
|
||||
typedef __int8 int_fast8_t;
|
||||
typedef __int16 int_fast16_t;
|
||||
typedef __int32 int_fast32_t;
|
||||
typedef __int64 int_fast64_t;
|
||||
typedef unsigned __int8 uint_fast8_t;
|
||||
typedef unsigned __int16 uint_fast16_t;
|
||||
typedef unsigned __int32 uint_fast32_t;
|
||||
typedef unsigned __int64 uint_fast64_t;
|
||||
typedef __int64 intmax_t;
|
||||
typedef unsigned __int64 uintmax_t;
|
||||
# else
|
||||
# include <stdint.h>
|
||||
# endif
|
||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||
# ifndef __cplusplus
|
||||
typedef unsigned char _Bool;
|
||||
# endif
|
||||
# endif
|
||||
# define _cffi_float_complex_t _Fcomplex /* include <complex.h> for it */
|
||||
# define _cffi_double_complex_t _Dcomplex /* include <complex.h> for it */
|
||||
#else
|
||||
# include <stdint.h>
|
||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||
# include <alloca.h>
|
||||
# endif
|
||||
# define _cffi_float_complex_t float _Complex
|
||||
# define _cffi_double_complex_t double _Complex
|
||||
#endif
|
||||
'''
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue