fix:优化界面
This commit is contained in:
5
.env
5
.env
@@ -1,3 +1,6 @@
|
|||||||
PORT=3000
|
PORT=3000
|
||||||
WECHAT_UPSTREAM_BASE_URL=http://113.44.162.180:7006
|
WECHAT_UPSTREAM_BASE_URL=http://113.44.162.180:7006
|
||||||
|
WS_KEY=HBpEnbtj9BJZ
|
||||||
|
SLIDER_VERIFY_KEY=408449830
|
||||||
|
APIKEY=sk-85880595fc714d63bfd0b025e917bd26#千问apikey
|
||||||
|
# 962516e4-60eb-4a26-a5a3-44e21adcf7bc #豆包
|
||||||
|
|||||||
19
.env.example
19
.env.example
@@ -1,6 +1,21 @@
|
|||||||
PORT=3000
|
PORT=3000
|
||||||
WECHAT_UPSTREAM_BASE_URL=http://113.44.162.180:7006
|
WECHAT_UPSTREAM_BASE_URL=http://113.44.162.180:7006
|
||||||
|
CHECK_STATUS_BASE_URL=http://113.44.162.180:7006
|
||||||
|
# 第三方滑块(7765):iframe 加载自带预填表单页,提交到下方地址
|
||||||
|
SLIDER_VERIFY_BASE_URL=http://113.44.162.180:7765
|
||||||
|
SLIDER_VERIFY_KEY=408449830
|
||||||
|
|
||||||
|
# WS 消息同步(GetSyncMsg)必须使用与登录页一致的账号 key,否则收不到该账号的消息
|
||||||
|
# 优先 WECHAT_WS_KEY,未设置则使用 KEY(登录参数填的 key)
|
||||||
|
# WECHAT_WS_KEY=HBpEnbtj9BJZ
|
||||||
|
|
||||||
# KEY = HBpEnbtj9BJZ
|
# 千问 API Key(用于个性化问候等),优先 QWEN_API_KEY,其次 APIKEY
|
||||||
# password = 408449830
|
QWEN_API_KEY=sk-xxx
|
||||||
|
# 或使用阿里云 DashScope:DASHSCOPE_API_KEY=sk-xxx
|
||||||
|
|
||||||
|
# 发送消息上游路径(默认 /msg/SendTextMsg)
|
||||||
|
# 发送文本消息路径(与 7006 swagger 一致,默认即可)
|
||||||
|
# SEND_MSG_PATH=/message/SendTextMessage
|
||||||
|
|
||||||
|
# 豆包(Volcengine ARK)可选:在模型管理页添加豆包并填 API Key 即可,此处仅作备用
|
||||||
|
# ARK_API_KEY=xxx
|
||||||
241
.venv/bin/Activate.ps1
Normal file
241
.venv/bin/Activate.ps1
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Activate a Python virtual environment for the current PowerShell session.
|
||||||
|
|
||||||
|
.Description
|
||||||
|
Pushes the python executable for a virtual environment to the front of the
|
||||||
|
$Env:PATH environment variable and sets the prompt to signify that you are
|
||||||
|
in a Python virtual environment. Makes use of the command line switches as
|
||||||
|
well as the `pyvenv.cfg` file values present in the virtual environment.
|
||||||
|
|
||||||
|
.Parameter VenvDir
|
||||||
|
Path to the directory that contains the virtual environment to activate. The
|
||||||
|
default value for this is the parent of the directory that the Activate.ps1
|
||||||
|
script is located within.
|
||||||
|
|
||||||
|
.Parameter Prompt
|
||||||
|
The prompt prefix to display when this virtual environment is activated. By
|
||||||
|
default, this prompt is the name of the virtual environment folder (VenvDir)
|
||||||
|
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Verbose
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and shows extra information about the activation as it executes.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
||||||
|
Activates the Python virtual environment located in the specified location.
|
||||||
|
|
||||||
|
.Example
|
||||||
|
Activate.ps1 -Prompt "MyPython"
|
||||||
|
Activates the Python virtual environment that contains the Activate.ps1 script,
|
||||||
|
and prefixes the current prompt with the specified string (surrounded in
|
||||||
|
parentheses) while the virtual environment is active.
|
||||||
|
|
||||||
|
.Notes
|
||||||
|
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
||||||
|
execution policy for the user. You can do this by issuing the following PowerShell
|
||||||
|
command:
|
||||||
|
|
||||||
|
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
||||||
|
|
||||||
|
For more information on Execution Policies:
|
||||||
|
https://go.microsoft.com/fwlink/?LinkID=135170
|
||||||
|
|
||||||
|
#>
|
||||||
|
Param(
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$VenvDir,
|
||||||
|
[Parameter(Mandatory = $false)]
|
||||||
|
[String]
|
||||||
|
$Prompt
|
||||||
|
)
|
||||||
|
|
||||||
|
<# Function declarations --------------------------------------------------- #>
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Synopsis
|
||||||
|
Remove all shell session elements added by the Activate script, including the
|
||||||
|
addition of the virtual environment's Python executable from the beginning of
|
||||||
|
the PATH variable.
|
||||||
|
|
||||||
|
.Parameter NonDestructive
|
||||||
|
If present, do not remove this function from the global namespace for the
|
||||||
|
session.
|
||||||
|
|
||||||
|
#>
|
||||||
|
function global:deactivate ([switch]$NonDestructive) {
|
||||||
|
# Revert to original values
|
||||||
|
|
||||||
|
# The prior prompt:
|
||||||
|
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
||||||
|
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
||||||
|
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PYTHONHOME:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# The prior PATH:
|
||||||
|
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
||||||
|
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
||||||
|
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the VIRTUAL_ENV altogether:
|
||||||
|
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
||||||
|
Remove-Item -Path env:VIRTUAL_ENV
|
||||||
|
}
|
||||||
|
|
||||||
|
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
||||||
|
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
||||||
|
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
||||||
|
}
|
||||||
|
|
||||||
|
# Leave deactivate function in the global namespace if requested:
|
||||||
|
if (-not $NonDestructive) {
|
||||||
|
Remove-Item -Path function:deactivate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
<#
|
||||||
|
.Description
|
||||||
|
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
||||||
|
given folder, and returns them in a map.
|
||||||
|
|
||||||
|
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
||||||
|
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
||||||
|
then it is considered a `key = value` line. The left hand string is the key,
|
||||||
|
the right hand is the value.
|
||||||
|
|
||||||
|
If the value starts with a `'` or a `"` then the first and last character is
|
||||||
|
stripped from the value before being captured.
|
||||||
|
|
||||||
|
.Parameter ConfigDir
|
||||||
|
Path to the directory that contains the `pyvenv.cfg` file.
|
||||||
|
#>
|
||||||
|
function Get-PyVenvConfig(
|
||||||
|
[String]
|
||||||
|
$ConfigDir
|
||||||
|
) {
|
||||||
|
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
||||||
|
|
||||||
|
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
||||||
|
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
||||||
|
|
||||||
|
# An empty map will be returned if no config file is found.
|
||||||
|
$pyvenvConfig = @{ }
|
||||||
|
|
||||||
|
if ($pyvenvConfigPath) {
|
||||||
|
|
||||||
|
Write-Verbose "File exists, parse `key = value` lines"
|
||||||
|
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
||||||
|
|
||||||
|
$pyvenvConfigContent | ForEach-Object {
|
||||||
|
$keyval = $PSItem -split "\s*=\s*", 2
|
||||||
|
if ($keyval[0] -and $keyval[1]) {
|
||||||
|
$val = $keyval[1]
|
||||||
|
|
||||||
|
# Remove extraneous quotations around a string value.
|
||||||
|
if ("'""".Contains($val.Substring(0, 1))) {
|
||||||
|
$val = $val.Substring(1, $val.Length - 2)
|
||||||
|
}
|
||||||
|
|
||||||
|
$pyvenvConfig[$keyval[0]] = $val
|
||||||
|
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return $pyvenvConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
<# Begin Activate script --------------------------------------------------- #>
|
||||||
|
|
||||||
|
# Determine the containing directory of this script
|
||||||
|
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
$VenvExecDir = Get-Item -Path $VenvExecPath
|
||||||
|
|
||||||
|
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
||||||
|
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
||||||
|
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
||||||
|
|
||||||
|
# Set values required in priority: CmdLine, ConfigFile, Default
|
||||||
|
# First, get the location of the virtual environment, it might not be
|
||||||
|
# VenvExecDir if specified on the command line.
|
||||||
|
if ($VenvDir) {
|
||||||
|
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
||||||
|
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
||||||
|
Write-Verbose "VenvDir=$VenvDir"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Next, read the `pyvenv.cfg` file to determine any required value such
|
||||||
|
# as `prompt`.
|
||||||
|
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
||||||
|
|
||||||
|
# Next, set the prompt from the command line, or the config file, or
|
||||||
|
# just use the name of the virtual environment folder.
|
||||||
|
if ($Prompt) {
|
||||||
|
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
||||||
|
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
||||||
|
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
||||||
|
$Prompt = $pyvenvCfg['prompt'];
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virutal environment)"
|
||||||
|
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
||||||
|
$Prompt = Split-Path -Path $venvDir -Leaf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Verbose "Prompt = '$Prompt'"
|
||||||
|
Write-Verbose "VenvDir='$VenvDir'"
|
||||||
|
|
||||||
|
# Deactivate any currently active virtual environment, but leave the
|
||||||
|
# deactivate function in place.
|
||||||
|
deactivate -nondestructive
|
||||||
|
|
||||||
|
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
||||||
|
# that there is an activated venv.
|
||||||
|
$env:VIRTUAL_ENV = $VenvDir
|
||||||
|
|
||||||
|
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
||||||
|
|
||||||
|
Write-Verbose "Setting prompt to '$Prompt'"
|
||||||
|
|
||||||
|
# Set the prompt to include the env name
|
||||||
|
# Make sure _OLD_VIRTUAL_PROMPT is global
|
||||||
|
function global:_OLD_VIRTUAL_PROMPT { "" }
|
||||||
|
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
||||||
|
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
||||||
|
|
||||||
|
function global:prompt {
|
||||||
|
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
||||||
|
_OLD_VIRTUAL_PROMPT
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clear PYTHONHOME
|
||||||
|
if (Test-Path -Path Env:PYTHONHOME) {
|
||||||
|
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
Remove-Item -Path Env:PYTHONHOME
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add the venv to the PATH
|
||||||
|
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
||||||
|
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
||||||
66
.venv/bin/activate
Normal file
66
.venv/bin/activate
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# This file must be used with "source bin/activate" *from bash*
|
||||||
|
# you cannot run it directly
|
||||||
|
|
||||||
|
deactivate () {
|
||||||
|
# reset old environment variables
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
||||||
|
PATH="${_OLD_VIRTUAL_PATH:-}"
|
||||||
|
export PATH
|
||||||
|
unset _OLD_VIRTUAL_PATH
|
||||||
|
fi
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
||||||
|
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
||||||
|
export PYTHONHOME
|
||||||
|
unset _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
||||||
|
PS1="${_OLD_VIRTUAL_PS1:-}"
|
||||||
|
export PS1
|
||||||
|
unset _OLD_VIRTUAL_PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
unset VIRTUAL_ENV
|
||||||
|
if [ ! "${1:-}" = "nondestructive" ] ; then
|
||||||
|
# Self destruct!
|
||||||
|
unset -f deactivate
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# unset irrelevant variables
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
VIRTUAL_ENV="/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv"
|
||||||
|
export VIRTUAL_ENV
|
||||||
|
|
||||||
|
_OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
export PATH
|
||||||
|
|
||||||
|
# unset PYTHONHOME if set
|
||||||
|
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
||||||
|
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
||||||
|
if [ -n "${PYTHONHOME:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
||||||
|
unset PYTHONHOME
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
||||||
|
_OLD_VIRTUAL_PS1="${PS1:-}"
|
||||||
|
PS1="(.venv) ${PS1:-}"
|
||||||
|
export PS1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# This should detect bash and zsh, which have a hash command that must
|
||||||
|
# be called to get it to forget past commands. Without forgetting
|
||||||
|
# past commands the $PATH changes we made may not be respected
|
||||||
|
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
||||||
|
hash -r 2> /dev/null
|
||||||
|
fi
|
||||||
25
.venv/bin/activate.csh
Normal file
25
.venv/bin/activate.csh
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# This file must be used with "source bin/activate.csh" *from csh*.
|
||||||
|
# You cannot run it directly.
|
||||||
|
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
||||||
|
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
||||||
|
|
||||||
|
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
setenv VIRTUAL_ENV "/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv"
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PATH="$PATH"
|
||||||
|
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|
||||||
|
|
||||||
|
set _OLD_VIRTUAL_PROMPT="$prompt"
|
||||||
|
|
||||||
|
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
||||||
|
set prompt = "(.venv) $prompt"
|
||||||
|
endif
|
||||||
|
|
||||||
|
alias pydoc python -m pydoc
|
||||||
|
|
||||||
|
rehash
|
||||||
64
.venv/bin/activate.fish
Normal file
64
.venv/bin/activate.fish
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
||||||
|
# (https://fishshell.com/); you cannot run it directly.
|
||||||
|
|
||||||
|
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
||||||
|
# reset old environment variables
|
||||||
|
if test -n "$_OLD_VIRTUAL_PATH"
|
||||||
|
set -gx PATH $_OLD_VIRTUAL_PATH
|
||||||
|
set -e _OLD_VIRTUAL_PATH
|
||||||
|
end
|
||||||
|
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
||||||
|
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
||||||
|
set -e _OLD_VIRTUAL_PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
||||||
|
functions -e fish_prompt
|
||||||
|
set -e _OLD_FISH_PROMPT_OVERRIDE
|
||||||
|
functions -c _old_fish_prompt fish_prompt
|
||||||
|
functions -e _old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -e VIRTUAL_ENV
|
||||||
|
if test "$argv[1]" != "nondestructive"
|
||||||
|
# Self-destruct!
|
||||||
|
functions -e deactivate
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Unset irrelevant variables.
|
||||||
|
deactivate nondestructive
|
||||||
|
|
||||||
|
set -gx VIRTUAL_ENV "/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv"
|
||||||
|
|
||||||
|
set -gx _OLD_VIRTUAL_PATH $PATH
|
||||||
|
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
||||||
|
|
||||||
|
# Unset PYTHONHOME if set.
|
||||||
|
if set -q PYTHONHOME
|
||||||
|
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
||||||
|
set -e PYTHONHOME
|
||||||
|
end
|
||||||
|
|
||||||
|
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
||||||
|
# fish uses a function instead of an env var to generate the prompt.
|
||||||
|
|
||||||
|
# Save the current fish_prompt function as the function _old_fish_prompt.
|
||||||
|
functions -c fish_prompt _old_fish_prompt
|
||||||
|
|
||||||
|
# With the original prompt function renamed, we can override with our own.
|
||||||
|
function fish_prompt
|
||||||
|
# Save the return status of the last command.
|
||||||
|
set -l old_status $status
|
||||||
|
|
||||||
|
# Output the venv prompt; color taken from the blue of the Python logo.
|
||||||
|
printf "%s%s%s" (set_color 4B8BBE) "(.venv) " (set_color normal)
|
||||||
|
|
||||||
|
# Restore the return status of the previous command.
|
||||||
|
echo "exit $old_status" | .
|
||||||
|
# Output the original/"old" prompt.
|
||||||
|
_old_fish_prompt
|
||||||
|
end
|
||||||
|
|
||||||
|
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
||||||
|
end
|
||||||
8
.venv/bin/distro
Executable file
8
.venv/bin/distro
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from distro.distro import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/dotenv
Executable file
8
.venv/bin/dotenv
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from dotenv.__main__ import cli
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(cli())
|
||||||
8
.venv/bin/fastapi
Executable file
8
.venv/bin/fastapi
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from fastapi.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/httpx
Executable file
8
.venv/bin/httpx
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from httpx import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/openai
Executable file
8
.venv/bin/openai
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from openai.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/pip
Executable file
8
.venv/bin/pip
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/pip3
Executable file
8
.venv/bin/pip3
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/pip3.9
Executable file
8
.venv/bin/pip3.9
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from pip._internal.cli.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
1
.venv/bin/python
Symbolic link
1
.venv/bin/python
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
python3
|
||||||
1
.venv/bin/python3
Symbolic link
1
.venv/bin/python3
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
/Library/Developer/CommandLineTools/usr/bin/python3
|
||||||
1
.venv/bin/python3.9
Symbolic link
1
.venv/bin/python3.9
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
python3
|
||||||
8
.venv/bin/tqdm
Executable file
8
.venv/bin/tqdm
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from tqdm.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/uvicorn
Executable file
8
.venv/bin/uvicorn
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from uvicorn.main import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
8
.venv/bin/watchfiles
Executable file
8
.venv/bin/watchfiles
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from watchfiles.cli import cli
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(cli())
|
||||||
8
.venv/bin/websockets
Executable file
8
.venv/bin/websockets
Executable file
@@ -0,0 +1,8 @@
|
|||||||
|
#!/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/bin/python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from websockets.cli import main
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
||||||
|
sys.exit(main())
|
||||||
128
.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py
Normal file
128
.venv/lib/python3.9/site-packages/_distutils_hack/__init__.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import importlib
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
|
is_pypy = '__pypy__' in sys.builtin_module_names
|
||||||
|
|
||||||
|
|
||||||
|
warnings.filterwarnings('ignore',
|
||||||
|
r'.+ distutils\b.+ deprecated',
|
||||||
|
DeprecationWarning)
|
||||||
|
|
||||||
|
|
||||||
|
def warn_distutils_present():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
if is_pypy and sys.version_info < (3, 7):
|
||||||
|
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
||||||
|
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
||||||
|
return
|
||||||
|
warnings.warn(
|
||||||
|
"Distutils was imported before Setuptools, but importing Setuptools "
|
||||||
|
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
||||||
|
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
||||||
|
"using distutils directly, ensure that setuptools is installed in the "
|
||||||
|
"traditional way (e.g. not an editable install), and/or make sure "
|
||||||
|
"that setuptools is always imported before distutils.")
|
||||||
|
|
||||||
|
|
||||||
|
def clear_distutils():
|
||||||
|
if 'distutils' not in sys.modules:
|
||||||
|
return
|
||||||
|
warnings.warn("Setuptools is replacing distutils.")
|
||||||
|
mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
|
||||||
|
for name in mods:
|
||||||
|
del sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
def enabled():
|
||||||
|
"""
|
||||||
|
Allow selection of distutils by environment variable.
|
||||||
|
"""
|
||||||
|
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
|
||||||
|
return which == 'local'
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_local_distutils():
|
||||||
|
clear_distutils()
|
||||||
|
distutils = importlib.import_module('setuptools._distutils')
|
||||||
|
distutils.__name__ = 'distutils'
|
||||||
|
sys.modules['distutils'] = distutils
|
||||||
|
|
||||||
|
# sanity check that submodules load as expected
|
||||||
|
core = importlib.import_module('distutils.core')
|
||||||
|
assert '_distutils' in core.__file__, core.__file__
|
||||||
|
|
||||||
|
|
||||||
|
def do_override():
|
||||||
|
"""
|
||||||
|
Ensure that the local copy of distutils is preferred over stdlib.
|
||||||
|
|
||||||
|
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
||||||
|
for more motivation.
|
||||||
|
"""
|
||||||
|
if enabled():
|
||||||
|
warn_distutils_present()
|
||||||
|
ensure_local_distutils()
|
||||||
|
|
||||||
|
|
||||||
|
class DistutilsMetaFinder:
|
||||||
|
def find_spec(self, fullname, path, target=None):
|
||||||
|
if path is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
method_name = 'spec_for_{fullname}'.format(**locals())
|
||||||
|
method = getattr(self, method_name, lambda: None)
|
||||||
|
return method()
|
||||||
|
|
||||||
|
def spec_for_distutils(self):
|
||||||
|
import importlib.abc
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
|
class DistutilsLoader(importlib.abc.Loader):
|
||||||
|
|
||||||
|
def create_module(self, spec):
|
||||||
|
return importlib.import_module('setuptools._distutils')
|
||||||
|
|
||||||
|
def exec_module(self, module):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return importlib.util.spec_from_loader('distutils', DistutilsLoader())
|
||||||
|
|
||||||
|
def spec_for_pip(self):
|
||||||
|
"""
|
||||||
|
Ensure stdlib distutils when running under pip.
|
||||||
|
See pypa/pip#8761 for rationale.
|
||||||
|
"""
|
||||||
|
if self.pip_imported_during_build():
|
||||||
|
return
|
||||||
|
clear_distutils()
|
||||||
|
self.spec_for_distutils = lambda: None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pip_imported_during_build():
|
||||||
|
"""
|
||||||
|
Detect if pip is being imported in a build script. Ref #2355.
|
||||||
|
"""
|
||||||
|
import traceback
|
||||||
|
return any(
|
||||||
|
frame.f_globals['__file__'].endswith('setup.py')
|
||||||
|
for frame, line in traceback.walk_stack(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
DISTUTILS_FINDER = DistutilsMetaFinder()
|
||||||
|
|
||||||
|
|
||||||
|
def add_shim():
|
||||||
|
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_shim():
|
||||||
|
try:
|
||||||
|
sys.meta_path.remove(DISTUTILS_FINDER)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
__import__('_distutils_hack').do_override()
|
||||||
33
.venv/lib/python3.9/site-packages/_yaml/__init__.py
Normal file
33
.venv/lib/python3.9/site-packages/_yaml/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
# This is a stub package designed to roughly emulate the _yaml
|
||||||
|
# extension module, which previously existed as a standalone module
|
||||||
|
# and has been moved into the `yaml` package namespace.
|
||||||
|
# It does not perfectly mimic its old counterpart, but should get
|
||||||
|
# close enough for anyone who's relying on it even when they shouldn't.
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
# in some circumstances, the yaml module we imoprted may be from a different version, so we need
|
||||||
|
# to tread carefully when poking at it here (it may not have the attributes we expect)
|
||||||
|
if not getattr(yaml, '__with_libyaml__', False):
|
||||||
|
from sys import version_info
|
||||||
|
|
||||||
|
exc = ModuleNotFoundError if version_info >= (3, 6) else ImportError
|
||||||
|
raise exc("No module named '_yaml'")
|
||||||
|
else:
|
||||||
|
from yaml._yaml import *
|
||||||
|
import warnings
|
||||||
|
warnings.warn(
|
||||||
|
'The _yaml extension module is now located at yaml._yaml'
|
||||||
|
' and its location is subject to change. To use the'
|
||||||
|
' LibYAML-based parser and emitter, import from `yaml`:'
|
||||||
|
' `from yaml import CLoader as Loader, CDumper as Dumper`.',
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
del warnings
|
||||||
|
# Don't `del yaml` here because yaml is actually an existing
|
||||||
|
# namespace member of _yaml.
|
||||||
|
|
||||||
|
__name__ = '_yaml'
|
||||||
|
# If the module is top-level (i.e. not a part of any specific package)
|
||||||
|
# then the attribute should be set to ''.
|
||||||
|
# https://docs.python.org/3.8/library/types.html
|
||||||
|
__package__ = ''
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@@ -0,0 +1,295 @@
|
|||||||
|
Metadata-Version: 2.3
|
||||||
|
Name: annotated-types
|
||||||
|
Version: 0.7.0
|
||||||
|
Summary: Reusable constraint types to use with typing.Annotated
|
||||||
|
Project-URL: Homepage, https://github.com/annotated-types/annotated-types
|
||||||
|
Project-URL: Source, https://github.com/annotated-types/annotated-types
|
||||||
|
Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases
|
||||||
|
Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin <s@muelcolvin.com>, Zac Hatfield-Dodds <zac@zhd.dev>
|
||||||
|
License-File: LICENSE
|
||||||
|
Classifier: Development Status :: 4 - Beta
|
||||||
|
Classifier: Environment :: Console
|
||||||
|
Classifier: Environment :: MacOS X
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Intended Audience :: Information Technology
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Operating System :: POSIX :: Linux
|
||||||
|
Classifier: Operating System :: Unix
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||||
|
Classifier: Typing :: Typed
|
||||||
|
Requires-Python: >=3.8
|
||||||
|
Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9'
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
|
||||||
|
# annotated-types
|
||||||
|
|
||||||
|
[](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
|
||||||
|
[](https://pypi.python.org/pypi/annotated-types)
|
||||||
|
[](https://github.com/annotated-types/annotated-types)
|
||||||
|
[](https://github.com/annotated-types/annotated-types/blob/main/LICENSE)
|
||||||
|
|
||||||
|
[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of
|
||||||
|
adding context-specific metadata to existing types, and specifies that
|
||||||
|
`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special
|
||||||
|
logic for `x`.
|
||||||
|
|
||||||
|
This package provides metadata objects which can be used to represent common
|
||||||
|
constraints such as upper and lower bounds on scalar values and collection sizes,
|
||||||
|
a `Predicate` marker for runtime checks, and
|
||||||
|
descriptions of how we intend these metadata to be interpreted. In some cases,
|
||||||
|
we also note alternative representations which do not require this package.
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install annotated-types
|
||||||
|
```
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
```python
|
||||||
|
from typing import Annotated
|
||||||
|
from annotated_types import Gt, Len, Predicate
|
||||||
|
|
||||||
|
class MyClass:
|
||||||
|
age: Annotated[int, Gt(18)] # Valid: 19, 20, ...
|
||||||
|
# Invalid: 17, 18, "19", 19.0, ...
|
||||||
|
factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ...
|
||||||
|
# Invalid: 4, 8, -2, 5.0, "prime", ...
|
||||||
|
|
||||||
|
my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50]
|
||||||
|
# Invalid: (1, 2), ["abc"], [0] * 20
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
_While `annotated-types` avoids runtime checks for performance, users should not
|
||||||
|
construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`.
|
||||||
|
Downstream implementors may choose to raise an error, emit a warning, silently ignore
|
||||||
|
a metadata item, etc., if the metadata objects described below are used with an
|
||||||
|
incompatible type - or for any other reason!_
|
||||||
|
|
||||||
|
### Gt, Ge, Lt, Le
|
||||||
|
|
||||||
|
Express inclusive and/or exclusive bounds on orderable values - which may be numbers,
|
||||||
|
dates, times, strings, sets, etc. Note that the boundary value need not be of the
|
||||||
|
same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]`
|
||||||
|
is fine, for example, and implies that the value is an integer x such that `x > 1.5`.
|
||||||
|
|
||||||
|
We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)`
|
||||||
|
as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on
|
||||||
|
the `annotated-types` package.
|
||||||
|
|
||||||
|
To be explicit, these types have the following meanings:
|
||||||
|
|
||||||
|
* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum
|
||||||
|
* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum
|
||||||
|
* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum
|
||||||
|
* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum
|
||||||
|
|
||||||
|
### Interval
|
||||||
|
|
||||||
|
`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single
|
||||||
|
metadata object. `None` attributes should be ignored, and non-`None` attributes
|
||||||
|
treated as per the single bounds above.
|
||||||
|
|
||||||
|
### MultipleOf
|
||||||
|
|
||||||
|
`MultipleOf(multiple_of=x)` might be interpreted in two ways:
|
||||||
|
|
||||||
|
1. Python semantics, implying `value % multiple_of == 0`, or
|
||||||
|
2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1),
|
||||||
|
where `int(value / multiple_of) == value / multiple_of`.
|
||||||
|
|
||||||
|
We encourage users to be aware of these two common interpretations and their
|
||||||
|
distinct behaviours, especially since very large or non-integer numbers make
|
||||||
|
it easy to cause silent data corruption due to floating-point imprecision.
|
||||||
|
|
||||||
|
We encourage libraries to carefully document which interpretation they implement.
|
||||||
|
|
||||||
|
### MinLen, MaxLen, Len
|
||||||
|
|
||||||
|
`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive.
|
||||||
|
|
||||||
|
As well as `Len()` which can optionally include upper and lower bounds, we also
|
||||||
|
provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)`
|
||||||
|
and `Len(max_length=y)` respectively.
|
||||||
|
|
||||||
|
`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`.
|
||||||
|
|
||||||
|
Examples of usage:
|
||||||
|
|
||||||
|
* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less
|
||||||
|
* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less
|
||||||
|
* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more
|
||||||
|
* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6
|
||||||
|
* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8
|
||||||
|
|
||||||
|
#### Changed in v0.4.0
|
||||||
|
|
||||||
|
* `min_inclusive` has been renamed to `min_length`, no change in meaning
|
||||||
|
* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive**
|
||||||
|
* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic
|
||||||
|
meaning of the upper bound in slices vs. `Len`
|
||||||
|
|
||||||
|
See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion.
|
||||||
|
|
||||||
|
### Timezone
|
||||||
|
|
||||||
|
`Timezone` can be used with a `datetime` or a `time` to express which timezones
|
||||||
|
are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime.
|
||||||
|
`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis))
|
||||||
|
expresses that any timezone-aware datetime is allowed. You may also pass a specific
|
||||||
|
timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects)
|
||||||
|
object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only
|
||||||
|
allow a specific timezone, though we note that this is often a symptom of fragile design.
|
||||||
|
|
||||||
|
#### Changed in v0.x.x
|
||||||
|
|
||||||
|
* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of
|
||||||
|
`timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries.
|
||||||
|
|
||||||
|
### Unit
|
||||||
|
|
||||||
|
`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of
|
||||||
|
a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]`
|
||||||
|
would be a float representing a velocity in meters per second.
|
||||||
|
|
||||||
|
Please note that `annotated_types` itself makes no attempt to parse or validate
|
||||||
|
the unit string in any way. That is left entirely to downstream libraries,
|
||||||
|
such as [`pint`](https://pint.readthedocs.io) or
|
||||||
|
[`astropy.units`](https://docs.astropy.org/en/stable/units/).
|
||||||
|
|
||||||
|
An example of how a library might use this metadata:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from annotated_types import Unit
|
||||||
|
from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args
|
||||||
|
|
||||||
|
# given a type annotated with a unit:
|
||||||
|
Meters = Annotated[float, Unit("m")]
|
||||||
|
|
||||||
|
|
||||||
|
# you can cast the annotation to a specific unit type with any
|
||||||
|
# callable that accepts a string and returns the desired type
|
||||||
|
T = TypeVar("T")
|
||||||
|
def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None:
|
||||||
|
if get_origin(tp) is Annotated:
|
||||||
|
for arg in get_args(tp):
|
||||||
|
if isinstance(arg, Unit):
|
||||||
|
return unit_cls(arg.unit)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# using `pint`
|
||||||
|
import pint
|
||||||
|
pint_unit = cast_unit(Meters, pint.Unit)
|
||||||
|
|
||||||
|
|
||||||
|
# using `astropy.units`
|
||||||
|
import astropy.units as u
|
||||||
|
astropy_unit = cast_unit(Meters, u.Unit)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Predicate
|
||||||
|
|
||||||
|
`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values.
|
||||||
|
Users should prefer the statically inspectable metadata above, but if you need
|
||||||
|
the full power and flexibility of arbitrary runtime predicates... here it is.
|
||||||
|
|
||||||
|
For some common constraints, we provide generic types:
|
||||||
|
|
||||||
|
* `IsLower = Annotated[T, Predicate(str.islower)]`
|
||||||
|
* `IsUpper = Annotated[T, Predicate(str.isupper)]`
|
||||||
|
* `IsDigit = Annotated[T, Predicate(str.isdigit)]`
|
||||||
|
* `IsFinite = Annotated[T, Predicate(math.isfinite)]`
|
||||||
|
* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]`
|
||||||
|
* `IsNan = Annotated[T, Predicate(math.isnan)]`
|
||||||
|
* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]`
|
||||||
|
* `IsInfinite = Annotated[T, Predicate(math.isinf)]`
|
||||||
|
* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]`
|
||||||
|
|
||||||
|
so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer
|
||||||
|
(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`.
|
||||||
|
|
||||||
|
Some libraries might have special logic to handle known or understandable predicates,
|
||||||
|
for example by checking for `str.isdigit` and using its presence to both call custom
|
||||||
|
logic to enforce digit-only strings, and customise some generated external schema.
|
||||||
|
Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in
|
||||||
|
favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`.
|
||||||
|
|
||||||
|
To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner.
|
||||||
|
|
||||||
|
We do not specify what behaviour should be expected for predicates that raise
|
||||||
|
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||||
|
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||||
|
and then propagate or discard the resulting
|
||||||
|
`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object`
|
||||||
|
exception. We encourage libraries to document the behaviour they choose.
|
||||||
|
|
||||||
|
### Doc
|
||||||
|
|
||||||
|
`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used.
|
||||||
|
|
||||||
|
It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools.
|
||||||
|
|
||||||
|
It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`.
|
||||||
|
|
||||||
|
This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md).
|
||||||
|
|
||||||
|
### Integrating downstream types with `GroupedMetadata`
|
||||||
|
|
||||||
|
Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata.
|
||||||
|
This can help reduce verbosity and cognitive overhead for users.
|
||||||
|
For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata:
|
||||||
|
|
||||||
|
```python
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Iterator
|
||||||
|
from annotated_types import GroupedMetadata, Ge
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Field(GroupedMetadata):
|
||||||
|
ge: int | None = None
|
||||||
|
description: str | None = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[object]:
|
||||||
|
# Iterating over a GroupedMetadata object should yield annotated-types
|
||||||
|
# constraint metadata objects which describe it as fully as possible,
|
||||||
|
# and may include other unknown objects too.
|
||||||
|
if self.ge is not None:
|
||||||
|
yield Ge(self.ge)
|
||||||
|
if self.description is not None:
|
||||||
|
yield Description(self.description)
|
||||||
|
```
|
||||||
|
|
||||||
|
Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently.
|
||||||
|
|
||||||
|
Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself.
|
||||||
|
|
||||||
|
Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`.
|
||||||
|
|
||||||
|
### Consuming metadata
|
||||||
|
|
||||||
|
We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103).
|
||||||
|
|
||||||
|
It is up to the implementer to determine how this metadata is used.
|
||||||
|
You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases.
|
||||||
|
|
||||||
|
## Design & History
|
||||||
|
|
||||||
|
This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic
|
||||||
|
and Hypothesis, with the goal of making it as easy as possible for end-users to
|
||||||
|
provide more informative annotations for use by runtime libraries.
|
||||||
|
|
||||||
|
It is deliberately minimal, and following PEP-593 allows considerable downstream
|
||||||
|
discretion in what (if anything!) they choose to support. Nonetheless, we expect
|
||||||
|
that staying simple and covering _only_ the most common use-cases will give users
|
||||||
|
and maintainers the best experience we can. If you'd like more constraints for your
|
||||||
|
types - follow our lead, by defining them and documenting them downstream!
|
||||||
@@ -0,0 +1,10 @@
|
|||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/annotated_types/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/annotated_types/test_cases.cpython-39.pyc,,
|
||||||
|
annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046
|
||||||
|
annotated_types-0.7.0.dist-info/RECORD,,
|
||||||
|
annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87
|
||||||
|
annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083
|
||||||
|
annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819
|
||||||
|
annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: hatchling 1.24.2
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2022 the contributors
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
432
.venv/lib/python3.9/site-packages/annotated_types/__init__.py
Normal file
432
.venv/lib/python3.9/site-packages/annotated_types/__init__.py
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
import math
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import tzinfo
|
||||||
|
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
from typing_extensions import Protocol, runtime_checkable
|
||||||
|
else:
|
||||||
|
from typing import Protocol, runtime_checkable
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated, Literal
|
||||||
|
else:
|
||||||
|
from typing import Annotated, Literal
|
||||||
|
|
||||||
|
if sys.version_info < (3, 10):
|
||||||
|
EllipsisType = type(Ellipsis)
|
||||||
|
KW_ONLY = {}
|
||||||
|
SLOTS = {}
|
||||||
|
else:
|
||||||
|
from types import EllipsisType
|
||||||
|
|
||||||
|
KW_ONLY = {"kw_only": True}
|
||||||
|
SLOTS = {"slots": True}
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'BaseMetadata',
|
||||||
|
'GroupedMetadata',
|
||||||
|
'Gt',
|
||||||
|
'Ge',
|
||||||
|
'Lt',
|
||||||
|
'Le',
|
||||||
|
'Interval',
|
||||||
|
'MultipleOf',
|
||||||
|
'MinLen',
|
||||||
|
'MaxLen',
|
||||||
|
'Len',
|
||||||
|
'Timezone',
|
||||||
|
'Predicate',
|
||||||
|
'LowerCase',
|
||||||
|
'UpperCase',
|
||||||
|
'IsDigits',
|
||||||
|
'IsFinite',
|
||||||
|
'IsNotFinite',
|
||||||
|
'IsNan',
|
||||||
|
'IsNotNan',
|
||||||
|
'IsInfinite',
|
||||||
|
'IsNotInfinite',
|
||||||
|
'doc',
|
||||||
|
'DocInfo',
|
||||||
|
'__version__',
|
||||||
|
)
|
||||||
|
|
||||||
|
__version__ = '0.7.0'
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
# arguments that start with __ are considered
|
||||||
|
# positional only
|
||||||
|
# see https://peps.python.org/pep-0484/#positional-only-arguments
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGt(Protocol):
|
||||||
|
def __gt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsGe(Protocol):
|
||||||
|
def __ge__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLt(Protocol):
|
||||||
|
def __lt__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsLe(Protocol):
|
||||||
|
def __le__(self: T, __other: T) -> bool:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsMod(Protocol):
|
||||||
|
def __mod__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class SupportsDiv(Protocol):
|
||||||
|
def __div__(self: T, __other: T) -> T:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class BaseMetadata:
|
||||||
|
"""Base class for all metadata.
|
||||||
|
|
||||||
|
This exists mainly so that implementers
|
||||||
|
can do `isinstance(..., BaseMetadata)` while traversing field annotations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Gt(BaseMetadata):
|
||||||
|
"""Gt(gt=x) implies that the value must be greater than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: SupportsGt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Ge(BaseMetadata):
|
||||||
|
"""Ge(ge=x) implies that the value must be greater than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``>=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
ge: SupportsGe
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Lt(BaseMetadata):
|
||||||
|
"""Lt(lt=x) implies that the value must be less than x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
lt: SupportsLt
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Le(BaseMetadata):
|
||||||
|
"""Le(le=x) implies that the value must be less than or equal to x.
|
||||||
|
|
||||||
|
It can be used with any type that supports the ``<=`` operator,
|
||||||
|
including numbers, dates and times, strings, sets, and so on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
le: SupportsLe
|
||||||
|
|
||||||
|
|
||||||
|
@runtime_checkable
|
||||||
|
class GroupedMetadata(Protocol):
|
||||||
|
"""A grouping of multiple objects, like typing.Unpack.
|
||||||
|
|
||||||
|
`GroupedMetadata` on its own is not metadata and has no meaning.
|
||||||
|
All of the constraints and metadata should be fully expressable
|
||||||
|
in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`.
|
||||||
|
|
||||||
|
Concrete implementations should override `GroupedMetadata.__iter__()`
|
||||||
|
to add their own metadata.
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> @dataclass
|
||||||
|
>>> class Field(GroupedMetadata):
|
||||||
|
>>> gt: float | None = None
|
||||||
|
>>> description: str | None = None
|
||||||
|
...
|
||||||
|
>>> def __iter__(self) -> Iterable[object]:
|
||||||
|
>>> if self.gt is not None:
|
||||||
|
>>> yield Gt(self.gt)
|
||||||
|
>>> if self.description is not None:
|
||||||
|
>>> yield Description(self.gt)
|
||||||
|
|
||||||
|
Also see the implementation of `Interval` below for an example.
|
||||||
|
|
||||||
|
Parsers should recognize this and unpack it so that it can be used
|
||||||
|
both with and without unpacking:
|
||||||
|
|
||||||
|
- `Annotated[int, Field(...)]` (parser must unpack Field)
|
||||||
|
- `Annotated[int, *Field(...)]` (PEP-646)
|
||||||
|
""" # noqa: trailing-whitespace
|
||||||
|
|
||||||
|
@property
|
||||||
|
def __is_annotated_types_grouped_metadata__(self) -> Literal[True]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[object]:
|
||||||
|
...
|
||||||
|
|
||||||
|
if not TYPE_CHECKING:
|
||||||
|
__slots__ = () # allow subclasses to use slots
|
||||||
|
|
||||||
|
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
|
||||||
|
# Basic ABC like functionality without the complexity of an ABC
|
||||||
|
super().__init_subclass__(*args, **kwargs)
|
||||||
|
if cls.__iter__ is GroupedMetadata.__iter__:
|
||||||
|
raise TypeError("Can't subclass GroupedMetadata without implementing __iter__")
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[object]: # noqa: F811
|
||||||
|
raise NotImplementedError # more helpful than "None has no attribute..." type errors
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **KW_ONLY, **SLOTS)
|
||||||
|
class Interval(GroupedMetadata):
|
||||||
|
"""Interval can express inclusive or exclusive bounds with a single object.
|
||||||
|
|
||||||
|
It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which
|
||||||
|
are interpreted the same way as the single-bound constraints.
|
||||||
|
"""
|
||||||
|
|
||||||
|
gt: Union[SupportsGt, None] = None
|
||||||
|
ge: Union[SupportsGe, None] = None
|
||||||
|
lt: Union[SupportsLt, None] = None
|
||||||
|
le: Union[SupportsLe, None] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack an Interval into zero or more single-bounds."""
|
||||||
|
if self.gt is not None:
|
||||||
|
yield Gt(self.gt)
|
||||||
|
if self.ge is not None:
|
||||||
|
yield Ge(self.ge)
|
||||||
|
if self.lt is not None:
|
||||||
|
yield Lt(self.lt)
|
||||||
|
if self.le is not None:
|
||||||
|
yield Le(self.le)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MultipleOf(BaseMetadata):
|
||||||
|
"""MultipleOf(multiple_of=x) might be interpreted in two ways:
|
||||||
|
|
||||||
|
1. Python semantics, implying ``value % multiple_of == 0``, or
|
||||||
|
2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of``
|
||||||
|
|
||||||
|
We encourage users to be aware of these two common interpretations,
|
||||||
|
and libraries to carefully document which they implement.
|
||||||
|
"""
|
||||||
|
|
||||||
|
multiple_of: Union[SupportsDiv, SupportsMod]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MinLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MinLen() implies minimum inclusive length,
|
||||||
|
e.g. ``len(value) >= min_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class MaxLen(BaseMetadata):
|
||||||
|
"""
|
||||||
|
MaxLen() implies maximum inclusive length,
|
||||||
|
e.g. ``len(value) <= max_length``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_length: Annotated[int, Ge(0)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Len(GroupedMetadata):
|
||||||
|
"""
|
||||||
|
Len() implies that ``min_length <= len(value) <= max_length``.
|
||||||
|
|
||||||
|
Upper bound may be omitted or ``None`` to indicate no upper length bound.
|
||||||
|
"""
|
||||||
|
|
||||||
|
min_length: Annotated[int, Ge(0)] = 0
|
||||||
|
max_length: Optional[Annotated[int, Ge(0)]] = None
|
||||||
|
|
||||||
|
def __iter__(self) -> Iterator[BaseMetadata]:
|
||||||
|
"""Unpack a Len into zone or more single-bounds."""
|
||||||
|
if self.min_length > 0:
|
||||||
|
yield MinLen(self.min_length)
|
||||||
|
if self.max_length is not None:
|
||||||
|
yield MaxLen(self.max_length)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Timezone(BaseMetadata):
|
||||||
|
"""Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive).
|
||||||
|
|
||||||
|
``Annotated[datetime, Timezone(None)]`` must be a naive datetime.
|
||||||
|
``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be
|
||||||
|
tz-aware but any timezone is allowed.
|
||||||
|
|
||||||
|
You may also pass a specific timezone string or tzinfo object such as
|
||||||
|
``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that
|
||||||
|
you only allow a specific timezone, though we note that this is often
|
||||||
|
a symptom of poor design.
|
||||||
|
"""
|
||||||
|
|
||||||
|
tz: Union[str, tzinfo, EllipsisType, None]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Unit(BaseMetadata):
|
||||||
|
"""Indicates that the value is a physical quantity with the specified unit.
|
||||||
|
|
||||||
|
It is intended for usage with numeric types, where the value represents the
|
||||||
|
magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]``
|
||||||
|
or ``speed: Annotated[float, Unit('m/s')]``.
|
||||||
|
|
||||||
|
Interpretation of the unit string is left to the discretion of the consumer.
|
||||||
|
It is suggested to follow conventions established by python libraries that work
|
||||||
|
with physical quantities, such as
|
||||||
|
|
||||||
|
- ``pint`` : <https://pint.readthedocs.io/en/stable/>
|
||||||
|
- ``astropy.units``: <https://docs.astropy.org/en/stable/units/>
|
||||||
|
|
||||||
|
For indicating a quantity with a certain dimensionality but without a specific unit
|
||||||
|
it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`.
|
||||||
|
Note, however, ``annotated_types`` itself makes no use of the unit string.
|
||||||
|
"""
|
||||||
|
|
||||||
|
unit: str
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class Predicate(BaseMetadata):
|
||||||
|
"""``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values.
|
||||||
|
|
||||||
|
Users should prefer statically inspectable metadata, but if you need the full
|
||||||
|
power and flexibility of arbitrary runtime predicates... here it is.
|
||||||
|
|
||||||
|
We provide a few predefined predicates for common string constraints:
|
||||||
|
``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and
|
||||||
|
``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which
|
||||||
|
can be given special handling, and avoid indirection like ``lambda s: s.lower()``.
|
||||||
|
|
||||||
|
Some libraries might have special logic to handle certain predicates, e.g. by
|
||||||
|
checking for `str.isdigit` and using its presence to both call custom logic to
|
||||||
|
enforce digit-only strings, and customise some generated external schema.
|
||||||
|
|
||||||
|
We do not specify what behaviour should be expected for predicates that raise
|
||||||
|
an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently
|
||||||
|
skip invalid constraints, or statically raise an error; or it might try calling it
|
||||||
|
and then propagate or discard the resulting exception.
|
||||||
|
"""
|
||||||
|
|
||||||
|
func: Callable[[Any], bool]
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
if getattr(self.func, "__name__", "<lambda>") == "<lambda>":
|
||||||
|
return f"{self.__class__.__name__}({self.func!r})"
|
||||||
|
if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and (
|
||||||
|
namespace := getattr(self.func.__self__, "__name__", None)
|
||||||
|
):
|
||||||
|
return f"{self.__class__.__name__}({namespace}.{self.func.__name__})"
|
||||||
|
if isinstance(self.func, type(str.isascii)): # method descriptor
|
||||||
|
return f"{self.__class__.__name__}({self.func.__qualname__})"
|
||||||
|
return f"{self.__class__.__name__}({self.func.__name__})"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Not:
|
||||||
|
func: Callable[[Any], bool]
|
||||||
|
|
||||||
|
def __call__(self, __v: Any) -> bool:
|
||||||
|
return not self.func(__v)
|
||||||
|
|
||||||
|
|
||||||
|
_StrType = TypeVar("_StrType", bound=str)
|
||||||
|
|
||||||
|
LowerCase = Annotated[_StrType, Predicate(str.islower)]
|
||||||
|
"""
|
||||||
|
Return True if the string is a lowercase string, False otherwise.
|
||||||
|
|
||||||
|
A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
UpperCase = Annotated[_StrType, Predicate(str.isupper)]
|
||||||
|
"""
|
||||||
|
Return True if the string is an uppercase string, False otherwise.
|
||||||
|
|
||||||
|
A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
IsDigit = Annotated[_StrType, Predicate(str.isdigit)]
|
||||||
|
IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63
|
||||||
|
"""
|
||||||
|
Return True if the string is a digit string, False otherwise.
|
||||||
|
|
||||||
|
A string is a digit string if all characters in the string are digits and there is at least one character in the string.
|
||||||
|
""" # noqa: E501
|
||||||
|
IsAscii = Annotated[_StrType, Predicate(str.isascii)]
|
||||||
|
"""
|
||||||
|
Return True if all characters in the string are ASCII, False otherwise.
|
||||||
|
|
||||||
|
ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex])
|
||||||
|
IsFinite = Annotated[_NumericType, Predicate(math.isfinite)]
|
||||||
|
"""Return True if x is neither an infinity nor a NaN, and False otherwise."""
|
||||||
|
IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))]
|
||||||
|
"""Return True if x is one of infinity or NaN, and False otherwise"""
|
||||||
|
IsNan = Annotated[_NumericType, Predicate(math.isnan)]
|
||||||
|
"""Return True if x is a NaN (not a number), and False otherwise."""
|
||||||
|
IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))]
|
||||||
|
"""Return True if x is anything but NaN (not a number), and False otherwise."""
|
||||||
|
IsInfinite = Annotated[_NumericType, Predicate(math.isinf)]
|
||||||
|
"""Return True if x is a positive or negative infinity, and False otherwise."""
|
||||||
|
IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))]
|
||||||
|
"""Return True if x is neither a positive or negative infinity, and False otherwise."""
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing_extensions import DocInfo, doc # type: ignore [attr-defined]
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
@dataclass(frozen=True, **SLOTS)
|
||||||
|
class DocInfo: # type: ignore [no-redef]
|
||||||
|
""" "
|
||||||
|
The return value of doc(), mainly to be used by tools that want to extract the
|
||||||
|
Annotated documentation at runtime.
|
||||||
|
"""
|
||||||
|
|
||||||
|
documentation: str
|
||||||
|
"""The documentation string passed to doc()."""
|
||||||
|
|
||||||
|
def doc(
|
||||||
|
documentation: str,
|
||||||
|
) -> DocInfo:
|
||||||
|
"""
|
||||||
|
Add documentation to a type annotation inside of Annotated.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
>>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ...
|
||||||
|
"""
|
||||||
|
return DocInfo(documentation)
|
||||||
151
.venv/lib/python3.9/site-packages/annotated_types/test_cases.py
Normal file
151
.venv/lib/python3.9/site-packages/annotated_types/test_cases.py
Normal file
@@ -0,0 +1,151 @@
|
|||||||
|
import math
|
||||||
|
import sys
|
||||||
|
from datetime import date, datetime, timedelta, timezone
|
||||||
|
from decimal import Decimal
|
||||||
|
from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple
|
||||||
|
|
||||||
|
if sys.version_info < (3, 9):
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
else:
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
import annotated_types as at
|
||||||
|
|
||||||
|
|
||||||
|
class Case(NamedTuple):
|
||||||
|
"""
|
||||||
|
A test case for `annotated_types`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
annotation: Any
|
||||||
|
valid_cases: Iterable[Any]
|
||||||
|
invalid_cases: Iterable[Any]
|
||||||
|
|
||||||
|
|
||||||
|
def cases() -> Iterable[Case]:
|
||||||
|
# Gt, Ge, Lt, Le
|
||||||
|
yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(date(2000, 1, 1))],
|
||||||
|
[date(2000, 1, 2), date(2000, 1, 3)],
|
||||||
|
[date(2000, 1, 1), date(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Gt(Decimal('1.123'))],
|
||||||
|
[Decimal('1.1231'), Decimal('123')],
|
||||||
|
[Decimal('1.123'), Decimal('0')],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1))
|
||||||
|
yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Ge(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(1998, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4))
|
||||||
|
yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Lt(datetime(2000, 1, 1))],
|
||||||
|
[datetime(1999, 12, 31), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000))
|
||||||
|
yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Le(datetime(2000, 1, 1))],
|
||||||
|
[datetime(2000, 1, 1), datetime(1999, 12, 31)],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Interval
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1))
|
||||||
|
yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1))
|
||||||
|
yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1))
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))],
|
||||||
|
[datetime(2000, 1, 2), datetime(2000, 1, 3)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 4)],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4))
|
||||||
|
yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1))
|
||||||
|
|
||||||
|
# lengths
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12'))
|
||||||
|
yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2]))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10))
|
||||||
|
yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10))
|
||||||
|
|
||||||
|
yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10))
|
||||||
|
yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234'))
|
||||||
|
|
||||||
|
yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}])
|
||||||
|
yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4}))
|
||||||
|
yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4)))
|
||||||
|
|
||||||
|
# Timezone
|
||||||
|
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)]
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone(timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone.utc)],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
yield Case(
|
||||||
|
Annotated[datetime, at.Timezone('Europe/London')],
|
||||||
|
[datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))],
|
||||||
|
[datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Quantity
|
||||||
|
|
||||||
|
yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m'))
|
||||||
|
|
||||||
|
# predicate types
|
||||||
|
|
||||||
|
yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom'])
|
||||||
|
yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC'])
|
||||||
|
yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2'])
|
||||||
|
yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀'])
|
||||||
|
|
||||||
|
yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5])
|
||||||
|
|
||||||
|
yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf])
|
||||||
|
yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23])
|
||||||
|
yield Case(at.IsNan[float], [math.nan], [1.23, math.inf])
|
||||||
|
yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan])
|
||||||
|
yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23])
|
||||||
|
yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf])
|
||||||
|
|
||||||
|
# check stacked predicates
|
||||||
|
yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan])
|
||||||
|
|
||||||
|
# doc
|
||||||
|
yield Case(Annotated[int, at.doc("A number")], [1, 2], [])
|
||||||
|
|
||||||
|
# custom GroupedMetadata
|
||||||
|
class MyCustomGroupedMetadata(at.GroupedMetadata):
|
||||||
|
def __iter__(self) -> Iterator[at.Predicate]:
|
||||||
|
yield at.Predicate(lambda x: float(x).is_integer())
|
||||||
|
|
||||||
|
yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5])
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@@ -0,0 +1,96 @@
|
|||||||
|
Metadata-Version: 2.4
|
||||||
|
Name: anyio
|
||||||
|
Version: 4.12.1
|
||||||
|
Summary: High-level concurrency and networking framework on top of asyncio or Trio
|
||||||
|
Author-email: Alex Grönholm <alex.gronholm@nextday.fi>
|
||||||
|
License-Expression: MIT
|
||||||
|
Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/
|
||||||
|
Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html
|
||||||
|
Project-URL: Source code, https://github.com/agronholm/anyio
|
||||||
|
Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: Framework :: AnyIO
|
||||||
|
Classifier: Typing :: Typed
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Programming Language :: Python :: 3.13
|
||||||
|
Classifier: Programming Language :: Python :: 3.14
|
||||||
|
Requires-Python: >=3.9
|
||||||
|
Description-Content-Type: text/x-rst
|
||||||
|
License-File: LICENSE
|
||||||
|
Requires-Dist: exceptiongroup>=1.0.2; python_version < "3.11"
|
||||||
|
Requires-Dist: idna>=2.8
|
||||||
|
Requires-Dist: typing_extensions>=4.5; python_version < "3.13"
|
||||||
|
Provides-Extra: trio
|
||||||
|
Requires-Dist: trio>=0.32.0; python_version >= "3.10" and extra == "trio"
|
||||||
|
Requires-Dist: trio>=0.31.0; python_version < "3.10" and extra == "trio"
|
||||||
|
Dynamic: license-file
|
||||||
|
|
||||||
|
.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg
|
||||||
|
:target: https://github.com/agronholm/anyio/actions/workflows/test.yml
|
||||||
|
:alt: Build Status
|
||||||
|
.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master
|
||||||
|
:target: https://coveralls.io/github/agronholm/anyio?branch=master
|
||||||
|
:alt: Code Coverage
|
||||||
|
.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest
|
||||||
|
:target: https://anyio.readthedocs.io/en/latest/?badge=latest
|
||||||
|
:alt: Documentation
|
||||||
|
.. image:: https://badges.gitter.im/gitterHQ/gitter.svg
|
||||||
|
:target: https://gitter.im/python-trio/AnyIO
|
||||||
|
:alt: Gitter chat
|
||||||
|
|
||||||
|
AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or
|
||||||
|
Trio_. It implements Trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony
|
||||||
|
with the native SC of Trio itself.
|
||||||
|
|
||||||
|
Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or
|
||||||
|
Trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full
|
||||||
|
refactoring necessary. It will blend in with the native libraries of your chosen backend.
|
||||||
|
|
||||||
|
To find out why you might want to use AnyIO's APIs instead of asyncio's, you can read about it
|
||||||
|
`here <https://anyio.readthedocs.io/en/stable/why.html>`_.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
View full documentation at: https://anyio.readthedocs.io/
|
||||||
|
|
||||||
|
Features
|
||||||
|
--------
|
||||||
|
|
||||||
|
AnyIO offers the following functionality:
|
||||||
|
|
||||||
|
* Task groups (nurseries_ in trio terminology)
|
||||||
|
* High-level networking (TCP, UDP and UNIX sockets)
|
||||||
|
|
||||||
|
* `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python
|
||||||
|
3.8)
|
||||||
|
* async/await style UDP sockets (unlike asyncio where you still have to use Transports and
|
||||||
|
Protocols)
|
||||||
|
|
||||||
|
* A versatile API for byte streams and object streams
|
||||||
|
* Inter-task synchronization and communication (locks, conditions, events, semaphores, object
|
||||||
|
streams)
|
||||||
|
* Worker threads
|
||||||
|
* Subprocesses
|
||||||
|
* Subinterpreter support for code parallelization (on Python 3.13 and later)
|
||||||
|
* Asynchronous file I/O (using worker threads)
|
||||||
|
* Signal handling
|
||||||
|
* Asynchronous version of the functools_ module
|
||||||
|
|
||||||
|
AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures.
|
||||||
|
It even works with the popular Hypothesis_ library.
|
||||||
|
|
||||||
|
.. _asyncio: https://docs.python.org/3/library/asyncio.html
|
||||||
|
.. _Trio: https://github.com/python-trio/trio
|
||||||
|
.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency
|
||||||
|
.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning
|
||||||
|
.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs
|
||||||
|
.. _pytest: https://docs.pytest.org/en/latest/
|
||||||
|
.. _functools: https://docs.python.org/3/library/functools.html
|
||||||
|
.. _Hypothesis: https://hypothesis.works/
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_backends/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_backends/_trio.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_asyncio_selector_thread.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_contextmanagers.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_fileio.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_resources.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_signals.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_sockets.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_streams.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_synchronization.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_tasks.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_tempfile.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_testing.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_eventloop.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_resources.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_sockets.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_streams.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_tasks.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/abc/_testing.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/from_thread.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/functools.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/lowlevel.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/pytest_plugin.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/buffered.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/file.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/memory.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/stapled.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/text.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/streams/tls.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/to_interpreter.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/to_process.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/anyio/to_thread.cpython-39.pyc,,
|
||||||
|
anyio-4.12.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
anyio-4.12.1.dist-info/METADATA,sha256=DfiDab9Tmmcfy802lOLTMEHJQShkOSbopCwqCYbLuJk,4277
|
||||||
|
anyio-4.12.1.dist-info/RECORD,,
|
||||||
|
anyio-4.12.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
||||||
|
anyio-4.12.1.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39
|
||||||
|
anyio-4.12.1.dist-info/licenses/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081
|
||||||
|
anyio-4.12.1.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6
|
||||||
|
anyio/__init__.py,sha256=7iDVqMUprUuKNY91FuoKqayAhR-OY136YDPI6P78HHk,6170
|
||||||
|
anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/_backends/_asyncio.py,sha256=xG6qv60mgGnL0mK82dxjH2b8hlkMlJ-x2BqIq3qv70Y,98863
|
||||||
|
anyio/_backends/_trio.py,sha256=30Rctb7lm8g63ZHljVPVnj5aH-uK6oQvphjwUBoAzuI,41456
|
||||||
|
anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/_core/_asyncio_selector_thread.py,sha256=2PdxFM3cs02Kp6BSppbvmRT7q7asreTW5FgBxEsflBo,5626
|
||||||
|
anyio/_core/_contextmanagers.py,sha256=YInBCabiEeS-UaP_Jdxa1CaFC71ETPW8HZTHIM8Rsc8,7215
|
||||||
|
anyio/_core/_eventloop.py,sha256=c2EdcBX-xnKwxPcC4Pjn3_qG9I-x4IWFO2R9RqCGjM4,6448
|
||||||
|
anyio/_core/_exceptions.py,sha256=Y3aq-Wxd7Q2HqwSg7nZPvRsHEuGazv_qeet6gqEBdPk,4407
|
||||||
|
anyio/_core/_fileio.py,sha256=uc7t10Vb-If7GbdWM_zFf-ajUe6uek63fSt7IBLlZW0,25731
|
||||||
|
anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435
|
||||||
|
anyio/_core/_signals.py,sha256=mjTBB2hTKNPRlU0IhnijeQedpWOGERDiMjSlJQsFrug,1016
|
||||||
|
anyio/_core/_sockets.py,sha256=RBXHcUqZt5gg_-OOfgHVv8uq2FSKk1uVUzTdpjBoI1o,34977
|
||||||
|
anyio/_core/_streams.py,sha256=FczFwIgDpnkK0bODWJXMpsUJYdvAD04kaUaGzJU8DK0,1806
|
||||||
|
anyio/_core/_subprocesses.py,sha256=EXm5igL7dj55iYkPlbYVAqtbqxJxjU-6OndSTIx9SRg,8047
|
||||||
|
anyio/_core/_synchronization.py,sha256=MgVVqFzvt580tHC31LiOcq1G6aryut--xRG4Ff8KwxQ,20869
|
||||||
|
anyio/_core/_tasks.py,sha256=pVB7K6AAulzUM8YgXAeqNZG44nSyZ1bYJjH8GznC00I,5435
|
||||||
|
anyio/_core/_tempfile.py,sha256=lHb7CW4FyIlpkf5ADAf4VmLHCKwEHF9nxqNyBCFFUiA,19697
|
||||||
|
anyio/_core/_testing.py,sha256=u7MPqGXwpTxqI7hclSdNA30z2GH1Nw258uwKvy_RfBg,2340
|
||||||
|
anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508
|
||||||
|
anyio/abc/__init__.py,sha256=6mWhcl_pGXhrgZVHP_TCfMvIXIOp9mroEFM90fYCU_U,2869
|
||||||
|
anyio/abc/_eventloop.py,sha256=GlzgB3UJGgG6Kr7olpjOZ-o00PghecXuofVDQ_5611Q,10749
|
||||||
|
anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783
|
||||||
|
anyio/abc/_sockets.py,sha256=ECTY0jLEF18gryANHR3vFzXzGdZ-xPwELq1QdgOb0Jo,13258
|
||||||
|
anyio/abc/_streams.py,sha256=005GKSCXGprxnhucILboSqc2JFovECZk9m3p-qqxXVc,7640
|
||||||
|
anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067
|
||||||
|
anyio/abc/_tasks.py,sha256=KC7wrciE48AINOI-AhPutnFhe1ewfP7QnamFlDzqesQ,3721
|
||||||
|
anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821
|
||||||
|
anyio/from_thread.py,sha256=L-0w1HxJ6BSb-KuVi57k5Tkc3yzQrx3QK5tAxMPcY-0,19141
|
||||||
|
anyio/functools.py,sha256=HWj7GBEmc0Z-mZg3uok7Z7ZJn0rEC_0Pzbt0nYUDaTQ,10973
|
||||||
|
anyio/lowlevel.py,sha256=AyKLVK3LaWSoK39LkCKxE4_GDMLKZBNqTrLUgk63y80,5158
|
||||||
|
anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/pytest_plugin.py,sha256=3jAFQn0jv_pyoWE2GBBlHaj9sqXj4e8vob0_hgrsXE8,10244
|
||||||
|
anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
anyio/streams/buffered.py,sha256=2R3PeJhe4EXrdYqz44Y6-Eg9R6DrmlsYrP36Ir43-po,6263
|
||||||
|
anyio/streams/file.py,sha256=4WZ7XGz5WNu39FQHvqbe__TQ0HDP9OOhgO1mk9iVpVU,4470
|
||||||
|
anyio/streams/memory.py,sha256=F0zwzvFJKAhX_LRZGoKzzqDC2oMM-f-yyTBrEYEGOaU,10740
|
||||||
|
anyio/streams/stapled.py,sha256=T8Xqwf8K6EgURPxbt1N4i7A8BAk-gScv-GRhjLXIf_o,4390
|
||||||
|
anyio/streams/text.py,sha256=BcVAGJw1VRvtIqnv-o0Rb0pwH7p8vwlvl21xHq522ag,5765
|
||||||
|
anyio/streams/tls.py,sha256=Jpxy0Mfbcp1BxHCwE-YjSSFaLnIBbnnwur-excYThs4,15368
|
||||||
|
anyio/to_interpreter.py,sha256=_mLngrMy97TMR6VbW4Y6YzDUk9ZuPcQMPlkuyRh3C9k,7100
|
||||||
|
anyio/to_process.py,sha256=J7gAA_YOuoHqnpDAf5fm1Qu6kOmTzdFbiDNvnV755vk,9798
|
||||||
|
anyio/to_thread.py,sha256=menEgXYmUV7Fjg_9WqCV95P9MAtQS8BzPGGcWB_QnfQ,2687
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: setuptools (80.9.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
||||||
@@ -0,0 +1,2 @@
|
|||||||
|
[pytest11]
|
||||||
|
anyio = anyio.pytest_plugin
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Alex Grönholm
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
anyio
|
||||||
111
.venv/lib/python3.9/site-packages/anyio/__init__.py
Normal file
111
.venv/lib/python3.9/site-packages/anyio/__init__.py
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
|
||||||
|
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
|
||||||
|
from ._core._eventloop import current_time as current_time
|
||||||
|
from ._core._eventloop import get_all_backends as get_all_backends
|
||||||
|
from ._core._eventloop import get_available_backends as get_available_backends
|
||||||
|
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
|
||||||
|
from ._core._eventloop import run as run
|
||||||
|
from ._core._eventloop import sleep as sleep
|
||||||
|
from ._core._eventloop import sleep_forever as sleep_forever
|
||||||
|
from ._core._eventloop import sleep_until as sleep_until
|
||||||
|
from ._core._exceptions import BrokenResourceError as BrokenResourceError
|
||||||
|
from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
|
||||||
|
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
|
||||||
|
from ._core._exceptions import BusyResourceError as BusyResourceError
|
||||||
|
from ._core._exceptions import ClosedResourceError as ClosedResourceError
|
||||||
|
from ._core._exceptions import ConnectionFailed as ConnectionFailed
|
||||||
|
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
|
||||||
|
from ._core._exceptions import EndOfStream as EndOfStream
|
||||||
|
from ._core._exceptions import IncompleteRead as IncompleteRead
|
||||||
|
from ._core._exceptions import NoEventLoopError as NoEventLoopError
|
||||||
|
from ._core._exceptions import RunFinishedError as RunFinishedError
|
||||||
|
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
|
||||||
|
from ._core._exceptions import WouldBlock as WouldBlock
|
||||||
|
from ._core._fileio import AsyncFile as AsyncFile
|
||||||
|
from ._core._fileio import Path as Path
|
||||||
|
from ._core._fileio import open_file as open_file
|
||||||
|
from ._core._fileio import wrap_file as wrap_file
|
||||||
|
from ._core._resources import aclose_forcefully as aclose_forcefully
|
||||||
|
from ._core._signals import open_signal_receiver as open_signal_receiver
|
||||||
|
from ._core._sockets import TCPConnectable as TCPConnectable
|
||||||
|
from ._core._sockets import UNIXConnectable as UNIXConnectable
|
||||||
|
from ._core._sockets import as_connectable as as_connectable
|
||||||
|
from ._core._sockets import connect_tcp as connect_tcp
|
||||||
|
from ._core._sockets import connect_unix as connect_unix
|
||||||
|
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
|
||||||
|
from ._core._sockets import (
|
||||||
|
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
|
||||||
|
)
|
||||||
|
from ._core._sockets import create_tcp_listener as create_tcp_listener
|
||||||
|
from ._core._sockets import create_udp_socket as create_udp_socket
|
||||||
|
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
|
||||||
|
from ._core._sockets import create_unix_listener as create_unix_listener
|
||||||
|
from ._core._sockets import getaddrinfo as getaddrinfo
|
||||||
|
from ._core._sockets import getnameinfo as getnameinfo
|
||||||
|
from ._core._sockets import notify_closing as notify_closing
|
||||||
|
from ._core._sockets import wait_readable as wait_readable
|
||||||
|
from ._core._sockets import wait_socket_readable as wait_socket_readable
|
||||||
|
from ._core._sockets import wait_socket_writable as wait_socket_writable
|
||||||
|
from ._core._sockets import wait_writable as wait_writable
|
||||||
|
from ._core._streams import create_memory_object_stream as create_memory_object_stream
|
||||||
|
from ._core._subprocesses import open_process as open_process
|
||||||
|
from ._core._subprocesses import run_process as run_process
|
||||||
|
from ._core._synchronization import CapacityLimiter as CapacityLimiter
|
||||||
|
from ._core._synchronization import (
|
||||||
|
CapacityLimiterStatistics as CapacityLimiterStatistics,
|
||||||
|
)
|
||||||
|
from ._core._synchronization import Condition as Condition
|
||||||
|
from ._core._synchronization import ConditionStatistics as ConditionStatistics
|
||||||
|
from ._core._synchronization import Event as Event
|
||||||
|
from ._core._synchronization import EventStatistics as EventStatistics
|
||||||
|
from ._core._synchronization import Lock as Lock
|
||||||
|
from ._core._synchronization import LockStatistics as LockStatistics
|
||||||
|
from ._core._synchronization import ResourceGuard as ResourceGuard
|
||||||
|
from ._core._synchronization import Semaphore as Semaphore
|
||||||
|
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
|
||||||
|
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
|
||||||
|
from ._core._tasks import CancelScope as CancelScope
|
||||||
|
from ._core._tasks import create_task_group as create_task_group
|
||||||
|
from ._core._tasks import current_effective_deadline as current_effective_deadline
|
||||||
|
from ._core._tasks import fail_after as fail_after
|
||||||
|
from ._core._tasks import move_on_after as move_on_after
|
||||||
|
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
|
||||||
|
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
|
||||||
|
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
|
||||||
|
from ._core._tempfile import TemporaryFile as TemporaryFile
|
||||||
|
from ._core._tempfile import gettempdir as gettempdir
|
||||||
|
from ._core._tempfile import gettempdirb as gettempdirb
|
||||||
|
from ._core._tempfile import mkdtemp as mkdtemp
|
||||||
|
from ._core._tempfile import mkstemp as mkstemp
|
||||||
|
from ._core._testing import TaskInfo as TaskInfo
|
||||||
|
from ._core._testing import get_current_task as get_current_task
|
||||||
|
from ._core._testing import get_running_tasks as get_running_tasks
|
||||||
|
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
|
||||||
|
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
|
||||||
|
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
|
||||||
|
from ._core._typedattr import typed_attribute as typed_attribute
|
||||||
|
|
||||||
|
# Re-export imports so they look like they live directly in this package
|
||||||
|
for __value in list(locals().values()):
|
||||||
|
if getattr(__value, "__module__", "").startswith("anyio."):
|
||||||
|
__value.__module__ = __name__
|
||||||
|
|
||||||
|
|
||||||
|
del __value
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
|
||||||
|
"""Support deprecated aliases."""
|
||||||
|
if attr == "BrokenWorkerIntepreter":
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
warnings.warn(
|
||||||
|
"The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
return BrokenWorkerInterpreter
|
||||||
|
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")
|
||||||
2980
.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py
Normal file
2980
.venv/lib/python3.9/site-packages/anyio/_backends/_asyncio.py
Normal file
File diff suppressed because it is too large
Load Diff
1346
.venv/lib/python3.9/site-packages/anyio/_backends/_trio.py
Normal file
1346
.venv/lib/python3.9/site-packages/anyio/_backends/_trio.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,167 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import socket
|
||||||
|
import threading
|
||||||
|
from collections.abc import Callable
|
||||||
|
from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import FileDescriptorLike
|
||||||
|
|
||||||
|
_selector_lock = threading.Lock()
|
||||||
|
_selector: Selector | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Selector:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._thread = threading.Thread(target=self.run, name="AnyIO socket selector")
|
||||||
|
self._selector = DefaultSelector()
|
||||||
|
self._send, self._receive = socket.socketpair()
|
||||||
|
self._send.setblocking(False)
|
||||||
|
self._receive.setblocking(False)
|
||||||
|
# This somewhat reduces the amount of memory wasted queueing up data
|
||||||
|
# for wakeups. With these settings, maximum number of 1-byte sends
|
||||||
|
# before getting BlockingIOError:
|
||||||
|
# Linux 4.8: 6
|
||||||
|
# macOS (darwin 15.5): 1
|
||||||
|
# Windows 10: 525347
|
||||||
|
# Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
|
||||||
|
# blocking, even on non-blocking sockets, so don't do that.)
|
||||||
|
self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
|
||||||
|
self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
|
||||||
|
# On Windows this is a TCP socket so this might matter. On other
|
||||||
|
# platforms this fails b/c AF_UNIX sockets aren't actually TCP.
|
||||||
|
try:
|
||||||
|
self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self._selector.register(self._receive, EVENT_READ)
|
||||||
|
self._closed = False
|
||||||
|
|
||||||
|
def start(self) -> None:
|
||||||
|
self._thread.start()
|
||||||
|
threading._register_atexit(self._stop) # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
def _stop(self) -> None:
|
||||||
|
global _selector
|
||||||
|
self._closed = True
|
||||||
|
self._notify_self()
|
||||||
|
self._send.close()
|
||||||
|
self._thread.join()
|
||||||
|
self._selector.unregister(self._receive)
|
||||||
|
self._receive.close()
|
||||||
|
self._selector.close()
|
||||||
|
_selector = None
|
||||||
|
assert not self._selector.get_map(), (
|
||||||
|
"selector still has registered file descriptors after shutdown"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _notify_self(self) -> None:
|
||||||
|
try:
|
||||||
|
self._send.send(b"\x00")
|
||||||
|
except BlockingIOError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
try:
|
||||||
|
key = self._selector.get_key(fd)
|
||||||
|
except KeyError:
|
||||||
|
self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)})
|
||||||
|
else:
|
||||||
|
if EVENT_READ in key.data:
|
||||||
|
raise ValueError(
|
||||||
|
"this file descriptor is already registered for reading"
|
||||||
|
)
|
||||||
|
|
||||||
|
key.data[EVENT_READ] = loop, callback
|
||||||
|
self._selector.modify(fd, key.events | EVENT_READ, key.data)
|
||||||
|
|
||||||
|
self._notify_self()
|
||||||
|
|
||||||
|
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
try:
|
||||||
|
key = self._selector.get_key(fd)
|
||||||
|
except KeyError:
|
||||||
|
self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)})
|
||||||
|
else:
|
||||||
|
if EVENT_WRITE in key.data:
|
||||||
|
raise ValueError(
|
||||||
|
"this file descriptor is already registered for writing"
|
||||||
|
)
|
||||||
|
|
||||||
|
key.data[EVENT_WRITE] = loop, callback
|
||||||
|
self._selector.modify(fd, key.events | EVENT_WRITE, key.data)
|
||||||
|
|
||||||
|
self._notify_self()
|
||||||
|
|
||||||
|
def remove_reader(self, fd: FileDescriptorLike) -> bool:
|
||||||
|
try:
|
||||||
|
key = self._selector.get_key(fd)
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if new_events := key.events ^ EVENT_READ:
|
||||||
|
del key.data[EVENT_READ]
|
||||||
|
self._selector.modify(fd, new_events, key.data)
|
||||||
|
else:
|
||||||
|
self._selector.unregister(fd)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def remove_writer(self, fd: FileDescriptorLike) -> bool:
|
||||||
|
try:
|
||||||
|
key = self._selector.get_key(fd)
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if new_events := key.events ^ EVENT_WRITE:
|
||||||
|
del key.data[EVENT_WRITE]
|
||||||
|
self._selector.modify(fd, new_events, key.data)
|
||||||
|
else:
|
||||||
|
self._selector.unregister(fd)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
while not self._closed:
|
||||||
|
for key, events in self._selector.select():
|
||||||
|
if key.fileobj is self._receive:
|
||||||
|
try:
|
||||||
|
while self._receive.recv(4096):
|
||||||
|
pass
|
||||||
|
except BlockingIOError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
if events & EVENT_READ:
|
||||||
|
loop, callback = key.data[EVENT_READ]
|
||||||
|
self.remove_reader(key.fd)
|
||||||
|
try:
|
||||||
|
loop.call_soon_threadsafe(callback)
|
||||||
|
except RuntimeError:
|
||||||
|
pass # the loop was already closed
|
||||||
|
|
||||||
|
if events & EVENT_WRITE:
|
||||||
|
loop, callback = key.data[EVENT_WRITE]
|
||||||
|
self.remove_writer(key.fd)
|
||||||
|
try:
|
||||||
|
loop.call_soon_threadsafe(callback)
|
||||||
|
except RuntimeError:
|
||||||
|
pass # the loop was already closed
|
||||||
|
|
||||||
|
|
||||||
|
def get_selector() -> Selector:
|
||||||
|
global _selector
|
||||||
|
|
||||||
|
with _selector_lock:
|
||||||
|
if _selector is None:
|
||||||
|
_selector = Selector()
|
||||||
|
_selector.start()
|
||||||
|
|
||||||
|
return _selector
|
||||||
@@ -0,0 +1,200 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from contextlib import AbstractAsyncContextManager, AbstractContextManager
|
||||||
|
from inspect import isasyncgen, iscoroutine, isgenerator
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Protocol, TypeVar, cast, final
|
||||||
|
|
||||||
|
_T_co = TypeVar("_T_co", covariant=True)
|
||||||
|
_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None")
|
||||||
|
|
||||||
|
|
||||||
|
class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]):
|
||||||
|
def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ...
|
||||||
|
|
||||||
|
|
||||||
|
class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]):
|
||||||
|
def __asynccontextmanager__(
|
||||||
|
self,
|
||||||
|
) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ...
|
||||||
|
|
||||||
|
|
||||||
|
class ContextManagerMixin:
|
||||||
|
"""
|
||||||
|
Mixin class providing context manager functionality via a generator-based
|
||||||
|
implementation.
|
||||||
|
|
||||||
|
This class allows you to implement a context manager via :meth:`__contextmanager__`
|
||||||
|
which should return a generator. The mechanics are meant to mirror those of
|
||||||
|
:func:`@contextmanager <contextlib.contextmanager>`.
|
||||||
|
|
||||||
|
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
||||||
|
that once you enter it, you can't re-enter before first exiting it.
|
||||||
|
|
||||||
|
.. seealso:: :doc:`contextmanagers`
|
||||||
|
"""
|
||||||
|
|
||||||
|
__cm: AbstractContextManager[object, bool | None] | None = None
|
||||||
|
|
||||||
|
@final
|
||||||
|
def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co:
|
||||||
|
# Needed for mypy to assume self still has the __cm member
|
||||||
|
assert isinstance(self, ContextManagerMixin)
|
||||||
|
if self.__cm is not None:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"this {self.__class__.__qualname__} has already been entered"
|
||||||
|
)
|
||||||
|
|
||||||
|
cm = self.__contextmanager__()
|
||||||
|
if not isinstance(cm, AbstractContextManager):
|
||||||
|
if isgenerator(cm):
|
||||||
|
raise TypeError(
|
||||||
|
"__contextmanager__() returned a generator object instead of "
|
||||||
|
"a context manager. Did you forget to add the @contextmanager "
|
||||||
|
"decorator?"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"__contextmanager__() did not return a context manager object, "
|
||||||
|
f"but {cm.__class__!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cm is self:
|
||||||
|
raise TypeError(
|
||||||
|
f"{self.__class__.__qualname__}.__contextmanager__() returned "
|
||||||
|
f"self. Did you forget to add the @contextmanager decorator and a "
|
||||||
|
f"'yield' statement?"
|
||||||
|
)
|
||||||
|
|
||||||
|
value = cm.__enter__()
|
||||||
|
self.__cm = cm
|
||||||
|
return value
|
||||||
|
|
||||||
|
@final
|
||||||
|
def __exit__(
|
||||||
|
self: _SupportsCtxMgr[object, _ExitT_co],
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> _ExitT_co:
|
||||||
|
# Needed for mypy to assume self still has the __cm member
|
||||||
|
assert isinstance(self, ContextManagerMixin)
|
||||||
|
if self.__cm is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"this {self.__class__.__qualname__} has not been entered yet"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prevent circular references
|
||||||
|
cm = self.__cm
|
||||||
|
del self.__cm
|
||||||
|
|
||||||
|
return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb))
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __contextmanager__(self) -> AbstractContextManager[object, bool | None]:
|
||||||
|
"""
|
||||||
|
Implement your context manager logic here.
|
||||||
|
|
||||||
|
This method **must** be decorated with
|
||||||
|
:func:`@contextmanager <contextlib.contextmanager>`.
|
||||||
|
|
||||||
|
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
||||||
|
enclosed context block, so use a ``finally:`` block to clean up resources!
|
||||||
|
|
||||||
|
:return: a context manager object
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncContextManagerMixin:
|
||||||
|
"""
|
||||||
|
Mixin class providing async context manager functionality via a generator-based
|
||||||
|
implementation.
|
||||||
|
|
||||||
|
This class allows you to implement a context manager via
|
||||||
|
:meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of
|
||||||
|
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
||||||
|
|
||||||
|
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
||||||
|
that once you enter it, you can't re-enter before first exiting it.
|
||||||
|
|
||||||
|
.. seealso:: :doc:`contextmanagers`
|
||||||
|
"""
|
||||||
|
|
||||||
|
__cm: AbstractAsyncContextManager[object, bool | None] | None = None
|
||||||
|
|
||||||
|
@final
|
||||||
|
async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co:
|
||||||
|
# Needed for mypy to assume self still has the __cm member
|
||||||
|
assert isinstance(self, AsyncContextManagerMixin)
|
||||||
|
if self.__cm is not None:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"this {self.__class__.__qualname__} has already been entered"
|
||||||
|
)
|
||||||
|
|
||||||
|
cm = self.__asynccontextmanager__()
|
||||||
|
if not isinstance(cm, AbstractAsyncContextManager):
|
||||||
|
if isasyncgen(cm):
|
||||||
|
raise TypeError(
|
||||||
|
"__asynccontextmanager__() returned an async generator instead of "
|
||||||
|
"an async context manager. Did you forget to add the "
|
||||||
|
"@asynccontextmanager decorator?"
|
||||||
|
)
|
||||||
|
elif iscoroutine(cm):
|
||||||
|
cm.close()
|
||||||
|
raise TypeError(
|
||||||
|
"__asynccontextmanager__() returned a coroutine object instead of "
|
||||||
|
"an async context manager. Did you forget to add the "
|
||||||
|
"@asynccontextmanager decorator and a 'yield' statement?"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"__asynccontextmanager__() did not return an async context manager, "
|
||||||
|
f"but {cm.__class__!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cm is self:
|
||||||
|
raise TypeError(
|
||||||
|
f"{self.__class__.__qualname__}.__asynccontextmanager__() returned "
|
||||||
|
f"self. Did you forget to add the @asynccontextmanager decorator and a "
|
||||||
|
f"'yield' statement?"
|
||||||
|
)
|
||||||
|
|
||||||
|
value = await cm.__aenter__()
|
||||||
|
self.__cm = cm
|
||||||
|
return value
|
||||||
|
|
||||||
|
@final
|
||||||
|
async def __aexit__(
|
||||||
|
self: _SupportsAsyncCtxMgr[object, _ExitT_co],
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> _ExitT_co:
|
||||||
|
assert isinstance(self, AsyncContextManagerMixin)
|
||||||
|
if self.__cm is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"this {self.__class__.__qualname__} has not been entered yet"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prevent circular references
|
||||||
|
cm = self.__cm
|
||||||
|
del self.__cm
|
||||||
|
|
||||||
|
return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb))
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __asynccontextmanager__(
|
||||||
|
self,
|
||||||
|
) -> AbstractAsyncContextManager[object, bool | None]:
|
||||||
|
"""
|
||||||
|
Implement your async context manager logic here.
|
||||||
|
|
||||||
|
This method **must** be decorated with
|
||||||
|
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
||||||
|
|
||||||
|
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
||||||
|
enclosed context block, so use a ``finally:`` block to clean up resources!
|
||||||
|
|
||||||
|
:return: an async context manager object
|
||||||
|
"""
|
||||||
234
.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py
Normal file
234
.venv/lib/python3.9/site-packages/anyio/_core/_eventloop.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
import threading
|
||||||
|
from collections.abc import Awaitable, Callable, Generator
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from contextvars import Token
|
||||||
|
from importlib import import_module
|
||||||
|
from typing import TYPE_CHECKING, Any, TypeVar
|
||||||
|
|
||||||
|
from ._exceptions import NoEventLoopError
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
sniffio: Any
|
||||||
|
try:
|
||||||
|
import sniffio
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
sniffio = None
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..abc import AsyncBackend
|
||||||
|
|
||||||
|
# This must be updated when new backends are introduced
|
||||||
|
BACKENDS = "asyncio", "trio"
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
threadlocals = threading.local()
|
||||||
|
loaded_backends: dict[str, type[AsyncBackend]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def run(
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
backend: str = "asyncio",
|
||||||
|
backend_options: dict[str, Any] | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Run the given coroutine function in an asynchronous event loop.
|
||||||
|
|
||||||
|
The current thread must not be already running an event loop.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to ``func``
|
||||||
|
:param backend: name of the asynchronous event loop implementation – currently
|
||||||
|
either ``asyncio`` or ``trio``
|
||||||
|
:param backend_options: keyword arguments to call the backend ``run()``
|
||||||
|
implementation with (documented :ref:`here <backend options>`)
|
||||||
|
:return: the return value of the coroutine function
|
||||||
|
:raises RuntimeError: if an asynchronous event loop is already running in this
|
||||||
|
thread
|
||||||
|
:raises LookupError: if the named backend is not found
|
||||||
|
|
||||||
|
"""
|
||||||
|
if asynclib_name := current_async_library():
|
||||||
|
raise RuntimeError(f"Already running {asynclib_name} in this thread")
|
||||||
|
|
||||||
|
try:
|
||||||
|
async_backend = get_async_backend(backend)
|
||||||
|
except ImportError as exc:
|
||||||
|
raise LookupError(f"No such backend: {backend}") from exc
|
||||||
|
|
||||||
|
token = None
|
||||||
|
if asynclib_name is None:
|
||||||
|
# Since we're in control of the event loop, we can cache the name of the async
|
||||||
|
# library
|
||||||
|
token = set_current_async_library(backend)
|
||||||
|
|
||||||
|
try:
|
||||||
|
backend_options = backend_options or {}
|
||||||
|
return async_backend.run(func, args, {}, backend_options)
|
||||||
|
finally:
|
||||||
|
reset_current_async_library(token)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep(delay: float) -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task for the specified duration.
|
||||||
|
|
||||||
|
:param delay: the duration, in seconds
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await get_async_backend().sleep(delay)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep_forever() -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task until it's cancelled.
|
||||||
|
|
||||||
|
This is a shortcut for ``sleep(math.inf)``.
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
await sleep(math.inf)
|
||||||
|
|
||||||
|
|
||||||
|
async def sleep_until(deadline: float) -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task until the given time.
|
||||||
|
|
||||||
|
:param deadline: the absolute time to wake up at (according to the internal
|
||||||
|
monotonic clock of the event loop)
|
||||||
|
|
||||||
|
.. versionadded:: 3.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
now = current_time()
|
||||||
|
await sleep(max(deadline - now, 0))
|
||||||
|
|
||||||
|
|
||||||
|
def current_time() -> float:
|
||||||
|
"""
|
||||||
|
Return the current value of the event loop's internal clock.
|
||||||
|
|
||||||
|
:return: the clock value (seconds)
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().current_time()
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_backends() -> tuple[str, ...]:
|
||||||
|
"""Return a tuple of the names of all built-in backends."""
|
||||||
|
return BACKENDS
|
||||||
|
|
||||||
|
|
||||||
|
def get_available_backends() -> tuple[str, ...]:
|
||||||
|
"""
|
||||||
|
Test for the availability of built-in backends.
|
||||||
|
|
||||||
|
:return a tuple of the built-in backend names that were successfully imported
|
||||||
|
|
||||||
|
.. versionadded:: 4.12
|
||||||
|
|
||||||
|
"""
|
||||||
|
available_backends: list[str] = []
|
||||||
|
for backend_name in get_all_backends():
|
||||||
|
try:
|
||||||
|
get_async_backend(backend_name)
|
||||||
|
except ImportError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
available_backends.append(backend_name)
|
||||||
|
|
||||||
|
return tuple(available_backends)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cancelled_exc_class() -> type[BaseException]:
|
||||||
|
"""
|
||||||
|
Return the current async library's cancellation exception class.
|
||||||
|
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().cancelled_exception_class()
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
# Private API
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def claim_worker_thread(
|
||||||
|
backend_class: type[AsyncBackend], token: object
|
||||||
|
) -> Generator[Any, None, None]:
|
||||||
|
from ..lowlevel import EventLoopToken
|
||||||
|
|
||||||
|
threadlocals.current_token = EventLoopToken(backend_class, token)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
del threadlocals.current_token
|
||||||
|
|
||||||
|
|
||||||
|
def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
|
||||||
|
if asynclib_name is None:
|
||||||
|
asynclib_name = current_async_library()
|
||||||
|
if not asynclib_name:
|
||||||
|
raise NoEventLoopError(
|
||||||
|
f"Not currently running on any asynchronous event loop. "
|
||||||
|
f"Available async backends: {', '.join(get_all_backends())}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# We use our own dict instead of sys.modules to get the already imported back-end
|
||||||
|
# class because the appropriate modules in sys.modules could potentially be only
|
||||||
|
# partially initialized
|
||||||
|
try:
|
||||||
|
return loaded_backends[asynclib_name]
|
||||||
|
except KeyError:
|
||||||
|
module = import_module(f"anyio._backends._{asynclib_name}")
|
||||||
|
loaded_backends[asynclib_name] = module.backend_class
|
||||||
|
return module.backend_class
|
||||||
|
|
||||||
|
|
||||||
|
def current_async_library() -> str | None:
|
||||||
|
if sniffio is None:
|
||||||
|
# If sniffio is not installed, we assume we're either running asyncio or nothing
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
try:
|
||||||
|
asyncio.get_running_loop()
|
||||||
|
return "asyncio"
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return sniffio.current_async_library()
|
||||||
|
except sniffio.AsyncLibraryNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def set_current_async_library(asynclib_name: str | None) -> Token | None:
|
||||||
|
# no-op if sniffio is not installed
|
||||||
|
if sniffio is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return sniffio.current_async_library_cvar.set(asynclib_name)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_current_async_library(token: Token | None) -> None:
|
||||||
|
if token is not None:
|
||||||
|
sniffio.current_async_library_cvar.reset(token)
|
||||||
156
.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py
Normal file
156
.venv/lib/python3.9/site-packages/anyio/_core/_exceptions.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from collections.abc import Generator
|
||||||
|
from textwrap import dedent
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
if sys.version_info < (3, 11):
|
||||||
|
from exceptiongroup import BaseExceptionGroup
|
||||||
|
|
||||||
|
|
||||||
|
class BrokenResourceError(Exception):
|
||||||
|
"""
|
||||||
|
Raised when trying to use a resource that has been rendered unusable due to external
|
||||||
|
causes (e.g. a send stream whose peer has disconnected).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BrokenWorkerProcess(Exception):
|
||||||
|
"""
|
||||||
|
Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or
|
||||||
|
otherwise misbehaves.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BrokenWorkerInterpreter(Exception):
|
||||||
|
"""
|
||||||
|
Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is
|
||||||
|
raised in the subinterpreter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, excinfo: Any):
|
||||||
|
# This was adapted from concurrent.futures.interpreter.ExecutionFailed
|
||||||
|
msg = excinfo.formatted
|
||||||
|
if not msg:
|
||||||
|
if excinfo.type and excinfo.msg:
|
||||||
|
msg = f"{excinfo.type.__name__}: {excinfo.msg}"
|
||||||
|
else:
|
||||||
|
msg = excinfo.type.__name__ or excinfo.msg
|
||||||
|
|
||||||
|
super().__init__(msg)
|
||||||
|
self.excinfo = excinfo
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
try:
|
||||||
|
formatted = self.excinfo.errdisplay
|
||||||
|
except Exception:
|
||||||
|
return super().__str__()
|
||||||
|
else:
|
||||||
|
return dedent(
|
||||||
|
f"""
|
||||||
|
{super().__str__()}
|
||||||
|
|
||||||
|
Uncaught in the interpreter:
|
||||||
|
|
||||||
|
{formatted}
|
||||||
|
""".strip()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BusyResourceError(Exception):
|
||||||
|
"""
|
||||||
|
Raised when two tasks are trying to read from or write to the same resource
|
||||||
|
concurrently.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, action: str):
|
||||||
|
super().__init__(f"Another task is already {action} this resource")
|
||||||
|
|
||||||
|
|
||||||
|
class ClosedResourceError(Exception):
|
||||||
|
"""Raised when trying to use a resource that has been closed."""
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectionFailed(OSError):
|
||||||
|
"""
|
||||||
|
Raised when a connection attempt fails.
|
||||||
|
|
||||||
|
.. note:: This class inherits from :exc:`OSError` for backwards compatibility.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def iterate_exceptions(
|
||||||
|
exception: BaseException,
|
||||||
|
) -> Generator[BaseException, None, None]:
|
||||||
|
if isinstance(exception, BaseExceptionGroup):
|
||||||
|
for exc in exception.exceptions:
|
||||||
|
yield from iterate_exceptions(exc)
|
||||||
|
else:
|
||||||
|
yield exception
|
||||||
|
|
||||||
|
|
||||||
|
class DelimiterNotFound(Exception):
|
||||||
|
"""
|
||||||
|
Raised during
|
||||||
|
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||||
|
maximum number of bytes has been read without the delimiter being found.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, max_bytes: int) -> None:
|
||||||
|
super().__init__(
|
||||||
|
f"The delimiter was not found among the first {max_bytes} bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EndOfStream(Exception):
|
||||||
|
"""
|
||||||
|
Raised when trying to read from a stream that has been closed from the other end.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class IncompleteRead(Exception):
|
||||||
|
"""
|
||||||
|
Raised during
|
||||||
|
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
|
||||||
|
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
||||||
|
connection is closed before the requested amount of bytes has been read.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__(
|
||||||
|
"The stream was closed before the read operation could be completed"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeLookupError(LookupError):
|
||||||
|
"""
|
||||||
|
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute
|
||||||
|
is not found and no default value has been given.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class WouldBlock(Exception):
|
||||||
|
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
|
||||||
|
|
||||||
|
|
||||||
|
class NoEventLoopError(RuntimeError):
|
||||||
|
"""
|
||||||
|
Raised by several functions that require an event loop to be running in the current
|
||||||
|
thread when there is no running event loop.
|
||||||
|
|
||||||
|
This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync`
|
||||||
|
if not calling from an AnyIO worker thread, and no ``token`` was passed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class RunFinishedError(RuntimeError):
|
||||||
|
"""
|
||||||
|
Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event
|
||||||
|
loop associated with the explicitly passed token has already finished.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
super().__init__(
|
||||||
|
"The event loop associated with the given token has already finished"
|
||||||
|
)
|
||||||
797
.venv/lib/python3.9/site-packages/anyio/_core/_fileio.py
Normal file
797
.venv/lib/python3.9/site-packages/anyio/_core/_fileio.py
Normal file
@@ -0,0 +1,797 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import sys
|
||||||
|
from collections.abc import (
|
||||||
|
AsyncIterator,
|
||||||
|
Callable,
|
||||||
|
Iterable,
|
||||||
|
Iterator,
|
||||||
|
Sequence,
|
||||||
|
)
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import partial
|
||||||
|
from os import PathLike
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
AnyStr,
|
||||||
|
ClassVar,
|
||||||
|
Final,
|
||||||
|
Generic,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .. import to_thread
|
||||||
|
from ..abc import AsyncResource
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
|
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
||||||
|
else:
|
||||||
|
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncFile(AsyncResource, Generic[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous file object.
|
||||||
|
|
||||||
|
This class wraps a standard file object and provides async friendly versions of the
|
||||||
|
following blocking methods (where available on the original file object):
|
||||||
|
|
||||||
|
* read
|
||||||
|
* read1
|
||||||
|
* readline
|
||||||
|
* readlines
|
||||||
|
* readinto
|
||||||
|
* readinto1
|
||||||
|
* write
|
||||||
|
* writelines
|
||||||
|
* truncate
|
||||||
|
* seek
|
||||||
|
* tell
|
||||||
|
* flush
|
||||||
|
|
||||||
|
All other methods are directly passed through.
|
||||||
|
|
||||||
|
This class supports the asynchronous context manager protocol which closes the
|
||||||
|
underlying file at the end of the context block.
|
||||||
|
|
||||||
|
This class also supports asynchronous iteration::
|
||||||
|
|
||||||
|
async with await open_file(...) as f:
|
||||||
|
async for line in f:
|
||||||
|
print(line)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, fp: IO[AnyStr]) -> None:
|
||||||
|
self._fp: Any = fp
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> object:
|
||||||
|
return getattr(self._fp, name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def wrapped(self) -> IO[AnyStr]:
|
||||||
|
"""The wrapped file object."""
|
||||||
|
return self._fp
|
||||||
|
|
||||||
|
async def __aiter__(self) -> AsyncIterator[AnyStr]:
|
||||||
|
while True:
|
||||||
|
line = await self.readline()
|
||||||
|
if line:
|
||||||
|
yield line
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.close)
|
||||||
|
|
||||||
|
async def read(self, size: int = -1) -> AnyStr:
|
||||||
|
return await to_thread.run_sync(self._fp.read, size)
|
||||||
|
|
||||||
|
async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._fp.read1, size)
|
||||||
|
|
||||||
|
async def readline(self) -> AnyStr:
|
||||||
|
return await to_thread.run_sync(self._fp.readline)
|
||||||
|
|
||||||
|
async def readlines(self) -> list[AnyStr]:
|
||||||
|
return await to_thread.run_sync(self._fp.readlines)
|
||||||
|
|
||||||
|
async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.readinto, b)
|
||||||
|
|
||||||
|
async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.readinto1, b)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def write(self: AsyncFile[str], b: str) -> int: ...
|
||||||
|
|
||||||
|
async def write(self, b: ReadableBuffer | str) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.write, b)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def writelines(
|
||||||
|
self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ...
|
||||||
|
|
||||||
|
async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.writelines, lines)
|
||||||
|
|
||||||
|
async def truncate(self, size: int | None = None) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.truncate, size)
|
||||||
|
|
||||||
|
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.seek, offset, whence)
|
||||||
|
|
||||||
|
async def tell(self) -> int:
|
||||||
|
return await to_thread.run_sync(self._fp.tell)
|
||||||
|
|
||||||
|
async def flush(self) -> None:
|
||||||
|
return await to_thread.run_sync(self._fp.flush)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: OpenBinaryMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
closefd: bool = ...,
|
||||||
|
opener: Callable[[str, int], int] | None = ...,
|
||||||
|
) -> AsyncFile[bytes]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: OpenTextMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
closefd: bool = ...,
|
||||||
|
opener: Callable[[str, int], int] | None = ...,
|
||||||
|
) -> AsyncFile[str]: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def open_file(
|
||||||
|
file: str | PathLike[str] | int,
|
||||||
|
mode: str = "r",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
closefd: bool = True,
|
||||||
|
opener: Callable[[str, int], int] | None = None,
|
||||||
|
) -> AsyncFile[Any]:
|
||||||
|
"""
|
||||||
|
Open a file asynchronously.
|
||||||
|
|
||||||
|
The arguments are exactly the same as for the builtin :func:`open`.
|
||||||
|
|
||||||
|
:return: an asynchronous file object
|
||||||
|
|
||||||
|
"""
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
open, file, mode, buffering, encoding, errors, newline, closefd, opener
|
||||||
|
)
|
||||||
|
return AsyncFile(fp)
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
|
||||||
|
"""
|
||||||
|
Wrap an existing file as an asynchronous file.
|
||||||
|
|
||||||
|
:param file: an existing file-like object
|
||||||
|
:return: an asynchronous file object
|
||||||
|
|
||||||
|
"""
|
||||||
|
return AsyncFile(file)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class _PathIterator(AsyncIterator["Path"]):
|
||||||
|
iterator: Iterator[PathLike[str]]
|
||||||
|
|
||||||
|
async def __anext__(self) -> Path:
|
||||||
|
nextval = await to_thread.run_sync(
|
||||||
|
next, self.iterator, None, abandon_on_cancel=True
|
||||||
|
)
|
||||||
|
if nextval is None:
|
||||||
|
raise StopAsyncIteration from None
|
||||||
|
|
||||||
|
return Path(nextval)
|
||||||
|
|
||||||
|
|
||||||
|
class Path:
|
||||||
|
"""
|
||||||
|
An asynchronous version of :class:`pathlib.Path`.
|
||||||
|
|
||||||
|
This class cannot be substituted for :class:`pathlib.Path` or
|
||||||
|
:class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike`
|
||||||
|
interface.
|
||||||
|
|
||||||
|
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for
|
||||||
|
the deprecated :meth:`~pathlib.Path.link_to` method.
|
||||||
|
|
||||||
|
Some methods may be unavailable or have limited functionality, based on the Python
|
||||||
|
version:
|
||||||
|
|
||||||
|
* :meth:`~pathlib.Path.copy` (available on Python 3.14 or later)
|
||||||
|
* :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later)
|
||||||
|
* :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later)
|
||||||
|
* :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later)
|
||||||
|
* :attr:`~pathlib.Path.info` (available on Python 3.14 or later)
|
||||||
|
* :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later)
|
||||||
|
* :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only
|
||||||
|
available on Python 3.13 or later)
|
||||||
|
* :meth:`~pathlib.Path.move` (available on Python 3.14 or later)
|
||||||
|
* :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later)
|
||||||
|
* :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available
|
||||||
|
on Python 3.12 or later)
|
||||||
|
* :meth:`~pathlib.Path.walk` (available on Python 3.12 or later)
|
||||||
|
|
||||||
|
Any methods that do disk I/O need to be awaited on. These methods are:
|
||||||
|
|
||||||
|
* :meth:`~pathlib.Path.absolute`
|
||||||
|
* :meth:`~pathlib.Path.chmod`
|
||||||
|
* :meth:`~pathlib.Path.cwd`
|
||||||
|
* :meth:`~pathlib.Path.exists`
|
||||||
|
* :meth:`~pathlib.Path.expanduser`
|
||||||
|
* :meth:`~pathlib.Path.group`
|
||||||
|
* :meth:`~pathlib.Path.hardlink_to`
|
||||||
|
* :meth:`~pathlib.Path.home`
|
||||||
|
* :meth:`~pathlib.Path.is_block_device`
|
||||||
|
* :meth:`~pathlib.Path.is_char_device`
|
||||||
|
* :meth:`~pathlib.Path.is_dir`
|
||||||
|
* :meth:`~pathlib.Path.is_fifo`
|
||||||
|
* :meth:`~pathlib.Path.is_file`
|
||||||
|
* :meth:`~pathlib.Path.is_junction`
|
||||||
|
* :meth:`~pathlib.Path.is_mount`
|
||||||
|
* :meth:`~pathlib.Path.is_socket`
|
||||||
|
* :meth:`~pathlib.Path.is_symlink`
|
||||||
|
* :meth:`~pathlib.Path.lchmod`
|
||||||
|
* :meth:`~pathlib.Path.lstat`
|
||||||
|
* :meth:`~pathlib.Path.mkdir`
|
||||||
|
* :meth:`~pathlib.Path.open`
|
||||||
|
* :meth:`~pathlib.Path.owner`
|
||||||
|
* :meth:`~pathlib.Path.read_bytes`
|
||||||
|
* :meth:`~pathlib.Path.read_text`
|
||||||
|
* :meth:`~pathlib.Path.readlink`
|
||||||
|
* :meth:`~pathlib.Path.rename`
|
||||||
|
* :meth:`~pathlib.Path.replace`
|
||||||
|
* :meth:`~pathlib.Path.resolve`
|
||||||
|
* :meth:`~pathlib.Path.rmdir`
|
||||||
|
* :meth:`~pathlib.Path.samefile`
|
||||||
|
* :meth:`~pathlib.Path.stat`
|
||||||
|
* :meth:`~pathlib.Path.symlink_to`
|
||||||
|
* :meth:`~pathlib.Path.touch`
|
||||||
|
* :meth:`~pathlib.Path.unlink`
|
||||||
|
* :meth:`~pathlib.Path.walk`
|
||||||
|
* :meth:`~pathlib.Path.write_bytes`
|
||||||
|
* :meth:`~pathlib.Path.write_text`
|
||||||
|
|
||||||
|
Additionally, the following methods return an async iterator yielding
|
||||||
|
:class:`~.Path` objects:
|
||||||
|
|
||||||
|
* :meth:`~pathlib.Path.glob`
|
||||||
|
* :meth:`~pathlib.Path.iterdir`
|
||||||
|
* :meth:`~pathlib.Path.rglob`
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_path", "__weakref__"
|
||||||
|
|
||||||
|
__weakref__: Any
|
||||||
|
|
||||||
|
def __init__(self, *args: str | PathLike[str]) -> None:
|
||||||
|
self._path: Final[pathlib.Path] = pathlib.Path(*args)
|
||||||
|
|
||||||
|
def __fspath__(self) -> str:
|
||||||
|
return self._path.__fspath__()
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self._path.__str__()
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}({self.as_posix()!r})"
|
||||||
|
|
||||||
|
def __bytes__(self) -> bytes:
|
||||||
|
return self._path.__bytes__()
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return self._path.__hash__()
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__eq__(target)
|
||||||
|
|
||||||
|
def __lt__(self, other: pathlib.PurePath | Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__lt__(target)
|
||||||
|
|
||||||
|
def __le__(self, other: pathlib.PurePath | Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__le__(target)
|
||||||
|
|
||||||
|
def __gt__(self, other: pathlib.PurePath | Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__gt__(target)
|
||||||
|
|
||||||
|
def __ge__(self, other: pathlib.PurePath | Path) -> bool:
|
||||||
|
target = other._path if isinstance(other, Path) else other
|
||||||
|
return self._path.__ge__(target)
|
||||||
|
|
||||||
|
def __truediv__(self, other: str | PathLike[str]) -> Path:
|
||||||
|
return Path(self._path / other)
|
||||||
|
|
||||||
|
def __rtruediv__(self, other: str | PathLike[str]) -> Path:
|
||||||
|
return Path(other) / self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parts(self) -> tuple[str, ...]:
|
||||||
|
return self._path.parts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def drive(self) -> str:
|
||||||
|
return self._path.drive
|
||||||
|
|
||||||
|
@property
|
||||||
|
def root(self) -> str:
|
||||||
|
return self._path.root
|
||||||
|
|
||||||
|
@property
|
||||||
|
def anchor(self) -> str:
|
||||||
|
return self._path.anchor
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parents(self) -> Sequence[Path]:
|
||||||
|
return tuple(Path(p) for p in self._path.parents)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def parent(self) -> Path:
|
||||||
|
return Path(self._path.parent)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._path.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suffix(self) -> str:
|
||||||
|
return self._path.suffix
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suffixes(self) -> list[str]:
|
||||||
|
return self._path.suffixes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stem(self) -> str:
|
||||||
|
return self._path.stem
|
||||||
|
|
||||||
|
async def absolute(self) -> Path:
|
||||||
|
path = await to_thread.run_sync(self._path.absolute)
|
||||||
|
return Path(path)
|
||||||
|
|
||||||
|
def as_posix(self) -> str:
|
||||||
|
return self._path.as_posix()
|
||||||
|
|
||||||
|
def as_uri(self) -> str:
|
||||||
|
return self._path.as_uri()
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 13):
|
||||||
|
parser: ClassVar[ModuleType] = pathlib.Path.parser
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_uri(cls, uri: str) -> Path:
|
||||||
|
return Path(pathlib.Path.from_uri(uri))
|
||||||
|
|
||||||
|
def full_match(
|
||||||
|
self, path_pattern: str, *, case_sensitive: bool | None = None
|
||||||
|
) -> bool:
|
||||||
|
return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
|
||||||
|
|
||||||
|
def match(
|
||||||
|
self, path_pattern: str, *, case_sensitive: bool | None = None
|
||||||
|
) -> bool:
|
||||||
|
return self._path.match(path_pattern, case_sensitive=case_sensitive)
|
||||||
|
else:
|
||||||
|
|
||||||
|
def match(self, path_pattern: str) -> bool:
|
||||||
|
return self._path.match(path_pattern)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 14):
|
||||||
|
|
||||||
|
@property
|
||||||
|
def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it
|
||||||
|
return self._path.info
|
||||||
|
|
||||||
|
async def copy(
|
||||||
|
self,
|
||||||
|
target: str | os.PathLike[str],
|
||||||
|
*,
|
||||||
|
follow_symlinks: bool = True,
|
||||||
|
preserve_metadata: bool = False,
|
||||||
|
) -> Path:
|
||||||
|
func = partial(
|
||||||
|
self._path.copy,
|
||||||
|
follow_symlinks=follow_symlinks,
|
||||||
|
preserve_metadata=preserve_metadata,
|
||||||
|
)
|
||||||
|
return Path(await to_thread.run_sync(func, pathlib.Path(target)))
|
||||||
|
|
||||||
|
async def copy_into(
|
||||||
|
self,
|
||||||
|
target_dir: str | os.PathLike[str],
|
||||||
|
*,
|
||||||
|
follow_symlinks: bool = True,
|
||||||
|
preserve_metadata: bool = False,
|
||||||
|
) -> Path:
|
||||||
|
func = partial(
|
||||||
|
self._path.copy_into,
|
||||||
|
follow_symlinks=follow_symlinks,
|
||||||
|
preserve_metadata=preserve_metadata,
|
||||||
|
)
|
||||||
|
return Path(await to_thread.run_sync(func, pathlib.Path(target_dir)))
|
||||||
|
|
||||||
|
async def move(self, target: str | os.PathLike[str]) -> Path:
|
||||||
|
# Upstream does not handle anyio.Path properly as a PathLike
|
||||||
|
target = pathlib.Path(target)
|
||||||
|
return Path(await to_thread.run_sync(self._path.move, target))
|
||||||
|
|
||||||
|
async def move_into(
|
||||||
|
self,
|
||||||
|
target_dir: str | os.PathLike[str],
|
||||||
|
) -> Path:
|
||||||
|
return Path(await to_thread.run_sync(self._path.move_into, target_dir))
|
||||||
|
|
||||||
|
def is_relative_to(self, other: str | PathLike[str]) -> bool:
|
||||||
|
try:
|
||||||
|
self.relative_to(other)
|
||||||
|
return True
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
|
||||||
|
func = partial(os.chmod, follow_symlinks=follow_symlinks)
|
||||||
|
return await to_thread.run_sync(func, self._path, mode)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def cwd(cls) -> Path:
|
||||||
|
path = await to_thread.run_sync(pathlib.Path.cwd)
|
||||||
|
return cls(path)
|
||||||
|
|
||||||
|
async def exists(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def expanduser(self) -> Path:
|
||||||
|
return Path(
|
||||||
|
await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info < (3, 12):
|
||||||
|
# Python 3.11 and earlier
|
||||||
|
def glob(self, pattern: str) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.glob(pattern)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
elif (3, 12) <= sys.version_info < (3, 13):
|
||||||
|
# changed in Python 3.12:
|
||||||
|
# - The case_sensitive parameter was added.
|
||||||
|
def glob(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
*,
|
||||||
|
case_sensitive: bool | None = None,
|
||||||
|
) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.glob(pattern, case_sensitive=case_sensitive)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
elif sys.version_info >= (3, 13):
|
||||||
|
# Changed in Python 3.13:
|
||||||
|
# - The recurse_symlinks parameter was added.
|
||||||
|
# - The pattern parameter accepts a path-like object.
|
||||||
|
def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
|
||||||
|
self,
|
||||||
|
pattern: str | PathLike[str],
|
||||||
|
*,
|
||||||
|
case_sensitive: bool | None = None,
|
||||||
|
recurse_symlinks: bool = False,
|
||||||
|
) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.glob(
|
||||||
|
pattern, # type: ignore[arg-type]
|
||||||
|
case_sensitive=case_sensitive,
|
||||||
|
recurse_symlinks=recurse_symlinks,
|
||||||
|
)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
|
||||||
|
async def group(self) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def hardlink_to(
|
||||||
|
self, target: str | bytes | PathLike[str] | PathLike[bytes]
|
||||||
|
) -> None:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(os.link, target, self)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def home(cls) -> Path:
|
||||||
|
home_path = await to_thread.run_sync(pathlib.Path.home)
|
||||||
|
return cls(home_path)
|
||||||
|
|
||||||
|
def is_absolute(self) -> bool:
|
||||||
|
return self._path.is_absolute()
|
||||||
|
|
||||||
|
async def is_block_device(self) -> bool:
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
self._path.is_block_device, abandon_on_cancel=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def is_char_device(self) -> bool:
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
self._path.is_char_device, abandon_on_cancel=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def is_dir(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def is_fifo(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def is_file(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
|
||||||
|
async def is_junction(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_junction)
|
||||||
|
|
||||||
|
async def is_mount(self) -> bool:
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
os.path.ismount, self._path, abandon_on_cancel=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_reserved(self) -> bool:
|
||||||
|
return self._path.is_reserved()
|
||||||
|
|
||||||
|
async def is_socket(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def is_symlink(self) -> bool:
|
||||||
|
return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def iterdir(self) -> AsyncIterator[Path]:
|
||||||
|
gen = (
|
||||||
|
self._path.iterdir()
|
||||||
|
if sys.version_info < (3, 13)
|
||||||
|
else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True)
|
||||||
|
)
|
||||||
|
async for path in _PathIterator(gen):
|
||||||
|
yield path
|
||||||
|
|
||||||
|
def joinpath(self, *args: str | PathLike[str]) -> Path:
|
||||||
|
return Path(self._path.joinpath(*args))
|
||||||
|
|
||||||
|
async def lchmod(self, mode: int) -> None:
|
||||||
|
await to_thread.run_sync(self._path.lchmod, mode)
|
||||||
|
|
||||||
|
async def lstat(self) -> os.stat_result:
|
||||||
|
return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def mkdir(
|
||||||
|
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
|
||||||
|
) -> None:
|
||||||
|
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: OpenBinaryMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
) -> AsyncFile[bytes]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: OpenTextMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
errors: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
) -> AsyncFile[str]: ...
|
||||||
|
|
||||||
|
async def open(
|
||||||
|
self,
|
||||||
|
mode: str = "r",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
) -> AsyncFile[Any]:
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
self._path.open, mode, buffering, encoding, errors, newline
|
||||||
|
)
|
||||||
|
return AsyncFile(fp)
|
||||||
|
|
||||||
|
async def owner(self) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def read_bytes(self) -> bytes:
|
||||||
|
return await to_thread.run_sync(self._path.read_bytes)
|
||||||
|
|
||||||
|
async def read_text(
|
||||||
|
self, encoding: str | None = None, errors: str | None = None
|
||||||
|
) -> str:
|
||||||
|
return await to_thread.run_sync(self._path.read_text, encoding, errors)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
|
||||||
|
def relative_to(
|
||||||
|
self, *other: str | PathLike[str], walk_up: bool = False
|
||||||
|
) -> Path:
|
||||||
|
# relative_to() should work with any PathLike but it doesn't
|
||||||
|
others = [pathlib.Path(other) for other in other]
|
||||||
|
return Path(self._path.relative_to(*others, walk_up=walk_up))
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def relative_to(self, *other: str | PathLike[str]) -> Path:
|
||||||
|
return Path(self._path.relative_to(*other))
|
||||||
|
|
||||||
|
async def readlink(self) -> Path:
|
||||||
|
target = await to_thread.run_sync(os.readlink, self._path)
|
||||||
|
return Path(target)
|
||||||
|
|
||||||
|
async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.rename, target)
|
||||||
|
return Path(target)
|
||||||
|
|
||||||
|
async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.replace, target)
|
||||||
|
return Path(target)
|
||||||
|
|
||||||
|
async def resolve(self, strict: bool = False) -> Path:
|
||||||
|
func = partial(self._path.resolve, strict=strict)
|
||||||
|
return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
|
||||||
|
|
||||||
|
if sys.version_info < (3, 12):
|
||||||
|
# Pre Python 3.12
|
||||||
|
def rglob(self, pattern: str) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.rglob(pattern)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
elif (3, 12) <= sys.version_info < (3, 13):
|
||||||
|
# Changed in Python 3.12:
|
||||||
|
# - The case_sensitive parameter was added.
|
||||||
|
def rglob(
|
||||||
|
self, pattern: str, *, case_sensitive: bool | None = None
|
||||||
|
) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.rglob(pattern, case_sensitive=case_sensitive)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
elif sys.version_info >= (3, 13):
|
||||||
|
# Changed in Python 3.13:
|
||||||
|
# - The recurse_symlinks parameter was added.
|
||||||
|
# - The pattern parameter accepts a path-like object.
|
||||||
|
def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block
|
||||||
|
self,
|
||||||
|
pattern: str | PathLike[str],
|
||||||
|
*,
|
||||||
|
case_sensitive: bool | None = None,
|
||||||
|
recurse_symlinks: bool = False,
|
||||||
|
) -> AsyncIterator[Path]:
|
||||||
|
gen = self._path.rglob(
|
||||||
|
pattern, # type: ignore[arg-type]
|
||||||
|
case_sensitive=case_sensitive,
|
||||||
|
recurse_symlinks=recurse_symlinks,
|
||||||
|
)
|
||||||
|
return _PathIterator(gen)
|
||||||
|
|
||||||
|
async def rmdir(self) -> None:
|
||||||
|
await to_thread.run_sync(self._path.rmdir)
|
||||||
|
|
||||||
|
async def samefile(self, other_path: str | PathLike[str]) -> bool:
|
||||||
|
if isinstance(other_path, Path):
|
||||||
|
other_path = other_path._path
|
||||||
|
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
self._path.samefile, other_path, abandon_on_cancel=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
|
||||||
|
func = partial(os.stat, follow_symlinks=follow_symlinks)
|
||||||
|
return await to_thread.run_sync(func, self._path, abandon_on_cancel=True)
|
||||||
|
|
||||||
|
async def symlink_to(
|
||||||
|
self,
|
||||||
|
target: str | bytes | PathLike[str] | PathLike[bytes],
|
||||||
|
target_is_directory: bool = False,
|
||||||
|
) -> None:
|
||||||
|
if isinstance(target, Path):
|
||||||
|
target = target._path
|
||||||
|
|
||||||
|
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
|
||||||
|
|
||||||
|
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
|
||||||
|
await to_thread.run_sync(self._path.touch, mode, exist_ok)
|
||||||
|
|
||||||
|
async def unlink(self, missing_ok: bool = False) -> None:
|
||||||
|
try:
|
||||||
|
await to_thread.run_sync(self._path.unlink)
|
||||||
|
except FileNotFoundError:
|
||||||
|
if not missing_ok:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
|
||||||
|
async def walk(
|
||||||
|
self,
|
||||||
|
top_down: bool = True,
|
||||||
|
on_error: Callable[[OSError], object] | None = None,
|
||||||
|
follow_symlinks: bool = False,
|
||||||
|
) -> AsyncIterator[tuple[Path, list[str], list[str]]]:
|
||||||
|
def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None:
|
||||||
|
try:
|
||||||
|
return next(gen)
|
||||||
|
except StopIteration:
|
||||||
|
return None
|
||||||
|
|
||||||
|
gen = self._path.walk(top_down, on_error, follow_symlinks)
|
||||||
|
while True:
|
||||||
|
value = await to_thread.run_sync(get_next_value)
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
root, dirs, paths = value
|
||||||
|
yield Path(root), dirs, paths
|
||||||
|
|
||||||
|
def with_name(self, name: str) -> Path:
|
||||||
|
return Path(self._path.with_name(name))
|
||||||
|
|
||||||
|
def with_stem(self, stem: str) -> Path:
|
||||||
|
return Path(self._path.with_name(stem + self._path.suffix))
|
||||||
|
|
||||||
|
def with_suffix(self, suffix: str) -> Path:
|
||||||
|
return Path(self._path.with_suffix(suffix))
|
||||||
|
|
||||||
|
def with_segments(self, *pathsegments: str | PathLike[str]) -> Path:
|
||||||
|
return Path(*pathsegments)
|
||||||
|
|
||||||
|
async def write_bytes(self, data: bytes) -> int:
|
||||||
|
return await to_thread.run_sync(self._path.write_bytes, data)
|
||||||
|
|
||||||
|
async def write_text(
|
||||||
|
self,
|
||||||
|
data: str,
|
||||||
|
encoding: str | None = None,
|
||||||
|
errors: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
) -> int:
|
||||||
|
# Path.write_text() does not support the "newline" parameter before Python 3.10
|
||||||
|
def sync_write_text() -> int:
|
||||||
|
with self._path.open(
|
||||||
|
"w", encoding=encoding, errors=errors, newline=newline
|
||||||
|
) as fp:
|
||||||
|
return fp.write(data)
|
||||||
|
|
||||||
|
return await to_thread.run_sync(sync_write_text)
|
||||||
|
|
||||||
|
|
||||||
|
PathLike.register(Path)
|
||||||
18
.venv/lib/python3.9/site-packages/anyio/_core/_resources.py
Normal file
18
.venv/lib/python3.9/site-packages/anyio/_core/_resources.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ..abc import AsyncResource
|
||||||
|
from ._tasks import CancelScope
|
||||||
|
|
||||||
|
|
||||||
|
async def aclose_forcefully(resource: AsyncResource) -> None:
|
||||||
|
"""
|
||||||
|
Close an asynchronous resource in a cancelled scope.
|
||||||
|
|
||||||
|
Doing this closes the resource without waiting on anything.
|
||||||
|
|
||||||
|
:param resource: the resource to close
|
||||||
|
|
||||||
|
"""
|
||||||
|
with CancelScope() as scope:
|
||||||
|
scope.cancel()
|
||||||
|
await resource.aclose()
|
||||||
29
.venv/lib/python3.9/site-packages/anyio/_core/_signals.py
Normal file
29
.venv/lib/python3.9/site-packages/anyio/_core/_signals.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import AsyncIterator
|
||||||
|
from contextlib import AbstractContextManager
|
||||||
|
from signal import Signals
|
||||||
|
|
||||||
|
from ._eventloop import get_async_backend
|
||||||
|
|
||||||
|
|
||||||
|
def open_signal_receiver(
|
||||||
|
*signals: Signals,
|
||||||
|
) -> AbstractContextManager[AsyncIterator[Signals]]:
|
||||||
|
"""
|
||||||
|
Start receiving operating system signals.
|
||||||
|
|
||||||
|
:param signals: signals to receive (e.g. ``signal.SIGINT``)
|
||||||
|
:return: an asynchronous context manager for an asynchronous iterator which yields
|
||||||
|
signal numbers
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
.. warning:: Windows does not support signals natively so it is best to avoid
|
||||||
|
relying on this in cross-platform applications.
|
||||||
|
|
||||||
|
.. warning:: On asyncio, this permanently replaces any previous signal handler for
|
||||||
|
the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().open_signal_receiver(*signals)
|
||||||
1003
.venv/lib/python3.9/site-packages/anyio/_core/_sockets.py
Normal file
1003
.venv/lib/python3.9/site-packages/anyio/_core/_sockets.py
Normal file
File diff suppressed because it is too large
Load Diff
52
.venv/lib/python3.9/site-packages/anyio/_core/_streams.py
Normal file
52
.venv/lib/python3.9/site-packages/anyio/_core/_streams.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from typing import TypeVar
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ..streams.memory import (
|
||||||
|
MemoryObjectReceiveStream,
|
||||||
|
MemoryObjectSendStream,
|
||||||
|
_MemoryObjectStreamState,
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
|
||||||
|
|
||||||
|
class create_memory_object_stream(
|
||||||
|
tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Create a memory object stream.
|
||||||
|
|
||||||
|
The stream's item type can be annotated like
|
||||||
|
:func:`create_memory_object_stream[T_Item]`.
|
||||||
|
|
||||||
|
:param max_buffer_size: number of items held in the buffer until ``send()`` starts
|
||||||
|
blocking
|
||||||
|
:param item_type: old way of marking the streams with the right generic type for
|
||||||
|
static typing (does nothing on AnyIO 4)
|
||||||
|
|
||||||
|
.. deprecated:: 4.0
|
||||||
|
Use ``create_memory_object_stream[YourItemType](...)`` instead.
|
||||||
|
:return: a tuple of (send stream, receive stream)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__( # type: ignore[misc]
|
||||||
|
cls, max_buffer_size: float = 0, item_type: object = None
|
||||||
|
) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
|
||||||
|
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
|
||||||
|
raise ValueError("max_buffer_size must be either an integer or math.inf")
|
||||||
|
if max_buffer_size < 0:
|
||||||
|
raise ValueError("max_buffer_size cannot be negative")
|
||||||
|
if item_type is not None:
|
||||||
|
warn(
|
||||||
|
"The item_type argument has been deprecated in AnyIO 4.0. "
|
||||||
|
"Use create_memory_object_stream[YourItemType](...) instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
state = _MemoryObjectStreamState[T_Item](max_buffer_size)
|
||||||
|
return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))
|
||||||
202
.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py
Normal file
202
.venv/lib/python3.9/site-packages/anyio/_core/_subprocesses.py
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
|
||||||
|
from io import BytesIO
|
||||||
|
from os import PathLike
|
||||||
|
from subprocess import PIPE, CalledProcessError, CompletedProcess
|
||||||
|
from typing import IO, Any, Union, cast
|
||||||
|
|
||||||
|
from ..abc import Process
|
||||||
|
from ._eventloop import get_async_backend
|
||||||
|
from ._tasks import create_task_group
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeAlias
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
|
||||||
|
|
||||||
|
|
||||||
|
async def run_process(
|
||||||
|
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
||||||
|
*,
|
||||||
|
input: bytes | None = None,
|
||||||
|
stdin: int | IO[Any] | None = None,
|
||||||
|
stdout: int | IO[Any] | None = PIPE,
|
||||||
|
stderr: int | IO[Any] | None = PIPE,
|
||||||
|
check: bool = True,
|
||||||
|
cwd: StrOrBytesPath | None = None,
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
startupinfo: Any = None,
|
||||||
|
creationflags: int = 0,
|
||||||
|
start_new_session: bool = False,
|
||||||
|
pass_fds: Sequence[int] = (),
|
||||||
|
user: str | int | None = None,
|
||||||
|
group: str | int | None = None,
|
||||||
|
extra_groups: Iterable[str | int] | None = None,
|
||||||
|
umask: int = -1,
|
||||||
|
) -> CompletedProcess[bytes]:
|
||||||
|
"""
|
||||||
|
Run an external command in a subprocess and wait until it completes.
|
||||||
|
|
||||||
|
.. seealso:: :func:`subprocess.run`
|
||||||
|
|
||||||
|
:param command: either a string to pass to the shell, or an iterable of strings
|
||||||
|
containing the executable name or path and its arguments
|
||||||
|
:param input: bytes passed to the standard input of the subprocess
|
||||||
|
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
a file-like object, or `None`; ``input`` overrides this
|
||||||
|
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
a file-like object, or `None`
|
||||||
|
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
:data:`subprocess.STDOUT`, a file-like object, or `None`
|
||||||
|
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the
|
||||||
|
process terminates with a return code other than 0
|
||||||
|
:param cwd: If not ``None``, change the working directory to this before running the
|
||||||
|
command
|
||||||
|
:param env: if not ``None``, this mapping replaces the inherited environment
|
||||||
|
variables from the parent process
|
||||||
|
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
|
||||||
|
to specify process startup parameters (Windows only)
|
||||||
|
:param creationflags: flags that can be used to control the creation of the
|
||||||
|
subprocess (see :class:`subprocess.Popen` for the specifics)
|
||||||
|
:param start_new_session: if ``true`` the setsid() system call will be made in the
|
||||||
|
child process prior to the execution of the subprocess. (POSIX only)
|
||||||
|
:param pass_fds: sequence of file descriptors to keep open between the parent and
|
||||||
|
child processes. (POSIX only)
|
||||||
|
:param user: effective user to run the process as (Python >= 3.9, POSIX only)
|
||||||
|
:param group: effective group to run the process as (Python >= 3.9, POSIX only)
|
||||||
|
:param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9,
|
||||||
|
POSIX only)
|
||||||
|
:param umask: if not negative, this umask is applied in the child process before
|
||||||
|
running the given command (Python >= 3.9, POSIX only)
|
||||||
|
:return: an object representing the completed process
|
||||||
|
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process
|
||||||
|
exits with a nonzero return code
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
|
||||||
|
buffer = BytesIO()
|
||||||
|
async for chunk in stream:
|
||||||
|
buffer.write(chunk)
|
||||||
|
|
||||||
|
stream_contents[index] = buffer.getvalue()
|
||||||
|
|
||||||
|
if stdin is not None and input is not None:
|
||||||
|
raise ValueError("only one of stdin and input is allowed")
|
||||||
|
|
||||||
|
async with await open_process(
|
||||||
|
command,
|
||||||
|
stdin=PIPE if input else stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
startupinfo=startupinfo,
|
||||||
|
creationflags=creationflags,
|
||||||
|
start_new_session=start_new_session,
|
||||||
|
pass_fds=pass_fds,
|
||||||
|
user=user,
|
||||||
|
group=group,
|
||||||
|
extra_groups=extra_groups,
|
||||||
|
umask=umask,
|
||||||
|
) as process:
|
||||||
|
stream_contents: list[bytes | None] = [None, None]
|
||||||
|
async with create_task_group() as tg:
|
||||||
|
if process.stdout:
|
||||||
|
tg.start_soon(drain_stream, process.stdout, 0)
|
||||||
|
|
||||||
|
if process.stderr:
|
||||||
|
tg.start_soon(drain_stream, process.stderr, 1)
|
||||||
|
|
||||||
|
if process.stdin and input:
|
||||||
|
await process.stdin.send(input)
|
||||||
|
await process.stdin.aclose()
|
||||||
|
|
||||||
|
await process.wait()
|
||||||
|
|
||||||
|
output, errors = stream_contents
|
||||||
|
if check and process.returncode != 0:
|
||||||
|
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
|
||||||
|
|
||||||
|
return CompletedProcess(command, cast(int, process.returncode), output, errors)
|
||||||
|
|
||||||
|
|
||||||
|
async def open_process(
|
||||||
|
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
||||||
|
*,
|
||||||
|
stdin: int | IO[Any] | None = PIPE,
|
||||||
|
stdout: int | IO[Any] | None = PIPE,
|
||||||
|
stderr: int | IO[Any] | None = PIPE,
|
||||||
|
cwd: StrOrBytesPath | None = None,
|
||||||
|
env: Mapping[str, str] | None = None,
|
||||||
|
startupinfo: Any = None,
|
||||||
|
creationflags: int = 0,
|
||||||
|
start_new_session: bool = False,
|
||||||
|
pass_fds: Sequence[int] = (),
|
||||||
|
user: str | int | None = None,
|
||||||
|
group: str | int | None = None,
|
||||||
|
extra_groups: Iterable[str | int] | None = None,
|
||||||
|
umask: int = -1,
|
||||||
|
) -> Process:
|
||||||
|
"""
|
||||||
|
Start an external command in a subprocess.
|
||||||
|
|
||||||
|
.. seealso:: :class:`subprocess.Popen`
|
||||||
|
|
||||||
|
:param command: either a string to pass to the shell, or an iterable of strings
|
||||||
|
containing the executable name or path and its arguments
|
||||||
|
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
|
||||||
|
file-like object, or ``None``
|
||||||
|
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
a file-like object, or ``None``
|
||||||
|
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
||||||
|
:data:`subprocess.STDOUT`, a file-like object, or ``None``
|
||||||
|
:param cwd: If not ``None``, the working directory is changed before executing
|
||||||
|
:param env: If env is not ``None``, it must be a mapping that defines the
|
||||||
|
environment variables for the new process
|
||||||
|
:param creationflags: flags that can be used to control the creation of the
|
||||||
|
subprocess (see :class:`subprocess.Popen` for the specifics)
|
||||||
|
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
|
||||||
|
to specify process startup parameters (Windows only)
|
||||||
|
:param start_new_session: if ``true`` the setsid() system call will be made in the
|
||||||
|
child process prior to the execution of the subprocess. (POSIX only)
|
||||||
|
:param pass_fds: sequence of file descriptors to keep open between the parent and
|
||||||
|
child processes. (POSIX only)
|
||||||
|
:param user: effective user to run the process as (POSIX only)
|
||||||
|
:param group: effective group to run the process as (POSIX only)
|
||||||
|
:param extra_groups: supplementary groups to set in the subprocess (POSIX only)
|
||||||
|
:param umask: if not negative, this umask is applied in the child process before
|
||||||
|
running the given command (POSIX only)
|
||||||
|
:return: an asynchronous process object
|
||||||
|
|
||||||
|
"""
|
||||||
|
kwargs: dict[str, Any] = {}
|
||||||
|
if user is not None:
|
||||||
|
kwargs["user"] = user
|
||||||
|
|
||||||
|
if group is not None:
|
||||||
|
kwargs["group"] = group
|
||||||
|
|
||||||
|
if extra_groups is not None:
|
||||||
|
kwargs["extra_groups"] = group
|
||||||
|
|
||||||
|
if umask >= 0:
|
||||||
|
kwargs["umask"] = umask
|
||||||
|
|
||||||
|
return await get_async_backend().open_process(
|
||||||
|
command,
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
startupinfo=startupinfo,
|
||||||
|
creationflags=creationflags,
|
||||||
|
start_new_session=start_new_session,
|
||||||
|
pass_fds=pass_fds,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
@@ -0,0 +1,753 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from collections import deque
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from ..lowlevel import checkpoint_if_cancelled
|
||||||
|
from ._eventloop import get_async_backend
|
||||||
|
from ._exceptions import BusyResourceError, NoEventLoopError
|
||||||
|
from ._tasks import CancelScope
|
||||||
|
from ._testing import TaskInfo, get_current_task
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class EventStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class CapacityLimiterStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
|
||||||
|
:ivar float total_tokens: total number of available tokens
|
||||||
|
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from
|
||||||
|
this limiter
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on
|
||||||
|
:meth:`~.CapacityLimiter.acquire` or
|
||||||
|
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
|
||||||
|
"""
|
||||||
|
|
||||||
|
borrowed_tokens: int
|
||||||
|
total_tokens: float
|
||||||
|
borrowers: tuple[object, ...]
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class LockStatistics:
|
||||||
|
"""
|
||||||
|
:ivar bool locked: flag indicating if this lock is locked or not
|
||||||
|
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the
|
||||||
|
lock is not held by any task)
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
|
||||||
|
"""
|
||||||
|
|
||||||
|
locked: bool
|
||||||
|
owner: TaskInfo | None
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class ConditionStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
|
||||||
|
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying
|
||||||
|
:class:`~.Lock`
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
lock_statistics: LockStatistics
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SemaphoreStatistics:
|
||||||
|
"""
|
||||||
|
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
tasks_waiting: int
|
||||||
|
|
||||||
|
|
||||||
|
class Event:
|
||||||
|
def __new__(cls) -> Event:
|
||||||
|
try:
|
||||||
|
return get_async_backend().create_event()
|
||||||
|
except NoEventLoopError:
|
||||||
|
return EventAdapter()
|
||||||
|
|
||||||
|
def set(self) -> None:
|
||||||
|
"""Set the flag, notifying all listeners."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_set(self) -> bool:
|
||||||
|
"""Return ``True`` if the flag is set, ``False`` if not."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
"""
|
||||||
|
Wait until the flag has been set.
|
||||||
|
|
||||||
|
If the flag has already been set when this method is called, it returns
|
||||||
|
immediately.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> EventStatistics:
|
||||||
|
"""Return statistics about the current state of this event."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class EventAdapter(Event):
|
||||||
|
_internal_event: Event | None = None
|
||||||
|
_is_set: bool = False
|
||||||
|
|
||||||
|
def __new__(cls) -> EventAdapter:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _event(self) -> Event:
|
||||||
|
if self._internal_event is None:
|
||||||
|
self._internal_event = get_async_backend().create_event()
|
||||||
|
if self._is_set:
|
||||||
|
self._internal_event.set()
|
||||||
|
|
||||||
|
return self._internal_event
|
||||||
|
|
||||||
|
def set(self) -> None:
|
||||||
|
if self._internal_event is None:
|
||||||
|
self._is_set = True
|
||||||
|
else:
|
||||||
|
self._event.set()
|
||||||
|
|
||||||
|
def is_set(self) -> bool:
|
||||||
|
if self._internal_event is None:
|
||||||
|
return self._is_set
|
||||||
|
|
||||||
|
return self._internal_event.is_set()
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
await self._event.wait()
|
||||||
|
|
||||||
|
def statistics(self) -> EventStatistics:
|
||||||
|
if self._internal_event is None:
|
||||||
|
return EventStatistics(tasks_waiting=0)
|
||||||
|
|
||||||
|
return self._internal_event.statistics()
|
||||||
|
|
||||||
|
|
||||||
|
class Lock:
|
||||||
|
def __new__(cls, *, fast_acquire: bool = False) -> Lock:
|
||||||
|
try:
|
||||||
|
return get_async_backend().create_lock(fast_acquire=fast_acquire)
|
||||||
|
except NoEventLoopError:
|
||||||
|
return LockAdapter(fast_acquire=fast_acquire)
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Acquire the lock."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""Release the lock."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def locked(self) -> bool:
|
||||||
|
"""Return True if the lock is currently held."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> LockStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this lock.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class LockAdapter(Lock):
|
||||||
|
_internal_lock: Lock | None = None
|
||||||
|
|
||||||
|
def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(self, *, fast_acquire: bool = False):
|
||||||
|
self._fast_acquire = fast_acquire
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _lock(self) -> Lock:
|
||||||
|
if self._internal_lock is None:
|
||||||
|
self._internal_lock = get_async_backend().create_lock(
|
||||||
|
fast_acquire=self._fast_acquire
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._internal_lock
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self._lock.acquire()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
if self._internal_lock is not None:
|
||||||
|
self._internal_lock.release()
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Acquire the lock."""
|
||||||
|
await self._lock.acquire()
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._lock.acquire_nowait()
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""Release the lock."""
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
def locked(self) -> bool:
|
||||||
|
"""Return True if the lock is currently held."""
|
||||||
|
return self._lock.locked()
|
||||||
|
|
||||||
|
def statistics(self) -> LockStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this lock.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._internal_lock is None:
|
||||||
|
return LockStatistics(False, None, 0)
|
||||||
|
|
||||||
|
return self._internal_lock.statistics()
|
||||||
|
|
||||||
|
|
||||||
|
class Condition:
|
||||||
|
_owner_task: TaskInfo | None = None
|
||||||
|
|
||||||
|
def __init__(self, lock: Lock | None = None):
|
||||||
|
self._lock = lock or Lock()
|
||||||
|
self._waiters: deque[Event] = deque()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
def _check_acquired(self) -> None:
|
||||||
|
if self._owner_task != get_current_task():
|
||||||
|
raise RuntimeError("The current task is not holding the underlying lock")
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Acquire the underlying lock."""
|
||||||
|
await self._lock.acquire()
|
||||||
|
self._owner_task = get_current_task()
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the underlying lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._lock.acquire_nowait()
|
||||||
|
self._owner_task = get_current_task()
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""Release the underlying lock."""
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
def locked(self) -> bool:
|
||||||
|
"""Return True if the lock is set."""
|
||||||
|
return self._lock.locked()
|
||||||
|
|
||||||
|
def notify(self, n: int = 1) -> None:
|
||||||
|
"""Notify exactly n listeners."""
|
||||||
|
self._check_acquired()
|
||||||
|
for _ in range(n):
|
||||||
|
try:
|
||||||
|
event = self._waiters.popleft()
|
||||||
|
except IndexError:
|
||||||
|
break
|
||||||
|
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
def notify_all(self) -> None:
|
||||||
|
"""Notify all the listeners."""
|
||||||
|
self._check_acquired()
|
||||||
|
for event in self._waiters:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
self._waiters.clear()
|
||||||
|
|
||||||
|
async def wait(self) -> None:
|
||||||
|
"""Wait for a notification."""
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
self._check_acquired()
|
||||||
|
event = Event()
|
||||||
|
self._waiters.append(event)
|
||||||
|
self.release()
|
||||||
|
try:
|
||||||
|
await event.wait()
|
||||||
|
except BaseException:
|
||||||
|
if not event.is_set():
|
||||||
|
self._waiters.remove(event)
|
||||||
|
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await self.acquire()
|
||||||
|
|
||||||
|
async def wait_for(self, predicate: Callable[[], T]) -> T:
|
||||||
|
"""
|
||||||
|
Wait until a predicate becomes true.
|
||||||
|
|
||||||
|
:param predicate: a callable that returns a truthy value when the condition is
|
||||||
|
met
|
||||||
|
:return: the result of the predicate
|
||||||
|
|
||||||
|
.. versionadded:: 4.11.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
while not (result := predicate()):
|
||||||
|
await self.wait()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def statistics(self) -> ConditionStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this condition.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return ConditionStatistics(len(self._waiters), self._lock.statistics())
|
||||||
|
|
||||||
|
|
||||||
|
class Semaphore:
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
initial_value: int,
|
||||||
|
*,
|
||||||
|
max_value: int | None = None,
|
||||||
|
fast_acquire: bool = False,
|
||||||
|
) -> Semaphore:
|
||||||
|
try:
|
||||||
|
return get_async_backend().create_semaphore(
|
||||||
|
initial_value, max_value=max_value, fast_acquire=fast_acquire
|
||||||
|
)
|
||||||
|
except NoEventLoopError:
|
||||||
|
return SemaphoreAdapter(initial_value, max_value=max_value)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
initial_value: int,
|
||||||
|
*,
|
||||||
|
max_value: int | None = None,
|
||||||
|
fast_acquire: bool = False,
|
||||||
|
):
|
||||||
|
if not isinstance(initial_value, int):
|
||||||
|
raise TypeError("initial_value must be an integer")
|
||||||
|
if initial_value < 0:
|
||||||
|
raise ValueError("initial_value must be >= 0")
|
||||||
|
if max_value is not None:
|
||||||
|
if not isinstance(max_value, int):
|
||||||
|
raise TypeError("max_value must be an integer or None")
|
||||||
|
if max_value < initial_value:
|
||||||
|
raise ValueError(
|
||||||
|
"max_value must be equal to or higher than initial_value"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._fast_acquire = fast_acquire
|
||||||
|
|
||||||
|
async def __aenter__(self) -> Semaphore:
|
||||||
|
await self.acquire()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.release()
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""Decrement the semaphore value, blocking if necessary."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire the underlying lock, without blocking.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if the operation would block
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""Increment the semaphore value."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self) -> int:
|
||||||
|
"""The current value of the semaphore."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_value(self) -> int | None:
|
||||||
|
"""The maximum value of the semaphore."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> SemaphoreStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this semaphore.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class SemaphoreAdapter(Semaphore):
|
||||||
|
_internal_semaphore: Semaphore | None = None
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
initial_value: int,
|
||||||
|
*,
|
||||||
|
max_value: int | None = None,
|
||||||
|
fast_acquire: bool = False,
|
||||||
|
) -> SemaphoreAdapter:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
initial_value: int,
|
||||||
|
*,
|
||||||
|
max_value: int | None = None,
|
||||||
|
fast_acquire: bool = False,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
|
||||||
|
self._initial_value = initial_value
|
||||||
|
self._max_value = max_value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _semaphore(self) -> Semaphore:
|
||||||
|
if self._internal_semaphore is None:
|
||||||
|
self._internal_semaphore = get_async_backend().create_semaphore(
|
||||||
|
self._initial_value, max_value=self._max_value
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._internal_semaphore
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
await self._semaphore.acquire()
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
self._semaphore.acquire_nowait()
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
self._semaphore.release()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self) -> int:
|
||||||
|
if self._internal_semaphore is None:
|
||||||
|
return self._initial_value
|
||||||
|
|
||||||
|
return self._semaphore.value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_value(self) -> int | None:
|
||||||
|
return self._max_value
|
||||||
|
|
||||||
|
def statistics(self) -> SemaphoreStatistics:
|
||||||
|
if self._internal_semaphore is None:
|
||||||
|
return SemaphoreStatistics(tasks_waiting=0)
|
||||||
|
|
||||||
|
return self._semaphore.statistics()
|
||||||
|
|
||||||
|
|
||||||
|
class CapacityLimiter:
|
||||||
|
def __new__(cls, total_tokens: float) -> CapacityLimiter:
|
||||||
|
try:
|
||||||
|
return get_async_backend().create_capacity_limiter(total_tokens)
|
||||||
|
except NoEventLoopError:
|
||||||
|
return CapacityLimiterAdapter(total_tokens)
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_tokens(self) -> float:
|
||||||
|
"""
|
||||||
|
The total number of tokens available for borrowing.
|
||||||
|
|
||||||
|
This is a read-write property. If the total number of tokens is increased, the
|
||||||
|
proportionate number of tasks waiting on this limiter will be granted their
|
||||||
|
tokens.
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
The property is now writable.
|
||||||
|
.. versionchanged:: 4.12
|
||||||
|
The value can now be set to 0.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@total_tokens.setter
|
||||||
|
def total_tokens(self, value: float) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def borrowed_tokens(self) -> int:
|
||||||
|
"""The number of tokens that have currently been borrowed."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available_tokens(self) -> float:
|
||||||
|
"""The number of tokens currently available to be borrowed"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token for the current task without waiting for one to become
|
||||||
|
available.
|
||||||
|
|
||||||
|
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token without waiting for one to become available.
|
||||||
|
|
||||||
|
:param borrower: the entity borrowing a token
|
||||||
|
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token for the current task, waiting if necessary for one to become
|
||||||
|
available.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
"""
|
||||||
|
Acquire a token, waiting if necessary for one to become available.
|
||||||
|
|
||||||
|
:param borrower: the entity borrowing a token
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""
|
||||||
|
Release the token held by the current task.
|
||||||
|
|
||||||
|
:raises RuntimeError: if the current task has not borrowed a token from this
|
||||||
|
limiter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def release_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
"""
|
||||||
|
Release the token held by the given borrower.
|
||||||
|
|
||||||
|
:raises RuntimeError: if the borrower has not borrowed a token from this
|
||||||
|
limiter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def statistics(self) -> CapacityLimiterStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this limiter.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class CapacityLimiterAdapter(CapacityLimiter):
|
||||||
|
_internal_limiter: CapacityLimiter | None = None
|
||||||
|
|
||||||
|
def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter:
|
||||||
|
return object.__new__(cls)
|
||||||
|
|
||||||
|
def __init__(self, total_tokens: float) -> None:
|
||||||
|
self.total_tokens = total_tokens
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _limiter(self) -> CapacityLimiter:
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
self._internal_limiter = get_async_backend().create_capacity_limiter(
|
||||||
|
self._total_tokens
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._internal_limiter
|
||||||
|
|
||||||
|
async def __aenter__(self) -> None:
|
||||||
|
await self._limiter.__aenter__()
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
return await self._limiter.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def total_tokens(self) -> float:
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
return self._total_tokens
|
||||||
|
|
||||||
|
return self._internal_limiter.total_tokens
|
||||||
|
|
||||||
|
@total_tokens.setter
|
||||||
|
def total_tokens(self, value: float) -> None:
|
||||||
|
if not isinstance(value, int) and value is not math.inf:
|
||||||
|
raise TypeError("total_tokens must be an int or math.inf")
|
||||||
|
elif value < 1:
|
||||||
|
raise ValueError("total_tokens must be >= 1")
|
||||||
|
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
self._total_tokens = value
|
||||||
|
return
|
||||||
|
|
||||||
|
self._limiter.total_tokens = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def borrowed_tokens(self) -> int:
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return self._internal_limiter.borrowed_tokens
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available_tokens(self) -> float:
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
return self._total_tokens
|
||||||
|
|
||||||
|
return self._internal_limiter.available_tokens
|
||||||
|
|
||||||
|
def acquire_nowait(self) -> None:
|
||||||
|
self._limiter.acquire_nowait()
|
||||||
|
|
||||||
|
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
|
||||||
|
self._limiter.acquire_on_behalf_of_nowait(borrower)
|
||||||
|
|
||||||
|
async def acquire(self) -> None:
|
||||||
|
await self._limiter.acquire()
|
||||||
|
|
||||||
|
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
await self._limiter.acquire_on_behalf_of(borrower)
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
self._limiter.release()
|
||||||
|
|
||||||
|
def release_on_behalf_of(self, borrower: object) -> None:
|
||||||
|
self._limiter.release_on_behalf_of(borrower)
|
||||||
|
|
||||||
|
def statistics(self) -> CapacityLimiterStatistics:
|
||||||
|
if self._internal_limiter is None:
|
||||||
|
return CapacityLimiterStatistics(
|
||||||
|
borrowed_tokens=0,
|
||||||
|
total_tokens=self.total_tokens,
|
||||||
|
borrowers=(),
|
||||||
|
tasks_waiting=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._internal_limiter.statistics()
|
||||||
|
|
||||||
|
|
||||||
|
class ResourceGuard:
|
||||||
|
"""
|
||||||
|
A context manager for ensuring that a resource is only used by a single task at a
|
||||||
|
time.
|
||||||
|
|
||||||
|
Entering this context manager while the previous has not exited it yet will trigger
|
||||||
|
:exc:`BusyResourceError`.
|
||||||
|
|
||||||
|
:param action: the action to guard against (visible in the :exc:`BusyResourceError`
|
||||||
|
when triggered, e.g. "Another task is already {action} this resource")
|
||||||
|
|
||||||
|
.. versionadded:: 4.1
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "action", "_guarded"
|
||||||
|
|
||||||
|
def __init__(self, action: str = "using"):
|
||||||
|
self.action: str = action
|
||||||
|
self._guarded = False
|
||||||
|
|
||||||
|
def __enter__(self) -> None:
|
||||||
|
if self._guarded:
|
||||||
|
raise BusyResourceError(self.action)
|
||||||
|
|
||||||
|
self._guarded = True
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self._guarded = False
|
||||||
173
.venv/lib/python3.9/site-packages/anyio/_core/_tasks.py
Normal file
173
.venv/lib/python3.9/site-packages/anyio/_core/_tasks.py
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
from collections.abc import Generator
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from types import TracebackType
|
||||||
|
|
||||||
|
from ..abc._tasks import TaskGroup, TaskStatus
|
||||||
|
from ._eventloop import get_async_backend
|
||||||
|
|
||||||
|
|
||||||
|
class _IgnoredTaskStatus(TaskStatus[object]):
|
||||||
|
def started(self, value: object = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
|
||||||
|
|
||||||
|
|
||||||
|
class CancelScope:
|
||||||
|
"""
|
||||||
|
Wraps a unit of work that can be made separately cancellable.
|
||||||
|
|
||||||
|
:param deadline: The time (clock value) when this scope is cancelled automatically
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls, *, deadline: float = math.inf, shield: bool = False
|
||||||
|
) -> CancelScope:
|
||||||
|
return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline)
|
||||||
|
|
||||||
|
def cancel(self, reason: str | None = None) -> None:
|
||||||
|
"""
|
||||||
|
Cancel this scope immediately.
|
||||||
|
|
||||||
|
:param reason: a message describing the reason for the cancellation
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deadline(self) -> float:
|
||||||
|
"""
|
||||||
|
The time (clock value) when this scope is cancelled automatically.
|
||||||
|
|
||||||
|
Will be ``float('inf')`` if no timeout has been set.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@deadline.setter
|
||||||
|
def deadline(self, value: float) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cancel_called(self) -> bool:
|
||||||
|
"""``True`` if :meth:`cancel` has been called."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cancelled_caught(self) -> bool:
|
||||||
|
"""
|
||||||
|
``True`` if this scope suppressed a cancellation exception it itself raised.
|
||||||
|
|
||||||
|
This is typically used to check if any work was interrupted, or to see if the
|
||||||
|
scope was cancelled due to its deadline being reached. The value will, however,
|
||||||
|
only be ``True`` if the cancellation was triggered by the scope itself (and not
|
||||||
|
an outer scope).
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def shield(self) -> bool:
|
||||||
|
"""
|
||||||
|
``True`` if this scope is shielded from external cancellation.
|
||||||
|
|
||||||
|
While a scope is shielded, it will not receive cancellations from outside.
|
||||||
|
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@shield.setter
|
||||||
|
def shield(self, value: bool) -> None:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __enter__(self) -> CancelScope:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def fail_after(
|
||||||
|
delay: float | None, shield: bool = False
|
||||||
|
) -> Generator[CancelScope, None, None]:
|
||||||
|
"""
|
||||||
|
Create a context manager which raises a :class:`TimeoutError` if does not finish in
|
||||||
|
time.
|
||||||
|
|
||||||
|
:param delay: maximum allowed time (in seconds) before raising the exception, or
|
||||||
|
``None`` to disable the timeout
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:return: a context manager that yields a cancel scope
|
||||||
|
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
current_time = get_async_backend().current_time
|
||||||
|
deadline = (current_time() + delay) if delay is not None else math.inf
|
||||||
|
with get_async_backend().create_cancel_scope(
|
||||||
|
deadline=deadline, shield=shield
|
||||||
|
) as cancel_scope:
|
||||||
|
yield cancel_scope
|
||||||
|
|
||||||
|
if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline:
|
||||||
|
raise TimeoutError
|
||||||
|
|
||||||
|
|
||||||
|
def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
|
||||||
|
"""
|
||||||
|
Create a cancel scope with a deadline that expires after the given delay.
|
||||||
|
|
||||||
|
:param delay: maximum allowed time (in seconds) before exiting the context block, or
|
||||||
|
``None`` to disable the timeout
|
||||||
|
:param shield: ``True`` to shield the cancel scope from external cancellation
|
||||||
|
:return: a cancel scope
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
deadline = (
|
||||||
|
(get_async_backend().current_time() + delay) if delay is not None else math.inf
|
||||||
|
)
|
||||||
|
return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield)
|
||||||
|
|
||||||
|
|
||||||
|
def current_effective_deadline() -> float:
|
||||||
|
"""
|
||||||
|
Return the nearest deadline among all the cancel scopes effective for the current
|
||||||
|
task.
|
||||||
|
|
||||||
|
:return: a clock value from the event loop's internal clock (or ``float('inf')`` if
|
||||||
|
there is no deadline in effect, or ``float('-inf')`` if the current scope has
|
||||||
|
been cancelled)
|
||||||
|
:rtype: float
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().current_effective_deadline()
|
||||||
|
|
||||||
|
|
||||||
|
def create_task_group() -> TaskGroup:
|
||||||
|
"""
|
||||||
|
Create a task group.
|
||||||
|
|
||||||
|
:return: a task group
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().create_task_group()
|
||||||
616
.venv/lib/python3.9/site-packages/anyio/_core/_tempfile.py
Normal file
616
.venv/lib/python3.9/site-packages/anyio/_core/_tempfile.py
Normal file
@@ -0,0 +1,616 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from io import BytesIO, TextIOWrapper
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
AnyStr,
|
||||||
|
Generic,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from .. import to_thread
|
||||||
|
from .._core._fileio import AsyncFile
|
||||||
|
from ..lowlevel import checkpoint_if_cancelled
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
||||||
|
|
||||||
|
|
||||||
|
class TemporaryFile(Generic[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous temporary file that is automatically created and cleaned up.
|
||||||
|
|
||||||
|
This class provides an asynchronous context manager interface to a temporary file.
|
||||||
|
The file is created using Python's standard `tempfile.TemporaryFile` function in a
|
||||||
|
background thread, and is wrapped as an asynchronous file using `AsyncFile`.
|
||||||
|
|
||||||
|
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
||||||
|
:param buffering: The buffering policy (-1 means the default buffering).
|
||||||
|
:param encoding: The encoding used to decode or encode the file. Only applicable in
|
||||||
|
text mode.
|
||||||
|
:param newline: Controls how universal newlines mode works (only applicable in text
|
||||||
|
mode).
|
||||||
|
:param suffix: The suffix for the temporary file name.
|
||||||
|
:param prefix: The prefix for the temporary file name.
|
||||||
|
:param dir: The directory in which the temporary file is created.
|
||||||
|
:param errors: The error handling scheme used for encoding/decoding errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_async_file: AsyncFile[AnyStr]
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: TemporaryFile[bytes],
|
||||||
|
mode: OpenBinaryMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
): ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: TemporaryFile[str],
|
||||||
|
mode: OpenTextMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
): ...
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
mode: OpenTextMode | OpenBinaryMode = "w+b",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
suffix: str | None = None,
|
||||||
|
prefix: str | None = None,
|
||||||
|
dir: str | None = None,
|
||||||
|
*,
|
||||||
|
errors: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
self.mode = mode
|
||||||
|
self.buffering = buffering
|
||||||
|
self.encoding = encoding
|
||||||
|
self.newline = newline
|
||||||
|
self.suffix: str | None = suffix
|
||||||
|
self.prefix: str | None = prefix
|
||||||
|
self.dir: str | None = dir
|
||||||
|
self.errors = errors
|
||||||
|
|
||||||
|
async def __aenter__(self) -> AsyncFile[AnyStr]:
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
lambda: tempfile.TemporaryFile(
|
||||||
|
self.mode,
|
||||||
|
self.buffering,
|
||||||
|
self.encoding,
|
||||||
|
self.newline,
|
||||||
|
self.suffix,
|
||||||
|
self.prefix,
|
||||||
|
self.dir,
|
||||||
|
errors=self.errors,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._async_file = AsyncFile(fp)
|
||||||
|
return self._async_file
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_value: BaseException | None,
|
||||||
|
traceback: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
await self._async_file.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
class NamedTemporaryFile(Generic[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous named temporary file that is automatically created and cleaned up.
|
||||||
|
|
||||||
|
This class provides an asynchronous context manager for a temporary file with a
|
||||||
|
visible name in the file system. It uses Python's standard
|
||||||
|
:func:`~tempfile.NamedTemporaryFile` function and wraps the file object with
|
||||||
|
:class:`AsyncFile` for asynchronous operations.
|
||||||
|
|
||||||
|
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
||||||
|
:param buffering: The buffering policy (-1 means the default buffering).
|
||||||
|
:param encoding: The encoding used to decode or encode the file. Only applicable in
|
||||||
|
text mode.
|
||||||
|
:param newline: Controls how universal newlines mode works (only applicable in text
|
||||||
|
mode).
|
||||||
|
:param suffix: The suffix for the temporary file name.
|
||||||
|
:param prefix: The prefix for the temporary file name.
|
||||||
|
:param dir: The directory in which the temporary file is created.
|
||||||
|
:param delete: Whether to delete the file when it is closed.
|
||||||
|
:param errors: The error handling scheme used for encoding/decoding errors.
|
||||||
|
:param delete_on_close: (Python 3.12+) Whether to delete the file on close.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_async_file: AsyncFile[AnyStr]
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: NamedTemporaryFile[bytes],
|
||||||
|
mode: OpenBinaryMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
delete: bool = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
delete_on_close: bool = ...,
|
||||||
|
): ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: NamedTemporaryFile[str],
|
||||||
|
mode: OpenTextMode,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
delete: bool = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
delete_on_close: bool = ...,
|
||||||
|
): ...
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
mode: OpenBinaryMode | OpenTextMode = "w+b",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
suffix: str | None = None,
|
||||||
|
prefix: str | None = None,
|
||||||
|
dir: str | None = None,
|
||||||
|
delete: bool = True,
|
||||||
|
*,
|
||||||
|
errors: str | None = None,
|
||||||
|
delete_on_close: bool = True,
|
||||||
|
) -> None:
|
||||||
|
self._params: dict[str, Any] = {
|
||||||
|
"mode": mode,
|
||||||
|
"buffering": buffering,
|
||||||
|
"encoding": encoding,
|
||||||
|
"newline": newline,
|
||||||
|
"suffix": suffix,
|
||||||
|
"prefix": prefix,
|
||||||
|
"dir": dir,
|
||||||
|
"delete": delete,
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
self._params["delete_on_close"] = delete_on_close
|
||||||
|
|
||||||
|
async def __aenter__(self) -> AsyncFile[AnyStr]:
|
||||||
|
fp = await to_thread.run_sync(
|
||||||
|
lambda: tempfile.NamedTemporaryFile(**self._params)
|
||||||
|
)
|
||||||
|
self._async_file = AsyncFile(fp)
|
||||||
|
return self._async_file
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_value: BaseException | None,
|
||||||
|
traceback: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
await self._async_file.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
class SpooledTemporaryFile(AsyncFile[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous spooled temporary file that starts in memory and is spooled to disk.
|
||||||
|
|
||||||
|
This class provides an asynchronous interface to a spooled temporary file, much like
|
||||||
|
Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous
|
||||||
|
write operations and provides a method to force a rollover to disk.
|
||||||
|
|
||||||
|
:param max_size: Maximum size in bytes before the file is rolled over to disk.
|
||||||
|
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
||||||
|
:param buffering: The buffering policy (-1 means the default buffering).
|
||||||
|
:param encoding: The encoding used to decode or encode the file (text mode only).
|
||||||
|
:param newline: Controls how universal newlines mode works (text mode only).
|
||||||
|
:param suffix: The suffix for the temporary file name.
|
||||||
|
:param prefix: The prefix for the temporary file name.
|
||||||
|
:param dir: The directory in which the temporary file is created.
|
||||||
|
:param errors: The error handling scheme used for encoding/decoding errors.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_rolled: bool = False
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: SpooledTemporaryFile[bytes],
|
||||||
|
max_size: int = ...,
|
||||||
|
mode: OpenBinaryMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
): ...
|
||||||
|
@overload
|
||||||
|
def __init__(
|
||||||
|
self: SpooledTemporaryFile[str],
|
||||||
|
max_size: int = ...,
|
||||||
|
mode: OpenTextMode = ...,
|
||||||
|
buffering: int = ...,
|
||||||
|
encoding: str | None = ...,
|
||||||
|
newline: str | None = ...,
|
||||||
|
suffix: str | None = ...,
|
||||||
|
prefix: str | None = ...,
|
||||||
|
dir: str | None = ...,
|
||||||
|
*,
|
||||||
|
errors: str | None = ...,
|
||||||
|
): ...
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
max_size: int = 0,
|
||||||
|
mode: OpenBinaryMode | OpenTextMode = "w+b",
|
||||||
|
buffering: int = -1,
|
||||||
|
encoding: str | None = None,
|
||||||
|
newline: str | None = None,
|
||||||
|
suffix: str | None = None,
|
||||||
|
prefix: str | None = None,
|
||||||
|
dir: str | None = None,
|
||||||
|
*,
|
||||||
|
errors: str | None = None,
|
||||||
|
) -> None:
|
||||||
|
self._tempfile_params: dict[str, Any] = {
|
||||||
|
"mode": mode,
|
||||||
|
"buffering": buffering,
|
||||||
|
"encoding": encoding,
|
||||||
|
"newline": newline,
|
||||||
|
"suffix": suffix,
|
||||||
|
"prefix": prefix,
|
||||||
|
"dir": dir,
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
self._max_size = max_size
|
||||||
|
if "b" in mode:
|
||||||
|
super().__init__(BytesIO()) # type: ignore[arg-type]
|
||||||
|
else:
|
||||||
|
super().__init__(
|
||||||
|
TextIOWrapper( # type: ignore[arg-type]
|
||||||
|
BytesIO(),
|
||||||
|
encoding=encoding,
|
||||||
|
errors=errors,
|
||||||
|
newline=newline,
|
||||||
|
write_through=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
if not self._rolled:
|
||||||
|
self._fp.close()
|
||||||
|
return
|
||||||
|
|
||||||
|
await super().aclose()
|
||||||
|
|
||||||
|
async def _check(self) -> None:
|
||||||
|
if self._rolled or self._fp.tell() <= self._max_size:
|
||||||
|
return
|
||||||
|
|
||||||
|
await self.rollover()
|
||||||
|
|
||||||
|
async def rollover(self) -> None:
|
||||||
|
if self._rolled:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._rolled = True
|
||||||
|
buffer = self._fp
|
||||||
|
buffer.seek(0)
|
||||||
|
self._fp = await to_thread.run_sync(
|
||||||
|
lambda: tempfile.TemporaryFile(**self._tempfile_params)
|
||||||
|
)
|
||||||
|
await self.write(buffer.read())
|
||||||
|
buffer.close()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def closed(self) -> bool:
|
||||||
|
return self._fp.closed
|
||||||
|
|
||||||
|
async def read(self, size: int = -1) -> AnyStr:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.read(size)
|
||||||
|
|
||||||
|
return await super().read(size) # type: ignore[return-value]
|
||||||
|
|
||||||
|
async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.read1(size)
|
||||||
|
|
||||||
|
return await super().read1(size)
|
||||||
|
|
||||||
|
async def readline(self) -> AnyStr:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.readline()
|
||||||
|
|
||||||
|
return await super().readline() # type: ignore[return-value]
|
||||||
|
|
||||||
|
async def readlines(self) -> list[AnyStr]:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.readlines()
|
||||||
|
|
||||||
|
return await super().readlines() # type: ignore[return-value]
|
||||||
|
|
||||||
|
async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
self._fp.readinto(b)
|
||||||
|
|
||||||
|
return await super().readinto(b)
|
||||||
|
|
||||||
|
async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
self._fp.readinto(b)
|
||||||
|
|
||||||
|
return await super().readinto1(b)
|
||||||
|
|
||||||
|
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.seek(offset, whence)
|
||||||
|
|
||||||
|
return await super().seek(offset, whence)
|
||||||
|
|
||||||
|
async def tell(self) -> int:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.tell()
|
||||||
|
|
||||||
|
return await super().tell()
|
||||||
|
|
||||||
|
async def truncate(self, size: int | None = None) -> int:
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
return self._fp.truncate(size)
|
||||||
|
|
||||||
|
return await super().truncate(size)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ...
|
||||||
|
@overload
|
||||||
|
async def write(self: SpooledTemporaryFile[str], b: str) -> int: ...
|
||||||
|
|
||||||
|
async def write(self, b: ReadableBuffer | str) -> int:
|
||||||
|
"""
|
||||||
|
Asynchronously write data to the spooled temporary file.
|
||||||
|
|
||||||
|
If the file has not yet been rolled over, the data is written synchronously,
|
||||||
|
and a rollover is triggered if the size exceeds the maximum size.
|
||||||
|
|
||||||
|
:param s: The data to write.
|
||||||
|
:return: The number of bytes written.
|
||||||
|
:raises RuntimeError: If the underlying file is not initialized.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
result = self._fp.write(b)
|
||||||
|
await self._check()
|
||||||
|
return result
|
||||||
|
|
||||||
|
return await super().write(b) # type: ignore[misc]
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def writelines(
|
||||||
|
self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer]
|
||||||
|
) -> None: ...
|
||||||
|
@overload
|
||||||
|
async def writelines(
|
||||||
|
self: SpooledTemporaryFile[str], lines: Iterable[str]
|
||||||
|
) -> None: ...
|
||||||
|
|
||||||
|
async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None:
|
||||||
|
"""
|
||||||
|
Asynchronously write a list of lines to the spooled temporary file.
|
||||||
|
|
||||||
|
If the file has not yet been rolled over, the lines are written synchronously,
|
||||||
|
and a rollover is triggered if the size exceeds the maximum size.
|
||||||
|
|
||||||
|
:param lines: An iterable of lines to write.
|
||||||
|
:raises RuntimeError: If the underlying file is not initialized.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._rolled:
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
result = self._fp.writelines(lines)
|
||||||
|
await self._check()
|
||||||
|
return result
|
||||||
|
|
||||||
|
return await super().writelines(lines) # type: ignore[misc]
|
||||||
|
|
||||||
|
|
||||||
|
class TemporaryDirectory(Generic[AnyStr]):
|
||||||
|
"""
|
||||||
|
An asynchronous temporary directory that is created and cleaned up automatically.
|
||||||
|
|
||||||
|
This class provides an asynchronous context manager for creating a temporary
|
||||||
|
directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to
|
||||||
|
perform directory creation and cleanup operations in a background thread.
|
||||||
|
|
||||||
|
:param suffix: Suffix to be added to the temporary directory name.
|
||||||
|
:param prefix: Prefix to be added to the temporary directory name.
|
||||||
|
:param dir: The parent directory where the temporary directory is created.
|
||||||
|
:param ignore_cleanup_errors: Whether to ignore errors during cleanup
|
||||||
|
(Python 3.10+).
|
||||||
|
:param delete: Whether to delete the directory upon closing (Python 3.12+).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
suffix: AnyStr | None = None,
|
||||||
|
prefix: AnyStr | None = None,
|
||||||
|
dir: AnyStr | None = None,
|
||||||
|
*,
|
||||||
|
ignore_cleanup_errors: bool = False,
|
||||||
|
delete: bool = True,
|
||||||
|
) -> None:
|
||||||
|
self.suffix: AnyStr | None = suffix
|
||||||
|
self.prefix: AnyStr | None = prefix
|
||||||
|
self.dir: AnyStr | None = dir
|
||||||
|
self.ignore_cleanup_errors = ignore_cleanup_errors
|
||||||
|
self.delete = delete
|
||||||
|
|
||||||
|
self._tempdir: tempfile.TemporaryDirectory | None = None
|
||||||
|
|
||||||
|
async def __aenter__(self) -> str:
|
||||||
|
params: dict[str, Any] = {
|
||||||
|
"suffix": self.suffix,
|
||||||
|
"prefix": self.prefix,
|
||||||
|
"dir": self.dir,
|
||||||
|
}
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
params["ignore_cleanup_errors"] = self.ignore_cleanup_errors
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
params["delete"] = self.delete
|
||||||
|
|
||||||
|
self._tempdir = await to_thread.run_sync(
|
||||||
|
lambda: tempfile.TemporaryDirectory(**params)
|
||||||
|
)
|
||||||
|
return await to_thread.run_sync(self._tempdir.__enter__)
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_value: BaseException | None,
|
||||||
|
traceback: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
if self._tempdir is not None:
|
||||||
|
await to_thread.run_sync(
|
||||||
|
self._tempdir.__exit__, exc_type, exc_value, traceback
|
||||||
|
)
|
||||||
|
|
||||||
|
async def cleanup(self) -> None:
|
||||||
|
if self._tempdir is not None:
|
||||||
|
await to_thread.run_sync(self._tempdir.cleanup)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def mkstemp(
|
||||||
|
suffix: str | None = None,
|
||||||
|
prefix: str | None = None,
|
||||||
|
dir: str | None = None,
|
||||||
|
text: bool = False,
|
||||||
|
) -> tuple[int, str]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def mkstemp(
|
||||||
|
suffix: bytes | None = None,
|
||||||
|
prefix: bytes | None = None,
|
||||||
|
dir: bytes | None = None,
|
||||||
|
text: bool = False,
|
||||||
|
) -> tuple[int, bytes]: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def mkstemp(
|
||||||
|
suffix: AnyStr | None = None,
|
||||||
|
prefix: AnyStr | None = None,
|
||||||
|
dir: AnyStr | None = None,
|
||||||
|
text: bool = False,
|
||||||
|
) -> tuple[int, str | bytes]:
|
||||||
|
"""
|
||||||
|
Asynchronously create a temporary file and return an OS-level handle and the file
|
||||||
|
name.
|
||||||
|
|
||||||
|
This function wraps `tempfile.mkstemp` and executes it in a background thread.
|
||||||
|
|
||||||
|
:param suffix: Suffix to be added to the file name.
|
||||||
|
:param prefix: Prefix to be added to the file name.
|
||||||
|
:param dir: Directory in which the temporary file is created.
|
||||||
|
:param text: Whether the file is opened in text mode.
|
||||||
|
:return: A tuple containing the file descriptor and the file name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def mkdtemp(
|
||||||
|
suffix: str | None = None,
|
||||||
|
prefix: str | None = None,
|
||||||
|
dir: str | None = None,
|
||||||
|
) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def mkdtemp(
|
||||||
|
suffix: bytes | None = None,
|
||||||
|
prefix: bytes | None = None,
|
||||||
|
dir: bytes | None = None,
|
||||||
|
) -> bytes: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def mkdtemp(
|
||||||
|
suffix: AnyStr | None = None,
|
||||||
|
prefix: AnyStr | None = None,
|
||||||
|
dir: AnyStr | None = None,
|
||||||
|
) -> str | bytes:
|
||||||
|
"""
|
||||||
|
Asynchronously create a temporary directory and return its path.
|
||||||
|
|
||||||
|
This function wraps `tempfile.mkdtemp` and executes it in a background thread.
|
||||||
|
|
||||||
|
:param suffix: Suffix to be added to the directory name.
|
||||||
|
:param prefix: Prefix to be added to the directory name.
|
||||||
|
:param dir: Parent directory where the temporary directory is created.
|
||||||
|
:return: The path of the created temporary directory.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir)
|
||||||
|
|
||||||
|
|
||||||
|
async def gettempdir() -> str:
|
||||||
|
"""
|
||||||
|
Asynchronously return the name of the directory used for temporary files.
|
||||||
|
|
||||||
|
This function wraps `tempfile.gettempdir` and executes it in a background thread.
|
||||||
|
|
||||||
|
:return: The path of the temporary directory as a string.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(tempfile.gettempdir)
|
||||||
|
|
||||||
|
|
||||||
|
async def gettempdirb() -> bytes:
|
||||||
|
"""
|
||||||
|
Asynchronously return the name of the directory used for temporary files in bytes.
|
||||||
|
|
||||||
|
This function wraps `tempfile.gettempdirb` and executes it in a background thread.
|
||||||
|
|
||||||
|
:return: The path of the temporary directory as bytes.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(tempfile.gettempdirb)
|
||||||
82
.venv/lib/python3.9/site-packages/anyio/_core/_testing.py
Normal file
82
.venv/lib/python3.9/site-packages/anyio/_core/_testing.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Generator
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from ._eventloop import get_async_backend
|
||||||
|
|
||||||
|
|
||||||
|
class TaskInfo:
|
||||||
|
"""
|
||||||
|
Represents an asynchronous task.
|
||||||
|
|
||||||
|
:ivar int id: the unique identifier of the task
|
||||||
|
:ivar parent_id: the identifier of the parent task, if any
|
||||||
|
:vartype parent_id: Optional[int]
|
||||||
|
:ivar str name: the description of the task (if any)
|
||||||
|
:ivar ~collections.abc.Coroutine coro: the coroutine object of the task
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_name", "id", "parent_id", "name", "coro"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
id: int,
|
||||||
|
parent_id: int | None,
|
||||||
|
name: str | None,
|
||||||
|
coro: Generator[Any, Any, Any] | Awaitable[Any],
|
||||||
|
):
|
||||||
|
func = get_current_task
|
||||||
|
self._name = f"{func.__module__}.{func.__qualname__}"
|
||||||
|
self.id: int = id
|
||||||
|
self.parent_id: int | None = parent_id
|
||||||
|
self.name: str | None = name
|
||||||
|
self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro
|
||||||
|
|
||||||
|
def __eq__(self, other: object) -> bool:
|
||||||
|
if isinstance(other, TaskInfo):
|
||||||
|
return self.id == other.id
|
||||||
|
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self.id)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})"
|
||||||
|
|
||||||
|
def has_pending_cancellation(self) -> bool:
|
||||||
|
"""
|
||||||
|
Return ``True`` if the task has a cancellation pending, ``False`` otherwise.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_task() -> TaskInfo:
|
||||||
|
"""
|
||||||
|
Return the current task.
|
||||||
|
|
||||||
|
:return: a representation of the current task
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().get_current_task()
|
||||||
|
|
||||||
|
|
||||||
|
def get_running_tasks() -> list[TaskInfo]:
|
||||||
|
"""
|
||||||
|
Return a list of running tasks in the current event loop.
|
||||||
|
|
||||||
|
:return: a list of task info objects
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return cast("list[TaskInfo]", get_async_backend().get_running_tasks())
|
||||||
|
|
||||||
|
|
||||||
|
async def wait_all_tasks_blocked() -> None:
|
||||||
|
"""Wait until all other tasks are waiting for something."""
|
||||||
|
await get_async_backend().wait_all_tasks_blocked()
|
||||||
81
.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py
Normal file
81
.venv/lib/python3.9/site-packages/anyio/_core/_typedattr.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable, Mapping
|
||||||
|
from typing import Any, TypeVar, final, overload
|
||||||
|
|
||||||
|
from ._exceptions import TypedAttributeLookupError
|
||||||
|
|
||||||
|
T_Attr = TypeVar("T_Attr")
|
||||||
|
T_Default = TypeVar("T_Default")
|
||||||
|
undefined = object()
|
||||||
|
|
||||||
|
|
||||||
|
def typed_attribute() -> Any:
|
||||||
|
"""Return a unique object, used to mark typed attributes."""
|
||||||
|
return object()
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeSet:
|
||||||
|
"""
|
||||||
|
Superclass for typed attribute collections.
|
||||||
|
|
||||||
|
Checks that every public attribute of every subclass has a type annotation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init_subclass__(cls) -> None:
|
||||||
|
annotations: dict[str, Any] = getattr(cls, "__annotations__", {})
|
||||||
|
for attrname in dir(cls):
|
||||||
|
if not attrname.startswith("_") and attrname not in annotations:
|
||||||
|
raise TypeError(
|
||||||
|
f"Attribute {attrname!r} is missing its type annotation"
|
||||||
|
)
|
||||||
|
|
||||||
|
super().__init_subclass__()
|
||||||
|
|
||||||
|
|
||||||
|
class TypedAttributeProvider:
|
||||||
|
"""Base class for classes that wish to provide typed extra attributes."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
|
||||||
|
"""
|
||||||
|
A mapping of the extra attributes to callables that return the corresponding
|
||||||
|
values.
|
||||||
|
|
||||||
|
If the provider wraps another provider, the attributes from that wrapper should
|
||||||
|
also be included in the returned mapping (but the wrapper may override the
|
||||||
|
callables from the wrapped instance).
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extra(self, attribute: T_Attr) -> T_Attr: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ...
|
||||||
|
|
||||||
|
@final
|
||||||
|
def extra(self, attribute: Any, default: object = undefined) -> object:
|
||||||
|
"""
|
||||||
|
extra(attribute, default=undefined)
|
||||||
|
|
||||||
|
Return the value of the given typed extra attribute.
|
||||||
|
|
||||||
|
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to
|
||||||
|
look for
|
||||||
|
:param default: the value that should be returned if no value is found for the
|
||||||
|
attribute
|
||||||
|
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default
|
||||||
|
value was given
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
getter = self.extra_attributes[attribute]
|
||||||
|
except KeyError:
|
||||||
|
if default is undefined:
|
||||||
|
raise TypedAttributeLookupError("Attribute not found") from None
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
return getter()
|
||||||
58
.venv/lib/python3.9/site-packages/anyio/abc/__init__.py
Normal file
58
.venv/lib/python3.9/site-packages/anyio/abc/__init__.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from ._eventloop import AsyncBackend as AsyncBackend
|
||||||
|
from ._resources import AsyncResource as AsyncResource
|
||||||
|
from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket
|
||||||
|
from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket
|
||||||
|
from ._sockets import IPAddressType as IPAddressType
|
||||||
|
from ._sockets import IPSockAddrType as IPSockAddrType
|
||||||
|
from ._sockets import SocketAttribute as SocketAttribute
|
||||||
|
from ._sockets import SocketListener as SocketListener
|
||||||
|
from ._sockets import SocketStream as SocketStream
|
||||||
|
from ._sockets import UDPPacketType as UDPPacketType
|
||||||
|
from ._sockets import UDPSocket as UDPSocket
|
||||||
|
from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType
|
||||||
|
from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket
|
||||||
|
from ._sockets import UNIXSocketStream as UNIXSocketStream
|
||||||
|
from ._streams import AnyByteReceiveStream as AnyByteReceiveStream
|
||||||
|
from ._streams import AnyByteSendStream as AnyByteSendStream
|
||||||
|
from ._streams import AnyByteStream as AnyByteStream
|
||||||
|
from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable
|
||||||
|
from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream
|
||||||
|
from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream
|
||||||
|
from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream
|
||||||
|
from ._streams import ByteReceiveStream as ByteReceiveStream
|
||||||
|
from ._streams import ByteSendStream as ByteSendStream
|
||||||
|
from ._streams import ByteStream as ByteStream
|
||||||
|
from ._streams import ByteStreamConnectable as ByteStreamConnectable
|
||||||
|
from ._streams import Listener as Listener
|
||||||
|
from ._streams import ObjectReceiveStream as ObjectReceiveStream
|
||||||
|
from ._streams import ObjectSendStream as ObjectSendStream
|
||||||
|
from ._streams import ObjectStream as ObjectStream
|
||||||
|
from ._streams import ObjectStreamConnectable as ObjectStreamConnectable
|
||||||
|
from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream
|
||||||
|
from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream
|
||||||
|
from ._streams import UnreliableObjectStream as UnreliableObjectStream
|
||||||
|
from ._subprocesses import Process as Process
|
||||||
|
from ._tasks import TaskGroup as TaskGroup
|
||||||
|
from ._tasks import TaskStatus as TaskStatus
|
||||||
|
from ._testing import TestRunner as TestRunner
|
||||||
|
|
||||||
|
# Re-exported here, for backwards compatibility
|
||||||
|
# isort: off
|
||||||
|
from .._core._synchronization import (
|
||||||
|
CapacityLimiter as CapacityLimiter,
|
||||||
|
Condition as Condition,
|
||||||
|
Event as Event,
|
||||||
|
Lock as Lock,
|
||||||
|
Semaphore as Semaphore,
|
||||||
|
)
|
||||||
|
from .._core._tasks import CancelScope as CancelScope
|
||||||
|
from ..from_thread import BlockingPortal as BlockingPortal
|
||||||
|
|
||||||
|
# Re-export imports so they look like they live directly in this package
|
||||||
|
for __value in list(locals().values()):
|
||||||
|
if getattr(__value, "__module__", "").startswith("anyio.abc."):
|
||||||
|
__value.__module__ = __name__
|
||||||
|
|
||||||
|
del __value
|
||||||
414
.venv/lib/python3.9/site-packages/anyio/abc/_eventloop.py
Normal file
414
.venv/lib/python3.9/site-packages/anyio/abc/_eventloop.py
Normal file
@@ -0,0 +1,414 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import math
|
||||||
|
import sys
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import AsyncIterator, Awaitable, Callable, Sequence
|
||||||
|
from contextlib import AbstractContextManager
|
||||||
|
from os import PathLike
|
||||||
|
from signal import Signals
|
||||||
|
from socket import AddressFamily, SocketKind, socket
|
||||||
|
from typing import (
|
||||||
|
IO,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
TypeVar,
|
||||||
|
Union,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeAlias
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import FileDescriptorLike
|
||||||
|
|
||||||
|
from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore
|
||||||
|
from .._core._tasks import CancelScope
|
||||||
|
from .._core._testing import TaskInfo
|
||||||
|
from ._sockets import (
|
||||||
|
ConnectedUDPSocket,
|
||||||
|
ConnectedUNIXDatagramSocket,
|
||||||
|
IPSockAddrType,
|
||||||
|
SocketListener,
|
||||||
|
SocketStream,
|
||||||
|
UDPSocket,
|
||||||
|
UNIXDatagramSocket,
|
||||||
|
UNIXSocketStream,
|
||||||
|
)
|
||||||
|
from ._subprocesses import Process
|
||||||
|
from ._tasks import TaskGroup
|
||||||
|
from ._testing import TestRunner
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncBackend(metaclass=ABCMeta):
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def run(
|
||||||
|
cls,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
options: dict[str, Any],
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Run the given coroutine function in an asynchronous event loop.
|
||||||
|
|
||||||
|
The current thread must not be already running an event loop.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to ``func``
|
||||||
|
:param kwargs: positional arguments to ``func``
|
||||||
|
:param options: keyword arguments to call the backend ``run()`` implementation
|
||||||
|
with
|
||||||
|
:return: the return value of the coroutine function
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def current_token(cls) -> object:
|
||||||
|
"""
|
||||||
|
Return an object that allows other threads to run code inside the event loop.
|
||||||
|
|
||||||
|
:return: a token object, specific to the event loop running in the current
|
||||||
|
thread
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def current_time(cls) -> float:
|
||||||
|
"""
|
||||||
|
Return the current value of the event loop's internal clock.
|
||||||
|
|
||||||
|
:return: the clock value (seconds)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def cancelled_exception_class(cls) -> type[BaseException]:
|
||||||
|
"""Return the exception class that is raised in a task if it's cancelled."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def checkpoint(cls) -> None:
|
||||||
|
"""
|
||||||
|
Check if the task has been cancelled, and allow rescheduling of other tasks.
|
||||||
|
|
||||||
|
This is effectively the same as running :meth:`checkpoint_if_cancelled` and then
|
||||||
|
:meth:`cancel_shielded_checkpoint`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def checkpoint_if_cancelled(cls) -> None:
|
||||||
|
"""
|
||||||
|
Check if the current task group has been cancelled.
|
||||||
|
|
||||||
|
This will check if the task has been cancelled, but will not allow other tasks
|
||||||
|
to be scheduled if not.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if cls.current_effective_deadline() == -math.inf:
|
||||||
|
await cls.checkpoint()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def cancel_shielded_checkpoint(cls) -> None:
|
||||||
|
"""
|
||||||
|
Allow the rescheduling of other tasks.
|
||||||
|
|
||||||
|
This will give other tasks the opportunity to run, but without checking if the
|
||||||
|
current task group has been cancelled, unlike with :meth:`checkpoint`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
with cls.create_cancel_scope(shield=True):
|
||||||
|
await cls.sleep(0)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def sleep(cls, delay: float) -> None:
|
||||||
|
"""
|
||||||
|
Pause the current task for the specified duration.
|
||||||
|
|
||||||
|
:param delay: the duration, in seconds
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_cancel_scope(
|
||||||
|
cls, *, deadline: float = math.inf, shield: bool = False
|
||||||
|
) -> CancelScope:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def current_effective_deadline(cls) -> float:
|
||||||
|
"""
|
||||||
|
Return the nearest deadline among all the cancel scopes effective for the
|
||||||
|
current task.
|
||||||
|
|
||||||
|
:return:
|
||||||
|
- a clock value from the event loop's internal clock
|
||||||
|
- ``inf`` if there is no deadline in effect
|
||||||
|
- ``-inf`` if the current scope has been cancelled
|
||||||
|
:rtype: float
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_task_group(cls) -> TaskGroup:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_event(cls) -> Event:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_lock(cls, *, fast_acquire: bool) -> Lock:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_semaphore(
|
||||||
|
cls,
|
||||||
|
initial_value: int,
|
||||||
|
*,
|
||||||
|
max_value: int | None = None,
|
||||||
|
fast_acquire: bool = False,
|
||||||
|
) -> Semaphore:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def run_sync_in_worker_thread(
|
||||||
|
cls,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
abandon_on_cancel: bool = False,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def check_cancelled(cls) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def run_async_from_thread(
|
||||||
|
cls,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
token: object,
|
||||||
|
) -> T_Retval:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def run_sync_from_thread(
|
||||||
|
cls,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
token: object,
|
||||||
|
) -> T_Retval:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def open_process(
|
||||||
|
cls,
|
||||||
|
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
||||||
|
*,
|
||||||
|
stdin: int | IO[Any] | None,
|
||||||
|
stdout: int | IO[Any] | None,
|
||||||
|
stderr: int | IO[Any] | None,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> Process:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def connect_tcp(
|
||||||
|
cls, host: str, port: int, local_address: IPSockAddrType | None = None
|
||||||
|
) -> SocketStream:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_tcp_listener(cls, sock: socket) -> SocketListener:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_unix_listener(cls, sock: socket) -> SocketListener:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def create_udp_socket(
|
||||||
|
cls,
|
||||||
|
family: AddressFamily,
|
||||||
|
local_address: IPSockAddrType | None,
|
||||||
|
remote_address: IPSockAddrType | None,
|
||||||
|
reuse_port: bool,
|
||||||
|
) -> UDPSocket | ConnectedUDPSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@overload
|
||||||
|
async def create_unix_datagram_socket(
|
||||||
|
cls, raw_socket: socket, remote_path: None
|
||||||
|
) -> UNIXDatagramSocket: ...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@overload
|
||||||
|
async def create_unix_datagram_socket(
|
||||||
|
cls, raw_socket: socket, remote_path: str | bytes
|
||||||
|
) -> ConnectedUNIXDatagramSocket: ...
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def create_unix_datagram_socket(
|
||||||
|
cls, raw_socket: socket, remote_path: str | bytes | None
|
||||||
|
) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def getaddrinfo(
|
||||||
|
cls,
|
||||||
|
host: bytes | str | None,
|
||||||
|
port: str | int | None,
|
||||||
|
*,
|
||||||
|
family: int | AddressFamily = 0,
|
||||||
|
type: int | SocketKind = 0,
|
||||||
|
proto: int = 0,
|
||||||
|
flags: int = 0,
|
||||||
|
) -> Sequence[
|
||||||
|
tuple[
|
||||||
|
AddressFamily,
|
||||||
|
SocketKind,
|
||||||
|
int,
|
||||||
|
str,
|
||||||
|
tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes],
|
||||||
|
]
|
||||||
|
]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def getnameinfo(
|
||||||
|
cls, sockaddr: IPSockAddrType, flags: int = 0
|
||||||
|
) -> tuple[str, str]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wait_readable(cls, obj: FileDescriptorLike) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wait_writable(cls, obj: FileDescriptorLike) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def notify_closing(cls, obj: FileDescriptorLike) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_listener_socket(cls, sock: socket) -> SocketListener:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_stream_socket(cls, sock: socket) -> SocketStream:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_unix_stream_socket(cls, sock: socket) -> UNIXSocketStream:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_udp_socket(cls, sock: socket) -> UDPSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_connected_udp_socket(cls, sock: socket) -> ConnectedUDPSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_unix_datagram_socket(cls, sock: socket) -> UNIXDatagramSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wrap_connected_unix_datagram_socket(
|
||||||
|
cls, sock: socket
|
||||||
|
) -> ConnectedUNIXDatagramSocket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def current_default_thread_limiter(cls) -> CapacityLimiter:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def open_signal_receiver(
|
||||||
|
cls, *signals: Signals
|
||||||
|
) -> AbstractContextManager[AsyncIterator[Signals]]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def get_current_task(cls) -> TaskInfo:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def get_running_tasks(cls) -> Sequence[TaskInfo]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
async def wait_all_tasks_blocked(cls) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def create_test_runner(cls, options: dict[str, Any]) -> TestRunner:
|
||||||
|
pass
|
||||||
33
.venv/lib/python3.9/site-packages/anyio/abc/_resources.py
Normal file
33
.venv/lib/python3.9/site-packages/anyio/abc/_resources.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncResource(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Abstract base class for all closeable asynchronous resources.
|
||||||
|
|
||||||
|
Works as an asynchronous context manager which returns the instance itself on enter,
|
||||||
|
and calls :meth:`aclose` on exit.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = ()
|
||||||
|
|
||||||
|
async def __aenter__(self: T) -> T:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
await self.aclose()
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
"""Close the resource."""
|
||||||
405
.venv/lib/python3.9/site-packages/anyio/abc/_sockets.py
Normal file
405
.venv/lib/python3.9/site-packages/anyio/abc/_sockets.py
Normal file
@@ -0,0 +1,405 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
from abc import abstractmethod
|
||||||
|
from collections.abc import Callable, Collection, Mapping
|
||||||
|
from contextlib import AsyncExitStack
|
||||||
|
from io import IOBase
|
||||||
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
from socket import AddressFamily
|
||||||
|
from typing import Any, TypeVar, Union
|
||||||
|
|
||||||
|
from .._core._eventloop import get_async_backend
|
||||||
|
from .._core._typedattr import (
|
||||||
|
TypedAttributeProvider,
|
||||||
|
TypedAttributeSet,
|
||||||
|
typed_attribute,
|
||||||
|
)
|
||||||
|
from ._streams import ByteStream, Listener, UnreliableObjectStream
|
||||||
|
from ._tasks import TaskGroup
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeAlias
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
IPAddressType: TypeAlias = Union[str, IPv4Address, IPv6Address]
|
||||||
|
IPSockAddrType: TypeAlias = tuple[str, int]
|
||||||
|
SockAddrType: TypeAlias = Union[IPSockAddrType, str]
|
||||||
|
UDPPacketType: TypeAlias = tuple[bytes, IPSockAddrType]
|
||||||
|
UNIXDatagramPacketType: TypeAlias = tuple[bytes, str]
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_socket(
|
||||||
|
sock_or_fd: socket.socket | int,
|
||||||
|
sock_type: socket.SocketKind,
|
||||||
|
addr_family: socket.AddressFamily = socket.AF_UNSPEC,
|
||||||
|
*,
|
||||||
|
require_connected: bool = False,
|
||||||
|
require_bound: bool = False,
|
||||||
|
) -> socket.socket:
|
||||||
|
if isinstance(sock_or_fd, int):
|
||||||
|
try:
|
||||||
|
sock = socket.socket(fileno=sock_or_fd)
|
||||||
|
except OSError as exc:
|
||||||
|
if exc.errno == errno.ENOTSOCK:
|
||||||
|
raise ValueError(
|
||||||
|
"the file descriptor does not refer to a socket"
|
||||||
|
) from exc
|
||||||
|
elif require_connected:
|
||||||
|
raise ValueError("the socket must be connected") from exc
|
||||||
|
elif require_bound:
|
||||||
|
raise ValueError("the socket must be bound to a local address") from exc
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
elif isinstance(sock_or_fd, socket.socket):
|
||||||
|
sock = sock_or_fd
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
f"expected an int or socket, got {type(sock_or_fd).__qualname__} instead"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if require_connected:
|
||||||
|
try:
|
||||||
|
sock.getpeername()
|
||||||
|
except OSError as exc:
|
||||||
|
raise ValueError("the socket must be connected") from exc
|
||||||
|
|
||||||
|
if require_bound:
|
||||||
|
try:
|
||||||
|
if sock.family in (socket.AF_INET, socket.AF_INET6):
|
||||||
|
bound_addr = sock.getsockname()[1]
|
||||||
|
else:
|
||||||
|
bound_addr = sock.getsockname()
|
||||||
|
except OSError:
|
||||||
|
bound_addr = None
|
||||||
|
|
||||||
|
if not bound_addr:
|
||||||
|
raise ValueError("the socket must be bound to a local address")
|
||||||
|
|
||||||
|
if addr_family != socket.AF_UNSPEC and sock.family != addr_family:
|
||||||
|
raise ValueError(
|
||||||
|
f"address family mismatch: expected {addr_family.name}, got "
|
||||||
|
f"{sock.family.name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if sock.type != sock_type:
|
||||||
|
raise ValueError(
|
||||||
|
f"socket type mismatch: expected {sock_type.name}, got {sock.type.name}"
|
||||||
|
)
|
||||||
|
except BaseException:
|
||||||
|
# Avoid ResourceWarning from the locally constructed socket object
|
||||||
|
if isinstance(sock_or_fd, int):
|
||||||
|
sock.detach()
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
sock.setblocking(False)
|
||||||
|
return sock
|
||||||
|
|
||||||
|
|
||||||
|
class SocketAttribute(TypedAttributeSet):
|
||||||
|
"""
|
||||||
|
.. attribute:: family
|
||||||
|
:type: socket.AddressFamily
|
||||||
|
|
||||||
|
the address family of the underlying socket
|
||||||
|
|
||||||
|
.. attribute:: local_address
|
||||||
|
:type: tuple[str, int] | str
|
||||||
|
|
||||||
|
the local address the underlying socket is connected to
|
||||||
|
|
||||||
|
.. attribute:: local_port
|
||||||
|
:type: int
|
||||||
|
|
||||||
|
for IP based sockets, the local port the underlying socket is bound to
|
||||||
|
|
||||||
|
.. attribute:: raw_socket
|
||||||
|
:type: socket.socket
|
||||||
|
|
||||||
|
the underlying stdlib socket object
|
||||||
|
|
||||||
|
.. attribute:: remote_address
|
||||||
|
:type: tuple[str, int] | str
|
||||||
|
|
||||||
|
the remote address the underlying socket is connected to
|
||||||
|
|
||||||
|
.. attribute:: remote_port
|
||||||
|
:type: int
|
||||||
|
|
||||||
|
for IP based sockets, the remote port the underlying socket is connected to
|
||||||
|
"""
|
||||||
|
|
||||||
|
family: AddressFamily = typed_attribute()
|
||||||
|
local_address: SockAddrType = typed_attribute()
|
||||||
|
local_port: int = typed_attribute()
|
||||||
|
raw_socket: socket.socket = typed_attribute()
|
||||||
|
remote_address: SockAddrType = typed_attribute()
|
||||||
|
remote_port: int = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
class _SocketProvider(TypedAttributeProvider):
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
from .._core._sockets import convert_ipv6_sockaddr as convert
|
||||||
|
|
||||||
|
attributes: dict[Any, Callable[[], Any]] = {
|
||||||
|
SocketAttribute.family: lambda: self._raw_socket.family,
|
||||||
|
SocketAttribute.local_address: lambda: convert(
|
||||||
|
self._raw_socket.getsockname()
|
||||||
|
),
|
||||||
|
SocketAttribute.raw_socket: lambda: self._raw_socket,
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
peername: tuple[str, int] | None = convert(self._raw_socket.getpeername())
|
||||||
|
except OSError:
|
||||||
|
peername = None
|
||||||
|
|
||||||
|
# Provide the remote address for connected sockets
|
||||||
|
if peername is not None:
|
||||||
|
attributes[SocketAttribute.remote_address] = lambda: peername
|
||||||
|
|
||||||
|
# Provide local and remote ports for IP based sockets
|
||||||
|
if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6):
|
||||||
|
attributes[SocketAttribute.local_port] = (
|
||||||
|
lambda: self._raw_socket.getsockname()[1]
|
||||||
|
)
|
||||||
|
if peername is not None:
|
||||||
|
remote_port = peername[1]
|
||||||
|
attributes[SocketAttribute.remote_port] = lambda: remote_port
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def _raw_socket(self) -> socket.socket:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SocketStream(ByteStream, _SocketProvider):
|
||||||
|
"""
|
||||||
|
Transports bytes over a socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(cls, sock_or_fd: socket.socket | int) -> SocketStream:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a socket stream.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
The existing socket must already be connected.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a socket stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_connected=True)
|
||||||
|
return await get_async_backend().wrap_stream_socket(sock)
|
||||||
|
|
||||||
|
|
||||||
|
class UNIXSocketStream(SocketStream):
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(cls, sock_or_fd: socket.socket | int) -> UNIXSocketStream:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a UNIX socket stream.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
The existing socket must already be connected.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a UNIX socket stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(
|
||||||
|
sock_or_fd, socket.SOCK_STREAM, socket.AF_UNIX, require_connected=True
|
||||||
|
)
|
||||||
|
return await get_async_backend().wrap_unix_stream_socket(sock)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
|
||||||
|
"""
|
||||||
|
Send file descriptors along with a message to the peer.
|
||||||
|
|
||||||
|
:param message: a non-empty bytestring
|
||||||
|
:param fds: a collection of files (either numeric file descriptors or open file
|
||||||
|
or socket objects)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
|
||||||
|
"""
|
||||||
|
Receive file descriptors along with a message from the peer.
|
||||||
|
|
||||||
|
:param msglen: length of the message to expect from the peer
|
||||||
|
:param maxfds: maximum number of file descriptors to expect from the peer
|
||||||
|
:return: a tuple of (message, file descriptors)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class SocketListener(Listener[SocketStream], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Listens to incoming socket connections.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(
|
||||||
|
cls,
|
||||||
|
sock_or_fd: socket.socket | int,
|
||||||
|
) -> SocketListener:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a socket listener.
|
||||||
|
|
||||||
|
The newly created listener takes ownership of the socket being passed in.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a socket listener
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_bound=True)
|
||||||
|
return await get_async_backend().wrap_listener_socket(sock)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def accept(self) -> SocketStream:
|
||||||
|
"""Accept an incoming connection."""
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self,
|
||||||
|
handler: Callable[[SocketStream], Any],
|
||||||
|
task_group: TaskGroup | None = None,
|
||||||
|
) -> None:
|
||||||
|
from .. import create_task_group
|
||||||
|
|
||||||
|
async with AsyncExitStack() as stack:
|
||||||
|
if task_group is None:
|
||||||
|
task_group = await stack.enter_async_context(create_task_group())
|
||||||
|
|
||||||
|
while True:
|
||||||
|
stream = await self.accept()
|
||||||
|
task_group.start_soon(handler, stream)
|
||||||
|
|
||||||
|
|
||||||
|
class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Represents an unconnected UDP socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(cls, sock_or_fd: socket.socket | int) -> UDPSocket:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a UDP socket.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
The existing socket must be bound to a local address.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a UDP socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, require_bound=True)
|
||||||
|
return await get_async_backend().wrap_udp_socket(sock)
|
||||||
|
|
||||||
|
async def sendto(self, data: bytes, host: str, port: int) -> None:
|
||||||
|
"""
|
||||||
|
Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))).
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await self.send((data, (host, port)))
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Represents an connected UDP socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(cls, sock_or_fd: socket.socket | int) -> ConnectedUDPSocket:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a connected UDP socket.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
The existing socket must already be connected.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a connected UDP socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(
|
||||||
|
sock_or_fd,
|
||||||
|
socket.SOCK_DGRAM,
|
||||||
|
require_connected=True,
|
||||||
|
)
|
||||||
|
return await get_async_backend().wrap_connected_udp_socket(sock)
|
||||||
|
|
||||||
|
|
||||||
|
class UNIXDatagramSocket(
|
||||||
|
UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Represents an unconnected Unix datagram socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(
|
||||||
|
cls,
|
||||||
|
sock_or_fd: socket.socket | int,
|
||||||
|
) -> UNIXDatagramSocket:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a UNIX datagram
|
||||||
|
socket.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a UNIX datagram socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX)
|
||||||
|
return await get_async_backend().wrap_unix_datagram_socket(sock)
|
||||||
|
|
||||||
|
async def sendto(self, data: bytes, path: str) -> None:
|
||||||
|
"""Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path))."""
|
||||||
|
return await self.send((data, path))
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider):
|
||||||
|
"""
|
||||||
|
Represents a connected Unix datagram socket.
|
||||||
|
|
||||||
|
Supports all relevant extra attributes from :class:`~SocketAttribute`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_socket(
|
||||||
|
cls,
|
||||||
|
sock_or_fd: socket.socket | int,
|
||||||
|
) -> ConnectedUNIXDatagramSocket:
|
||||||
|
"""
|
||||||
|
Wrap an existing socket object or file descriptor as a connected UNIX datagram
|
||||||
|
socket.
|
||||||
|
|
||||||
|
The newly created socket wrapper takes ownership of the socket being passed in.
|
||||||
|
The existing socket must already be connected.
|
||||||
|
|
||||||
|
:param sock_or_fd: a socket object or file descriptor
|
||||||
|
:return: a connected UNIX datagram socket
|
||||||
|
|
||||||
|
"""
|
||||||
|
sock = _validate_socket(
|
||||||
|
sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX, require_connected=True
|
||||||
|
)
|
||||||
|
return await get_async_backend().wrap_connected_unix_datagram_socket(sock)
|
||||||
239
.venv/lib/python3.9/site-packages/anyio/abc/_streams.py
Normal file
239
.venv/lib/python3.9/site-packages/anyio/abc/_streams.py
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Any, Generic, TypeVar, Union
|
||||||
|
|
||||||
|
from .._core._exceptions import EndOfStream
|
||||||
|
from .._core._typedattr import TypedAttributeProvider
|
||||||
|
from ._resources import AsyncResource
|
||||||
|
from ._tasks import TaskGroup
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeAlias
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_co = TypeVar("T_co", covariant=True)
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectReceiveStream(
|
||||||
|
Generic[T_co], AsyncResource, TypedAttributeProvider
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
An interface for receiving objects.
|
||||||
|
|
||||||
|
This interface makes no guarantees that the received messages arrive in the order in
|
||||||
|
which they were sent, or that no messages are missed.
|
||||||
|
|
||||||
|
Asynchronously iterating over objects of this type will yield objects matching the
|
||||||
|
given type parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> T_co:
|
||||||
|
try:
|
||||||
|
return await self.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
raise StopAsyncIteration from None
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive(self) -> T_co:
|
||||||
|
"""
|
||||||
|
Receive the next item.
|
||||||
|
|
||||||
|
:raises ~anyio.ClosedResourceError: if the receive stream has been explicitly
|
||||||
|
closed
|
||||||
|
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
|
||||||
|
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
|
||||||
|
due to external causes
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectSendStream(
|
||||||
|
Generic[T_contra], AsyncResource, TypedAttributeProvider
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
An interface for sending objects.
|
||||||
|
|
||||||
|
This interface makes no guarantees that the messages sent will reach the
|
||||||
|
recipient(s) in the same order in which they were sent, or at all.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send(self, item: T_contra) -> None:
|
||||||
|
"""
|
||||||
|
Send an item to the peer(s).
|
||||||
|
|
||||||
|
:param item: the item to send
|
||||||
|
:raises ~anyio.ClosedResourceError: if the send stream has been explicitly
|
||||||
|
closed
|
||||||
|
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
|
||||||
|
due to external causes
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class UnreliableObjectStream(
|
||||||
|
UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A bidirectional message stream which does not guarantee the order or reliability of
|
||||||
|
message delivery.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]):
|
||||||
|
"""
|
||||||
|
A receive message stream which guarantees that messages are received in the same
|
||||||
|
order in which they were sent, and that no messages are missed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectSendStream(UnreliableObjectSendStream[T_contra]):
|
||||||
|
"""
|
||||||
|
A send message stream which guarantees that messages are delivered in the same order
|
||||||
|
in which they were sent, without missing any messages in the middle.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectStream(
|
||||||
|
ObjectReceiveStream[T_Item],
|
||||||
|
ObjectSendStream[T_Item],
|
||||||
|
UnreliableObjectStream[T_Item],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
A bidirectional message stream which guarantees the order and reliability of message
|
||||||
|
delivery.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
"""
|
||||||
|
Send an end-of-file indication to the peer.
|
||||||
|
|
||||||
|
You should not try to send any further data to this stream after calling this
|
||||||
|
method. This method is idempotent (does nothing on successive calls).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteReceiveStream(AsyncResource, TypedAttributeProvider):
|
||||||
|
"""
|
||||||
|
An interface for receiving bytes from a single peer.
|
||||||
|
|
||||||
|
Iterating this byte stream will yield a byte string of arbitrary length, but no more
|
||||||
|
than 65536 bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __aiter__(self) -> ByteReceiveStream:
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self) -> bytes:
|
||||||
|
try:
|
||||||
|
return await self.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
raise StopAsyncIteration from None
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
"""
|
||||||
|
Receive at most ``max_bytes`` bytes from the peer.
|
||||||
|
|
||||||
|
.. note:: Implementers of this interface should not return an empty
|
||||||
|
:class:`bytes` object, and users should ignore them.
|
||||||
|
|
||||||
|
:param max_bytes: maximum number of bytes to receive
|
||||||
|
:return: the received bytes
|
||||||
|
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteSendStream(AsyncResource, TypedAttributeProvider):
|
||||||
|
"""An interface for sending bytes to a single peer."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
"""
|
||||||
|
Send the given bytes to the peer.
|
||||||
|
|
||||||
|
:param item: the bytes to send
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteStream(ByteReceiveStream, ByteSendStream):
|
||||||
|
"""A bidirectional byte stream."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
"""
|
||||||
|
Send an end-of-file indication to the peer.
|
||||||
|
|
||||||
|
You should not try to send any further data to this stream after calling this
|
||||||
|
method. This method is idempotent (does nothing on successive calls).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
#: Type alias for all unreliable bytes-oriented receive streams.
|
||||||
|
AnyUnreliableByteReceiveStream: TypeAlias = Union[
|
||||||
|
UnreliableObjectReceiveStream[bytes], ByteReceiveStream
|
||||||
|
]
|
||||||
|
#: Type alias for all unreliable bytes-oriented send streams.
|
||||||
|
AnyUnreliableByteSendStream: TypeAlias = Union[
|
||||||
|
UnreliableObjectSendStream[bytes], ByteSendStream
|
||||||
|
]
|
||||||
|
#: Type alias for all unreliable bytes-oriented streams.
|
||||||
|
AnyUnreliableByteStream: TypeAlias = Union[UnreliableObjectStream[bytes], ByteStream]
|
||||||
|
#: Type alias for all bytes-oriented receive streams.
|
||||||
|
AnyByteReceiveStream: TypeAlias = Union[ObjectReceiveStream[bytes], ByteReceiveStream]
|
||||||
|
#: Type alias for all bytes-oriented send streams.
|
||||||
|
AnyByteSendStream: TypeAlias = Union[ObjectSendStream[bytes], ByteSendStream]
|
||||||
|
#: Type alias for all bytes-oriented streams.
|
||||||
|
AnyByteStream: TypeAlias = Union[ObjectStream[bytes], ByteStream]
|
||||||
|
|
||||||
|
|
||||||
|
class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider):
|
||||||
|
"""An interface for objects that let you accept incoming connections."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def serve(
|
||||||
|
self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Accept incoming connections as they come in and start tasks to handle them.
|
||||||
|
|
||||||
|
:param handler: a callable that will be used to handle each accepted connection
|
||||||
|
:param task_group: the task group that will be used to start tasks for handling
|
||||||
|
each accepted connection (if omitted, an ad-hoc task group will be created)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectStreamConnectable(Generic[T_co], metaclass=ABCMeta):
|
||||||
|
@abstractmethod
|
||||||
|
async def connect(self) -> ObjectStream[T_co]:
|
||||||
|
"""
|
||||||
|
Connect to the remote endpoint.
|
||||||
|
|
||||||
|
:return: an object stream connected to the remote end
|
||||||
|
:raises ConnectionFailed: if the connection fails
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class ByteStreamConnectable(metaclass=ABCMeta):
|
||||||
|
@abstractmethod
|
||||||
|
async def connect(self) -> ByteStream:
|
||||||
|
"""
|
||||||
|
Connect to the remote endpoint.
|
||||||
|
|
||||||
|
:return: a bytestream connected to the remote end
|
||||||
|
:raises ConnectionFailed: if the connection fails
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
#: Type alias for all connectables returning bytestreams or bytes-oriented object streams
|
||||||
|
AnyByteStreamConnectable: TypeAlias = Union[
|
||||||
|
ObjectStreamConnectable[bytes], ByteStreamConnectable
|
||||||
|
]
|
||||||
79
.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py
Normal file
79
.venv/lib/python3.9/site-packages/anyio/abc/_subprocesses.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from abc import abstractmethod
|
||||||
|
from signal import Signals
|
||||||
|
|
||||||
|
from ._resources import AsyncResource
|
||||||
|
from ._streams import ByteReceiveStream, ByteSendStream
|
||||||
|
|
||||||
|
|
||||||
|
class Process(AsyncResource):
|
||||||
|
"""An asynchronous version of :class:`subprocess.Popen`."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def wait(self) -> int:
|
||||||
|
"""
|
||||||
|
Wait until the process exits.
|
||||||
|
|
||||||
|
:return: the exit code of the process
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def terminate(self) -> None:
|
||||||
|
"""
|
||||||
|
Terminates the process, gracefully if possible.
|
||||||
|
|
||||||
|
On Windows, this calls ``TerminateProcess()``.
|
||||||
|
On POSIX systems, this sends ``SIGTERM`` to the process.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.terminate`
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def kill(self) -> None:
|
||||||
|
"""
|
||||||
|
Kills the process.
|
||||||
|
|
||||||
|
On Windows, this calls ``TerminateProcess()``.
|
||||||
|
On POSIX systems, this sends ``SIGKILL`` to the process.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.kill`
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def send_signal(self, signal: Signals) -> None:
|
||||||
|
"""
|
||||||
|
Send a signal to the subprocess.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`subprocess.Popen.send_signal`
|
||||||
|
|
||||||
|
:param signal: the signal number (e.g. :data:`signal.SIGHUP`)
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def pid(self) -> int:
|
||||||
|
"""The process ID of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def returncode(self) -> int | None:
|
||||||
|
"""
|
||||||
|
The return code of the process. If the process has not yet terminated, this will
|
||||||
|
be ``None``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stdin(self) -> ByteSendStream | None:
|
||||||
|
"""The stream for the standard input of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stdout(self) -> ByteReceiveStream | None:
|
||||||
|
"""The stream for the standard output of the process."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def stderr(self) -> ByteReceiveStream | None:
|
||||||
|
"""The stream for the standard error output of the process."""
|
||||||
117
.venv/lib/python3.9/site-packages/anyio/abc/_tasks.py
Normal file
117
.venv/lib/python3.9/site-packages/anyio/abc/_tasks.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import TYPE_CHECKING, Any, Protocol, overload
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 13):
|
||||||
|
from typing import TypeVar
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVar
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .._core._tasks import CancelScope
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True, default=None)
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
|
||||||
|
class TaskStatus(Protocol[T_contra]):
|
||||||
|
@overload
|
||||||
|
def started(self: TaskStatus[None]) -> None: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def started(self, value: T_contra) -> None: ...
|
||||||
|
|
||||||
|
def started(self, value: T_contra | None = None) -> None:
|
||||||
|
"""
|
||||||
|
Signal that the task has started.
|
||||||
|
|
||||||
|
:param value: object passed back to the starter of the task
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class TaskGroup(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Groups several asynchronous tasks together.
|
||||||
|
|
||||||
|
:ivar cancel_scope: the cancel scope inherited by all child tasks
|
||||||
|
:vartype cancel_scope: CancelScope
|
||||||
|
|
||||||
|
.. note:: On asyncio, support for eager task factories is considered to be
|
||||||
|
**experimental**. In particular, they don't follow the usual semantics of new
|
||||||
|
tasks being scheduled on the next iteration of the event loop, and may thus
|
||||||
|
cause unexpected behavior in code that wasn't written with such semantics in
|
||||||
|
mind.
|
||||||
|
"""
|
||||||
|
|
||||||
|
cancel_scope: CancelScope
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def start_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[Any]],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
name: object = None,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Start a new task in this task group.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments to call the function with
|
||||||
|
:param name: name of the task, for the purposes of introspection and debugging
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def start(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[Any]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> Any:
|
||||||
|
"""
|
||||||
|
Start a new task and wait until it signals for readiness.
|
||||||
|
|
||||||
|
The target callable must accept a keyword argument ``task_status`` (of type
|
||||||
|
:class:`TaskStatus`). Awaiting on this method will return whatever was passed to
|
||||||
|
``task_status.started()`` (``None`` by default).
|
||||||
|
|
||||||
|
.. note:: The :class:`TaskStatus` class is generic, and the type argument should
|
||||||
|
indicate the type of the value that will be passed to
|
||||||
|
``task_status.started()``.
|
||||||
|
|
||||||
|
:param func: a coroutine function that accepts the ``task_status`` keyword
|
||||||
|
argument
|
||||||
|
:param args: positional arguments to call the function with
|
||||||
|
:param name: an optional name for the task, for introspection and debugging
|
||||||
|
:return: the value passed to ``task_status.started()``
|
||||||
|
:raises RuntimeError: if the task finishes without calling
|
||||||
|
``task_status.started()``
|
||||||
|
|
||||||
|
.. seealso:: :ref:`start_initialize`
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def __aenter__(self) -> TaskGroup:
|
||||||
|
"""Enter the task group context and allow starting new tasks."""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool:
|
||||||
|
"""Exit the task group context waiting for all tasks to finish."""
|
||||||
65
.venv/lib/python3.9/site-packages/anyio/abc/_testing.py
Normal file
65
.venv/lib/python3.9/site-packages/anyio/abc/_testing.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import types
|
||||||
|
from abc import ABCMeta, abstractmethod
|
||||||
|
from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable
|
||||||
|
from typing import Any, TypeVar
|
||||||
|
|
||||||
|
_T = TypeVar("_T")
|
||||||
|
|
||||||
|
|
||||||
|
class TestRunner(metaclass=ABCMeta):
|
||||||
|
"""
|
||||||
|
Encapsulates a running event loop. Every call made through this object will use the
|
||||||
|
same event loop.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __enter__(self) -> TestRunner:
|
||||||
|
return self
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: types.TracebackType | None,
|
||||||
|
) -> bool | None: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_asyncgen_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., AsyncGenerator[_T, Any]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> Iterable[_T]:
|
||||||
|
"""
|
||||||
|
Run an async generator fixture.
|
||||||
|
|
||||||
|
:param fixture_func: the fixture function
|
||||||
|
:param kwargs: keyword arguments to call the fixture function with
|
||||||
|
:return: an iterator yielding the value yielded from the async generator
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_fixture(
|
||||||
|
self,
|
||||||
|
fixture_func: Callable[..., Coroutine[Any, Any, _T]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
) -> _T:
|
||||||
|
"""
|
||||||
|
Run an async fixture.
|
||||||
|
|
||||||
|
:param fixture_func: the fixture function
|
||||||
|
:param kwargs: keyword arguments to call the fixture function with
|
||||||
|
:return: the return value of the fixture function
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def run_test(
|
||||||
|
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Run an async test function.
|
||||||
|
|
||||||
|
:param test_func: the test function
|
||||||
|
:param kwargs: keyword arguments to call the test function with
|
||||||
|
"""
|
||||||
578
.venv/lib/python3.9/site-packages/anyio/from_thread.py
Normal file
578
.venv/lib/python3.9/site-packages/anyio/from_thread.py
Normal file
@@ -0,0 +1,578 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"BlockingPortal",
|
||||||
|
"BlockingPortalProvider",
|
||||||
|
"check_cancelled",
|
||||||
|
"run",
|
||||||
|
"run_sync",
|
||||||
|
"start_blocking_portal",
|
||||||
|
)
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from collections.abc import Awaitable, Callable, Generator
|
||||||
|
from concurrent.futures import Future
|
||||||
|
from contextlib import (
|
||||||
|
AbstractAsyncContextManager,
|
||||||
|
AbstractContextManager,
|
||||||
|
contextmanager,
|
||||||
|
)
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from functools import partial
|
||||||
|
from inspect import isawaitable
|
||||||
|
from threading import Lock, Thread, current_thread, get_ident
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Generic,
|
||||||
|
TypeVar,
|
||||||
|
cast,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ._core._eventloop import (
|
||||||
|
get_cancelled_exc_class,
|
||||||
|
threadlocals,
|
||||||
|
)
|
||||||
|
from ._core._eventloop import run as run_eventloop
|
||||||
|
from ._core._exceptions import NoEventLoopError
|
||||||
|
from ._core._synchronization import Event
|
||||||
|
from ._core._tasks import CancelScope, create_task_group
|
||||||
|
from .abc._tasks import TaskStatus
|
||||||
|
from .lowlevel import EventLoopToken, current_token
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
T_co = TypeVar("T_co", covariant=True)
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
|
||||||
|
def _token_or_error(token: EventLoopToken | None) -> EventLoopToken:
|
||||||
|
if token is not None:
|
||||||
|
return token
|
||||||
|
|
||||||
|
try:
|
||||||
|
return threadlocals.current_token
|
||||||
|
except AttributeError:
|
||||||
|
raise NoEventLoopError(
|
||||||
|
"Not running inside an AnyIO worker thread, and no event loop token was "
|
||||||
|
"provided"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
|
||||||
|
def run(
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
token: EventLoopToken | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call a coroutine function from a worker thread.
|
||||||
|
|
||||||
|
:param func: a coroutine function
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param token: an event loop token to use to get back to the event loop thread
|
||||||
|
(required if calling this function from outside an AnyIO worker thread)
|
||||||
|
:return: the return value of the coroutine function
|
||||||
|
:raises MissingTokenError: if no token was provided and called from outside an
|
||||||
|
AnyIO worker thread
|
||||||
|
:raises RunFinishedError: if the event loop tied to ``token`` is no longer running
|
||||||
|
|
||||||
|
.. versionchanged:: 4.11.0
|
||||||
|
Added the ``token`` parameter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
explicit_token = token is not None
|
||||||
|
token = _token_or_error(token)
|
||||||
|
return token.backend_class.run_async_from_thread(
|
||||||
|
func, args, token=token.native_token if explicit_token else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def run_sync(
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
token: EventLoopToken | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call a function in the event loop thread from a worker thread.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param token: an event loop token to use to get back to the event loop thread
|
||||||
|
(required if calling this function from outside an AnyIO worker thread)
|
||||||
|
:return: the return value of the callable
|
||||||
|
:raises MissingTokenError: if no token was provided and called from outside an
|
||||||
|
AnyIO worker thread
|
||||||
|
:raises RunFinishedError: if the event loop tied to ``token`` is no longer running
|
||||||
|
|
||||||
|
.. versionchanged:: 4.11.0
|
||||||
|
Added the ``token`` parameter.
|
||||||
|
|
||||||
|
"""
|
||||||
|
explicit_token = token is not None
|
||||||
|
token = _token_or_error(token)
|
||||||
|
return token.backend_class.run_sync_from_thread(
|
||||||
|
func, args, token=token.native_token if explicit_token else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager):
|
||||||
|
_enter_future: Future[T_co]
|
||||||
|
_exit_future: Future[bool | None]
|
||||||
|
_exit_event: Event
|
||||||
|
_exit_exc_info: tuple[
|
||||||
|
type[BaseException] | None, BaseException | None, TracebackType | None
|
||||||
|
] = (None, None, None)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal
|
||||||
|
):
|
||||||
|
self._async_cm = async_cm
|
||||||
|
self._portal = portal
|
||||||
|
|
||||||
|
async def run_async_cm(self) -> bool | None:
|
||||||
|
try:
|
||||||
|
self._exit_event = Event()
|
||||||
|
value = await self._async_cm.__aenter__()
|
||||||
|
except BaseException as exc:
|
||||||
|
self._enter_future.set_exception(exc)
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
self._enter_future.set_result(value)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Wait for the sync context manager to exit.
|
||||||
|
# This next statement can raise `get_cancelled_exc_class()` if
|
||||||
|
# something went wrong in a task group in this async context
|
||||||
|
# manager.
|
||||||
|
await self._exit_event.wait()
|
||||||
|
finally:
|
||||||
|
# In case of cancellation, it could be that we end up here before
|
||||||
|
# `_BlockingAsyncContextManager.__exit__` is called, and an
|
||||||
|
# `_exit_exc_info` has been set.
|
||||||
|
result = await self._async_cm.__aexit__(*self._exit_exc_info)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __enter__(self) -> T_co:
|
||||||
|
self._enter_future = Future()
|
||||||
|
self._exit_future = self._portal.start_task_soon(self.run_async_cm)
|
||||||
|
return self._enter_future.result()
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
__exc_type: type[BaseException] | None,
|
||||||
|
__exc_value: BaseException | None,
|
||||||
|
__traceback: TracebackType | None,
|
||||||
|
) -> bool | None:
|
||||||
|
self._exit_exc_info = __exc_type, __exc_value, __traceback
|
||||||
|
self._portal.call(self._exit_event.set)
|
||||||
|
return self._exit_future.result()
|
||||||
|
|
||||||
|
|
||||||
|
class _BlockingPortalTaskStatus(TaskStatus):
|
||||||
|
def __init__(self, future: Future):
|
||||||
|
self._future = future
|
||||||
|
|
||||||
|
def started(self, value: object = None) -> None:
|
||||||
|
self._future.set_result(value)
|
||||||
|
|
||||||
|
|
||||||
|
class BlockingPortal:
|
||||||
|
"""
|
||||||
|
An object that lets external threads run code in an asynchronous event loop.
|
||||||
|
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._token = current_token()
|
||||||
|
self._event_loop_thread_id: int | None = get_ident()
|
||||||
|
self._stop_event = Event()
|
||||||
|
self._task_group = create_task_group()
|
||||||
|
|
||||||
|
async def __aenter__(self) -> BlockingPortal:
|
||||||
|
await self._task_group.__aenter__()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> bool:
|
||||||
|
await self.stop()
|
||||||
|
return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
|
||||||
|
|
||||||
|
def _check_running(self) -> None:
|
||||||
|
if self._event_loop_thread_id is None:
|
||||||
|
raise RuntimeError("This portal is not running")
|
||||||
|
if self._event_loop_thread_id == get_ident():
|
||||||
|
raise RuntimeError(
|
||||||
|
"This method cannot be called from the event loop thread"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def sleep_until_stopped(self) -> None:
|
||||||
|
"""Sleep until :meth:`stop` is called."""
|
||||||
|
await self._stop_event.wait()
|
||||||
|
|
||||||
|
async def stop(self, cancel_remaining: bool = False) -> None:
|
||||||
|
"""
|
||||||
|
Signal the portal to shut down.
|
||||||
|
|
||||||
|
This marks the portal as no longer accepting new calls and exits from
|
||||||
|
:meth:`sleep_until_stopped`.
|
||||||
|
|
||||||
|
:param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False``
|
||||||
|
to let them finish before returning
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._event_loop_thread_id = None
|
||||||
|
self._stop_event.set()
|
||||||
|
if cancel_remaining:
|
||||||
|
self._task_group.cancel_scope.cancel("the blocking portal is shutting down")
|
||||||
|
|
||||||
|
async def _call_func(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
future: Future[T_Retval],
|
||||||
|
) -> None:
|
||||||
|
def callback(f: Future[T_Retval]) -> None:
|
||||||
|
if f.cancelled():
|
||||||
|
if self._event_loop_thread_id == get_ident():
|
||||||
|
scope.cancel("the future was cancelled")
|
||||||
|
elif self._event_loop_thread_id is not None:
|
||||||
|
self.call(scope.cancel, "the future was cancelled")
|
||||||
|
|
||||||
|
try:
|
||||||
|
retval_or_awaitable = func(*args, **kwargs)
|
||||||
|
if isawaitable(retval_or_awaitable):
|
||||||
|
with CancelScope() as scope:
|
||||||
|
future.add_done_callback(callback)
|
||||||
|
retval = await retval_or_awaitable
|
||||||
|
else:
|
||||||
|
retval = retval_or_awaitable
|
||||||
|
except get_cancelled_exc_class():
|
||||||
|
future.cancel()
|
||||||
|
future.set_running_or_notify_cancel()
|
||||||
|
except BaseException as exc:
|
||||||
|
if not future.cancelled():
|
||||||
|
future.set_exception(exc)
|
||||||
|
|
||||||
|
# Let base exceptions fall through
|
||||||
|
if not isinstance(exc, Exception):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
if not future.cancelled():
|
||||||
|
future.set_result(retval)
|
||||||
|
finally:
|
||||||
|
scope = None # type: ignore[assignment]
|
||||||
|
|
||||||
|
def _spawn_task_from_thread(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
|
||||||
|
args: tuple[Unpack[PosArgsT]],
|
||||||
|
kwargs: dict[str, Any],
|
||||||
|
name: object,
|
||||||
|
future: Future[T_Retval],
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Spawn a new task using the given callable.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments to be passed to the callable
|
||||||
|
:param kwargs: keyword arguments to be passed to the callable
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:param future: a future that will resolve to the return value of the callable,
|
||||||
|
or the exception raised during its execution
|
||||||
|
|
||||||
|
"""
|
||||||
|
run_sync(
|
||||||
|
partial(self._task_group.start_soon, name=name),
|
||||||
|
self._call_func,
|
||||||
|
func,
|
||||||
|
args,
|
||||||
|
kwargs,
|
||||||
|
future,
|
||||||
|
token=self._token,
|
||||||
|
)
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def call(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
) -> T_Retval: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def call(
|
||||||
|
self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT]
|
||||||
|
) -> T_Retval: ...
|
||||||
|
|
||||||
|
def call(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function in the event loop thread.
|
||||||
|
|
||||||
|
If the callable returns a coroutine object, it is awaited on.
|
||||||
|
|
||||||
|
:param func: any callable
|
||||||
|
:raises RuntimeError: if the portal is not running or if this method is called
|
||||||
|
from within the event loop thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return cast(T_Retval, self.start_task_soon(func, *args).result())
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def start_task_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def start_task_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]: ...
|
||||||
|
|
||||||
|
def start_task_soon(
|
||||||
|
self,
|
||||||
|
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
name: object = None,
|
||||||
|
) -> Future[T_Retval]:
|
||||||
|
"""
|
||||||
|
Start a task in the portal's task group.
|
||||||
|
|
||||||
|
The task will be run inside a cancel scope which can be cancelled by cancelling
|
||||||
|
the returned future.
|
||||||
|
|
||||||
|
:param func: the target function
|
||||||
|
:param args: positional arguments passed to ``func``
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:return: a future that resolves with the return value of the callable if the
|
||||||
|
task completes successfully, or with the exception raised in the task
|
||||||
|
:raises RuntimeError: if the portal is not running or if this method is called
|
||||||
|
from within the event loop thread
|
||||||
|
:rtype: concurrent.futures.Future[T_Retval]
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._check_running()
|
||||||
|
f: Future[T_Retval] = Future()
|
||||||
|
self._spawn_task_from_thread(func, args, {}, name, f)
|
||||||
|
return f
|
||||||
|
|
||||||
|
def start_task(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable[T_Retval]],
|
||||||
|
*args: object,
|
||||||
|
name: object = None,
|
||||||
|
) -> tuple[Future[T_Retval], Any]:
|
||||||
|
"""
|
||||||
|
Start a task in the portal's task group and wait until it signals for readiness.
|
||||||
|
|
||||||
|
This method works the same way as :meth:`.abc.TaskGroup.start`.
|
||||||
|
|
||||||
|
:param func: the target function
|
||||||
|
:param args: positional arguments passed to ``func``
|
||||||
|
:param name: name of the task (will be coerced to a string if not ``None``)
|
||||||
|
:return: a tuple of (future, task_status_value) where the ``task_status_value``
|
||||||
|
is the value passed to ``task_status.started()`` from within the target
|
||||||
|
function
|
||||||
|
:rtype: tuple[concurrent.futures.Future[T_Retval], Any]
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def task_done(future: Future[T_Retval]) -> None:
|
||||||
|
if not task_status_future.done():
|
||||||
|
if future.cancelled():
|
||||||
|
task_status_future.cancel()
|
||||||
|
elif future.exception():
|
||||||
|
task_status_future.set_exception(future.exception())
|
||||||
|
else:
|
||||||
|
exc = RuntimeError(
|
||||||
|
"Task exited without calling task_status.started()"
|
||||||
|
)
|
||||||
|
task_status_future.set_exception(exc)
|
||||||
|
|
||||||
|
self._check_running()
|
||||||
|
task_status_future: Future = Future()
|
||||||
|
task_status = _BlockingPortalTaskStatus(task_status_future)
|
||||||
|
f: Future = Future()
|
||||||
|
f.add_done_callback(task_done)
|
||||||
|
self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f)
|
||||||
|
return f, task_status_future.result()
|
||||||
|
|
||||||
|
def wrap_async_context_manager(
|
||||||
|
self, cm: AbstractAsyncContextManager[T_co]
|
||||||
|
) -> AbstractContextManager[T_co]:
|
||||||
|
"""
|
||||||
|
Wrap an async context manager as a synchronous context manager via this portal.
|
||||||
|
|
||||||
|
Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping
|
||||||
|
in the middle until the synchronous context manager exits.
|
||||||
|
|
||||||
|
:param cm: an asynchronous context manager
|
||||||
|
:return: a synchronous context manager
|
||||||
|
|
||||||
|
.. versionadded:: 2.1
|
||||||
|
|
||||||
|
"""
|
||||||
|
return _BlockingAsyncContextManager(cm, self)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class BlockingPortalProvider:
|
||||||
|
"""
|
||||||
|
A manager for a blocking portal. Used as a context manager. The first thread to
|
||||||
|
enter this context manager causes a blocking portal to be started with the specific
|
||||||
|
parameters, and the last thread to exit causes the portal to be shut down. Thus,
|
||||||
|
there will be exactly one blocking portal running in this context as long as at
|
||||||
|
least one thread has entered this context manager.
|
||||||
|
|
||||||
|
The parameters are the same as for :func:`~anyio.run`.
|
||||||
|
|
||||||
|
:param backend: name of the backend
|
||||||
|
:param backend_options: backend options
|
||||||
|
|
||||||
|
.. versionadded:: 4.4
|
||||||
|
"""
|
||||||
|
|
||||||
|
backend: str = "asyncio"
|
||||||
|
backend_options: dict[str, Any] | None = None
|
||||||
|
_lock: Lock = field(init=False, default_factory=Lock)
|
||||||
|
_leases: int = field(init=False, default=0)
|
||||||
|
_portal: BlockingPortal = field(init=False)
|
||||||
|
_portal_cm: AbstractContextManager[BlockingPortal] | None = field(
|
||||||
|
init=False, default=None
|
||||||
|
)
|
||||||
|
|
||||||
|
def __enter__(self) -> BlockingPortal:
|
||||||
|
with self._lock:
|
||||||
|
if self._portal_cm is None:
|
||||||
|
self._portal_cm = start_blocking_portal(
|
||||||
|
self.backend, self.backend_options
|
||||||
|
)
|
||||||
|
self._portal = self._portal_cm.__enter__()
|
||||||
|
|
||||||
|
self._leases += 1
|
||||||
|
return self._portal
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
portal_cm: AbstractContextManager[BlockingPortal] | None = None
|
||||||
|
with self._lock:
|
||||||
|
assert self._portal_cm
|
||||||
|
assert self._leases > 0
|
||||||
|
self._leases -= 1
|
||||||
|
if not self._leases:
|
||||||
|
portal_cm = self._portal_cm
|
||||||
|
self._portal_cm = None
|
||||||
|
del self._portal
|
||||||
|
|
||||||
|
if portal_cm:
|
||||||
|
portal_cm.__exit__(None, None, None)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def start_blocking_portal(
|
||||||
|
backend: str = "asyncio",
|
||||||
|
backend_options: dict[str, Any] | None = None,
|
||||||
|
*,
|
||||||
|
name: str | None = None,
|
||||||
|
) -> Generator[BlockingPortal, Any, None]:
|
||||||
|
"""
|
||||||
|
Start a new event loop in a new thread and run a blocking portal in its main task.
|
||||||
|
|
||||||
|
The parameters are the same as for :func:`~anyio.run`.
|
||||||
|
|
||||||
|
:param backend: name of the backend
|
||||||
|
:param backend_options: backend options
|
||||||
|
:param name: name of the thread
|
||||||
|
:return: a context manager that yields a blocking portal
|
||||||
|
|
||||||
|
.. versionchanged:: 3.0
|
||||||
|
Usage as a context manager is now required.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def run_portal() -> None:
|
||||||
|
async with BlockingPortal() as portal_:
|
||||||
|
if name is None:
|
||||||
|
current_thread().name = f"{backend}-portal-{id(portal_):x}"
|
||||||
|
|
||||||
|
future.set_result(portal_)
|
||||||
|
await portal_.sleep_until_stopped()
|
||||||
|
|
||||||
|
def run_blocking_portal() -> None:
|
||||||
|
if future.set_running_or_notify_cancel():
|
||||||
|
try:
|
||||||
|
run_eventloop(
|
||||||
|
run_portal, backend=backend, backend_options=backend_options
|
||||||
|
)
|
||||||
|
except BaseException as exc:
|
||||||
|
if not future.done():
|
||||||
|
future.set_exception(exc)
|
||||||
|
|
||||||
|
future: Future[BlockingPortal] = Future()
|
||||||
|
thread = Thread(target=run_blocking_portal, daemon=True, name=name)
|
||||||
|
thread.start()
|
||||||
|
try:
|
||||||
|
cancel_remaining_tasks = False
|
||||||
|
portal = future.result()
|
||||||
|
try:
|
||||||
|
yield portal
|
||||||
|
except BaseException:
|
||||||
|
cancel_remaining_tasks = True
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
portal.call(portal.stop, cancel_remaining_tasks)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
|
||||||
|
def check_cancelled() -> None:
|
||||||
|
"""
|
||||||
|
Check if the cancel scope of the host task's running the current worker thread has
|
||||||
|
been cancelled.
|
||||||
|
|
||||||
|
If the host task's current cancel scope has indeed been cancelled, the
|
||||||
|
backend-specific cancellation exception will be raised.
|
||||||
|
|
||||||
|
:raises RuntimeError: if the current thread was not spawned by
|
||||||
|
:func:`.to_thread.run_sync`
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
token: EventLoopToken = threadlocals.current_token
|
||||||
|
except AttributeError:
|
||||||
|
raise NoEventLoopError(
|
||||||
|
"This function can only be called inside an AnyIO worker thread"
|
||||||
|
) from None
|
||||||
|
|
||||||
|
token.backend_class.check_cancelled()
|
||||||
375
.venv/lib/python3.9/site-packages/anyio/functools.py
Normal file
375
.venv/lib/python3.9/site-packages/anyio/functools.py
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"AsyncCacheInfo",
|
||||||
|
"AsyncCacheParameters",
|
||||||
|
"AsyncLRUCacheWrapper",
|
||||||
|
"cache",
|
||||||
|
"lru_cache",
|
||||||
|
"reduce",
|
||||||
|
)
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import sys
|
||||||
|
from collections import OrderedDict
|
||||||
|
from collections.abc import (
|
||||||
|
AsyncIterable,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Coroutine,
|
||||||
|
Hashable,
|
||||||
|
Iterable,
|
||||||
|
)
|
||||||
|
from functools import update_wrapper
|
||||||
|
from inspect import iscoroutinefunction
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Generic,
|
||||||
|
NamedTuple,
|
||||||
|
TypedDict,
|
||||||
|
TypeVar,
|
||||||
|
cast,
|
||||||
|
final,
|
||||||
|
overload,
|
||||||
|
)
|
||||||
|
from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
|
from ._core._synchronization import Lock
|
||||||
|
from .lowlevel import RunVar, checkpoint
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import ParamSpec
|
||||||
|
else:
|
||||||
|
from typing_extensions import ParamSpec
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
S = TypeVar("S")
|
||||||
|
P = ParamSpec("P")
|
||||||
|
lru_cache_items: RunVar[
|
||||||
|
WeakKeyDictionary[
|
||||||
|
AsyncLRUCacheWrapper[Any, Any],
|
||||||
|
OrderedDict[Hashable, tuple[_InitialMissingType, Lock] | tuple[Any, None]],
|
||||||
|
]
|
||||||
|
] = RunVar("lru_cache_items")
|
||||||
|
|
||||||
|
|
||||||
|
class _InitialMissingType:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
initial_missing: _InitialMissingType = _InitialMissingType()
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncCacheInfo(NamedTuple):
|
||||||
|
hits: int
|
||||||
|
misses: int
|
||||||
|
maxsize: int | None
|
||||||
|
currsize: int
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncCacheParameters(TypedDict):
|
||||||
|
maxsize: int | None
|
||||||
|
typed: bool
|
||||||
|
always_checkpoint: bool
|
||||||
|
|
||||||
|
|
||||||
|
class _LRUMethodWrapper(Generic[T]):
|
||||||
|
def __init__(self, wrapper: AsyncLRUCacheWrapper[..., T], instance: object):
|
||||||
|
self.__wrapper = wrapper
|
||||||
|
self.__instance = instance
|
||||||
|
|
||||||
|
def cache_info(self) -> AsyncCacheInfo:
|
||||||
|
return self.__wrapper.cache_info()
|
||||||
|
|
||||||
|
def cache_parameters(self) -> AsyncCacheParameters:
|
||||||
|
return self.__wrapper.cache_parameters()
|
||||||
|
|
||||||
|
def cache_clear(self) -> None:
|
||||||
|
self.__wrapper.cache_clear()
|
||||||
|
|
||||||
|
async def __call__(self, *args: Any, **kwargs: Any) -> T:
|
||||||
|
if self.__instance is None:
|
||||||
|
return await self.__wrapper(*args, **kwargs)
|
||||||
|
|
||||||
|
return await self.__wrapper(self.__instance, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
class AsyncLRUCacheWrapper(Generic[P, T]):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[P, Awaitable[T]],
|
||||||
|
maxsize: int | None,
|
||||||
|
typed: bool,
|
||||||
|
always_checkpoint: bool,
|
||||||
|
):
|
||||||
|
self.__wrapped__ = func
|
||||||
|
self._hits: int = 0
|
||||||
|
self._misses: int = 0
|
||||||
|
self._maxsize = max(maxsize, 0) if maxsize is not None else None
|
||||||
|
self._currsize: int = 0
|
||||||
|
self._typed = typed
|
||||||
|
self._always_checkpoint = always_checkpoint
|
||||||
|
update_wrapper(self, func)
|
||||||
|
|
||||||
|
def cache_info(self) -> AsyncCacheInfo:
|
||||||
|
return AsyncCacheInfo(self._hits, self._misses, self._maxsize, self._currsize)
|
||||||
|
|
||||||
|
def cache_parameters(self) -> AsyncCacheParameters:
|
||||||
|
return {
|
||||||
|
"maxsize": self._maxsize,
|
||||||
|
"typed": self._typed,
|
||||||
|
"always_checkpoint": self._always_checkpoint,
|
||||||
|
}
|
||||||
|
|
||||||
|
def cache_clear(self) -> None:
|
||||||
|
if cache := lru_cache_items.get(None):
|
||||||
|
cache.pop(self, None)
|
||||||
|
self._hits = self._misses = self._currsize = 0
|
||||||
|
|
||||||
|
async def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T:
|
||||||
|
# Easy case first: if maxsize == 0, no caching is done
|
||||||
|
if self._maxsize == 0:
|
||||||
|
value = await self.__wrapped__(*args, **kwargs)
|
||||||
|
self._misses += 1
|
||||||
|
return value
|
||||||
|
|
||||||
|
# The key is constructed as a flat tuple to avoid memory overhead
|
||||||
|
key: tuple[Any, ...] = args
|
||||||
|
if kwargs:
|
||||||
|
# initial_missing is used as a separator
|
||||||
|
key += (initial_missing,) + sum(kwargs.items(), ())
|
||||||
|
|
||||||
|
if self._typed:
|
||||||
|
key += tuple(type(arg) for arg in args)
|
||||||
|
if kwargs:
|
||||||
|
key += (initial_missing,) + tuple(type(val) for val in kwargs.values())
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache = lru_cache_items.get()
|
||||||
|
except LookupError:
|
||||||
|
cache = WeakKeyDictionary()
|
||||||
|
lru_cache_items.set(cache)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cache_entry = cache[self]
|
||||||
|
except KeyError:
|
||||||
|
cache_entry = cache[self] = OrderedDict()
|
||||||
|
|
||||||
|
cached_value: T | _InitialMissingType
|
||||||
|
try:
|
||||||
|
cached_value, lock = cache_entry[key]
|
||||||
|
except KeyError:
|
||||||
|
# We're the first task to call this function
|
||||||
|
cached_value, lock = (
|
||||||
|
initial_missing,
|
||||||
|
Lock(fast_acquire=not self._always_checkpoint),
|
||||||
|
)
|
||||||
|
cache_entry[key] = cached_value, lock
|
||||||
|
|
||||||
|
if lock is None:
|
||||||
|
# The value was already cached
|
||||||
|
self._hits += 1
|
||||||
|
cache_entry.move_to_end(key)
|
||||||
|
if self._always_checkpoint:
|
||||||
|
await checkpoint()
|
||||||
|
|
||||||
|
return cast(T, cached_value)
|
||||||
|
|
||||||
|
async with lock:
|
||||||
|
# Check if another task filled the cache while we acquired the lock
|
||||||
|
if (cached_value := cache_entry[key][0]) is initial_missing:
|
||||||
|
self._misses += 1
|
||||||
|
if self._maxsize is not None and self._currsize >= self._maxsize:
|
||||||
|
cache_entry.popitem(last=False)
|
||||||
|
else:
|
||||||
|
self._currsize += 1
|
||||||
|
|
||||||
|
value = await self.__wrapped__(*args, **kwargs)
|
||||||
|
cache_entry[key] = value, None
|
||||||
|
else:
|
||||||
|
# Another task filled the cache while we were waiting for the lock
|
||||||
|
self._hits += 1
|
||||||
|
cache_entry.move_to_end(key)
|
||||||
|
value = cast(T, cached_value)
|
||||||
|
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __get__(
|
||||||
|
self, instance: object, owner: type | None = None
|
||||||
|
) -> _LRUMethodWrapper[T]:
|
||||||
|
wrapper = _LRUMethodWrapper(self, instance)
|
||||||
|
update_wrapper(wrapper, self.__wrapped__)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class _LRUCacheWrapper(Generic[T]):
|
||||||
|
def __init__(self, maxsize: int | None, typed: bool, always_checkpoint: bool):
|
||||||
|
self._maxsize = maxsize
|
||||||
|
self._typed = typed
|
||||||
|
self._always_checkpoint = always_checkpoint
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__( # type: ignore[overload-overlap]
|
||||||
|
self, func: Callable[P, Coroutine[Any, Any, T]], /
|
||||||
|
) -> AsyncLRUCacheWrapper[P, T]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def __call__(
|
||||||
|
self, func: Callable[..., T], /
|
||||||
|
) -> functools._lru_cache_wrapper[T]: ...
|
||||||
|
|
||||||
|
def __call__(
|
||||||
|
self, f: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T], /
|
||||||
|
) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]:
|
||||||
|
if iscoroutinefunction(f):
|
||||||
|
return AsyncLRUCacheWrapper(
|
||||||
|
f, self._maxsize, self._typed, self._always_checkpoint
|
||||||
|
)
|
||||||
|
|
||||||
|
return functools.lru_cache(maxsize=self._maxsize, typed=self._typed)(f) # type: ignore[arg-type]
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def cache( # type: ignore[overload-overlap]
|
||||||
|
func: Callable[P, Coroutine[Any, Any, T]], /
|
||||||
|
) -> AsyncLRUCacheWrapper[P, T]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ...
|
||||||
|
|
||||||
|
|
||||||
|
def cache(
|
||||||
|
func: Callable[..., T] | Callable[P, Coroutine[Any, Any, T]], /
|
||||||
|
) -> AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T]:
|
||||||
|
"""
|
||||||
|
A convenient shortcut for :func:`lru_cache` with ``maxsize=None``.
|
||||||
|
|
||||||
|
This is the asynchronous equivalent to :func:`functools.cache`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return lru_cache(maxsize=None)(func)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def lru_cache(
|
||||||
|
*, maxsize: int | None = ..., typed: bool = ..., always_checkpoint: bool = ...
|
||||||
|
) -> _LRUCacheWrapper[Any]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def lru_cache( # type: ignore[overload-overlap]
|
||||||
|
func: Callable[P, Coroutine[Any, Any, T]], /
|
||||||
|
) -> AsyncLRUCacheWrapper[P, T]: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def lru_cache(func: Callable[..., T], /) -> functools._lru_cache_wrapper[T]: ...
|
||||||
|
|
||||||
|
|
||||||
|
def lru_cache(
|
||||||
|
func: Callable[P, Coroutine[Any, Any, T]] | Callable[..., T] | None = None,
|
||||||
|
/,
|
||||||
|
*,
|
||||||
|
maxsize: int | None = 128,
|
||||||
|
typed: bool = False,
|
||||||
|
always_checkpoint: bool = False,
|
||||||
|
) -> (
|
||||||
|
AsyncLRUCacheWrapper[P, T] | functools._lru_cache_wrapper[T] | _LRUCacheWrapper[Any]
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
An asynchronous version of :func:`functools.lru_cache`.
|
||||||
|
|
||||||
|
If a synchronous function is passed, the standard library
|
||||||
|
:func:`functools.lru_cache` is applied instead.
|
||||||
|
|
||||||
|
:param always_checkpoint: if ``True``, every call to the cached function will be
|
||||||
|
guaranteed to yield control to the event loop at least once
|
||||||
|
|
||||||
|
.. note:: Caches and locks are managed on a per-event loop basis.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if func is None:
|
||||||
|
return _LRUCacheWrapper[Any](maxsize, typed, always_checkpoint)
|
||||||
|
|
||||||
|
if not callable(func):
|
||||||
|
raise TypeError("the first argument must be callable")
|
||||||
|
|
||||||
|
return _LRUCacheWrapper[T](maxsize, typed, always_checkpoint)(func)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def reduce(
|
||||||
|
function: Callable[[T, S], Awaitable[T]],
|
||||||
|
iterable: Iterable[S] | AsyncIterable[S],
|
||||||
|
/,
|
||||||
|
initial: T,
|
||||||
|
) -> T: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def reduce(
|
||||||
|
function: Callable[[T, T], Awaitable[T]],
|
||||||
|
iterable: Iterable[T] | AsyncIterable[T],
|
||||||
|
/,
|
||||||
|
) -> T: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def reduce( # type: ignore[misc]
|
||||||
|
function: Callable[[T, T], Awaitable[T]] | Callable[[T, S], Awaitable[T]],
|
||||||
|
iterable: Iterable[T] | Iterable[S] | AsyncIterable[T] | AsyncIterable[S],
|
||||||
|
/,
|
||||||
|
initial: T | _InitialMissingType = initial_missing,
|
||||||
|
) -> T:
|
||||||
|
"""
|
||||||
|
Asynchronous version of :func:`functools.reduce`.
|
||||||
|
|
||||||
|
:param function: a coroutine function that takes two arguments: the accumulated
|
||||||
|
value and the next element from the iterable
|
||||||
|
:param iterable: an iterable or async iterable
|
||||||
|
:param initial: the initial value (if missing, the first element of the iterable is
|
||||||
|
used as the initial value)
|
||||||
|
|
||||||
|
"""
|
||||||
|
element: Any
|
||||||
|
function_called = False
|
||||||
|
if isinstance(iterable, AsyncIterable):
|
||||||
|
async_it = iterable.__aiter__()
|
||||||
|
if initial is initial_missing:
|
||||||
|
try:
|
||||||
|
value = cast(T, await async_it.__anext__())
|
||||||
|
except StopAsyncIteration:
|
||||||
|
raise TypeError(
|
||||||
|
"reduce() of empty sequence with no initial value"
|
||||||
|
) from None
|
||||||
|
else:
|
||||||
|
value = cast(T, initial)
|
||||||
|
|
||||||
|
async for element in async_it:
|
||||||
|
value = await function(value, element)
|
||||||
|
function_called = True
|
||||||
|
elif isinstance(iterable, Iterable):
|
||||||
|
it = iter(iterable)
|
||||||
|
if initial is initial_missing:
|
||||||
|
try:
|
||||||
|
value = cast(T, next(it))
|
||||||
|
except StopIteration:
|
||||||
|
raise TypeError(
|
||||||
|
"reduce() of empty sequence with no initial value"
|
||||||
|
) from None
|
||||||
|
else:
|
||||||
|
value = cast(T, initial)
|
||||||
|
|
||||||
|
for element in it:
|
||||||
|
value = await function(value, element)
|
||||||
|
function_called = True
|
||||||
|
else:
|
||||||
|
raise TypeError("reduce() argument 2 must be an iterable or async iterable")
|
||||||
|
|
||||||
|
# Make sure there is at least one checkpoint, even if an empty iterable and an
|
||||||
|
# initial value were given
|
||||||
|
if not function_called:
|
||||||
|
await checkpoint()
|
||||||
|
|
||||||
|
return value
|
||||||
196
.venv/lib/python3.9/site-packages/anyio/lowlevel.py
Normal file
196
.venv/lib/python3.9/site-packages/anyio/lowlevel.py
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"EventLoopToken",
|
||||||
|
"RunvarToken",
|
||||||
|
"RunVar",
|
||||||
|
"checkpoint",
|
||||||
|
"checkpoint_if_cancelled",
|
||||||
|
"cancel_shielded_checkpoint",
|
||||||
|
"current_token",
|
||||||
|
)
|
||||||
|
|
||||||
|
import enum
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Any, Generic, Literal, TypeVar, final, overload
|
||||||
|
from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
|
from ._core._eventloop import get_async_backend
|
||||||
|
from .abc import AsyncBackend
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
D = TypeVar("D")
|
||||||
|
|
||||||
|
|
||||||
|
async def checkpoint() -> None:
|
||||||
|
"""
|
||||||
|
Check for cancellation and allow the scheduler to switch to another task.
|
||||||
|
|
||||||
|
Equivalent to (but more efficient than)::
|
||||||
|
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
await cancel_shielded_checkpoint()
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_async_backend().checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
async def checkpoint_if_cancelled() -> None:
|
||||||
|
"""
|
||||||
|
Enter a checkpoint if the enclosing cancel scope has been cancelled.
|
||||||
|
|
||||||
|
This does not allow the scheduler to switch to a different task.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_async_backend().checkpoint_if_cancelled()
|
||||||
|
|
||||||
|
|
||||||
|
async def cancel_shielded_checkpoint() -> None:
|
||||||
|
"""
|
||||||
|
Allow the scheduler to switch to another task but without checking for cancellation.
|
||||||
|
|
||||||
|
Equivalent to (but potentially more efficient than)::
|
||||||
|
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await checkpoint()
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
await get_async_backend().cancel_shielded_checkpoint()
|
||||||
|
|
||||||
|
|
||||||
|
@final
|
||||||
|
@dataclass(frozen=True, repr=False)
|
||||||
|
class EventLoopToken:
|
||||||
|
"""
|
||||||
|
An opaque object that holds a reference to an event loop.
|
||||||
|
|
||||||
|
.. versionadded:: 4.11.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
backend_class: type[AsyncBackend]
|
||||||
|
native_token: object
|
||||||
|
|
||||||
|
|
||||||
|
def current_token() -> EventLoopToken:
|
||||||
|
"""
|
||||||
|
Return a token object that can be used to call code in the current event loop from
|
||||||
|
another thread.
|
||||||
|
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
.. versionadded:: 4.11.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
backend_class = get_async_backend()
|
||||||
|
raw_token = backend_class.current_token()
|
||||||
|
return EventLoopToken(backend_class, raw_token)
|
||||||
|
|
||||||
|
|
||||||
|
_run_vars: WeakKeyDictionary[object, dict[RunVar[Any], Any]] = WeakKeyDictionary()
|
||||||
|
|
||||||
|
|
||||||
|
class _NoValueSet(enum.Enum):
|
||||||
|
NO_VALUE_SET = enum.auto()
|
||||||
|
|
||||||
|
|
||||||
|
class RunvarToken(Generic[T]):
|
||||||
|
__slots__ = "_var", "_value", "_redeemed"
|
||||||
|
|
||||||
|
def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]):
|
||||||
|
self._var = var
|
||||||
|
self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value
|
||||||
|
self._redeemed = False
|
||||||
|
|
||||||
|
def __enter__(self) -> RunvarToken[T]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self._var.reset(self)
|
||||||
|
|
||||||
|
|
||||||
|
class RunVar(Generic[T]):
|
||||||
|
"""
|
||||||
|
Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop.
|
||||||
|
|
||||||
|
Can be used as a context manager, Just like :class:`~contextvars.ContextVar`, that
|
||||||
|
will reset the variable to its previous value when the context block is exited.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__slots__ = "_name", "_default"
|
||||||
|
|
||||||
|
NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET
|
||||||
|
):
|
||||||
|
self._name = name
|
||||||
|
self._default = default
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _current_vars(self) -> dict[RunVar[T], T]:
|
||||||
|
native_token = current_token().native_token
|
||||||
|
try:
|
||||||
|
return _run_vars[native_token]
|
||||||
|
except KeyError:
|
||||||
|
run_vars = _run_vars[native_token] = {}
|
||||||
|
return run_vars
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get(self, default: D) -> T | D: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get(self) -> T: ...
|
||||||
|
|
||||||
|
def get(
|
||||||
|
self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET
|
||||||
|
) -> T | D:
|
||||||
|
try:
|
||||||
|
return self._current_vars[self]
|
||||||
|
except KeyError:
|
||||||
|
if default is not RunVar.NO_VALUE_SET:
|
||||||
|
return default
|
||||||
|
elif self._default is not RunVar.NO_VALUE_SET:
|
||||||
|
return self._default
|
||||||
|
|
||||||
|
raise LookupError(
|
||||||
|
f'Run variable "{self._name}" has no value and no default set'
|
||||||
|
)
|
||||||
|
|
||||||
|
def set(self, value: T) -> RunvarToken[T]:
|
||||||
|
current_vars = self._current_vars
|
||||||
|
token = RunvarToken(self, current_vars.get(self, RunVar.NO_VALUE_SET))
|
||||||
|
current_vars[self] = value
|
||||||
|
return token
|
||||||
|
|
||||||
|
def reset(self, token: RunvarToken[T]) -> None:
|
||||||
|
if token._var is not self:
|
||||||
|
raise ValueError("This token does not belong to this RunVar")
|
||||||
|
|
||||||
|
if token._redeemed:
|
||||||
|
raise ValueError("This token has already been used")
|
||||||
|
|
||||||
|
if token._value is _NoValueSet.NO_VALUE_SET:
|
||||||
|
try:
|
||||||
|
del self._current_vars[self]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self._current_vars[self] = token._value
|
||||||
|
|
||||||
|
token._redeemed = True
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"<RunVar name={self._name!r}>"
|
||||||
0
.venv/lib/python3.9/site-packages/anyio/py.typed
Normal file
0
.venv/lib/python3.9/site-packages/anyio/py.typed
Normal file
302
.venv/lib/python3.9/site-packages/anyio/pytest_plugin.py
Normal file
302
.venv/lib/python3.9/site-packages/anyio/pytest_plugin.py
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable, Generator, Iterator
|
||||||
|
from contextlib import ExitStack, contextmanager
|
||||||
|
from inspect import isasyncgenfunction, iscoroutinefunction, ismethod
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from _pytest.fixtures import SubRequest
|
||||||
|
from _pytest.outcomes import Exit
|
||||||
|
|
||||||
|
from . import get_available_backends
|
||||||
|
from ._core._eventloop import (
|
||||||
|
current_async_library,
|
||||||
|
get_async_backend,
|
||||||
|
reset_current_async_library,
|
||||||
|
set_current_async_library,
|
||||||
|
)
|
||||||
|
from ._core._exceptions import iterate_exceptions
|
||||||
|
from .abc import TestRunner
|
||||||
|
|
||||||
|
if sys.version_info < (3, 11):
|
||||||
|
from exceptiongroup import ExceptionGroup
|
||||||
|
|
||||||
|
_current_runner: TestRunner | None = None
|
||||||
|
_runner_stack: ExitStack | None = None
|
||||||
|
_runner_leases = 0
|
||||||
|
|
||||||
|
|
||||||
|
def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]:
|
||||||
|
if isinstance(backend, str):
|
||||||
|
return backend, {}
|
||||||
|
elif isinstance(backend, tuple) and len(backend) == 2:
|
||||||
|
if isinstance(backend[0], str) and isinstance(backend[1], dict):
|
||||||
|
return cast(tuple[str, dict[str, Any]], backend)
|
||||||
|
|
||||||
|
raise TypeError("anyio_backend must be either a string or tuple of (string, dict)")
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def get_runner(
|
||||||
|
backend_name: str, backend_options: dict[str, Any]
|
||||||
|
) -> Iterator[TestRunner]:
|
||||||
|
global _current_runner, _runner_leases, _runner_stack
|
||||||
|
if _current_runner is None:
|
||||||
|
asynclib = get_async_backend(backend_name)
|
||||||
|
_runner_stack = ExitStack()
|
||||||
|
if current_async_library() is None:
|
||||||
|
# Since we're in control of the event loop, we can cache the name of the
|
||||||
|
# async library
|
||||||
|
token = set_current_async_library(backend_name)
|
||||||
|
_runner_stack.callback(reset_current_async_library, token)
|
||||||
|
|
||||||
|
backend_options = backend_options or {}
|
||||||
|
_current_runner = _runner_stack.enter_context(
|
||||||
|
asynclib.create_test_runner(backend_options)
|
||||||
|
)
|
||||||
|
|
||||||
|
_runner_leases += 1
|
||||||
|
try:
|
||||||
|
yield _current_runner
|
||||||
|
finally:
|
||||||
|
_runner_leases -= 1
|
||||||
|
if not _runner_leases:
|
||||||
|
assert _runner_stack is not None
|
||||||
|
_runner_stack.close()
|
||||||
|
_runner_stack = _current_runner = None
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_addoption(parser: pytest.Parser) -> None:
|
||||||
|
parser.addini(
|
||||||
|
"anyio_mode",
|
||||||
|
default="strict",
|
||||||
|
help='AnyIO plugin mode (either "strict" or "auto")',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_configure(config: pytest.Config) -> None:
|
||||||
|
config.addinivalue_line(
|
||||||
|
"markers",
|
||||||
|
"anyio: mark the (coroutine function) test to be run asynchronously via anyio.",
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
config.getini("anyio_mode") == "auto"
|
||||||
|
and config.pluginmanager.has_plugin("asyncio")
|
||||||
|
and config.getini("asyncio_mode") == "auto"
|
||||||
|
):
|
||||||
|
config.issue_config_time_warning(
|
||||||
|
pytest.PytestConfigWarning(
|
||||||
|
"AnyIO auto mode has been enabled together with pytest-asyncio auto "
|
||||||
|
"mode. This may cause unexpected behavior."
|
||||||
|
),
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.hookimpl(hookwrapper=True)
|
||||||
|
def pytest_fixture_setup(fixturedef: Any, request: Any) -> Generator[Any]:
|
||||||
|
def wrapper(anyio_backend: Any, request: SubRequest, **kwargs: Any) -> Any:
|
||||||
|
# Rebind any fixture methods to the request instance
|
||||||
|
if (
|
||||||
|
request.instance
|
||||||
|
and ismethod(func)
|
||||||
|
and type(func.__self__) is type(request.instance)
|
||||||
|
):
|
||||||
|
local_func = func.__func__.__get__(request.instance)
|
||||||
|
else:
|
||||||
|
local_func = func
|
||||||
|
|
||||||
|
backend_name, backend_options = extract_backend_and_options(anyio_backend)
|
||||||
|
if has_backend_arg:
|
||||||
|
kwargs["anyio_backend"] = anyio_backend
|
||||||
|
|
||||||
|
if has_request_arg:
|
||||||
|
kwargs["request"] = request
|
||||||
|
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
if isasyncgenfunction(local_func):
|
||||||
|
yield from runner.run_asyncgen_fixture(local_func, kwargs)
|
||||||
|
else:
|
||||||
|
yield runner.run_fixture(local_func, kwargs)
|
||||||
|
|
||||||
|
# Only apply this to coroutine functions and async generator functions in requests
|
||||||
|
# that involve the anyio_backend fixture
|
||||||
|
func = fixturedef.func
|
||||||
|
if isasyncgenfunction(func) or iscoroutinefunction(func):
|
||||||
|
if "anyio_backend" in request.fixturenames:
|
||||||
|
fixturedef.func = wrapper
|
||||||
|
original_argname = fixturedef.argnames
|
||||||
|
|
||||||
|
if not (has_backend_arg := "anyio_backend" in fixturedef.argnames):
|
||||||
|
fixturedef.argnames += ("anyio_backend",)
|
||||||
|
|
||||||
|
if not (has_request_arg := "request" in fixturedef.argnames):
|
||||||
|
fixturedef.argnames += ("request",)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return (yield)
|
||||||
|
finally:
|
||||||
|
fixturedef.func = func
|
||||||
|
fixturedef.argnames = original_argname
|
||||||
|
|
||||||
|
return (yield)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.hookimpl(tryfirst=True)
|
||||||
|
def pytest_pycollect_makeitem(
|
||||||
|
collector: pytest.Module | pytest.Class, name: str, obj: object
|
||||||
|
) -> None:
|
||||||
|
if collector.istestfunction(obj, name):
|
||||||
|
inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj
|
||||||
|
if iscoroutinefunction(inner_func):
|
||||||
|
anyio_auto_mode = collector.config.getini("anyio_mode") == "auto"
|
||||||
|
marker = collector.get_closest_marker("anyio")
|
||||||
|
own_markers = getattr(obj, "pytestmark", ())
|
||||||
|
if (
|
||||||
|
anyio_auto_mode
|
||||||
|
or marker
|
||||||
|
or any(marker.name == "anyio" for marker in own_markers)
|
||||||
|
):
|
||||||
|
pytest.mark.usefixtures("anyio_backend")(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.hookimpl(tryfirst=True)
|
||||||
|
def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None:
|
||||||
|
def run_with_hypothesis(**kwargs: Any) -> None:
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
runner.run_test(original_func, kwargs)
|
||||||
|
|
||||||
|
backend = pyfuncitem.funcargs.get("anyio_backend")
|
||||||
|
if backend:
|
||||||
|
backend_name, backend_options = extract_backend_and_options(backend)
|
||||||
|
|
||||||
|
if hasattr(pyfuncitem.obj, "hypothesis"):
|
||||||
|
# Wrap the inner test function unless it's already wrapped
|
||||||
|
original_func = pyfuncitem.obj.hypothesis.inner_test
|
||||||
|
if original_func.__qualname__ != run_with_hypothesis.__qualname__:
|
||||||
|
if iscoroutinefunction(original_func):
|
||||||
|
pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
if iscoroutinefunction(pyfuncitem.obj):
|
||||||
|
funcargs = pyfuncitem.funcargs
|
||||||
|
testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames}
|
||||||
|
with get_runner(backend_name, backend_options) as runner:
|
||||||
|
try:
|
||||||
|
runner.run_test(pyfuncitem.obj, testargs)
|
||||||
|
except ExceptionGroup as excgrp:
|
||||||
|
for exc in iterate_exceptions(excgrp):
|
||||||
|
if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)):
|
||||||
|
raise exc from excgrp
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module", params=get_available_backends())
|
||||||
|
def anyio_backend(request: Any) -> Any:
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def anyio_backend_name(anyio_backend: Any) -> str:
|
||||||
|
if isinstance(anyio_backend, str):
|
||||||
|
return anyio_backend
|
||||||
|
else:
|
||||||
|
return anyio_backend[0]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]:
|
||||||
|
if isinstance(anyio_backend, str):
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
|
return anyio_backend[1]
|
||||||
|
|
||||||
|
|
||||||
|
class FreePortFactory:
|
||||||
|
"""
|
||||||
|
Manages port generation based on specified socket kind, ensuring no duplicate
|
||||||
|
ports are generated.
|
||||||
|
|
||||||
|
This class provides functionality for generating available free ports on the
|
||||||
|
system. It is initialized with a specific socket kind and can generate ports
|
||||||
|
for given address families while avoiding reuse of previously generated ports.
|
||||||
|
|
||||||
|
Users should not instantiate this class directly, but use the
|
||||||
|
``free_tcp_port_factory`` and ``free_udp_port_factory`` fixtures instead. For simple
|
||||||
|
uses cases, ``free_tcp_port`` and ``free_udp_port`` can be used instead.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, kind: socket.SocketKind) -> None:
|
||||||
|
self._kind = kind
|
||||||
|
self._generated = set[int]()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def kind(self) -> socket.SocketKind:
|
||||||
|
"""
|
||||||
|
The type of socket connection (e.g., :data:`~socket.SOCK_STREAM` or
|
||||||
|
:data:`~socket.SOCK_DGRAM`) used to bind for checking port availability
|
||||||
|
|
||||||
|
"""
|
||||||
|
return self._kind
|
||||||
|
|
||||||
|
def __call__(self, family: socket.AddressFamily | None = None) -> int:
|
||||||
|
"""
|
||||||
|
Return an unbound port for the given address family.
|
||||||
|
|
||||||
|
:param family: if omitted, both IPv4 and IPv6 addresses will be tried
|
||||||
|
:return: a port number
|
||||||
|
|
||||||
|
"""
|
||||||
|
if family is not None:
|
||||||
|
families = [family]
|
||||||
|
else:
|
||||||
|
families = [socket.AF_INET]
|
||||||
|
if socket.has_ipv6:
|
||||||
|
families.append(socket.AF_INET6)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
port = 0
|
||||||
|
with ExitStack() as stack:
|
||||||
|
for family in families:
|
||||||
|
sock = stack.enter_context(socket.socket(family, self._kind))
|
||||||
|
addr = "::1" if family == socket.AF_INET6 else "127.0.0.1"
|
||||||
|
try:
|
||||||
|
sock.bind((addr, port))
|
||||||
|
except OSError:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not port:
|
||||||
|
port = sock.getsockname()[1]
|
||||||
|
else:
|
||||||
|
if port not in self._generated:
|
||||||
|
self._generated.add(port)
|
||||||
|
return port
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def free_tcp_port_factory() -> FreePortFactory:
|
||||||
|
return FreePortFactory(socket.SOCK_STREAM)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def free_udp_port_factory() -> FreePortFactory:
|
||||||
|
return FreePortFactory(socket.SOCK_DGRAM)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def free_tcp_port(free_tcp_port_factory: Callable[[], int]) -> int:
|
||||||
|
return free_tcp_port_factory()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def free_udp_port(free_udp_port_factory: Callable[[], int]) -> int:
|
||||||
|
return free_udp_port_factory()
|
||||||
188
.venv/lib/python3.9/site-packages/anyio/streams/buffered.py
Normal file
188
.venv/lib/python3.9/site-packages/anyio/streams/buffered.py
Normal file
@@ -0,0 +1,188 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"BufferedByteReceiveStream",
|
||||||
|
"BufferedByteStream",
|
||||||
|
"BufferedConnectable",
|
||||||
|
)
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable, Iterable, Mapping
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from typing import Any, SupportsIndex
|
||||||
|
|
||||||
|
from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead
|
||||||
|
from ..abc import (
|
||||||
|
AnyByteReceiveStream,
|
||||||
|
AnyByteStream,
|
||||||
|
AnyByteStreamConnectable,
|
||||||
|
ByteReceiveStream,
|
||||||
|
ByteStream,
|
||||||
|
ByteStreamConnectable,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
from typing import override
|
||||||
|
else:
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class BufferedByteReceiveStream(ByteReceiveStream):
|
||||||
|
"""
|
||||||
|
Wraps any bytes-based receive stream and uses a buffer to provide sophisticated
|
||||||
|
receiving capabilities in the form of a byte stream.
|
||||||
|
"""
|
||||||
|
|
||||||
|
receive_stream: AnyByteReceiveStream
|
||||||
|
_buffer: bytearray = field(init=False, default_factory=bytearray)
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
self._closed = True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def buffer(self) -> bytes:
|
||||||
|
"""The bytes currently in the buffer."""
|
||||||
|
return bytes(self._buffer)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.receive_stream.extra_attributes
|
||||||
|
|
||||||
|
def feed_data(self, data: Iterable[SupportsIndex], /) -> None:
|
||||||
|
"""
|
||||||
|
Append data directly into the buffer.
|
||||||
|
|
||||||
|
Any data in the buffer will be consumed by receive operations before receiving
|
||||||
|
anything from the wrapped stream.
|
||||||
|
|
||||||
|
:param data: the data to append to the buffer (can be bytes or anything else
|
||||||
|
that supports ``__index__()``)
|
||||||
|
|
||||||
|
"""
|
||||||
|
self._buffer.extend(data)
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
if self._buffer:
|
||||||
|
chunk = bytes(self._buffer[:max_bytes])
|
||||||
|
del self._buffer[:max_bytes]
|
||||||
|
return chunk
|
||||||
|
elif isinstance(self.receive_stream, ByteReceiveStream):
|
||||||
|
return await self.receive_stream.receive(max_bytes)
|
||||||
|
else:
|
||||||
|
# With a bytes-oriented object stream, we need to handle any surplus bytes
|
||||||
|
# we get from the receive() call
|
||||||
|
chunk = await self.receive_stream.receive()
|
||||||
|
if len(chunk) > max_bytes:
|
||||||
|
# Save the surplus bytes in the buffer
|
||||||
|
self._buffer.extend(chunk[max_bytes:])
|
||||||
|
return chunk[:max_bytes]
|
||||||
|
else:
|
||||||
|
return chunk
|
||||||
|
|
||||||
|
async def receive_exactly(self, nbytes: int) -> bytes:
|
||||||
|
"""
|
||||||
|
Read exactly the given amount of bytes from the stream.
|
||||||
|
|
||||||
|
:param nbytes: the number of bytes to read
|
||||||
|
:return: the bytes read
|
||||||
|
:raises ~anyio.IncompleteRead: if the stream was closed before the requested
|
||||||
|
amount of bytes could be read from the stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
while True:
|
||||||
|
remaining = nbytes - len(self._buffer)
|
||||||
|
if remaining <= 0:
|
||||||
|
retval = self._buffer[:nbytes]
|
||||||
|
del self._buffer[:nbytes]
|
||||||
|
return bytes(retval)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if isinstance(self.receive_stream, ByteReceiveStream):
|
||||||
|
chunk = await self.receive_stream.receive(remaining)
|
||||||
|
else:
|
||||||
|
chunk = await self.receive_stream.receive()
|
||||||
|
except EndOfStream as exc:
|
||||||
|
raise IncompleteRead from exc
|
||||||
|
|
||||||
|
self._buffer.extend(chunk)
|
||||||
|
|
||||||
|
async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes:
|
||||||
|
"""
|
||||||
|
Read from the stream until the delimiter is found or max_bytes have been read.
|
||||||
|
|
||||||
|
:param delimiter: the marker to look for in the stream
|
||||||
|
:param max_bytes: maximum number of bytes that will be read before raising
|
||||||
|
:exc:`~anyio.DelimiterNotFound`
|
||||||
|
:return: the bytes read (not including the delimiter)
|
||||||
|
:raises ~anyio.IncompleteRead: if the stream was closed before the delimiter
|
||||||
|
was found
|
||||||
|
:raises ~anyio.DelimiterNotFound: if the delimiter is not found within the
|
||||||
|
bytes read up to the maximum allowed
|
||||||
|
|
||||||
|
"""
|
||||||
|
delimiter_size = len(delimiter)
|
||||||
|
offset = 0
|
||||||
|
while True:
|
||||||
|
# Check if the delimiter can be found in the current buffer
|
||||||
|
index = self._buffer.find(delimiter, offset)
|
||||||
|
if index >= 0:
|
||||||
|
found = self._buffer[:index]
|
||||||
|
del self._buffer[: index + len(delimiter) :]
|
||||||
|
return bytes(found)
|
||||||
|
|
||||||
|
# Check if the buffer is already at or over the limit
|
||||||
|
if len(self._buffer) >= max_bytes:
|
||||||
|
raise DelimiterNotFound(max_bytes)
|
||||||
|
|
||||||
|
# Read more data into the buffer from the socket
|
||||||
|
try:
|
||||||
|
data = await self.receive_stream.receive()
|
||||||
|
except EndOfStream as exc:
|
||||||
|
raise IncompleteRead from exc
|
||||||
|
|
||||||
|
# Move the offset forward and add the new data to the buffer
|
||||||
|
offset = max(len(self._buffer) - delimiter_size + 1, 0)
|
||||||
|
self._buffer.extend(data)
|
||||||
|
|
||||||
|
|
||||||
|
class BufferedByteStream(BufferedByteReceiveStream, ByteStream):
|
||||||
|
"""
|
||||||
|
A full-duplex variant of :class:`BufferedByteReceiveStream`. All writes are passed
|
||||||
|
through to the wrapped stream as-is.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, stream: AnyByteStream):
|
||||||
|
"""
|
||||||
|
:param stream: the stream to be wrapped
|
||||||
|
|
||||||
|
"""
|
||||||
|
super().__init__(stream)
|
||||||
|
self._stream = stream
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self._stream.send_eof()
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
await self._stream.send(item)
|
||||||
|
|
||||||
|
|
||||||
|
class BufferedConnectable(ByteStreamConnectable):
|
||||||
|
def __init__(self, connectable: AnyByteStreamConnectable):
|
||||||
|
"""
|
||||||
|
:param connectable: the connectable to wrap
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.connectable = connectable
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def connect(self) -> BufferedByteStream:
|
||||||
|
stream = await self.connectable.connect()
|
||||||
|
return BufferedByteStream(stream)
|
||||||
154
.venv/lib/python3.9/site-packages/anyio/streams/file.py
Normal file
154
.venv/lib/python3.9/site-packages/anyio/streams/file.py
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"FileReadStream",
|
||||||
|
"FileStreamAttribute",
|
||||||
|
"FileWriteStream",
|
||||||
|
)
|
||||||
|
|
||||||
|
from collections.abc import Callable, Mapping
|
||||||
|
from io import SEEK_SET, UnsupportedOperation
|
||||||
|
from os import PathLike
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, BinaryIO, cast
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
TypedAttributeSet,
|
||||||
|
to_thread,
|
||||||
|
typed_attribute,
|
||||||
|
)
|
||||||
|
from ..abc import ByteReceiveStream, ByteSendStream
|
||||||
|
|
||||||
|
|
||||||
|
class FileStreamAttribute(TypedAttributeSet):
|
||||||
|
#: the open file descriptor
|
||||||
|
file: BinaryIO = typed_attribute()
|
||||||
|
#: the path of the file on the file system, if available (file must be a real file)
|
||||||
|
path: Path = typed_attribute()
|
||||||
|
#: the file number, if available (file must be a real file or a TTY)
|
||||||
|
fileno: int = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
class _BaseFileStream:
|
||||||
|
def __init__(self, file: BinaryIO):
|
||||||
|
self._file = file
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await to_thread.run_sync(self._file.close)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
attributes: dict[Any, Callable[[], Any]] = {
|
||||||
|
FileStreamAttribute.file: lambda: self._file,
|
||||||
|
}
|
||||||
|
|
||||||
|
if hasattr(self._file, "name"):
|
||||||
|
attributes[FileStreamAttribute.path] = lambda: Path(self._file.name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._file.fileno()
|
||||||
|
except UnsupportedOperation:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno()
|
||||||
|
|
||||||
|
return attributes
|
||||||
|
|
||||||
|
|
||||||
|
class FileReadStream(_BaseFileStream, ByteReceiveStream):
|
||||||
|
"""
|
||||||
|
A byte stream that reads from a file in the file system.
|
||||||
|
|
||||||
|
:param file: a file that has been opened for reading in binary mode
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_path(cls, path: str | PathLike[str]) -> FileReadStream:
|
||||||
|
"""
|
||||||
|
Create a file read stream by opening the given file.
|
||||||
|
|
||||||
|
:param path: path of the file to read from
|
||||||
|
|
||||||
|
"""
|
||||||
|
file = await to_thread.run_sync(Path(path).open, "rb")
|
||||||
|
return cls(cast(BinaryIO, file))
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
try:
|
||||||
|
data = await to_thread.run_sync(self._file.read, max_bytes)
|
||||||
|
except ValueError:
|
||||||
|
raise ClosedResourceError from None
|
||||||
|
except OSError as exc:
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
|
||||||
|
if data:
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
async def seek(self, position: int, whence: int = SEEK_SET) -> int:
|
||||||
|
"""
|
||||||
|
Seek the file to the given position.
|
||||||
|
|
||||||
|
.. seealso:: :meth:`io.IOBase.seek`
|
||||||
|
|
||||||
|
.. note:: Not all file descriptors are seekable.
|
||||||
|
|
||||||
|
:param position: position to seek the file to
|
||||||
|
:param whence: controls how ``position`` is interpreted
|
||||||
|
:return: the new absolute position
|
||||||
|
:raises OSError: if the file is not seekable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(self._file.seek, position, whence)
|
||||||
|
|
||||||
|
async def tell(self) -> int:
|
||||||
|
"""
|
||||||
|
Return the current stream position.
|
||||||
|
|
||||||
|
.. note:: Not all file descriptors are seekable.
|
||||||
|
|
||||||
|
:return: the current absolute position
|
||||||
|
:raises OSError: if the file is not seekable
|
||||||
|
|
||||||
|
"""
|
||||||
|
return await to_thread.run_sync(self._file.tell)
|
||||||
|
|
||||||
|
|
||||||
|
class FileWriteStream(_BaseFileStream, ByteSendStream):
|
||||||
|
"""
|
||||||
|
A byte stream that writes to a file in the file system.
|
||||||
|
|
||||||
|
:param file: a file that has been opened for writing in binary mode
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def from_path(
|
||||||
|
cls, path: str | PathLike[str], append: bool = False
|
||||||
|
) -> FileWriteStream:
|
||||||
|
"""
|
||||||
|
Create a file write stream by opening the given file for writing.
|
||||||
|
|
||||||
|
:param path: path of the file to write to
|
||||||
|
:param append: if ``True``, open the file for appending; if ``False``, any
|
||||||
|
existing file at the given path will be truncated
|
||||||
|
|
||||||
|
"""
|
||||||
|
mode = "ab" if append else "wb"
|
||||||
|
file = await to_thread.run_sync(Path(path).open, mode)
|
||||||
|
return cls(cast(BinaryIO, file))
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
try:
|
||||||
|
await to_thread.run_sync(self._file.write, item)
|
||||||
|
except ValueError:
|
||||||
|
raise ClosedResourceError from None
|
||||||
|
except OSError as exc:
|
||||||
|
raise BrokenResourceError from exc
|
||||||
325
.venv/lib/python3.9/site-packages/anyio/streams/memory.py
Normal file
325
.venv/lib/python3.9/site-packages/anyio/streams/memory.py
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"MemoryObjectReceiveStream",
|
||||||
|
"MemoryObjectSendStream",
|
||||||
|
"MemoryObjectStreamStatistics",
|
||||||
|
)
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
from collections import OrderedDict, deque
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from types import TracebackType
|
||||||
|
from typing import Generic, NamedTuple, TypeVar
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
ClosedResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
WouldBlock,
|
||||||
|
)
|
||||||
|
from .._core._testing import TaskInfo, get_current_task
|
||||||
|
from ..abc import Event, ObjectReceiveStream, ObjectSendStream
|
||||||
|
from ..lowlevel import checkpoint
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_co = TypeVar("T_co", covariant=True)
|
||||||
|
T_contra = TypeVar("T_contra", contravariant=True)
|
||||||
|
|
||||||
|
|
||||||
|
class MemoryObjectStreamStatistics(NamedTuple):
|
||||||
|
current_buffer_used: int #: number of items stored in the buffer
|
||||||
|
#: maximum number of items that can be stored on this stream (or :data:`math.inf`)
|
||||||
|
max_buffer_size: float
|
||||||
|
open_send_streams: int #: number of unclosed clones of the send stream
|
||||||
|
open_receive_streams: int #: number of unclosed clones of the receive stream
|
||||||
|
#: number of tasks blocked on :meth:`MemoryObjectSendStream.send`
|
||||||
|
tasks_waiting_send: int
|
||||||
|
#: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive`
|
||||||
|
tasks_waiting_receive: int
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class _MemoryObjectItemReceiver(Generic[T_Item]):
|
||||||
|
task_info: TaskInfo = field(init=False, default_factory=get_current_task)
|
||||||
|
item: T_Item = field(init=False)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
# When item is not defined, we get following error with default __repr__:
|
||||||
|
# AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item'
|
||||||
|
item = getattr(self, "item", None)
|
||||||
|
return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class _MemoryObjectStreamState(Generic[T_Item]):
|
||||||
|
max_buffer_size: float = field()
|
||||||
|
buffer: deque[T_Item] = field(init=False, default_factory=deque)
|
||||||
|
open_send_channels: int = field(init=False, default=0)
|
||||||
|
open_receive_channels: int = field(init=False, default=0)
|
||||||
|
waiting_receivers: OrderedDict[Event, _MemoryObjectItemReceiver[T_Item]] = field(
|
||||||
|
init=False, default_factory=OrderedDict
|
||||||
|
)
|
||||||
|
waiting_senders: OrderedDict[Event, T_Item] = field(
|
||||||
|
init=False, default_factory=OrderedDict
|
||||||
|
)
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
return MemoryObjectStreamStatistics(
|
||||||
|
len(self.buffer),
|
||||||
|
self.max_buffer_size,
|
||||||
|
self.open_send_channels,
|
||||||
|
self.open_receive_channels,
|
||||||
|
len(self.waiting_senders),
|
||||||
|
len(self.waiting_receivers),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]):
|
||||||
|
_state: _MemoryObjectStreamState[T_co]
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
self._state.open_receive_channels += 1
|
||||||
|
|
||||||
|
def receive_nowait(self) -> T_co:
|
||||||
|
"""
|
||||||
|
Receive the next item if it can be done without waiting.
|
||||||
|
|
||||||
|
:return: the received item
|
||||||
|
:raises ~anyio.ClosedResourceError: if this send stream has been closed
|
||||||
|
:raises ~anyio.EndOfStream: if the buffer is empty and this stream has been
|
||||||
|
closed from the sending end
|
||||||
|
:raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks
|
||||||
|
waiting to send
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
if self._state.waiting_senders:
|
||||||
|
# Get the item from the next sender
|
||||||
|
send_event, item = self._state.waiting_senders.popitem(last=False)
|
||||||
|
self._state.buffer.append(item)
|
||||||
|
send_event.set()
|
||||||
|
|
||||||
|
if self._state.buffer:
|
||||||
|
return self._state.buffer.popleft()
|
||||||
|
elif not self._state.open_send_channels:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
async def receive(self) -> T_co:
|
||||||
|
await checkpoint()
|
||||||
|
try:
|
||||||
|
return self.receive_nowait()
|
||||||
|
except WouldBlock:
|
||||||
|
# Add ourselves in the queue
|
||||||
|
receive_event = Event()
|
||||||
|
receiver = _MemoryObjectItemReceiver[T_co]()
|
||||||
|
self._state.waiting_receivers[receive_event] = receiver
|
||||||
|
|
||||||
|
try:
|
||||||
|
await receive_event.wait()
|
||||||
|
finally:
|
||||||
|
self._state.waiting_receivers.pop(receive_event, None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return receiver.item
|
||||||
|
except AttributeError:
|
||||||
|
raise EndOfStream from None
|
||||||
|
|
||||||
|
def clone(self) -> MemoryObjectReceiveStream[T_co]:
|
||||||
|
"""
|
||||||
|
Create a clone of this receive stream.
|
||||||
|
|
||||||
|
Each clone can be closed separately. Only when all clones have been closed will
|
||||||
|
the receiving end of the memory stream be considered closed by the sending ends.
|
||||||
|
|
||||||
|
:return: the cloned stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
return MemoryObjectReceiveStream(_state=self._state)
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""
|
||||||
|
Close the stream.
|
||||||
|
|
||||||
|
This works the exact same way as :meth:`aclose`, but is provided as a special
|
||||||
|
case for the benefit of synchronous callbacks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._closed:
|
||||||
|
self._closed = True
|
||||||
|
self._state.open_receive_channels -= 1
|
||||||
|
if self._state.open_receive_channels == 0:
|
||||||
|
send_events = list(self._state.waiting_senders.keys())
|
||||||
|
for event in send_events:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this stream.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return self._state.statistics()
|
||||||
|
|
||||||
|
def __enter__(self) -> MemoryObjectReceiveStream[T_co]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
if not self._closed:
|
||||||
|
warnings.warn(
|
||||||
|
f"Unclosed <{self.__class__.__name__} at {id(self):x}>",
|
||||||
|
ResourceWarning,
|
||||||
|
stacklevel=1,
|
||||||
|
source=self,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]):
|
||||||
|
_state: _MemoryObjectStreamState[T_contra]
|
||||||
|
_closed: bool = field(init=False, default=False)
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
self._state.open_send_channels += 1
|
||||||
|
|
||||||
|
def send_nowait(self, item: T_contra) -> None:
|
||||||
|
"""
|
||||||
|
Send an item immediately if it can be done without waiting.
|
||||||
|
|
||||||
|
:param item: the item to send
|
||||||
|
:raises ~anyio.ClosedResourceError: if this send stream has been closed
|
||||||
|
:raises ~anyio.BrokenResourceError: if the stream has been closed from the
|
||||||
|
receiving end
|
||||||
|
:raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting
|
||||||
|
to receive
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
if not self._state.open_receive_channels:
|
||||||
|
raise BrokenResourceError
|
||||||
|
|
||||||
|
while self._state.waiting_receivers:
|
||||||
|
receive_event, receiver = self._state.waiting_receivers.popitem(last=False)
|
||||||
|
if not receiver.task_info.has_pending_cancellation():
|
||||||
|
receiver.item = item
|
||||||
|
receive_event.set()
|
||||||
|
return
|
||||||
|
|
||||||
|
if len(self._state.buffer) < self._state.max_buffer_size:
|
||||||
|
self._state.buffer.append(item)
|
||||||
|
else:
|
||||||
|
raise WouldBlock
|
||||||
|
|
||||||
|
async def send(self, item: T_contra) -> None:
|
||||||
|
"""
|
||||||
|
Send an item to the stream.
|
||||||
|
|
||||||
|
If the buffer is full, this method blocks until there is again room in the
|
||||||
|
buffer or the item can be sent directly to a receiver.
|
||||||
|
|
||||||
|
:param item: the item to send
|
||||||
|
:raises ~anyio.ClosedResourceError: if this send stream has been closed
|
||||||
|
:raises ~anyio.BrokenResourceError: if the stream has been closed from the
|
||||||
|
receiving end
|
||||||
|
|
||||||
|
"""
|
||||||
|
await checkpoint()
|
||||||
|
try:
|
||||||
|
self.send_nowait(item)
|
||||||
|
except WouldBlock:
|
||||||
|
# Wait until there's someone on the receiving end
|
||||||
|
send_event = Event()
|
||||||
|
self._state.waiting_senders[send_event] = item
|
||||||
|
try:
|
||||||
|
await send_event.wait()
|
||||||
|
except BaseException:
|
||||||
|
self._state.waiting_senders.pop(send_event, None)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if send_event in self._state.waiting_senders:
|
||||||
|
del self._state.waiting_senders[send_event]
|
||||||
|
raise BrokenResourceError from None
|
||||||
|
|
||||||
|
def clone(self) -> MemoryObjectSendStream[T_contra]:
|
||||||
|
"""
|
||||||
|
Create a clone of this send stream.
|
||||||
|
|
||||||
|
Each clone can be closed separately. Only when all clones have been closed will
|
||||||
|
the sending end of the memory stream be considered closed by the receiving ends.
|
||||||
|
|
||||||
|
:return: the cloned stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._closed:
|
||||||
|
raise ClosedResourceError
|
||||||
|
|
||||||
|
return MemoryObjectSendStream(_state=self._state)
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""
|
||||||
|
Close the stream.
|
||||||
|
|
||||||
|
This works the exact same way as :meth:`aclose`, but is provided as a special
|
||||||
|
case for the benefit of synchronous callbacks.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self._closed:
|
||||||
|
self._closed = True
|
||||||
|
self._state.open_send_channels -= 1
|
||||||
|
if self._state.open_send_channels == 0:
|
||||||
|
receive_events = list(self._state.waiting_receivers.keys())
|
||||||
|
self._state.waiting_receivers.clear()
|
||||||
|
for event in receive_events:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def statistics(self) -> MemoryObjectStreamStatistics:
|
||||||
|
"""
|
||||||
|
Return statistics about the current state of this stream.
|
||||||
|
|
||||||
|
.. versionadded:: 3.0
|
||||||
|
"""
|
||||||
|
return self._state.statistics()
|
||||||
|
|
||||||
|
def __enter__(self) -> MemoryObjectSendStream[T_contra]:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: type[BaseException] | None,
|
||||||
|
exc_val: BaseException | None,
|
||||||
|
exc_tb: TracebackType | None,
|
||||||
|
) -> None:
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
if not self._closed:
|
||||||
|
warnings.warn(
|
||||||
|
f"Unclosed <{self.__class__.__name__} at {id(self):x}>",
|
||||||
|
ResourceWarning,
|
||||||
|
stacklevel=1,
|
||||||
|
source=self,
|
||||||
|
)
|
||||||
147
.venv/lib/python3.9/site-packages/anyio/streams/stapled.py
Normal file
147
.venv/lib/python3.9/site-packages/anyio/streams/stapled.py
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"MultiListener",
|
||||||
|
"StapledByteStream",
|
||||||
|
"StapledObjectStream",
|
||||||
|
)
|
||||||
|
|
||||||
|
from collections.abc import Callable, Mapping, Sequence
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Any, Generic, TypeVar
|
||||||
|
|
||||||
|
from ..abc import (
|
||||||
|
ByteReceiveStream,
|
||||||
|
ByteSendStream,
|
||||||
|
ByteStream,
|
||||||
|
Listener,
|
||||||
|
ObjectReceiveStream,
|
||||||
|
ObjectSendStream,
|
||||||
|
ObjectStream,
|
||||||
|
TaskGroup,
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Item = TypeVar("T_Item")
|
||||||
|
T_Stream = TypeVar("T_Stream")
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class StapledByteStream(ByteStream):
|
||||||
|
"""
|
||||||
|
Combines two byte streams into a single, bidirectional byte stream.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream
|
||||||
|
providing the values in case of a conflict.
|
||||||
|
|
||||||
|
:param ByteSendStream send_stream: the sending byte stream
|
||||||
|
:param ByteReceiveStream receive_stream: the receiving byte stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
send_stream: ByteSendStream
|
||||||
|
receive_stream: ByteReceiveStream
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
return await self.receive_stream.receive(max_bytes)
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
await self.send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.send_stream.extra_attributes,
|
||||||
|
**self.receive_stream.extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]):
|
||||||
|
"""
|
||||||
|
Combines two object streams into a single, bidirectional object stream.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream
|
||||||
|
providing the values in case of a conflict.
|
||||||
|
|
||||||
|
:param ObjectSendStream send_stream: the sending object stream
|
||||||
|
:param ObjectReceiveStream receive_stream: the receiving object stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
send_stream: ObjectSendStream[T_Item]
|
||||||
|
receive_stream: ObjectReceiveStream[T_Item]
|
||||||
|
|
||||||
|
async def receive(self) -> T_Item:
|
||||||
|
return await self.receive_stream.receive()
|
||||||
|
|
||||||
|
async def send(self, item: T_Item) -> None:
|
||||||
|
await self.send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.send_stream.aclose()
|
||||||
|
await self.receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.send_stream.extra_attributes,
|
||||||
|
**self.receive_stream.extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class MultiListener(Generic[T_Stream], Listener[T_Stream]):
|
||||||
|
"""
|
||||||
|
Combines multiple listeners into one, serving connections from all of them at once.
|
||||||
|
|
||||||
|
Any MultiListeners in the given collection of listeners will have their listeners
|
||||||
|
moved into this one.
|
||||||
|
|
||||||
|
Extra attributes are provided from each listener, with each successive listener
|
||||||
|
overriding any conflicting attributes from the previous one.
|
||||||
|
|
||||||
|
:param listeners: listeners to serve
|
||||||
|
:type listeners: Sequence[Listener[T_Stream]]
|
||||||
|
"""
|
||||||
|
|
||||||
|
listeners: Sequence[Listener[T_Stream]]
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
listeners: list[Listener[T_Stream]] = []
|
||||||
|
for listener in self.listeners:
|
||||||
|
if isinstance(listener, MultiListener):
|
||||||
|
listeners.extend(listener.listeners)
|
||||||
|
del listener.listeners[:] # type: ignore[attr-defined]
|
||||||
|
else:
|
||||||
|
listeners.append(listener)
|
||||||
|
|
||||||
|
self.listeners = listeners
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None
|
||||||
|
) -> None:
|
||||||
|
from .. import create_task_group
|
||||||
|
|
||||||
|
async with create_task_group() as tg:
|
||||||
|
for listener in self.listeners:
|
||||||
|
tg.start_soon(listener.serve, handler, task_group)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
for listener in self.listeners:
|
||||||
|
await listener.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
attributes: dict = {}
|
||||||
|
for listener in self.listeners:
|
||||||
|
attributes.update(listener.extra_attributes)
|
||||||
|
|
||||||
|
return attributes
|
||||||
176
.venv/lib/python3.9/site-packages/anyio/streams/text.py
Normal file
176
.venv/lib/python3.9/site-packages/anyio/streams/text.py
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"TextConnectable",
|
||||||
|
"TextReceiveStream",
|
||||||
|
"TextSendStream",
|
||||||
|
"TextStream",
|
||||||
|
)
|
||||||
|
|
||||||
|
import codecs
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable, Mapping
|
||||||
|
from dataclasses import InitVar, dataclass, field
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from ..abc import (
|
||||||
|
AnyByteReceiveStream,
|
||||||
|
AnyByteSendStream,
|
||||||
|
AnyByteStream,
|
||||||
|
AnyByteStreamConnectable,
|
||||||
|
ObjectReceiveStream,
|
||||||
|
ObjectSendStream,
|
||||||
|
ObjectStream,
|
||||||
|
ObjectStreamConnectable,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
from typing import override
|
||||||
|
else:
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextReceiveStream(ObjectReceiveStream[str]):
|
||||||
|
"""
|
||||||
|
Stream wrapper that decodes bytes to strings using the given encoding.
|
||||||
|
|
||||||
|
Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any
|
||||||
|
completely received unicode characters as soon as they come in.
|
||||||
|
|
||||||
|
:param transport_stream: any bytes-based receive stream
|
||||||
|
:param encoding: character encoding to use for decoding bytes to strings (defaults
|
||||||
|
to ``utf-8``)
|
||||||
|
:param errors: handling scheme for decoding errors (defaults to ``strict``; see the
|
||||||
|
`codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation:
|
||||||
|
https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteReceiveStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: InitVar[str] = "strict"
|
||||||
|
_decoder: codecs.IncrementalDecoder = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str, errors: str) -> None:
|
||||||
|
decoder_class = codecs.getincrementaldecoder(encoding)
|
||||||
|
self._decoder = decoder_class(errors=errors)
|
||||||
|
|
||||||
|
async def receive(self) -> str:
|
||||||
|
while True:
|
||||||
|
chunk = await self.transport_stream.receive()
|
||||||
|
decoded = self._decoder.decode(chunk)
|
||||||
|
if decoded:
|
||||||
|
return decoded
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
self._decoder.reset()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.transport_stream.extra_attributes
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextSendStream(ObjectSendStream[str]):
|
||||||
|
"""
|
||||||
|
Sends strings to the wrapped stream as bytes using the given encoding.
|
||||||
|
|
||||||
|
:param AnyByteSendStream transport_stream: any bytes-based send stream
|
||||||
|
:param str encoding: character encoding to use for encoding strings to bytes
|
||||||
|
(defaults to ``utf-8``)
|
||||||
|
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see
|
||||||
|
the `codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation:
|
||||||
|
https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteSendStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: str = "strict"
|
||||||
|
_encoder: Callable[..., tuple[bytes, int]] = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str) -> None:
|
||||||
|
self._encoder = codecs.getencoder(encoding)
|
||||||
|
|
||||||
|
async def send(self, item: str) -> None:
|
||||||
|
encoded = self._encoder(item, self.errors)[0]
|
||||||
|
await self.transport_stream.send(encoded)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return self.transport_stream.extra_attributes
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TextStream(ObjectStream[str]):
|
||||||
|
"""
|
||||||
|
A bidirectional stream that decodes bytes to strings on receive and encodes strings
|
||||||
|
to bytes on send.
|
||||||
|
|
||||||
|
Extra attributes will be provided from both streams, with the receive stream
|
||||||
|
providing the values in case of a conflict.
|
||||||
|
|
||||||
|
:param AnyByteStream transport_stream: any bytes-based stream
|
||||||
|
:param str encoding: character encoding to use for encoding/decoding strings to/from
|
||||||
|
bytes (defaults to ``utf-8``)
|
||||||
|
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see
|
||||||
|
the `codecs module documentation`_ for a comprehensive list of options)
|
||||||
|
|
||||||
|
.. _codecs module documentation:
|
||||||
|
https://docs.python.org/3/library/codecs.html#codec-objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteStream
|
||||||
|
encoding: InitVar[str] = "utf-8"
|
||||||
|
errors: InitVar[str] = "strict"
|
||||||
|
_receive_stream: TextReceiveStream = field(init=False)
|
||||||
|
_send_stream: TextSendStream = field(init=False)
|
||||||
|
|
||||||
|
def __post_init__(self, encoding: str, errors: str) -> None:
|
||||||
|
self._receive_stream = TextReceiveStream(
|
||||||
|
self.transport_stream, encoding=encoding, errors=errors
|
||||||
|
)
|
||||||
|
self._send_stream = TextSendStream(
|
||||||
|
self.transport_stream, encoding=encoding, errors=errors
|
||||||
|
)
|
||||||
|
|
||||||
|
async def receive(self) -> str:
|
||||||
|
return await self._receive_stream.receive()
|
||||||
|
|
||||||
|
async def send(self, item: str) -> None:
|
||||||
|
await self._send_stream.send(item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
await self.transport_stream.send_eof()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self._send_stream.aclose()
|
||||||
|
await self._receive_stream.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self._send_stream.extra_attributes,
|
||||||
|
**self._receive_stream.extra_attributes,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TextConnectable(ObjectStreamConnectable[str]):
|
||||||
|
def __init__(self, connectable: AnyByteStreamConnectable):
|
||||||
|
"""
|
||||||
|
:param connectable: the bytestream endpoint to wrap
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.connectable = connectable
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def connect(self) -> TextStream:
|
||||||
|
stream = await self.connectable.connect()
|
||||||
|
return TextStream(stream)
|
||||||
424
.venv/lib/python3.9/site-packages/anyio/streams/tls.py
Normal file
424
.venv/lib/python3.9/site-packages/anyio/streams/tls.py
Normal file
@@ -0,0 +1,424 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"TLSAttribute",
|
||||||
|
"TLSConnectable",
|
||||||
|
"TLSListener",
|
||||||
|
"TLSStream",
|
||||||
|
)
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
import ssl
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable, Mapping
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from functools import wraps
|
||||||
|
from ssl import SSLContext
|
||||||
|
from typing import Any, TypeVar
|
||||||
|
|
||||||
|
from .. import (
|
||||||
|
BrokenResourceError,
|
||||||
|
EndOfStream,
|
||||||
|
aclose_forcefully,
|
||||||
|
get_cancelled_exc_class,
|
||||||
|
to_thread,
|
||||||
|
)
|
||||||
|
from .._core._typedattr import TypedAttributeSet, typed_attribute
|
||||||
|
from ..abc import (
|
||||||
|
AnyByteStream,
|
||||||
|
AnyByteStreamConnectable,
|
||||||
|
ByteStream,
|
||||||
|
ByteStreamConnectable,
|
||||||
|
Listener,
|
||||||
|
TaskGroup,
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
from typing import TypeAlias
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeAlias
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 12):
|
||||||
|
from typing import override
|
||||||
|
else:
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
_PCTRTT: TypeAlias = tuple[tuple[str, str], ...]
|
||||||
|
_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...]
|
||||||
|
|
||||||
|
|
||||||
|
class TLSAttribute(TypedAttributeSet):
|
||||||
|
"""Contains Transport Layer Security related attributes."""
|
||||||
|
|
||||||
|
#: the selected ALPN protocol
|
||||||
|
alpn_protocol: str | None = typed_attribute()
|
||||||
|
#: the channel binding for type ``tls-unique``
|
||||||
|
channel_binding_tls_unique: bytes = typed_attribute()
|
||||||
|
#: the selected cipher
|
||||||
|
cipher: tuple[str, str, int] = typed_attribute()
|
||||||
|
#: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert`
|
||||||
|
# for more information)
|
||||||
|
peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute()
|
||||||
|
#: the peer certificate in binary form
|
||||||
|
peer_certificate_binary: bytes | None = typed_attribute()
|
||||||
|
#: ``True`` if this is the server side of the connection
|
||||||
|
server_side: bool = typed_attribute()
|
||||||
|
#: ciphers shared by the client during the TLS handshake (``None`` if this is the
|
||||||
|
#: client side)
|
||||||
|
shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute()
|
||||||
|
#: the :class:`~ssl.SSLObject` used for encryption
|
||||||
|
ssl_object: ssl.SSLObject = typed_attribute()
|
||||||
|
#: ``True`` if this stream does (and expects) a closing TLS handshake when the
|
||||||
|
#: stream is being closed
|
||||||
|
standard_compatible: bool = typed_attribute()
|
||||||
|
#: the TLS protocol version (e.g. ``TLSv1.2``)
|
||||||
|
tls_version: str = typed_attribute()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TLSStream(ByteStream):
|
||||||
|
"""
|
||||||
|
A stream wrapper that encrypts all sent data and decrypts received data.
|
||||||
|
|
||||||
|
This class has no public initializer; use :meth:`wrap` instead.
|
||||||
|
All extra attributes from :class:`~TLSAttribute` are supported.
|
||||||
|
|
||||||
|
:var AnyByteStream transport_stream: the wrapped stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
transport_stream: AnyByteStream
|
||||||
|
standard_compatible: bool
|
||||||
|
_ssl_object: ssl.SSLObject
|
||||||
|
_read_bio: ssl.MemoryBIO
|
||||||
|
_write_bio: ssl.MemoryBIO
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def wrap(
|
||||||
|
cls,
|
||||||
|
transport_stream: AnyByteStream,
|
||||||
|
*,
|
||||||
|
server_side: bool | None = None,
|
||||||
|
hostname: str | None = None,
|
||||||
|
ssl_context: ssl.SSLContext | None = None,
|
||||||
|
standard_compatible: bool = True,
|
||||||
|
) -> TLSStream:
|
||||||
|
"""
|
||||||
|
Wrap an existing stream with Transport Layer Security.
|
||||||
|
|
||||||
|
This performs a TLS handshake with the peer.
|
||||||
|
|
||||||
|
:param transport_stream: a bytes-transporting stream to wrap
|
||||||
|
:param server_side: ``True`` if this is the server side of the connection,
|
||||||
|
``False`` if this is the client side (if omitted, will be set to ``False``
|
||||||
|
if ``hostname`` has been provided, ``False`` otherwise). Used only to create
|
||||||
|
a default context when an explicit context has not been provided.
|
||||||
|
:param hostname: host name of the peer (if host name checking is desired)
|
||||||
|
:param ssl_context: the SSLContext object to use (if not provided, a secure
|
||||||
|
default will be created)
|
||||||
|
:param standard_compatible: if ``False``, skip the closing handshake when
|
||||||
|
closing the connection, and don't raise an exception if the peer does the
|
||||||
|
same
|
||||||
|
:raises ~ssl.SSLError: if the TLS handshake fails
|
||||||
|
|
||||||
|
"""
|
||||||
|
if server_side is None:
|
||||||
|
server_side = not hostname
|
||||||
|
|
||||||
|
if not ssl_context:
|
||||||
|
purpose = (
|
||||||
|
ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH
|
||||||
|
)
|
||||||
|
ssl_context = ssl.create_default_context(purpose)
|
||||||
|
|
||||||
|
# Re-enable detection of unexpected EOFs if it was disabled by Python
|
||||||
|
if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"):
|
||||||
|
ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF
|
||||||
|
|
||||||
|
bio_in = ssl.MemoryBIO()
|
||||||
|
bio_out = ssl.MemoryBIO()
|
||||||
|
|
||||||
|
# External SSLContext implementations may do blocking I/O in wrap_bio(),
|
||||||
|
# but the standard library implementation won't
|
||||||
|
if type(ssl_context) is ssl.SSLContext:
|
||||||
|
ssl_object = ssl_context.wrap_bio(
|
||||||
|
bio_in, bio_out, server_side=server_side, server_hostname=hostname
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
ssl_object = await to_thread.run_sync(
|
||||||
|
ssl_context.wrap_bio,
|
||||||
|
bio_in,
|
||||||
|
bio_out,
|
||||||
|
server_side,
|
||||||
|
hostname,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
wrapper = cls(
|
||||||
|
transport_stream=transport_stream,
|
||||||
|
standard_compatible=standard_compatible,
|
||||||
|
_ssl_object=ssl_object,
|
||||||
|
_read_bio=bio_in,
|
||||||
|
_write_bio=bio_out,
|
||||||
|
)
|
||||||
|
await wrapper._call_sslobject_method(ssl_object.do_handshake)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
async def _call_sslobject_method(
|
||||||
|
self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT]
|
||||||
|
) -> T_Retval:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
result = func(*args)
|
||||||
|
except ssl.SSLWantReadError:
|
||||||
|
try:
|
||||||
|
# Flush any pending writes first
|
||||||
|
if self._write_bio.pending:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
|
||||||
|
data = await self.transport_stream.receive()
|
||||||
|
except EndOfStream:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
except OSError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
else:
|
||||||
|
self._read_bio.write(data)
|
||||||
|
except ssl.SSLWantWriteError:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
except ssl.SSLSyscallError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
except ssl.SSLError as exc:
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
if isinstance(exc, ssl.SSLEOFError) or (
|
||||||
|
exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror
|
||||||
|
):
|
||||||
|
if self.standard_compatible:
|
||||||
|
raise BrokenResourceError from exc
|
||||||
|
else:
|
||||||
|
raise EndOfStream from None
|
||||||
|
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# Flush any pending writes first
|
||||||
|
if self._write_bio.pending:
|
||||||
|
await self.transport_stream.send(self._write_bio.read())
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def unwrap(self) -> tuple[AnyByteStream, bytes]:
|
||||||
|
"""
|
||||||
|
Does the TLS closing handshake.
|
||||||
|
|
||||||
|
:return: a tuple of (wrapped byte stream, bytes left in the read buffer)
|
||||||
|
|
||||||
|
"""
|
||||||
|
await self._call_sslobject_method(self._ssl_object.unwrap)
|
||||||
|
self._read_bio.write_eof()
|
||||||
|
self._write_bio.write_eof()
|
||||||
|
return self.transport_stream, self._read_bio.read()
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
if self.standard_compatible:
|
||||||
|
try:
|
||||||
|
await self.unwrap()
|
||||||
|
except BaseException:
|
||||||
|
await aclose_forcefully(self.transport_stream)
|
||||||
|
raise
|
||||||
|
|
||||||
|
await self.transport_stream.aclose()
|
||||||
|
|
||||||
|
async def receive(self, max_bytes: int = 65536) -> bytes:
|
||||||
|
data = await self._call_sslobject_method(self._ssl_object.read, max_bytes)
|
||||||
|
if not data:
|
||||||
|
raise EndOfStream
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
async def send(self, item: bytes) -> None:
|
||||||
|
await self._call_sslobject_method(self._ssl_object.write, item)
|
||||||
|
|
||||||
|
async def send_eof(self) -> None:
|
||||||
|
tls_version = self.extra(TLSAttribute.tls_version)
|
||||||
|
match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version)
|
||||||
|
if match:
|
||||||
|
major, minor = int(match.group(1)), int(match.group(2) or 0)
|
||||||
|
if (major, minor) < (1, 3):
|
||||||
|
raise NotImplementedError(
|
||||||
|
f"send_eof() requires at least TLSv1.3; current "
|
||||||
|
f"session uses {tls_version}"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise NotImplementedError(
|
||||||
|
"send_eof() has not yet been implemented for TLS streams"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
**self.transport_stream.extra_attributes,
|
||||||
|
TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol,
|
||||||
|
TLSAttribute.channel_binding_tls_unique: (
|
||||||
|
self._ssl_object.get_channel_binding
|
||||||
|
),
|
||||||
|
TLSAttribute.cipher: self._ssl_object.cipher,
|
||||||
|
TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False),
|
||||||
|
TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(
|
||||||
|
True
|
||||||
|
),
|
||||||
|
TLSAttribute.server_side: lambda: self._ssl_object.server_side,
|
||||||
|
TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers()
|
||||||
|
if self._ssl_object.server_side
|
||||||
|
else None,
|
||||||
|
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
|
||||||
|
TLSAttribute.ssl_object: lambda: self._ssl_object,
|
||||||
|
TLSAttribute.tls_version: self._ssl_object.version,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False)
|
||||||
|
class TLSListener(Listener[TLSStream]):
|
||||||
|
"""
|
||||||
|
A convenience listener that wraps another listener and auto-negotiates a TLS session
|
||||||
|
on every accepted connection.
|
||||||
|
|
||||||
|
If the TLS handshake times out or raises an exception,
|
||||||
|
:meth:`handle_handshake_error` is called to do whatever post-mortem processing is
|
||||||
|
deemed necessary.
|
||||||
|
|
||||||
|
Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute.
|
||||||
|
|
||||||
|
:param Listener listener: the listener to wrap
|
||||||
|
:param ssl_context: the SSL context object
|
||||||
|
:param standard_compatible: a flag passed through to :meth:`TLSStream.wrap`
|
||||||
|
:param handshake_timeout: time limit for the TLS handshake
|
||||||
|
(passed to :func:`~anyio.fail_after`)
|
||||||
|
"""
|
||||||
|
|
||||||
|
listener: Listener[Any]
|
||||||
|
ssl_context: ssl.SSLContext
|
||||||
|
standard_compatible: bool = True
|
||||||
|
handshake_timeout: float = 30
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None:
|
||||||
|
"""
|
||||||
|
Handle an exception raised during the TLS handshake.
|
||||||
|
|
||||||
|
This method does 3 things:
|
||||||
|
|
||||||
|
#. Forcefully closes the original stream
|
||||||
|
#. Logs the exception (unless it was a cancellation exception) using the
|
||||||
|
``anyio.streams.tls`` logger
|
||||||
|
#. Reraises the exception if it was a base exception or a cancellation exception
|
||||||
|
|
||||||
|
:param exc: the exception
|
||||||
|
:param stream: the original stream
|
||||||
|
|
||||||
|
"""
|
||||||
|
await aclose_forcefully(stream)
|
||||||
|
|
||||||
|
# Log all except cancellation exceptions
|
||||||
|
if not isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
# CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using
|
||||||
|
# any asyncio implementation, so we explicitly pass the exception to log
|
||||||
|
# (https://github.com/python/cpython/issues/108668). Trio does not have this
|
||||||
|
# issue because it works around the CPython bug.
|
||||||
|
logging.getLogger(__name__).exception(
|
||||||
|
"Error during TLS handshake", exc_info=exc
|
||||||
|
)
|
||||||
|
|
||||||
|
# Only reraise base exceptions and cancellation exceptions
|
||||||
|
if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def serve(
|
||||||
|
self,
|
||||||
|
handler: Callable[[TLSStream], Any],
|
||||||
|
task_group: TaskGroup | None = None,
|
||||||
|
) -> None:
|
||||||
|
@wraps(handler)
|
||||||
|
async def handler_wrapper(stream: AnyByteStream) -> None:
|
||||||
|
from .. import fail_after
|
||||||
|
|
||||||
|
try:
|
||||||
|
with fail_after(self.handshake_timeout):
|
||||||
|
wrapped_stream = await TLSStream.wrap(
|
||||||
|
stream,
|
||||||
|
ssl_context=self.ssl_context,
|
||||||
|
standard_compatible=self.standard_compatible,
|
||||||
|
)
|
||||||
|
except BaseException as exc:
|
||||||
|
await self.handle_handshake_error(exc, stream)
|
||||||
|
else:
|
||||||
|
await handler(wrapped_stream)
|
||||||
|
|
||||||
|
await self.listener.serve(handler_wrapper, task_group)
|
||||||
|
|
||||||
|
async def aclose(self) -> None:
|
||||||
|
await self.listener.aclose()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
|
||||||
|
return {
|
||||||
|
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TLSConnectable(ByteStreamConnectable):
|
||||||
|
"""
|
||||||
|
Wraps another connectable and does TLS negotiation after a successful connection.
|
||||||
|
|
||||||
|
:param connectable: the connectable to wrap
|
||||||
|
:param hostname: host name of the server (if host name checking is desired)
|
||||||
|
:param ssl_context: the SSLContext object to use (if not provided, a secure default
|
||||||
|
will be created)
|
||||||
|
:param standard_compatible: if ``False``, skip the closing handshake when closing
|
||||||
|
the connection, and don't raise an exception if the server does the same
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
connectable: AnyByteStreamConnectable,
|
||||||
|
*,
|
||||||
|
hostname: str | None = None,
|
||||||
|
ssl_context: ssl.SSLContext | None = None,
|
||||||
|
standard_compatible: bool = True,
|
||||||
|
) -> None:
|
||||||
|
self.connectable = connectable
|
||||||
|
self.ssl_context: SSLContext = ssl_context or ssl.create_default_context(
|
||||||
|
ssl.Purpose.SERVER_AUTH
|
||||||
|
)
|
||||||
|
if not isinstance(self.ssl_context, ssl.SSLContext):
|
||||||
|
raise TypeError(
|
||||||
|
"ssl_context must be an instance of ssl.SSLContext, not "
|
||||||
|
f"{type(self.ssl_context).__name__}"
|
||||||
|
)
|
||||||
|
self.hostname = hostname
|
||||||
|
self.standard_compatible = standard_compatible
|
||||||
|
|
||||||
|
@override
|
||||||
|
async def connect(self) -> TLSStream:
|
||||||
|
stream = await self.connectable.connect()
|
||||||
|
try:
|
||||||
|
return await TLSStream.wrap(
|
||||||
|
stream,
|
||||||
|
hostname=self.hostname,
|
||||||
|
ssl_context=self.ssl_context,
|
||||||
|
standard_compatible=self.standard_compatible,
|
||||||
|
)
|
||||||
|
except BaseException:
|
||||||
|
await aclose_forcefully(stream)
|
||||||
|
raise
|
||||||
246
.venv/lib/python3.9/site-packages/anyio/to_interpreter.py
Normal file
246
.venv/lib/python3.9/site-packages/anyio/to_interpreter.py
Normal file
@@ -0,0 +1,246 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"run_sync",
|
||||||
|
"current_default_interpreter_limiter",
|
||||||
|
)
|
||||||
|
|
||||||
|
import atexit
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from collections import deque
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import Any, Final, TypeVar
|
||||||
|
|
||||||
|
from . import current_time, to_thread
|
||||||
|
from ._core._exceptions import BrokenWorkerInterpreter
|
||||||
|
from ._core._synchronization import CapacityLimiter
|
||||||
|
from .lowlevel import RunVar
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 14):
|
||||||
|
from concurrent.interpreters import ExecutionFailed, create
|
||||||
|
|
||||||
|
def _interp_call(
|
||||||
|
func: Callable[..., Any], args: tuple[Any, ...]
|
||||||
|
) -> tuple[Any, bool]:
|
||||||
|
try:
|
||||||
|
retval = func(*args)
|
||||||
|
except BaseException as exc:
|
||||||
|
return exc, True
|
||||||
|
else:
|
||||||
|
return retval, False
|
||||||
|
|
||||||
|
class _Worker:
|
||||||
|
last_used: float = 0
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._interpreter = create()
|
||||||
|
|
||||||
|
def destroy(self) -> None:
|
||||||
|
self._interpreter.close()
|
||||||
|
|
||||||
|
def call(
|
||||||
|
self,
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
args: tuple[Any, ...],
|
||||||
|
) -> T_Retval:
|
||||||
|
try:
|
||||||
|
res, is_exception = self._interpreter.call(_interp_call, func, args)
|
||||||
|
except ExecutionFailed as exc:
|
||||||
|
raise BrokenWorkerInterpreter(exc.excinfo) from exc
|
||||||
|
|
||||||
|
if is_exception:
|
||||||
|
raise res
|
||||||
|
|
||||||
|
return res
|
||||||
|
elif sys.version_info >= (3, 13):
|
||||||
|
import _interpqueues
|
||||||
|
import _interpreters
|
||||||
|
|
||||||
|
UNBOUND: Final = 2 # I have no clue how this works, but it was used in the stdlib
|
||||||
|
FMT_UNPICKLED: Final = 0
|
||||||
|
FMT_PICKLED: Final = 1
|
||||||
|
QUEUE_PICKLE_ARGS: Final = (FMT_PICKLED, UNBOUND)
|
||||||
|
QUEUE_UNPICKLE_ARGS: Final = (FMT_UNPICKLED, UNBOUND)
|
||||||
|
|
||||||
|
_run_func = compile(
|
||||||
|
"""
|
||||||
|
import _interpqueues
|
||||||
|
from _interpreters import NotShareableError
|
||||||
|
from pickle import loads, dumps, HIGHEST_PROTOCOL
|
||||||
|
|
||||||
|
QUEUE_PICKLE_ARGS = (1, 2)
|
||||||
|
QUEUE_UNPICKLE_ARGS = (0, 2)
|
||||||
|
|
||||||
|
item = _interpqueues.get(queue_id)[0]
|
||||||
|
try:
|
||||||
|
func, args = loads(item)
|
||||||
|
retval = func(*args)
|
||||||
|
except BaseException as exc:
|
||||||
|
is_exception = True
|
||||||
|
retval = exc
|
||||||
|
else:
|
||||||
|
is_exception = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
_interpqueues.put(queue_id, (retval, is_exception), *QUEUE_UNPICKLE_ARGS)
|
||||||
|
except NotShareableError:
|
||||||
|
retval = dumps(retval, HIGHEST_PROTOCOL)
|
||||||
|
_interpqueues.put(queue_id, (retval, is_exception), *QUEUE_PICKLE_ARGS)
|
||||||
|
""",
|
||||||
|
"<string>",
|
||||||
|
"exec",
|
||||||
|
)
|
||||||
|
|
||||||
|
class _Worker:
|
||||||
|
last_used: float = 0
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._interpreter_id = _interpreters.create()
|
||||||
|
self._queue_id = _interpqueues.create(1, *QUEUE_UNPICKLE_ARGS)
|
||||||
|
_interpreters.set___main___attrs(
|
||||||
|
self._interpreter_id, {"queue_id": self._queue_id}
|
||||||
|
)
|
||||||
|
|
||||||
|
def destroy(self) -> None:
|
||||||
|
_interpqueues.destroy(self._queue_id)
|
||||||
|
_interpreters.destroy(self._interpreter_id)
|
||||||
|
|
||||||
|
def call(
|
||||||
|
self,
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
args: tuple[Any, ...],
|
||||||
|
) -> T_Retval:
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
item = pickle.dumps((func, args), pickle.HIGHEST_PROTOCOL)
|
||||||
|
_interpqueues.put(self._queue_id, item, *QUEUE_PICKLE_ARGS)
|
||||||
|
exc_info = _interpreters.exec(self._interpreter_id, _run_func)
|
||||||
|
if exc_info:
|
||||||
|
raise BrokenWorkerInterpreter(exc_info)
|
||||||
|
|
||||||
|
res = _interpqueues.get(self._queue_id)
|
||||||
|
(res, is_exception), fmt = res[:2]
|
||||||
|
if fmt == FMT_PICKLED:
|
||||||
|
res = pickle.loads(res)
|
||||||
|
|
||||||
|
if is_exception:
|
||||||
|
raise res
|
||||||
|
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
|
||||||
|
class _Worker:
|
||||||
|
last_used: float = 0
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
raise RuntimeError("subinterpreters require at least Python 3.13")
|
||||||
|
|
||||||
|
def call(
|
||||||
|
self,
|
||||||
|
func: Callable[..., T_Retval],
|
||||||
|
args: tuple[Any, ...],
|
||||||
|
) -> T_Retval:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def destroy(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_CPU_COUNT: Final = 8 # this is just an arbitrarily selected value
|
||||||
|
MAX_WORKER_IDLE_TIME = (
|
||||||
|
30 # seconds a subinterpreter can be idle before becoming eligible for pruning
|
||||||
|
)
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
_idle_workers = RunVar[deque[_Worker]]("_available_workers")
|
||||||
|
_default_interpreter_limiter = RunVar[CapacityLimiter]("_default_interpreter_limiter")
|
||||||
|
|
||||||
|
|
||||||
|
def _stop_workers(workers: deque[_Worker]) -> None:
|
||||||
|
for worker in workers:
|
||||||
|
worker.destroy()
|
||||||
|
|
||||||
|
workers.clear()
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync(
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function with the given arguments in a subinterpreter.
|
||||||
|
|
||||||
|
.. warning:: On Python 3.13, the :mod:`concurrent.interpreters` module was not yet
|
||||||
|
available, so the code path for that Python version relies on an undocumented,
|
||||||
|
private API. As such, it is recommended to not rely on this function for anything
|
||||||
|
mission-critical on Python 3.13.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: the positional arguments for the callable
|
||||||
|
:param limiter: capacity limiter to use to limit the total number of subinterpreters
|
||||||
|
running (if omitted, the default limiter is used)
|
||||||
|
:return: the result of the call
|
||||||
|
:raises BrokenWorkerInterpreter: if there's an internal error in a subinterpreter
|
||||||
|
|
||||||
|
"""
|
||||||
|
if limiter is None:
|
||||||
|
limiter = current_default_interpreter_limiter()
|
||||||
|
|
||||||
|
try:
|
||||||
|
idle_workers = _idle_workers.get()
|
||||||
|
except LookupError:
|
||||||
|
idle_workers = deque()
|
||||||
|
_idle_workers.set(idle_workers)
|
||||||
|
atexit.register(_stop_workers, idle_workers)
|
||||||
|
|
||||||
|
async with limiter:
|
||||||
|
try:
|
||||||
|
worker = idle_workers.pop()
|
||||||
|
except IndexError:
|
||||||
|
worker = _Worker()
|
||||||
|
|
||||||
|
try:
|
||||||
|
return await to_thread.run_sync(
|
||||||
|
worker.call,
|
||||||
|
func,
|
||||||
|
args,
|
||||||
|
limiter=limiter,
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# Prune workers that have been idle for too long
|
||||||
|
now = current_time()
|
||||||
|
while idle_workers:
|
||||||
|
if now - idle_workers[0].last_used <= MAX_WORKER_IDLE_TIME:
|
||||||
|
break
|
||||||
|
|
||||||
|
await to_thread.run_sync(idle_workers.popleft().destroy, limiter=limiter)
|
||||||
|
|
||||||
|
worker.last_used = current_time()
|
||||||
|
idle_workers.append(worker)
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_interpreter_limiter() -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Return the capacity limiter used by default to limit the number of concurrently
|
||||||
|
running subinterpreters.
|
||||||
|
|
||||||
|
Defaults to the number of CPU cores.
|
||||||
|
|
||||||
|
:return: a capacity limiter object
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return _default_interpreter_limiter.get()
|
||||||
|
except LookupError:
|
||||||
|
limiter = CapacityLimiter(os.cpu_count() or DEFAULT_CPU_COUNT)
|
||||||
|
_default_interpreter_limiter.set(limiter)
|
||||||
|
return limiter
|
||||||
266
.venv/lib/python3.9/site-packages/anyio/to_process.py
Normal file
266
.venv/lib/python3.9/site-packages/anyio/to_process.py
Normal file
@@ -0,0 +1,266 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"current_default_process_limiter",
|
||||||
|
"process_worker",
|
||||||
|
"run_sync",
|
||||||
|
)
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from collections import deque
|
||||||
|
from collections.abc import Callable
|
||||||
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
|
from typing import TypeVar, cast
|
||||||
|
|
||||||
|
from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class
|
||||||
|
from ._core._exceptions import BrokenWorkerProcess
|
||||||
|
from ._core._subprocesses import open_process
|
||||||
|
from ._core._synchronization import CapacityLimiter
|
||||||
|
from ._core._tasks import CancelScope, fail_after
|
||||||
|
from .abc import ByteReceiveStream, ByteSendStream, Process
|
||||||
|
from .lowlevel import RunVar, checkpoint_if_cancelled
|
||||||
|
from .streams.buffered import BufferedByteReceiveStream
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
WORKER_MAX_IDLE_TIME = 300 # 5 minutes
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers")
|
||||||
|
_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar(
|
||||||
|
"_process_pool_idle_workers"
|
||||||
|
)
|
||||||
|
_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter")
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync( # type: ignore[return]
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
cancellable: bool = False,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function with the given arguments in a worker process.
|
||||||
|
|
||||||
|
If the ``cancellable`` option is enabled and the task waiting for its completion is
|
||||||
|
cancelled, the worker process running it will be abruptly terminated using SIGKILL
|
||||||
|
(or ``terminateProcess()`` on Windows).
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param cancellable: ``True`` to allow cancellation of the operation while it's
|
||||||
|
running
|
||||||
|
:param limiter: capacity limiter to use to limit the total amount of processes
|
||||||
|
running (if omitted, the default limiter is used)
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
:return: an awaitable that yields the return value of the function.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def send_raw_command(pickled_cmd: bytes) -> object:
|
||||||
|
try:
|
||||||
|
await stdin.send(pickled_cmd)
|
||||||
|
response = await buffered.receive_until(b"\n", 50)
|
||||||
|
status, length = response.split(b" ")
|
||||||
|
if status not in (b"RETURN", b"EXCEPTION"):
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Worker process returned unexpected response: {response!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
pickled_response = await buffered.receive_exactly(int(length))
|
||||||
|
except BaseException as exc:
|
||||||
|
workers.discard(process)
|
||||||
|
try:
|
||||||
|
process.kill()
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
await process.aclose()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if isinstance(exc, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
raise BrokenWorkerProcess from exc
|
||||||
|
|
||||||
|
retval = pickle.loads(pickled_response)
|
||||||
|
if status == b"EXCEPTION":
|
||||||
|
assert isinstance(retval, BaseException)
|
||||||
|
raise retval
|
||||||
|
else:
|
||||||
|
return retval
|
||||||
|
|
||||||
|
# First pickle the request before trying to reserve a worker process
|
||||||
|
await checkpoint_if_cancelled()
|
||||||
|
request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL)
|
||||||
|
|
||||||
|
# If this is the first run in this event loop thread, set up the necessary variables
|
||||||
|
try:
|
||||||
|
workers = _process_pool_workers.get()
|
||||||
|
idle_workers = _process_pool_idle_workers.get()
|
||||||
|
except LookupError:
|
||||||
|
workers = set()
|
||||||
|
idle_workers = deque()
|
||||||
|
_process_pool_workers.set(workers)
|
||||||
|
_process_pool_idle_workers.set(idle_workers)
|
||||||
|
get_async_backend().setup_process_pool_exit_at_shutdown(workers)
|
||||||
|
|
||||||
|
async with limiter or current_default_process_limiter():
|
||||||
|
# Pop processes from the pool (starting from the most recently used) until we
|
||||||
|
# find one that hasn't exited yet
|
||||||
|
process: Process
|
||||||
|
while idle_workers:
|
||||||
|
process, idle_since = idle_workers.pop()
|
||||||
|
if process.returncode is None:
|
||||||
|
stdin = cast(ByteSendStream, process.stdin)
|
||||||
|
buffered = BufferedByteReceiveStream(
|
||||||
|
cast(ByteReceiveStream, process.stdout)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME
|
||||||
|
# seconds or longer
|
||||||
|
now = current_time()
|
||||||
|
killed_processes: list[Process] = []
|
||||||
|
while idle_workers:
|
||||||
|
if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME:
|
||||||
|
break
|
||||||
|
|
||||||
|
process_to_kill, idle_since = idle_workers.popleft()
|
||||||
|
process_to_kill.kill()
|
||||||
|
workers.remove(process_to_kill)
|
||||||
|
killed_processes.append(process_to_kill)
|
||||||
|
|
||||||
|
with CancelScope(shield=True):
|
||||||
|
for killed_process in killed_processes:
|
||||||
|
await killed_process.aclose()
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
workers.remove(process)
|
||||||
|
else:
|
||||||
|
command = [sys.executable, "-u", "-m", __name__]
|
||||||
|
process = await open_process(
|
||||||
|
command, stdin=subprocess.PIPE, stdout=subprocess.PIPE
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
stdin = cast(ByteSendStream, process.stdin)
|
||||||
|
buffered = BufferedByteReceiveStream(
|
||||||
|
cast(ByteReceiveStream, process.stdout)
|
||||||
|
)
|
||||||
|
with fail_after(20):
|
||||||
|
message = await buffered.receive(6)
|
||||||
|
|
||||||
|
if message != b"READY\n":
|
||||||
|
raise BrokenWorkerProcess(
|
||||||
|
f"Worker process returned unexpected response: {message!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
main_module_path = getattr(sys.modules["__main__"], "__file__", None)
|
||||||
|
pickled = pickle.dumps(
|
||||||
|
("init", sys.path, main_module_path),
|
||||||
|
protocol=pickle.HIGHEST_PROTOCOL,
|
||||||
|
)
|
||||||
|
await send_raw_command(pickled)
|
||||||
|
except (BrokenWorkerProcess, get_cancelled_exc_class()):
|
||||||
|
raise
|
||||||
|
except BaseException as exc:
|
||||||
|
process.kill()
|
||||||
|
raise BrokenWorkerProcess(
|
||||||
|
"Error during worker process initialization"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
workers.add(process)
|
||||||
|
|
||||||
|
with CancelScope(shield=not cancellable):
|
||||||
|
try:
|
||||||
|
return cast(T_Retval, await send_raw_command(request))
|
||||||
|
finally:
|
||||||
|
if process in workers:
|
||||||
|
idle_workers.append((process, current_time()))
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_process_limiter() -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Return the capacity limiter that is used by default to limit the number of worker
|
||||||
|
processes.
|
||||||
|
|
||||||
|
:return: a capacity limiter object
|
||||||
|
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return _default_process_limiter.get()
|
||||||
|
except LookupError:
|
||||||
|
limiter = CapacityLimiter(os.cpu_count() or 2)
|
||||||
|
_default_process_limiter.set(limiter)
|
||||||
|
return limiter
|
||||||
|
|
||||||
|
|
||||||
|
def process_worker() -> None:
|
||||||
|
# Redirect standard streams to os.devnull so that user code won't interfere with the
|
||||||
|
# parent-worker communication
|
||||||
|
stdin = sys.stdin
|
||||||
|
stdout = sys.stdout
|
||||||
|
sys.stdin = open(os.devnull)
|
||||||
|
sys.stdout = open(os.devnull, "w")
|
||||||
|
|
||||||
|
stdout.buffer.write(b"READY\n")
|
||||||
|
while True:
|
||||||
|
retval = exception = None
|
||||||
|
try:
|
||||||
|
command, *args = pickle.load(stdin.buffer)
|
||||||
|
except EOFError:
|
||||||
|
return
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
else:
|
||||||
|
if command == "run":
|
||||||
|
func, args = args
|
||||||
|
try:
|
||||||
|
retval = func(*args)
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
elif command == "init":
|
||||||
|
main_module_path: str | None
|
||||||
|
sys.path, main_module_path = args
|
||||||
|
del sys.modules["__main__"]
|
||||||
|
if main_module_path and os.path.isfile(main_module_path):
|
||||||
|
# Load the parent's main module but as __mp_main__ instead of
|
||||||
|
# __main__ (like multiprocessing does) to avoid infinite recursion
|
||||||
|
try:
|
||||||
|
spec = spec_from_file_location("__mp_main__", main_module_path)
|
||||||
|
if spec and spec.loader:
|
||||||
|
main = module_from_spec(spec)
|
||||||
|
spec.loader.exec_module(main)
|
||||||
|
sys.modules["__main__"] = main
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
try:
|
||||||
|
if exception is not None:
|
||||||
|
status = b"EXCEPTION"
|
||||||
|
pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL)
|
||||||
|
else:
|
||||||
|
status = b"RETURN"
|
||||||
|
pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL)
|
||||||
|
except BaseException as exc:
|
||||||
|
exception = exc
|
||||||
|
status = b"EXCEPTION"
|
||||||
|
pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL)
|
||||||
|
|
||||||
|
stdout.buffer.write(b"%s %d\n" % (status, len(pickled)))
|
||||||
|
stdout.buffer.write(pickled)
|
||||||
|
|
||||||
|
# Respect SIGTERM
|
||||||
|
if isinstance(exception, SystemExit):
|
||||||
|
raise exception
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
process_worker()
|
||||||
78
.venv/lib/python3.9/site-packages/anyio/to_thread.py
Normal file
78
.venv/lib/python3.9/site-packages/anyio/to_thread.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
"run_sync",
|
||||||
|
"current_default_thread_limiter",
|
||||||
|
)
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from collections.abc import Callable
|
||||||
|
from typing import TypeVar
|
||||||
|
from warnings import warn
|
||||||
|
|
||||||
|
from ._core._eventloop import get_async_backend
|
||||||
|
from .abc import CapacityLimiter
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
from typing import TypeVarTuple, Unpack
|
||||||
|
else:
|
||||||
|
from typing_extensions import TypeVarTuple, Unpack
|
||||||
|
|
||||||
|
T_Retval = TypeVar("T_Retval")
|
||||||
|
PosArgsT = TypeVarTuple("PosArgsT")
|
||||||
|
|
||||||
|
|
||||||
|
async def run_sync(
|
||||||
|
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
||||||
|
*args: Unpack[PosArgsT],
|
||||||
|
abandon_on_cancel: bool = False,
|
||||||
|
cancellable: bool | None = None,
|
||||||
|
limiter: CapacityLimiter | None = None,
|
||||||
|
) -> T_Retval:
|
||||||
|
"""
|
||||||
|
Call the given function with the given arguments in a worker thread.
|
||||||
|
|
||||||
|
If the ``cancellable`` option is enabled and the task waiting for its completion is
|
||||||
|
cancelled, the thread will still run its course but its return value (or any raised
|
||||||
|
exception) will be ignored.
|
||||||
|
|
||||||
|
:param func: a callable
|
||||||
|
:param args: positional arguments for the callable
|
||||||
|
:param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run
|
||||||
|
unchecked on own) if the host task is cancelled, ``False`` to ignore
|
||||||
|
cancellations in the host task until the operation has completed in the worker
|
||||||
|
thread
|
||||||
|
:param cancellable: deprecated alias of ``abandon_on_cancel``; will override
|
||||||
|
``abandon_on_cancel`` if both parameters are passed
|
||||||
|
:param limiter: capacity limiter to use to limit the total amount of threads running
|
||||||
|
(if omitted, the default limiter is used)
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
:return: an awaitable that yields the return value of the function.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if cancellable is not None:
|
||||||
|
abandon_on_cancel = cancellable
|
||||||
|
warn(
|
||||||
|
"The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is "
|
||||||
|
"deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
return await get_async_backend().run_sync_in_worker_thread(
|
||||||
|
func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def current_default_thread_limiter() -> CapacityLimiter:
|
||||||
|
"""
|
||||||
|
Return the capacity limiter that is used by default to limit the number of
|
||||||
|
concurrent threads.
|
||||||
|
|
||||||
|
:return: a capacity limiter object
|
||||||
|
:raises NoEventLoopError: if no supported asynchronous event loop is running in the
|
||||||
|
current thread
|
||||||
|
|
||||||
|
"""
|
||||||
|
return get_async_backend().current_default_thread_limiter()
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@@ -0,0 +1,78 @@
|
|||||||
|
Metadata-Version: 2.4
|
||||||
|
Name: certifi
|
||||||
|
Version: 2026.2.25
|
||||||
|
Summary: Python package for providing Mozilla's CA Bundle.
|
||||||
|
Home-page: https://github.com/certifi/python-certifi
|
||||||
|
Author: Kenneth Reitz
|
||||||
|
Author-email: me@kennethreitz.com
|
||||||
|
License: MPL-2.0
|
||||||
|
Project-URL: Source, https://github.com/certifi/python-certifi
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
|
||||||
|
Classifier: Natural Language :: English
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3 :: Only
|
||||||
|
Classifier: Programming Language :: Python :: 3.7
|
||||||
|
Classifier: Programming Language :: Python :: 3.8
|
||||||
|
Classifier: Programming Language :: Python :: 3.9
|
||||||
|
Classifier: Programming Language :: Python :: 3.10
|
||||||
|
Classifier: Programming Language :: Python :: 3.11
|
||||||
|
Classifier: Programming Language :: Python :: 3.12
|
||||||
|
Classifier: Programming Language :: Python :: 3.13
|
||||||
|
Classifier: Programming Language :: Python :: 3.14
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
License-File: LICENSE
|
||||||
|
Dynamic: author
|
||||||
|
Dynamic: author-email
|
||||||
|
Dynamic: classifier
|
||||||
|
Dynamic: description
|
||||||
|
Dynamic: home-page
|
||||||
|
Dynamic: license
|
||||||
|
Dynamic: license-file
|
||||||
|
Dynamic: project-url
|
||||||
|
Dynamic: requires-python
|
||||||
|
Dynamic: summary
|
||||||
|
|
||||||
|
Certifi: Python SSL Certificates
|
||||||
|
================================
|
||||||
|
|
||||||
|
Certifi provides Mozilla's carefully curated collection of Root Certificates for
|
||||||
|
validating the trustworthiness of SSL certificates while verifying the identity
|
||||||
|
of TLS hosts. It has been extracted from the `Requests`_ project.
|
||||||
|
|
||||||
|
Installation
|
||||||
|
------------
|
||||||
|
|
||||||
|
``certifi`` is available on PyPI. Simply install it with ``pip``::
|
||||||
|
|
||||||
|
$ pip install certifi
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
To reference the installed certificate authority (CA) bundle, you can use the
|
||||||
|
built-in function::
|
||||||
|
|
||||||
|
>>> import certifi
|
||||||
|
|
||||||
|
>>> certifi.where()
|
||||||
|
'/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
|
||||||
|
|
||||||
|
Or from the command line::
|
||||||
|
|
||||||
|
$ python -m certifi
|
||||||
|
/usr/local/lib/python3.7/site-packages/certifi/cacert.pem
|
||||||
|
|
||||||
|
Enjoy!
|
||||||
|
|
||||||
|
.. _`Requests`: https://requests.readthedocs.io/en/master/
|
||||||
|
|
||||||
|
Addition/Removal of Certificates
|
||||||
|
--------------------------------
|
||||||
|
|
||||||
|
Certifi does not support any addition/removal or other modification of the
|
||||||
|
CA trust store content. This project is intended to provide a reliable and
|
||||||
|
highly portable root of trust to python deployments. Look to upstream projects
|
||||||
|
for methods to use alternate trust.
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/certifi/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/certifi/__main__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/certifi/core.cpython-39.pyc,,
|
||||||
|
certifi-2026.2.25.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
certifi-2026.2.25.dist-info/METADATA,sha256=4NMuGXdg_hBiRA3paKVXYcDmE3VXEBWxTvCL2xlDyPU,2474
|
||||||
|
certifi-2026.2.25.dist-info/RECORD,,
|
||||||
|
certifi-2026.2.25.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
||||||
|
certifi-2026.2.25.dist-info/licenses/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
|
||||||
|
certifi-2026.2.25.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
|
||||||
|
certifi/__init__.py,sha256=c9eaYufv1pSLl0Q8QNcMiMLLH4WquDcxdPyKjmI4opY,94
|
||||||
|
certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
|
||||||
|
certifi/cacert.pem,sha256=_JFloSQDJj5-v72te-ej6sD6XTJdPHBGXyjTaQByyig,272441
|
||||||
|
certifi/core.py,sha256=XFXycndG5pf37ayeF8N32HUuDafsyhkVMbO4BAPWHa0,3394
|
||||||
|
certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
@@ -0,0 +1,5 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: setuptools (82.0.0)
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
|
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
This package contains a modified version of ca-bundle.crt:
|
||||||
|
|
||||||
|
ca-bundle.crt -- Bundle of CA Root Certificates
|
||||||
|
|
||||||
|
This is a bundle of X.509 certificates of public Certificate Authorities
|
||||||
|
(CA). These were automatically extracted from Mozilla's root certificates
|
||||||
|
file (certdata.txt). This file can be found in the mozilla source tree:
|
||||||
|
https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
|
||||||
|
It contains the certificates in PEM format and therefore
|
||||||
|
can be directly used with curl / libcurl / php_curl, or with
|
||||||
|
an Apache+mod_ssl webserver for SSL client authentication.
|
||||||
|
Just configure this file as the SSLCACertificateFile.#
|
||||||
|
|
||||||
|
***** BEGIN LICENSE BLOCK *****
|
||||||
|
This Source Code Form is subject to the terms of the Mozilla Public License,
|
||||||
|
v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
|
||||||
|
one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
***** END LICENSE BLOCK *****
|
||||||
|
@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
certifi
|
||||||
4
.venv/lib/python3.9/site-packages/certifi/__init__.py
Normal file
4
.venv/lib/python3.9/site-packages/certifi/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from .core import contents, where
|
||||||
|
|
||||||
|
__all__ = ["contents", "where"]
|
||||||
|
__version__ = "2026.02.25"
|
||||||
12
.venv/lib/python3.9/site-packages/certifi/__main__.py
Normal file
12
.venv/lib/python3.9/site-packages/certifi/__main__.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import argparse
|
||||||
|
|
||||||
|
from certifi import contents, where
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("-c", "--contents", action="store_true")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.contents:
|
||||||
|
print(contents())
|
||||||
|
else:
|
||||||
|
print(where())
|
||||||
4494
.venv/lib/python3.9/site-packages/certifi/cacert.pem
Normal file
4494
.venv/lib/python3.9/site-packages/certifi/cacert.pem
Normal file
File diff suppressed because it is too large
Load Diff
83
.venv/lib/python3.9/site-packages/certifi/core.py
Normal file
83
.venv/lib/python3.9/site-packages/certifi/core.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
certifi.py
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
This module returns the installation location of cacert.pem or its contents.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
import atexit
|
||||||
|
|
||||||
|
def exit_cacert_ctx() -> None:
|
||||||
|
_CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 11):
|
||||||
|
|
||||||
|
from importlib.resources import as_file, files
|
||||||
|
|
||||||
|
_CACERT_CTX = None
|
||||||
|
_CACERT_PATH = None
|
||||||
|
|
||||||
|
def where() -> str:
|
||||||
|
# This is slightly terrible, but we want to delay extracting the file
|
||||||
|
# in cases where we're inside of a zipimport situation until someone
|
||||||
|
# actually calls where(), but we don't want to re-extract the file
|
||||||
|
# on every call of where(), so we'll do it once then store it in a
|
||||||
|
# global variable.
|
||||||
|
global _CACERT_CTX
|
||||||
|
global _CACERT_PATH
|
||||||
|
if _CACERT_PATH is None:
|
||||||
|
# This is slightly janky, the importlib.resources API wants you to
|
||||||
|
# manage the cleanup of this file, so it doesn't actually return a
|
||||||
|
# path, it returns a context manager that will give you the path
|
||||||
|
# when you enter it and will do any cleanup when you leave it. In
|
||||||
|
# the common case of not needing a temporary file, it will just
|
||||||
|
# return the file system location and the __exit__() is a no-op.
|
||||||
|
#
|
||||||
|
# We also have to hold onto the actual context manager, because
|
||||||
|
# it will do the cleanup whenever it gets garbage collected, so
|
||||||
|
# we will also store that at the global level as well.
|
||||||
|
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
|
||||||
|
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||||
|
atexit.register(exit_cacert_ctx)
|
||||||
|
|
||||||
|
return _CACERT_PATH
|
||||||
|
|
||||||
|
def contents() -> str:
|
||||||
|
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
from importlib.resources import path as get_path, read_text
|
||||||
|
|
||||||
|
_CACERT_CTX = None
|
||||||
|
_CACERT_PATH = None
|
||||||
|
|
||||||
|
def where() -> str:
|
||||||
|
# This is slightly terrible, but we want to delay extracting the
|
||||||
|
# file in cases where we're inside of a zipimport situation until
|
||||||
|
# someone actually calls where(), but we don't want to re-extract
|
||||||
|
# the file on every call of where(), so we'll do it once then store
|
||||||
|
# it in a global variable.
|
||||||
|
global _CACERT_CTX
|
||||||
|
global _CACERT_PATH
|
||||||
|
if _CACERT_PATH is None:
|
||||||
|
# This is slightly janky, the importlib.resources API wants you
|
||||||
|
# to manage the cleanup of this file, so it doesn't actually
|
||||||
|
# return a path, it returns a context manager that will give
|
||||||
|
# you the path when you enter it and will do any cleanup when
|
||||||
|
# you leave it. In the common case of not needing a temporary
|
||||||
|
# file, it will just return the file system location and the
|
||||||
|
# __exit__() is a no-op.
|
||||||
|
#
|
||||||
|
# We also have to hold onto the actual context manager, because
|
||||||
|
# it will do the cleanup whenever it gets garbage collected, so
|
||||||
|
# we will also store that at the global level as well.
|
||||||
|
_CACERT_CTX = get_path("certifi", "cacert.pem")
|
||||||
|
_CACERT_PATH = str(_CACERT_CTX.__enter__())
|
||||||
|
atexit.register(exit_cacert_ctx)
|
||||||
|
|
||||||
|
return _CACERT_PATH
|
||||||
|
|
||||||
|
def contents() -> str:
|
||||||
|
return read_text("certifi", "cacert.pem", encoding="ascii")
|
||||||
0
.venv/lib/python3.9/site-packages/certifi/py.typed
Normal file
0
.venv/lib/python3.9/site-packages/certifi/py.typed
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pip
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
Copyright 2014 Pallets
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
1. Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
|
||||||
|
2. Redistributions in binary form must reproduce the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer in the
|
||||||
|
documentation and/or other materials provided with the distribution.
|
||||||
|
|
||||||
|
3. Neither the name of the copyright holder nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||||
|
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||||
|
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||||
|
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||||
|
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||||
|
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
@@ -0,0 +1,74 @@
|
|||||||
|
Metadata-Version: 2.3
|
||||||
|
Name: click
|
||||||
|
Version: 8.1.8
|
||||||
|
Summary: Composable command line interface toolkit
|
||||||
|
Maintainer-email: Pallets <contact@palletsprojects.com>
|
||||||
|
Requires-Python: >=3.7
|
||||||
|
Description-Content-Type: text/markdown
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Operating System :: OS Independent
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Typing :: Typed
|
||||||
|
Requires-Dist: colorama; platform_system == 'Windows'
|
||||||
|
Requires-Dist: importlib-metadata; python_version < '3.8'
|
||||||
|
Project-URL: Changes, https://click.palletsprojects.com/changes/
|
||||||
|
Project-URL: Chat, https://discord.gg/pallets
|
||||||
|
Project-URL: Documentation, https://click.palletsprojects.com/
|
||||||
|
Project-URL: Donate, https://palletsprojects.com/donate
|
||||||
|
Project-URL: Source, https://github.com/pallets/click/
|
||||||
|
|
||||||
|
# $ click_
|
||||||
|
|
||||||
|
Click is a Python package for creating beautiful command line interfaces
|
||||||
|
in a composable way with as little code as necessary. It's the "Command
|
||||||
|
Line Interface Creation Kit". It's highly configurable but comes with
|
||||||
|
sensible defaults out of the box.
|
||||||
|
|
||||||
|
It aims to make the process of writing command line tools quick and fun
|
||||||
|
while also preventing any frustration caused by the inability to
|
||||||
|
implement an intended CLI API.
|
||||||
|
|
||||||
|
Click in three points:
|
||||||
|
|
||||||
|
- Arbitrary nesting of commands
|
||||||
|
- Automatic help page generation
|
||||||
|
- Supports lazy loading of subcommands at runtime
|
||||||
|
|
||||||
|
|
||||||
|
## A Simple Example
|
||||||
|
|
||||||
|
```python
|
||||||
|
import click
|
||||||
|
|
||||||
|
@click.command()
|
||||||
|
@click.option("--count", default=1, help="Number of greetings.")
|
||||||
|
@click.option("--name", prompt="Your name", help="The person to greet.")
|
||||||
|
def hello(count, name):
|
||||||
|
"""Simple program that greets NAME for a total of COUNT times."""
|
||||||
|
for _ in range(count):
|
||||||
|
click.echo(f"Hello, {name}!")
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
hello()
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
$ python hello.py --count=3
|
||||||
|
Your name: Click
|
||||||
|
Hello, Click!
|
||||||
|
Hello, Click!
|
||||||
|
Hello, Click!
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Donate
|
||||||
|
|
||||||
|
The Pallets organization develops and supports Click and other popular
|
||||||
|
packages. In order to grow the community of contributors and users, and
|
||||||
|
allow the maintainers to devote more time to the projects, [please
|
||||||
|
donate today][].
|
||||||
|
|
||||||
|
[please donate today]: https://palletsprojects.com/donate
|
||||||
|
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/__init__.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/_compat.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/_termui_impl.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/_textwrap.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/_winconsole.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/core.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/decorators.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/exceptions.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/formatting.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/globals.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/parser.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/shell_completion.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/termui.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/testing.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/types.cpython-39.pyc,,
|
||||||
|
../../../../../../../../Library/Caches/com.apple.python/Users/dannier/Desktop/living/AICLW/wechatAiclaw/.venv/lib/python3.9/site-packages/click/utils.cpython-39.pyc,,
|
||||||
|
click-8.1.8.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
||||||
|
click-8.1.8.dist-info/LICENSE.txt,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475
|
||||||
|
click-8.1.8.dist-info/METADATA,sha256=WJtQ6uGS2ybLfvUE4vC0XIhIBr4yFGwjrMBR2fiCQ-Q,2263
|
||||||
|
click-8.1.8.dist-info/RECORD,,
|
||||||
|
click-8.1.8.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
||||||
|
click/__init__.py,sha256=j1DJeCbga4ribkv5uyvIAzI0oFN13fW9mevDKShFelo,3188
|
||||||
|
click/_compat.py,sha256=IGKh_J5QdfKELitnRfTGHneejWxoCw_NX9tfMbdcg3w,18730
|
||||||
|
click/_termui_impl.py,sha256=a5z7I9gOFeMmu7Gb6_RPyQ8GPuVP1EeblixcWSPSQPk,24783
|
||||||
|
click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353
|
||||||
|
click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860
|
||||||
|
click/core.py,sha256=Q1nEVdctZwvIPOlt4vfHko0TYnHCeE40UEEul8Wpyvs,114748
|
||||||
|
click/decorators.py,sha256=7t6F-QWowtLh6F_6l-4YV4Y4yNTcqFQEu9i37zIz68s,18925
|
||||||
|
click/exceptions.py,sha256=V7zDT6emqJ8iNl0kF1P5kpFmLMWQ1T1L7aNNKM4YR0w,9600
|
||||||
|
click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706
|
||||||
|
click/globals.py,sha256=cuJ6Bbo073lgEEmhjr394PeM-QFmXM-Ci-wmfsd7H5g,1954
|
||||||
|
click/parser.py,sha256=h4sndcpF5OHrZQN8vD8IWb5OByvW7ABbhRToxovrqS8,19067
|
||||||
|
click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||||
|
click/shell_completion.py,sha256=TR0dXEGcvWb9Eo3aaQEXGhnvNS3FF4H4QcuLnvAvYo4,18636
|
||||||
|
click/termui.py,sha256=dLxiS70UOvIYBda_nEEZaPAFOVDVmRs1sEPMuLDowQo,28310
|
||||||
|
click/testing.py,sha256=3RA8anCf7TZ8-5RAF5it2Te-aWXBAL5VLasQnMiC2ZQ,16282
|
||||||
|
click/types.py,sha256=BD5Qqq4h-8kawBmOIzJlmq4xzThAf4wCvaOLZSBDNx0,36422
|
||||||
|
click/utils.py,sha256=ce-IrO9ilII76LGkU354pOdHbepM8UftfNH7SfMU_28,20330
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
Wheel-Version: 1.0
|
||||||
|
Generator: flit 3.10.1
|
||||||
|
Root-Is-Purelib: true
|
||||||
|
Tag: py3-none-any
|
||||||
75
.venv/lib/python3.9/site-packages/click/__init__.py
Normal file
75
.venv/lib/python3.9/site-packages/click/__init__.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
"""
|
||||||
|
Click is a simple Python module inspired by the stdlib optparse to make
|
||||||
|
writing command line scripts fun. Unlike other modules, it's based
|
||||||
|
around a simple API that does not come with too much magic and is
|
||||||
|
composable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .core import Argument as Argument
|
||||||
|
from .core import BaseCommand as BaseCommand
|
||||||
|
from .core import Command as Command
|
||||||
|
from .core import CommandCollection as CommandCollection
|
||||||
|
from .core import Context as Context
|
||||||
|
from .core import Group as Group
|
||||||
|
from .core import MultiCommand as MultiCommand
|
||||||
|
from .core import Option as Option
|
||||||
|
from .core import Parameter as Parameter
|
||||||
|
from .decorators import argument as argument
|
||||||
|
from .decorators import command as command
|
||||||
|
from .decorators import confirmation_option as confirmation_option
|
||||||
|
from .decorators import group as group
|
||||||
|
from .decorators import help_option as help_option
|
||||||
|
from .decorators import HelpOption as HelpOption
|
||||||
|
from .decorators import make_pass_decorator as make_pass_decorator
|
||||||
|
from .decorators import option as option
|
||||||
|
from .decorators import pass_context as pass_context
|
||||||
|
from .decorators import pass_obj as pass_obj
|
||||||
|
from .decorators import password_option as password_option
|
||||||
|
from .decorators import version_option as version_option
|
||||||
|
from .exceptions import Abort as Abort
|
||||||
|
from .exceptions import BadArgumentUsage as BadArgumentUsage
|
||||||
|
from .exceptions import BadOptionUsage as BadOptionUsage
|
||||||
|
from .exceptions import BadParameter as BadParameter
|
||||||
|
from .exceptions import ClickException as ClickException
|
||||||
|
from .exceptions import FileError as FileError
|
||||||
|
from .exceptions import MissingParameter as MissingParameter
|
||||||
|
from .exceptions import NoSuchOption as NoSuchOption
|
||||||
|
from .exceptions import UsageError as UsageError
|
||||||
|
from .formatting import HelpFormatter as HelpFormatter
|
||||||
|
from .formatting import wrap_text as wrap_text
|
||||||
|
from .globals import get_current_context as get_current_context
|
||||||
|
from .parser import OptionParser as OptionParser
|
||||||
|
from .termui import clear as clear
|
||||||
|
from .termui import confirm as confirm
|
||||||
|
from .termui import echo_via_pager as echo_via_pager
|
||||||
|
from .termui import edit as edit
|
||||||
|
from .termui import getchar as getchar
|
||||||
|
from .termui import launch as launch
|
||||||
|
from .termui import pause as pause
|
||||||
|
from .termui import progressbar as progressbar
|
||||||
|
from .termui import prompt as prompt
|
||||||
|
from .termui import secho as secho
|
||||||
|
from .termui import style as style
|
||||||
|
from .termui import unstyle as unstyle
|
||||||
|
from .types import BOOL as BOOL
|
||||||
|
from .types import Choice as Choice
|
||||||
|
from .types import DateTime as DateTime
|
||||||
|
from .types import File as File
|
||||||
|
from .types import FLOAT as FLOAT
|
||||||
|
from .types import FloatRange as FloatRange
|
||||||
|
from .types import INT as INT
|
||||||
|
from .types import IntRange as IntRange
|
||||||
|
from .types import ParamType as ParamType
|
||||||
|
from .types import Path as Path
|
||||||
|
from .types import STRING as STRING
|
||||||
|
from .types import Tuple as Tuple
|
||||||
|
from .types import UNPROCESSED as UNPROCESSED
|
||||||
|
from .types import UUID as UUID
|
||||||
|
from .utils import echo as echo
|
||||||
|
from .utils import format_filename as format_filename
|
||||||
|
from .utils import get_app_dir as get_app_dir
|
||||||
|
from .utils import get_binary_stream as get_binary_stream
|
||||||
|
from .utils import get_text_stream as get_text_stream
|
||||||
|
from .utils import open_file as open_file
|
||||||
|
|
||||||
|
__version__ = "8.1.8"
|
||||||
623
.venv/lib/python3.9/site-packages/click/_compat.py
Normal file
623
.venv/lib/python3.9/site-packages/click/_compat.py
Normal file
@@ -0,0 +1,623 @@
|
|||||||
|
import codecs
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import typing as t
|
||||||
|
from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
|
CYGWIN = sys.platform.startswith("cygwin")
|
||||||
|
WIN = sys.platform.startswith("win")
|
||||||
|
auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None
|
||||||
|
_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]")
|
||||||
|
|
||||||
|
|
||||||
|
def _make_text_stream(
|
||||||
|
stream: t.BinaryIO,
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
force_readable: bool = False,
|
||||||
|
force_writable: bool = False,
|
||||||
|
) -> t.TextIO:
|
||||||
|
if encoding is None:
|
||||||
|
encoding = get_best_encoding(stream)
|
||||||
|
if errors is None:
|
||||||
|
errors = "replace"
|
||||||
|
return _NonClosingTextIOWrapper(
|
||||||
|
stream,
|
||||||
|
encoding,
|
||||||
|
errors,
|
||||||
|
line_buffering=True,
|
||||||
|
force_readable=force_readable,
|
||||||
|
force_writable=force_writable,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_ascii_encoding(encoding: str) -> bool:
|
||||||
|
"""Checks if a given encoding is ascii."""
|
||||||
|
try:
|
||||||
|
return codecs.lookup(encoding).name == "ascii"
|
||||||
|
except LookupError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_best_encoding(stream: t.IO[t.Any]) -> str:
|
||||||
|
"""Returns the default stream encoding if not found."""
|
||||||
|
rv = getattr(stream, "encoding", None) or sys.getdefaultencoding()
|
||||||
|
if is_ascii_encoding(rv):
|
||||||
|
return "utf-8"
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class _NonClosingTextIOWrapper(io.TextIOWrapper):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
stream: t.BinaryIO,
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
force_readable: bool = False,
|
||||||
|
force_writable: bool = False,
|
||||||
|
**extra: t.Any,
|
||||||
|
) -> None:
|
||||||
|
self._stream = stream = t.cast(
|
||||||
|
t.BinaryIO, _FixupStream(stream, force_readable, force_writable)
|
||||||
|
)
|
||||||
|
super().__init__(stream, encoding, errors, **extra)
|
||||||
|
|
||||||
|
def __del__(self) -> None:
|
||||||
|
try:
|
||||||
|
self.detach()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def isatty(self) -> bool:
|
||||||
|
# https://bitbucket.org/pypy/pypy/issue/1803
|
||||||
|
return self._stream.isatty()
|
||||||
|
|
||||||
|
|
||||||
|
class _FixupStream:
|
||||||
|
"""The new io interface needs more from streams than streams
|
||||||
|
traditionally implement. As such, this fix-up code is necessary in
|
||||||
|
some circumstances.
|
||||||
|
|
||||||
|
The forcing of readable and writable flags are there because some tools
|
||||||
|
put badly patched objects on sys (one such offender are certain version
|
||||||
|
of jupyter notebook).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
stream: t.BinaryIO,
|
||||||
|
force_readable: bool = False,
|
||||||
|
force_writable: bool = False,
|
||||||
|
):
|
||||||
|
self._stream = stream
|
||||||
|
self._force_readable = force_readable
|
||||||
|
self._force_writable = force_writable
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> t.Any:
|
||||||
|
return getattr(self._stream, name)
|
||||||
|
|
||||||
|
def read1(self, size: int) -> bytes:
|
||||||
|
f = getattr(self._stream, "read1", None)
|
||||||
|
|
||||||
|
if f is not None:
|
||||||
|
return t.cast(bytes, f(size))
|
||||||
|
|
||||||
|
return self._stream.read(size)
|
||||||
|
|
||||||
|
def readable(self) -> bool:
|
||||||
|
if self._force_readable:
|
||||||
|
return True
|
||||||
|
x = getattr(self._stream, "readable", None)
|
||||||
|
if x is not None:
|
||||||
|
return t.cast(bool, x())
|
||||||
|
try:
|
||||||
|
self._stream.read(0)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def writable(self) -> bool:
|
||||||
|
if self._force_writable:
|
||||||
|
return True
|
||||||
|
x = getattr(self._stream, "writable", None)
|
||||||
|
if x is not None:
|
||||||
|
return t.cast(bool, x())
|
||||||
|
try:
|
||||||
|
self._stream.write("") # type: ignore
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
self._stream.write(b"")
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def seekable(self) -> bool:
|
||||||
|
x = getattr(self._stream, "seekable", None)
|
||||||
|
if x is not None:
|
||||||
|
return t.cast(bool, x())
|
||||||
|
try:
|
||||||
|
self._stream.seek(self._stream.tell())
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||||
|
try:
|
||||||
|
return isinstance(stream.read(0), bytes)
|
||||||
|
except Exception:
|
||||||
|
return default
|
||||||
|
# This happens in some cases where the stream was already
|
||||||
|
# closed. In this case, we assume the default.
|
||||||
|
|
||||||
|
|
||||||
|
def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool:
|
||||||
|
try:
|
||||||
|
stream.write(b"")
|
||||||
|
except Exception:
|
||||||
|
try:
|
||||||
|
stream.write("")
|
||||||
|
return False
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return default
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]:
|
||||||
|
# We need to figure out if the given stream is already binary.
|
||||||
|
# This can happen because the official docs recommend detaching
|
||||||
|
# the streams to get binary streams. Some code might do this, so
|
||||||
|
# we need to deal with this case explicitly.
|
||||||
|
if _is_binary_reader(stream, False):
|
||||||
|
return t.cast(t.BinaryIO, stream)
|
||||||
|
|
||||||
|
buf = getattr(stream, "buffer", None)
|
||||||
|
|
||||||
|
# Same situation here; this time we assume that the buffer is
|
||||||
|
# actually binary in case it's closed.
|
||||||
|
if buf is not None and _is_binary_reader(buf, True):
|
||||||
|
return t.cast(t.BinaryIO, buf)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]:
|
||||||
|
# We need to figure out if the given stream is already binary.
|
||||||
|
# This can happen because the official docs recommend detaching
|
||||||
|
# the streams to get binary streams. Some code might do this, so
|
||||||
|
# we need to deal with this case explicitly.
|
||||||
|
if _is_binary_writer(stream, False):
|
||||||
|
return t.cast(t.BinaryIO, stream)
|
||||||
|
|
||||||
|
buf = getattr(stream, "buffer", None)
|
||||||
|
|
||||||
|
# Same situation here; this time we assume that the buffer is
|
||||||
|
# actually binary in case it's closed.
|
||||||
|
if buf is not None and _is_binary_writer(buf, True):
|
||||||
|
return t.cast(t.BinaryIO, buf)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _stream_is_misconfigured(stream: t.TextIO) -> bool:
|
||||||
|
"""A stream is misconfigured if its encoding is ASCII."""
|
||||||
|
# If the stream does not have an encoding set, we assume it's set
|
||||||
|
# to ASCII. This appears to happen in certain unittest
|
||||||
|
# environments. It's not quite clear what the correct behavior is
|
||||||
|
# but this at least will force Click to recover somehow.
|
||||||
|
return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii")
|
||||||
|
|
||||||
|
|
||||||
|
def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool:
|
||||||
|
"""A stream attribute is compatible if it is equal to the
|
||||||
|
desired value or the desired value is unset and the attribute
|
||||||
|
has a value.
|
||||||
|
"""
|
||||||
|
stream_value = getattr(stream, attr, None)
|
||||||
|
return stream_value == value or (value is None and stream_value is not None)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_compatible_text_stream(
|
||||||
|
stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
|
||||||
|
) -> bool:
|
||||||
|
"""Check if a stream's encoding and errors attributes are
|
||||||
|
compatible with the desired values.
|
||||||
|
"""
|
||||||
|
return _is_compat_stream_attr(
|
||||||
|
stream, "encoding", encoding
|
||||||
|
) and _is_compat_stream_attr(stream, "errors", errors)
|
||||||
|
|
||||||
|
|
||||||
|
def _force_correct_text_stream(
|
||||||
|
text_stream: t.IO[t.Any],
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
is_binary: t.Callable[[t.IO[t.Any], bool], bool],
|
||||||
|
find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]],
|
||||||
|
force_readable: bool = False,
|
||||||
|
force_writable: bool = False,
|
||||||
|
) -> t.TextIO:
|
||||||
|
if is_binary(text_stream, False):
|
||||||
|
binary_reader = t.cast(t.BinaryIO, text_stream)
|
||||||
|
else:
|
||||||
|
text_stream = t.cast(t.TextIO, text_stream)
|
||||||
|
# If the stream looks compatible, and won't default to a
|
||||||
|
# misconfigured ascii encoding, return it as-is.
|
||||||
|
if _is_compatible_text_stream(text_stream, encoding, errors) and not (
|
||||||
|
encoding is None and _stream_is_misconfigured(text_stream)
|
||||||
|
):
|
||||||
|
return text_stream
|
||||||
|
|
||||||
|
# Otherwise, get the underlying binary reader.
|
||||||
|
possible_binary_reader = find_binary(text_stream)
|
||||||
|
|
||||||
|
# If that's not possible, silently use the original reader
|
||||||
|
# and get mojibake instead of exceptions.
|
||||||
|
if possible_binary_reader is None:
|
||||||
|
return text_stream
|
||||||
|
|
||||||
|
binary_reader = possible_binary_reader
|
||||||
|
|
||||||
|
# Default errors to replace instead of strict in order to get
|
||||||
|
# something that works.
|
||||||
|
if errors is None:
|
||||||
|
errors = "replace"
|
||||||
|
|
||||||
|
# Wrap the binary stream in a text stream with the correct
|
||||||
|
# encoding parameters.
|
||||||
|
return _make_text_stream(
|
||||||
|
binary_reader,
|
||||||
|
encoding,
|
||||||
|
errors,
|
||||||
|
force_readable=force_readable,
|
||||||
|
force_writable=force_writable,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _force_correct_text_reader(
|
||||||
|
text_reader: t.IO[t.Any],
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
force_readable: bool = False,
|
||||||
|
) -> t.TextIO:
|
||||||
|
return _force_correct_text_stream(
|
||||||
|
text_reader,
|
||||||
|
encoding,
|
||||||
|
errors,
|
||||||
|
_is_binary_reader,
|
||||||
|
_find_binary_reader,
|
||||||
|
force_readable=force_readable,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _force_correct_text_writer(
|
||||||
|
text_writer: t.IO[t.Any],
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
force_writable: bool = False,
|
||||||
|
) -> t.TextIO:
|
||||||
|
return _force_correct_text_stream(
|
||||||
|
text_writer,
|
||||||
|
encoding,
|
||||||
|
errors,
|
||||||
|
_is_binary_writer,
|
||||||
|
_find_binary_writer,
|
||||||
|
force_writable=force_writable,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_stdin() -> t.BinaryIO:
|
||||||
|
reader = _find_binary_reader(sys.stdin)
|
||||||
|
if reader is None:
|
||||||
|
raise RuntimeError("Was not able to determine binary stream for sys.stdin.")
|
||||||
|
return reader
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_stdout() -> t.BinaryIO:
|
||||||
|
writer = _find_binary_writer(sys.stdout)
|
||||||
|
if writer is None:
|
||||||
|
raise RuntimeError("Was not able to determine binary stream for sys.stdout.")
|
||||||
|
return writer
|
||||||
|
|
||||||
|
|
||||||
|
def get_binary_stderr() -> t.BinaryIO:
|
||||||
|
writer = _find_binary_writer(sys.stderr)
|
||||||
|
if writer is None:
|
||||||
|
raise RuntimeError("Was not able to determine binary stream for sys.stderr.")
|
||||||
|
return writer
|
||||||
|
|
||||||
|
|
||||||
|
def get_text_stdin(
|
||||||
|
encoding: t.Optional[str] = None, errors: t.Optional[str] = None
|
||||||
|
) -> t.TextIO:
|
||||||
|
rv = _get_windows_console_stream(sys.stdin, encoding, errors)
|
||||||
|
if rv is not None:
|
||||||
|
return rv
|
||||||
|
return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_text_stdout(
|
||||||
|
encoding: t.Optional[str] = None, errors: t.Optional[str] = None
|
||||||
|
) -> t.TextIO:
|
||||||
|
rv = _get_windows_console_stream(sys.stdout, encoding, errors)
|
||||||
|
if rv is not None:
|
||||||
|
return rv
|
||||||
|
return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_text_stderr(
|
||||||
|
encoding: t.Optional[str] = None, errors: t.Optional[str] = None
|
||||||
|
) -> t.TextIO:
|
||||||
|
rv = _get_windows_console_stream(sys.stderr, encoding, errors)
|
||||||
|
if rv is not None:
|
||||||
|
return rv
|
||||||
|
return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_io_open(
|
||||||
|
file: t.Union[str, "os.PathLike[str]", int],
|
||||||
|
mode: str,
|
||||||
|
encoding: t.Optional[str],
|
||||||
|
errors: t.Optional[str],
|
||||||
|
) -> t.IO[t.Any]:
|
||||||
|
"""Handles not passing ``encoding`` and ``errors`` in binary mode."""
|
||||||
|
if "b" in mode:
|
||||||
|
return open(file, mode)
|
||||||
|
|
||||||
|
return open(file, mode, encoding=encoding, errors=errors)
|
||||||
|
|
||||||
|
|
||||||
|
def open_stream(
|
||||||
|
filename: "t.Union[str, os.PathLike[str]]",
|
||||||
|
mode: str = "r",
|
||||||
|
encoding: t.Optional[str] = None,
|
||||||
|
errors: t.Optional[str] = "strict",
|
||||||
|
atomic: bool = False,
|
||||||
|
) -> t.Tuple[t.IO[t.Any], bool]:
|
||||||
|
binary = "b" in mode
|
||||||
|
filename = os.fspath(filename)
|
||||||
|
|
||||||
|
# Standard streams first. These are simple because they ignore the
|
||||||
|
# atomic flag. Use fsdecode to handle Path("-").
|
||||||
|
if os.fsdecode(filename) == "-":
|
||||||
|
if any(m in mode for m in ["w", "a", "x"]):
|
||||||
|
if binary:
|
||||||
|
return get_binary_stdout(), False
|
||||||
|
return get_text_stdout(encoding=encoding, errors=errors), False
|
||||||
|
if binary:
|
||||||
|
return get_binary_stdin(), False
|
||||||
|
return get_text_stdin(encoding=encoding, errors=errors), False
|
||||||
|
|
||||||
|
# Non-atomic writes directly go out through the regular open functions.
|
||||||
|
if not atomic:
|
||||||
|
return _wrap_io_open(filename, mode, encoding, errors), True
|
||||||
|
|
||||||
|
# Some usability stuff for atomic writes
|
||||||
|
if "a" in mode:
|
||||||
|
raise ValueError(
|
||||||
|
"Appending to an existing file is not supported, because that"
|
||||||
|
" would involve an expensive `copy`-operation to a temporary"
|
||||||
|
" file. Open the file in normal `w`-mode and copy explicitly"
|
||||||
|
" if that's what you're after."
|
||||||
|
)
|
||||||
|
if "x" in mode:
|
||||||
|
raise ValueError("Use the `overwrite`-parameter instead.")
|
||||||
|
if "w" not in mode:
|
||||||
|
raise ValueError("Atomic writes only make sense with `w`-mode.")
|
||||||
|
|
||||||
|
# Atomic writes are more complicated. They work by opening a file
|
||||||
|
# as a proxy in the same folder and then using the fdopen
|
||||||
|
# functionality to wrap it in a Python file. Then we wrap it in an
|
||||||
|
# atomic file that moves the file over on close.
|
||||||
|
import errno
|
||||||
|
import random
|
||||||
|
|
||||||
|
try:
|
||||||
|
perm: t.Optional[int] = os.stat(filename).st_mode
|
||||||
|
except OSError:
|
||||||
|
perm = None
|
||||||
|
|
||||||
|
flags = os.O_RDWR | os.O_CREAT | os.O_EXCL
|
||||||
|
|
||||||
|
if binary:
|
||||||
|
flags |= getattr(os, "O_BINARY", 0)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
tmp_filename = os.path.join(
|
||||||
|
os.path.dirname(filename),
|
||||||
|
f".__atomic-write{random.randrange(1 << 32):08x}",
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm)
|
||||||
|
break
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST or (
|
||||||
|
os.name == "nt"
|
||||||
|
and e.errno == errno.EACCES
|
||||||
|
and os.path.isdir(e.filename)
|
||||||
|
and os.access(e.filename, os.W_OK)
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
|
||||||
|
if perm is not None:
|
||||||
|
os.chmod(tmp_filename, perm) # in case perm includes bits in umask
|
||||||
|
|
||||||
|
f = _wrap_io_open(fd, mode, encoding, errors)
|
||||||
|
af = _AtomicFile(f, tmp_filename, os.path.realpath(filename))
|
||||||
|
return t.cast(t.IO[t.Any], af), True
|
||||||
|
|
||||||
|
|
||||||
|
class _AtomicFile:
|
||||||
|
def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None:
|
||||||
|
self._f = f
|
||||||
|
self._tmp_filename = tmp_filename
|
||||||
|
self._real_filename = real_filename
|
||||||
|
self.closed = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self._real_filename
|
||||||
|
|
||||||
|
def close(self, delete: bool = False) -> None:
|
||||||
|
if self.closed:
|
||||||
|
return
|
||||||
|
self._f.close()
|
||||||
|
os.replace(self._tmp_filename, self._real_filename)
|
||||||
|
self.closed = True
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> t.Any:
|
||||||
|
return getattr(self._f, name)
|
||||||
|
|
||||||
|
def __enter__(self) -> "_AtomicFile":
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None:
|
||||||
|
self.close(delete=exc_type is not None)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return repr(self._f)
|
||||||
|
|
||||||
|
|
||||||
|
def strip_ansi(value: str) -> str:
|
||||||
|
return _ansi_re.sub("", value)
|
||||||
|
|
||||||
|
|
||||||
|
def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool:
|
||||||
|
while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)):
|
||||||
|
stream = stream._stream
|
||||||
|
|
||||||
|
return stream.__class__.__module__.startswith("ipykernel.")
|
||||||
|
|
||||||
|
|
||||||
|
def should_strip_ansi(
|
||||||
|
stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None
|
||||||
|
) -> bool:
|
||||||
|
if color is None:
|
||||||
|
if stream is None:
|
||||||
|
stream = sys.stdin
|
||||||
|
return not isatty(stream) and not _is_jupyter_kernel_output(stream)
|
||||||
|
return not color
|
||||||
|
|
||||||
|
|
||||||
|
# On Windows, wrap the output streams with colorama to support ANSI
|
||||||
|
# color codes.
|
||||||
|
# NOTE: double check is needed so mypy does not analyze this on Linux
|
||||||
|
if sys.platform.startswith("win") and WIN:
|
||||||
|
from ._winconsole import _get_windows_console_stream
|
||||||
|
|
||||||
|
def _get_argv_encoding() -> str:
|
||||||
|
import locale
|
||||||
|
|
||||||
|
return locale.getpreferredencoding()
|
||||||
|
|
||||||
|
_ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||||
|
|
||||||
|
def auto_wrap_for_ansi(
|
||||||
|
stream: t.TextIO, color: t.Optional[bool] = None
|
||||||
|
) -> t.TextIO:
|
||||||
|
"""Support ANSI color and style codes on Windows by wrapping a
|
||||||
|
stream with colorama.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
cached = _ansi_stream_wrappers.get(stream)
|
||||||
|
except Exception:
|
||||||
|
cached = None
|
||||||
|
|
||||||
|
if cached is not None:
|
||||||
|
return cached
|
||||||
|
|
||||||
|
import colorama
|
||||||
|
|
||||||
|
strip = should_strip_ansi(stream, color)
|
||||||
|
ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip)
|
||||||
|
rv = t.cast(t.TextIO, ansi_wrapper.stream)
|
||||||
|
_write = rv.write
|
||||||
|
|
||||||
|
def _safe_write(s):
|
||||||
|
try:
|
||||||
|
return _write(s)
|
||||||
|
except BaseException:
|
||||||
|
ansi_wrapper.reset_all()
|
||||||
|
raise
|
||||||
|
|
||||||
|
rv.write = _safe_write
|
||||||
|
|
||||||
|
try:
|
||||||
|
_ansi_stream_wrappers[stream] = rv
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
def _get_argv_encoding() -> str:
|
||||||
|
return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding()
|
||||||
|
|
||||||
|
def _get_windows_console_stream(
|
||||||
|
f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str]
|
||||||
|
) -> t.Optional[t.TextIO]:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def term_len(x: str) -> int:
|
||||||
|
return len(strip_ansi(x))
|
||||||
|
|
||||||
|
|
||||||
|
def isatty(stream: t.IO[t.Any]) -> bool:
|
||||||
|
try:
|
||||||
|
return stream.isatty()
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _make_cached_stream_func(
|
||||||
|
src_func: t.Callable[[], t.Optional[t.TextIO]],
|
||||||
|
wrapper_func: t.Callable[[], t.TextIO],
|
||||||
|
) -> t.Callable[[], t.Optional[t.TextIO]]:
|
||||||
|
cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary()
|
||||||
|
|
||||||
|
def func() -> t.Optional[t.TextIO]:
|
||||||
|
stream = src_func()
|
||||||
|
|
||||||
|
if stream is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
rv = cache.get(stream)
|
||||||
|
except Exception:
|
||||||
|
rv = None
|
||||||
|
if rv is not None:
|
||||||
|
return rv
|
||||||
|
rv = wrapper_func()
|
||||||
|
try:
|
||||||
|
cache[stream] = rv
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return rv
|
||||||
|
|
||||||
|
return func
|
||||||
|
|
||||||
|
|
||||||
|
_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin)
|
||||||
|
_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout)
|
||||||
|
_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr)
|
||||||
|
|
||||||
|
|
||||||
|
binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = {
|
||||||
|
"stdin": get_binary_stdin,
|
||||||
|
"stdout": get_binary_stdout,
|
||||||
|
"stderr": get_binary_stderr,
|
||||||
|
}
|
||||||
|
|
||||||
|
text_streams: t.Mapping[
|
||||||
|
str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO]
|
||||||
|
] = {
|
||||||
|
"stdin": get_text_stdin,
|
||||||
|
"stdout": get_text_stdout,
|
||||||
|
"stderr": get_text_stderr,
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user