Feat: Se elimina venv
This commit is contained in:
@@ -1,247 +0,0 @@
|
|||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Activate a Python virtual environment for the current PowerShell session.
|
|
||||||
|
|
||||||
.Description
|
|
||||||
Pushes the python executable for a virtual environment to the front of the
|
|
||||||
$Env:PATH environment variable and sets the prompt to signify that you are
|
|
||||||
in a Python virtual environment. Makes use of the command line switches as
|
|
||||||
well as the `pyvenv.cfg` file values present in the virtual environment.
|
|
||||||
|
|
||||||
.Parameter VenvDir
|
|
||||||
Path to the directory that contains the virtual environment to activate. The
|
|
||||||
default value for this is the parent of the directory that the Activate.ps1
|
|
||||||
script is located within.
|
|
||||||
|
|
||||||
.Parameter Prompt
|
|
||||||
The prompt prefix to display when this virtual environment is activated. By
|
|
||||||
default, this prompt is the name of the virtual environment folder (VenvDir)
|
|
||||||
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Verbose
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and shows extra information about the activation as it executes.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
|
|
||||||
Activates the Python virtual environment located in the specified location.
|
|
||||||
|
|
||||||
.Example
|
|
||||||
Activate.ps1 -Prompt "MyPython"
|
|
||||||
Activates the Python virtual environment that contains the Activate.ps1 script,
|
|
||||||
and prefixes the current prompt with the specified string (surrounded in
|
|
||||||
parentheses) while the virtual environment is active.
|
|
||||||
|
|
||||||
.Notes
|
|
||||||
On Windows, it may be required to enable this Activate.ps1 script by setting the
|
|
||||||
execution policy for the user. You can do this by issuing the following PowerShell
|
|
||||||
command:
|
|
||||||
|
|
||||||
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
|
|
||||||
|
|
||||||
For more information on Execution Policies:
|
|
||||||
https://go.microsoft.com/fwlink/?LinkID=135170
|
|
||||||
|
|
||||||
#>
|
|
||||||
Param(
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$VenvDir,
|
|
||||||
[Parameter(Mandatory = $false)]
|
|
||||||
[String]
|
|
||||||
$Prompt
|
|
||||||
)
|
|
||||||
|
|
||||||
<# Function declarations --------------------------------------------------- #>
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Synopsis
|
|
||||||
Remove all shell session elements added by the Activate script, including the
|
|
||||||
addition of the virtual environment's Python executable from the beginning of
|
|
||||||
the PATH variable.
|
|
||||||
|
|
||||||
.Parameter NonDestructive
|
|
||||||
If present, do not remove this function from the global namespace for the
|
|
||||||
session.
|
|
||||||
|
|
||||||
#>
|
|
||||||
function global:deactivate ([switch]$NonDestructive) {
|
|
||||||
# Revert to original values
|
|
||||||
|
|
||||||
# The prior prompt:
|
|
||||||
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
|
|
||||||
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
|
|
||||||
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PYTHONHOME:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# The prior PATH:
|
|
||||||
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
|
|
||||||
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
|
|
||||||
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the VIRTUAL_ENV altogether:
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove VIRTUAL_ENV_PROMPT altogether.
|
|
||||||
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
|
|
||||||
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
|
|
||||||
}
|
|
||||||
|
|
||||||
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
|
|
||||||
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
|
|
||||||
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
|
|
||||||
}
|
|
||||||
|
|
||||||
# Leave deactivate function in the global namespace if requested:
|
|
||||||
if (-not $NonDestructive) {
|
|
||||||
Remove-Item -Path function:deactivate
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
<#
|
|
||||||
.Description
|
|
||||||
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
|
|
||||||
given folder, and returns them in a map.
|
|
||||||
|
|
||||||
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
|
|
||||||
two strings separated by `=` (with any amount of whitespace surrounding the =)
|
|
||||||
then it is considered a `key = value` line. The left hand string is the key,
|
|
||||||
the right hand is the value.
|
|
||||||
|
|
||||||
If the value starts with a `'` or a `"` then the first and last character is
|
|
||||||
stripped from the value before being captured.
|
|
||||||
|
|
||||||
.Parameter ConfigDir
|
|
||||||
Path to the directory that contains the `pyvenv.cfg` file.
|
|
||||||
#>
|
|
||||||
function Get-PyVenvConfig(
|
|
||||||
[String]
|
|
||||||
$ConfigDir
|
|
||||||
) {
|
|
||||||
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
|
|
||||||
|
|
||||||
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
|
|
||||||
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
|
|
||||||
|
|
||||||
# An empty map will be returned if no config file is found.
|
|
||||||
$pyvenvConfig = @{ }
|
|
||||||
|
|
||||||
if ($pyvenvConfigPath) {
|
|
||||||
|
|
||||||
Write-Verbose "File exists, parse `key = value` lines"
|
|
||||||
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
|
|
||||||
|
|
||||||
$pyvenvConfigContent | ForEach-Object {
|
|
||||||
$keyval = $PSItem -split "\s*=\s*", 2
|
|
||||||
if ($keyval[0] -and $keyval[1]) {
|
|
||||||
$val = $keyval[1]
|
|
||||||
|
|
||||||
# Remove extraneous quotations around a string value.
|
|
||||||
if ("'""".Contains($val.Substring(0, 1))) {
|
|
||||||
$val = $val.Substring(1, $val.Length - 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
$pyvenvConfig[$keyval[0]] = $val
|
|
||||||
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return $pyvenvConfig
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
<# Begin Activate script --------------------------------------------------- #>
|
|
||||||
|
|
||||||
# Determine the containing directory of this script
|
|
||||||
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
|
||||||
$VenvExecDir = Get-Item -Path $VenvExecPath
|
|
||||||
|
|
||||||
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
|
|
||||||
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
|
|
||||||
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
|
|
||||||
|
|
||||||
# Set values required in priority: CmdLine, ConfigFile, Default
|
|
||||||
# First, get the location of the virtual environment, it might not be
|
|
||||||
# VenvExecDir if specified on the command line.
|
|
||||||
if ($VenvDir) {
|
|
||||||
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
|
|
||||||
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
|
|
||||||
Write-Verbose "VenvDir=$VenvDir"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Next, read the `pyvenv.cfg` file to determine any required value such
|
|
||||||
# as `prompt`.
|
|
||||||
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
|
|
||||||
|
|
||||||
# Next, set the prompt from the command line, or the config file, or
|
|
||||||
# just use the name of the virtual environment folder.
|
|
||||||
if ($Prompt) {
|
|
||||||
Write-Verbose "Prompt specified as argument, using '$Prompt'"
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
|
|
||||||
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
|
|
||||||
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
|
|
||||||
$Prompt = $pyvenvCfg['prompt'];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
|
|
||||||
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
|
|
||||||
$Prompt = Split-Path -Path $venvDir -Leaf
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Write-Verbose "Prompt = '$Prompt'"
|
|
||||||
Write-Verbose "VenvDir='$VenvDir'"
|
|
||||||
|
|
||||||
# Deactivate any currently active virtual environment, but leave the
|
|
||||||
# deactivate function in place.
|
|
||||||
deactivate -nondestructive
|
|
||||||
|
|
||||||
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
|
|
||||||
# that there is an activated venv.
|
|
||||||
$env:VIRTUAL_ENV = $VenvDir
|
|
||||||
|
|
||||||
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
|
|
||||||
|
|
||||||
Write-Verbose "Setting prompt to '$Prompt'"
|
|
||||||
|
|
||||||
# Set the prompt to include the env name
|
|
||||||
# Make sure _OLD_VIRTUAL_PROMPT is global
|
|
||||||
function global:_OLD_VIRTUAL_PROMPT { "" }
|
|
||||||
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
|
|
||||||
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
|
|
||||||
|
|
||||||
function global:prompt {
|
|
||||||
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
|
|
||||||
_OLD_VIRTUAL_PROMPT
|
|
||||||
}
|
|
||||||
$env:VIRTUAL_ENV_PROMPT = $Prompt
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clear PYTHONHOME
|
|
||||||
if (Test-Path -Path Env:PYTHONHOME) {
|
|
||||||
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
Remove-Item -Path Env:PYTHONHOME
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add the venv to the PATH
|
|
||||||
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
|
|
||||||
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate" *from bash*
|
|
||||||
# you cannot run it directly
|
|
||||||
|
|
||||||
deactivate () {
|
|
||||||
# reset old environment variables
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
|
|
||||||
PATH="${_OLD_VIRTUAL_PATH:-}"
|
|
||||||
export PATH
|
|
||||||
unset _OLD_VIRTUAL_PATH
|
|
||||||
fi
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
|
|
||||||
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
|
|
||||||
export PYTHONHOME
|
|
||||||
unset _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This should detect bash and zsh, which have a hash command that must
|
|
||||||
# be called to get it to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
|
|
||||||
PS1="${_OLD_VIRTUAL_PS1:-}"
|
|
||||||
export PS1
|
|
||||||
unset _OLD_VIRTUAL_PS1
|
|
||||||
fi
|
|
||||||
|
|
||||||
unset VIRTUAL_ENV
|
|
||||||
unset VIRTUAL_ENV_PROMPT
|
|
||||||
if [ ! "${1:-}" = "nondestructive" ] ; then
|
|
||||||
# Self destruct!
|
|
||||||
unset -f deactivate
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
# unset irrelevant variables
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
VIRTUAL_ENV="/home/mongar/Escritorio/pruebas_oc/venv"
|
|
||||||
export VIRTUAL_ENV
|
|
||||||
|
|
||||||
_OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
PATH="$VIRTUAL_ENV/bin:$PATH"
|
|
||||||
export PATH
|
|
||||||
|
|
||||||
# unset PYTHONHOME if set
|
|
||||||
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
|
|
||||||
# could use `if (set -u; : $PYTHONHOME) ;` in bash
|
|
||||||
if [ -n "${PYTHONHOME:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
|
|
||||||
unset PYTHONHOME
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
|
|
||||||
_OLD_VIRTUAL_PS1="${PS1:-}"
|
|
||||||
PS1="(venv) ${PS1:-}"
|
|
||||||
export PS1
|
|
||||||
VIRTUAL_ENV_PROMPT="(venv) "
|
|
||||||
export VIRTUAL_ENV_PROMPT
|
|
||||||
fi
|
|
||||||
|
|
||||||
# This should detect bash and zsh, which have a hash command that must
|
|
||||||
# be called to get it to forget past commands. Without forgetting
|
|
||||||
# past commands the $PATH changes we made may not be respected
|
|
||||||
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
|
|
||||||
hash -r 2> /dev/null
|
|
||||||
fi
|
|
||||||
@@ -1,154 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# PYTHON_ARGCOMPLETE_OK
|
|
||||||
|
|
||||||
# Copyright 2012-2023, Andrey Kislyuk and argcomplete contributors.
|
|
||||||
# Licensed under the Apache License. See https://github.com/kislyuk/argcomplete for more info.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Activate the generic bash-completion script or zsh completion autoload function for the argcomplete module.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import site
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import argcomplete
|
|
||||||
|
|
||||||
zsh_shellcode = """
|
|
||||||
# Begin added by argcomplete
|
|
||||||
fpath=( {zsh_fpath} "${{fpath[@]}}" )
|
|
||||||
# End added by argcomplete
|
|
||||||
"""
|
|
||||||
|
|
||||||
bash_shellcode = """
|
|
||||||
# Begin added by argcomplete
|
|
||||||
source "{activator}"
|
|
||||||
# End added by argcomplete
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def get_local_dir():
|
|
||||||
try:
|
|
||||||
return subprocess.check_output(["brew", "--prefix"]).decode().strip()
|
|
||||||
except (FileNotFoundError, subprocess.CalledProcessError):
|
|
||||||
return "/usr/local"
|
|
||||||
|
|
||||||
|
|
||||||
def get_zsh_system_dir():
|
|
||||||
return f"{get_local_dir()}/share/zsh/site-functions"
|
|
||||||
|
|
||||||
|
|
||||||
def get_bash_system_dir():
|
|
||||||
if "BASH_COMPLETION_COMPAT_DIR" in os.environ:
|
|
||||||
return os.environ["BASH_COMPLETION_COMPAT_DIR"]
|
|
||||||
elif sys.platform == "darwin":
|
|
||||||
return f"{get_local_dir()}/etc/bash_completion.d" # created by homebrew
|
|
||||||
else:
|
|
||||||
return "/etc/bash_completion.d" # created by bash-completion
|
|
||||||
|
|
||||||
|
|
||||||
def get_activator_dir():
|
|
||||||
return os.path.join(os.path.abspath(os.path.dirname(argcomplete.__file__)), "bash_completion.d")
|
|
||||||
|
|
||||||
|
|
||||||
def get_activator_path():
|
|
||||||
return os.path.join(get_activator_dir(), "_python-argcomplete")
|
|
||||||
|
|
||||||
|
|
||||||
def install_to_destination(dest):
|
|
||||||
activator = get_activator_path()
|
|
||||||
if dest == "-":
|
|
||||||
with open(activator) as fh:
|
|
||||||
sys.stdout.write(fh.read())
|
|
||||||
return
|
|
||||||
destdir = os.path.dirname(dest)
|
|
||||||
if not os.path.exists(destdir):
|
|
||||||
try:
|
|
||||||
os.makedirs(destdir, exist_ok=True)
|
|
||||||
except Exception as e:
|
|
||||||
parser.error(f"path {destdir} does not exist and could not be created: {e}")
|
|
||||||
try:
|
|
||||||
print(f"Installing {activator} to {dest}...", file=sys.stderr)
|
|
||||||
shutil.copy(activator, dest)
|
|
||||||
print("Installed.", file=sys.stderr)
|
|
||||||
except Exception as e:
|
|
||||||
parser.error(
|
|
||||||
f"while installing to {dest}: {e}. Please run this command using sudo, or see --help for more options."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_consent():
|
|
||||||
if args.yes is True:
|
|
||||||
return True
|
|
||||||
while True:
|
|
||||||
res = input("OK to proceed? [y/n] ")
|
|
||||||
if res.lower() not in {"y", "n", "yes", "no"}:
|
|
||||||
print('Please answer "yes" or "no".', file=sys.stderr)
|
|
||||||
elif res.lower() in {"y", "yes"}:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def append_to_config_file(path, shellcode):
|
|
||||||
if os.path.exists(path):
|
|
||||||
with open(path, 'r') as fh:
|
|
||||||
if shellcode in fh.read():
|
|
||||||
print(f"The code already exists in the file {path}.", file=sys.stderr)
|
|
||||||
return
|
|
||||||
print(f"argcomplete needs to append to the file {path}. The following code will be appended:", file=sys.stderr)
|
|
||||||
for line in shellcode.splitlines():
|
|
||||||
print(">", line, file=sys.stderr)
|
|
||||||
if not get_consent():
|
|
||||||
print("Not added.", file=sys.stderr)
|
|
||||||
return
|
|
||||||
print(f"Adding shellcode to {path}...", file=sys.stderr)
|
|
||||||
with open(path, "a") as fh:
|
|
||||||
fh.write(shellcode)
|
|
||||||
print("Added.", file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def link_user_rcfiles():
|
|
||||||
# TODO: warn if running as superuser
|
|
||||||
zsh_rcfile = os.path.join(os.path.expanduser(os.environ.get("ZDOTDIR", "~")), ".zshenv")
|
|
||||||
append_to_config_file(zsh_rcfile, zsh_shellcode.format(zsh_fpath=get_activator_dir()))
|
|
||||||
|
|
||||||
bash_completion_user_file = os.path.expanduser("~/.bash_completion")
|
|
||||||
append_to_config_file(bash_completion_user_file, bash_shellcode.format(activator=get_activator_path()))
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
||||||
parser.add_argument("-y", "--yes", help="automatically answer yes for all questions", action="store_true")
|
|
||||||
parser.add_argument("--dest", help='Specify the shell completion modules directory to install into, or "-" for stdout')
|
|
||||||
parser.add_argument("--user", help="Install into user directory", action="store_true")
|
|
||||||
argcomplete.autocomplete(parser)
|
|
||||||
args = parser.parse_args()
|
|
||||||
destinations = []
|
|
||||||
|
|
||||||
if args.dest:
|
|
||||||
if args.dest != "-" and not os.path.exists(args.dest):
|
|
||||||
parser.error(f"directory {args.dest} was specified via --dest, but it does not exist")
|
|
||||||
destinations.append(args.dest)
|
|
||||||
elif site.ENABLE_USER_SITE and site.USER_SITE in argcomplete.__file__:
|
|
||||||
print(
|
|
||||||
"Argcomplete was installed in the user site local directory. Defaulting to user installation.", file=sys.stderr
|
|
||||||
)
|
|
||||||
link_user_rcfiles()
|
|
||||||
elif sys.prefix != sys.base_prefix:
|
|
||||||
print("Argcomplete was installed in a virtual environment. Defaulting to user installation.", file=sys.stderr)
|
|
||||||
link_user_rcfiles()
|
|
||||||
elif args.user:
|
|
||||||
link_user_rcfiles()
|
|
||||||
else:
|
|
||||||
print("Defaulting to system-wide installation.", file=sys.stderr)
|
|
||||||
destinations.append(f"{get_zsh_system_dir()}/_python-argcomplete")
|
|
||||||
destinations.append(f"{get_bash_system_dir()}/python-argcomplete")
|
|
||||||
|
|
||||||
for destination in destinations:
|
|
||||||
install_to_destination(destination)
|
|
||||||
|
|
||||||
if args.dest is None:
|
|
||||||
print("Please restart your shell or source the installed file to activate it.", file=sys.stderr)
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
# This file must be used with "source bin/activate.csh" *from csh*.
|
|
||||||
# You cannot run it directly.
|
|
||||||
# Created by Davide Di Blasi <davidedb@gmail.com>.
|
|
||||||
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
|
|
||||||
|
|
||||||
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
setenv VIRTUAL_ENV "/home/mongar/Escritorio/pruebas_oc/venv"
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PATH="$PATH"
|
|
||||||
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
|
|
||||||
|
|
||||||
|
|
||||||
set _OLD_VIRTUAL_PROMPT="$prompt"
|
|
||||||
|
|
||||||
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
|
|
||||||
set prompt = "(venv) $prompt"
|
|
||||||
setenv VIRTUAL_ENV_PROMPT "(venv) "
|
|
||||||
endif
|
|
||||||
|
|
||||||
alias pydoc python -m pydoc
|
|
||||||
|
|
||||||
rehash
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
|
|
||||||
# (https://fishshell.com/); you cannot run it directly.
|
|
||||||
|
|
||||||
function deactivate -d "Exit virtual environment and return to normal shell environment"
|
|
||||||
# reset old environment variables
|
|
||||||
if test -n "$_OLD_VIRTUAL_PATH"
|
|
||||||
set -gx PATH $_OLD_VIRTUAL_PATH
|
|
||||||
set -e _OLD_VIRTUAL_PATH
|
|
||||||
end
|
|
||||||
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
|
||||||
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
|
|
||||||
set -e _OLD_VIRTUAL_PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
|
||||||
set -e _OLD_FISH_PROMPT_OVERRIDE
|
|
||||||
# prevents error when using nested fish instances (Issue #93858)
|
|
||||||
if functions -q _old_fish_prompt
|
|
||||||
functions -e fish_prompt
|
|
||||||
functions -c _old_fish_prompt fish_prompt
|
|
||||||
functions -e _old_fish_prompt
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
set -e VIRTUAL_ENV
|
|
||||||
set -e VIRTUAL_ENV_PROMPT
|
|
||||||
if test "$argv[1]" != "nondestructive"
|
|
||||||
# Self-destruct!
|
|
||||||
functions -e deactivate
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# Unset irrelevant variables.
|
|
||||||
deactivate nondestructive
|
|
||||||
|
|
||||||
set -gx VIRTUAL_ENV "/home/mongar/Escritorio/pruebas_oc/venv"
|
|
||||||
|
|
||||||
set -gx _OLD_VIRTUAL_PATH $PATH
|
|
||||||
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
|
|
||||||
|
|
||||||
# Unset PYTHONHOME if set.
|
|
||||||
if set -q PYTHONHOME
|
|
||||||
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
|
||||||
set -e PYTHONHOME
|
|
||||||
end
|
|
||||||
|
|
||||||
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
|
||||||
# fish uses a function instead of an env var to generate the prompt.
|
|
||||||
|
|
||||||
# Save the current fish_prompt function as the function _old_fish_prompt.
|
|
||||||
functions -c fish_prompt _old_fish_prompt
|
|
||||||
|
|
||||||
# With the original prompt function renamed, we can override with our own.
|
|
||||||
function fish_prompt
|
|
||||||
# Save the return status of the last command.
|
|
||||||
set -l old_status $status
|
|
||||||
|
|
||||||
# Output the venv prompt; color taken from the blue of the Python logo.
|
|
||||||
printf "%s%s%s" (set_color 4B8BBE) "(venv) " (set_color normal)
|
|
||||||
|
|
||||||
# Restore the return status of the previous command.
|
|
||||||
echo "exit $old_status" | .
|
|
||||||
# Output the original/"old" prompt.
|
|
||||||
_old_fish_prompt
|
|
||||||
end
|
|
||||||
|
|
||||||
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
|
||||||
set -gx VIRTUAL_ENV_PROMPT "(venv) "
|
|
||||||
end
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from libfuturize.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from libpasteurize.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from pip._internal.cli.main import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,266 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# prichunkpng
|
|
||||||
# Chunk editing tool.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Make a new PNG by adding, delete, or replacing particular chunks.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import collections
|
|
||||||
|
|
||||||
# https://docs.python.org/2.7/library/io.html
|
|
||||||
import io
|
|
||||||
import re
|
|
||||||
import string
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
import zlib
|
|
||||||
|
|
||||||
# Local module.
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
Chunk = collections.namedtuple("Chunk", "type content")
|
|
||||||
|
|
||||||
|
|
||||||
class ArgumentError(Exception):
|
|
||||||
"""A user problem with the command arguments."""
|
|
||||||
|
|
||||||
|
|
||||||
def process(out, args):
|
|
||||||
"""Process the PNG file args.input to the output, chunk by chunk.
|
|
||||||
Chunks can be inserted, removed, replaced, or sometimes edited.
|
|
||||||
Chunks are specified by their 4 byte Chunk Type;
|
|
||||||
see https://www.w3.org/TR/2003/REC-PNG-20031110/#5Chunk-layout .
|
|
||||||
The chunks in args.delete will be removed from the stream.
|
|
||||||
The chunks in args.chunk will be inserted into the stream
|
|
||||||
with their contents taken from the named files.
|
|
||||||
|
|
||||||
Other options on the args object will create particular
|
|
||||||
ancillary chunks.
|
|
||||||
|
|
||||||
.gamma -> gAMA chunk
|
|
||||||
.sigbit -> sBIT chunk
|
|
||||||
|
|
||||||
Chunk types need not be official PNG chunks at all.
|
|
||||||
Non-standard chunks can be created.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Convert options to chunks in the args.chunk list
|
|
||||||
if args.gamma:
|
|
||||||
v = int(round(1e5 * args.gamma))
|
|
||||||
bs = io.BytesIO(struct.pack(">I", v))
|
|
||||||
args.chunk.insert(0, Chunk(b"gAMA", bs))
|
|
||||||
if args.sigbit:
|
|
||||||
v = struct.pack("%dB" % len(args.sigbit), *args.sigbit)
|
|
||||||
bs = io.BytesIO(v)
|
|
||||||
args.chunk.insert(0, Chunk(b"sBIT", bs))
|
|
||||||
if args.iccprofile:
|
|
||||||
# http://www.w3.org/TR/PNG/#11iCCP
|
|
||||||
v = b"a color profile\x00\x00" + zlib.compress(args.iccprofile.read())
|
|
||||||
bs = io.BytesIO(v)
|
|
||||||
args.chunk.insert(0, Chunk(b"iCCP", bs))
|
|
||||||
if args.transparent:
|
|
||||||
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11tRNS
|
|
||||||
v = struct.pack(">%dH" % len(args.transparent), *args.transparent)
|
|
||||||
bs = io.BytesIO(v)
|
|
||||||
args.chunk.insert(0, Chunk(b"tRNS", bs))
|
|
||||||
if args.background:
|
|
||||||
# https://www.w3.org/TR/2003/REC-PNG-20031110/#11bKGD
|
|
||||||
v = struct.pack(">%dH" % len(args.background), *args.background)
|
|
||||||
bs = io.BytesIO(v)
|
|
||||||
args.chunk.insert(0, Chunk(b"bKGD", bs))
|
|
||||||
if args.physical:
|
|
||||||
# https://www.w3.org/TR/PNG/#11pHYs
|
|
||||||
numbers = re.findall(r"(\d+\.?\d*)", args.physical)
|
|
||||||
if len(numbers) not in {1, 2}:
|
|
||||||
raise ArgumentError("One or two numbers are required for --physical")
|
|
||||||
xppu = float(numbers[0])
|
|
||||||
if len(numbers) == 1:
|
|
||||||
yppu = xppu
|
|
||||||
else:
|
|
||||||
yppu = float(numbers[1])
|
|
||||||
|
|
||||||
unit_spec = 0
|
|
||||||
if args.physical.endswith("dpi"):
|
|
||||||
# Convert from DPI to Pixels Per Metre
|
|
||||||
# 1 inch is 0.0254 metres
|
|
||||||
l = 0.0254
|
|
||||||
xppu /= l
|
|
||||||
yppu /= l
|
|
||||||
unit_spec = 1
|
|
||||||
elif args.physical.endswith("ppm"):
|
|
||||||
unit_spec = 1
|
|
||||||
|
|
||||||
v = struct.pack("!LLB", round(xppu), round(yppu), unit_spec)
|
|
||||||
bs = io.BytesIO(v)
|
|
||||||
args.chunk.insert(0, Chunk(b"pHYs", bs))
|
|
||||||
|
|
||||||
# Create:
|
|
||||||
# - a set of chunks to delete
|
|
||||||
# - a dict of chunks to replace
|
|
||||||
# - a list of chunk to add
|
|
||||||
|
|
||||||
delete = set(args.delete)
|
|
||||||
# The set of chunks to replace are those where the specification says
|
|
||||||
# that there should be at most one of them.
|
|
||||||
replacing = set([b"gAMA", b"pHYs", b"sBIT", b"PLTE", b"tRNS", b"sPLT", b"IHDR"])
|
|
||||||
replace = dict()
|
|
||||||
add = []
|
|
||||||
|
|
||||||
for chunk in args.chunk:
|
|
||||||
if chunk.type in replacing:
|
|
||||||
replace[chunk.type] = chunk
|
|
||||||
else:
|
|
||||||
add.append(chunk)
|
|
||||||
|
|
||||||
input = png.Reader(file=args.input)
|
|
||||||
|
|
||||||
return png.write_chunks(out, edit_chunks(input.chunks(), delete, replace, add))
|
|
||||||
|
|
||||||
|
|
||||||
def edit_chunks(chunks, delete, replace, add):
|
|
||||||
"""
|
|
||||||
Iterate over chunks, yielding edited chunks.
|
|
||||||
Subtle: the new chunks have to have their contents .read().
|
|
||||||
"""
|
|
||||||
for type, v in chunks:
|
|
||||||
if type in delete:
|
|
||||||
continue
|
|
||||||
if type in replace:
|
|
||||||
yield type, replace[type].content.read()
|
|
||||||
del replace[type]
|
|
||||||
continue
|
|
||||||
|
|
||||||
if b"IDAT" <= type <= b"IDAT" and replace:
|
|
||||||
# If there are any chunks on the replace list by
|
|
||||||
# the time we reach IDAT, add then all now.
|
|
||||||
# put them all on the add list.
|
|
||||||
for chunk in replace.values():
|
|
||||||
yield chunk.type, chunk.content.read()
|
|
||||||
replace = dict()
|
|
||||||
|
|
||||||
if b"IDAT" <= type <= b"IDAT" and add:
|
|
||||||
# We reached IDAT; add all remaining chunks now.
|
|
||||||
for chunk in add:
|
|
||||||
yield chunk.type, chunk.content.read()
|
|
||||||
add = []
|
|
||||||
|
|
||||||
yield type, v
|
|
||||||
|
|
||||||
|
|
||||||
def chunk_name(s):
|
|
||||||
"""
|
|
||||||
Type check a chunk name option value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# See https://www.w3.org/TR/2003/REC-PNG-20031110/#table51
|
|
||||||
valid = len(s) == 4 and set(s) <= set(string.ascii_letters)
|
|
||||||
if not valid:
|
|
||||||
raise ValueError("Chunk name must be 4 ASCII letters")
|
|
||||||
return s.encode("ascii")
|
|
||||||
|
|
||||||
|
|
||||||
def comma_list(s):
|
|
||||||
"""
|
|
||||||
Convert s, a command separated list of whole numbers,
|
|
||||||
into a sequence of int.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return tuple(int(v) for v in s.split(","))
|
|
||||||
|
|
||||||
|
|
||||||
def hex_color(s):
|
|
||||||
"""
|
|
||||||
Type check and convert a hex color.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if s.startswith("#"):
|
|
||||||
s = s[1:]
|
|
||||||
valid = len(s) in [1, 2, 3, 4, 6, 12] and set(s) <= set(string.hexdigits)
|
|
||||||
if not valid:
|
|
||||||
raise ValueError("colour must be 1,2,3,4,6, or 12 hex-digits")
|
|
||||||
|
|
||||||
# For the 4-bit RGB, expand to 8-bit, by repeating digits.
|
|
||||||
if len(s) == 3:
|
|
||||||
s = "".join(c + c for c in s)
|
|
||||||
|
|
||||||
if len(s) in [1, 2, 4]:
|
|
||||||
# Single grey value.
|
|
||||||
return (int(s, 16),)
|
|
||||||
|
|
||||||
if len(s) in [6, 12]:
|
|
||||||
w = len(s) // 3
|
|
||||||
return tuple(int(s[i : i + w], 16) for i in range(0, len(s), w))
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
|
|
||||||
argv = argv[1:]
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("--gamma", type=float, help="Gamma value for gAMA chunk")
|
|
||||||
parser.add_argument(
|
|
||||||
"--physical",
|
|
||||||
type=str,
|
|
||||||
metavar="x[,y][dpi|ppm]",
|
|
||||||
help="specify intended pixel size or aspect ratio",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--sigbit",
|
|
||||||
type=comma_list,
|
|
||||||
metavar="D[,D[,D[,D]]]",
|
|
||||||
help="Number of significant bits in each channel",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--iccprofile",
|
|
||||||
metavar="file.iccp",
|
|
||||||
type=argparse.FileType("rb"),
|
|
||||||
help="add an ICC Profile from a file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--transparent",
|
|
||||||
type=hex_color,
|
|
||||||
metavar="#RRGGBB",
|
|
||||||
help="Specify the colour that is transparent (tRNS chunk)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--background",
|
|
||||||
type=hex_color,
|
|
||||||
metavar="#RRGGBB",
|
|
||||||
help="background colour for bKGD chunk",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--delete",
|
|
||||||
action="append",
|
|
||||||
default=[],
|
|
||||||
type=chunk_name,
|
|
||||||
help="delete the chunk",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--chunk",
|
|
||||||
action="append",
|
|
||||||
nargs=2,
|
|
||||||
default=[],
|
|
||||||
type=str,
|
|
||||||
help="insert chunk, taking contents from file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args(argv)
|
|
||||||
|
|
||||||
# Reprocess the chunk arguments, converting each pair into a Chunk.
|
|
||||||
args.chunk = [
|
|
||||||
Chunk(chunk_name(type), open(path, "rb")) for type, path in args.chunk
|
|
||||||
]
|
|
||||||
|
|
||||||
return process(png.binary_stdout(), args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
Description = """Join PNG images in a column top-to-bottom."""
|
|
||||||
|
|
||||||
|
|
||||||
class FormatError(Exception):
|
|
||||||
"""
|
|
||||||
Some problem with the image format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def join_col(out, l):
|
|
||||||
"""
|
|
||||||
Join the list of images.
|
|
||||||
All input images must be same width and
|
|
||||||
have the same number of channels.
|
|
||||||
They are joined top-to-bottom.
|
|
||||||
`out` is the (open file) destination for the output image.
|
|
||||||
`l` should be a list of open files (the input image files).
|
|
||||||
"""
|
|
||||||
|
|
||||||
image = 0
|
|
||||||
stream = 0
|
|
||||||
|
|
||||||
# When the first image is read, this will be the reference width,
|
|
||||||
# which must be the same for all images.
|
|
||||||
width = None
|
|
||||||
# Total height (accumulated as images are read).
|
|
||||||
height = 0
|
|
||||||
# Accumulated rows.
|
|
||||||
rows = []
|
|
||||||
|
|
||||||
for f in l:
|
|
||||||
stream += 1
|
|
||||||
while True:
|
|
||||||
im = png.Reader(file=f)
|
|
||||||
try:
|
|
||||||
im.preamble()
|
|
||||||
except EOFError:
|
|
||||||
break
|
|
||||||
image += 1
|
|
||||||
|
|
||||||
if not width:
|
|
||||||
width = im.width
|
|
||||||
elif width != im.width:
|
|
||||||
raise FormatError('Image %d in stream %d has width %d; does not match %d.' %
|
|
||||||
(image, stream, im.width, width))
|
|
||||||
|
|
||||||
height += im.height
|
|
||||||
# Various bugs here because different numbers of channels and depths go wrong.
|
|
||||||
w, h, p, info = im.asDirect()
|
|
||||||
rows.extend(p)
|
|
||||||
|
|
||||||
# Alarmingly re-use the last info object.
|
|
||||||
tinfo = dict(info)
|
|
||||||
del tinfo['size']
|
|
||||||
w = png.Writer(width, height, **tinfo)
|
|
||||||
|
|
||||||
w.write(out, rows)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv):
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=Description)
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
return join_col(png.binary_stdout(), args.input)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main(sys.argv)
|
|
||||||
@@ -1,254 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# pipdither
|
|
||||||
# Error Diffusing image dithering.
|
|
||||||
# Now with serpentine scanning.
|
|
||||||
|
|
||||||
# See http://www.efg2.com/Lab/Library/ImageProcessing/DHALF.TXT
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.4.4/lib/module-bisect.html
|
|
||||||
from bisect import bisect_left
|
|
||||||
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def dither(
|
|
||||||
out,
|
|
||||||
input,
|
|
||||||
bitdepth=1,
|
|
||||||
linear=False,
|
|
||||||
defaultgamma=1.0,
|
|
||||||
targetgamma=None,
|
|
||||||
cutoff=0.5, # see :cutoff:default
|
|
||||||
):
|
|
||||||
"""Dither the input PNG `inp` into an image with a smaller bit depth
|
|
||||||
and write the result image onto `out`. `bitdepth` specifies the bit
|
|
||||||
depth of the new image.
|
|
||||||
|
|
||||||
Normally the source image gamma is honoured (the image is
|
|
||||||
converted into a linear light space before being dithered), but
|
|
||||||
if the `linear` argument is true then the image is treated as
|
|
||||||
being linear already: no gamma conversion is done (this is
|
|
||||||
quicker, and if you don't care much about accuracy, it won't
|
|
||||||
matter much).
|
|
||||||
|
|
||||||
Images with no gamma indication (no ``gAMA`` chunk) are normally
|
|
||||||
treated as linear (gamma = 1.0), but often it can be better
|
|
||||||
to assume a different gamma value: For example continuous tone
|
|
||||||
photographs intended for presentation on the web often carry
|
|
||||||
an implicit assumption of being encoded with a gamma of about
|
|
||||||
0.45 (because that's what you get if you just "blat the pixels"
|
|
||||||
onto a PC framebuffer), so ``defaultgamma=0.45`` might be a
|
|
||||||
good idea. `defaultgamma` does not override a gamma value
|
|
||||||
specified in the file itself: It is only used when the file
|
|
||||||
does not specify a gamma.
|
|
||||||
|
|
||||||
If you (pointlessly) specify both `linear` and `defaultgamma`,
|
|
||||||
`linear` wins.
|
|
||||||
|
|
||||||
The gamma of the output image is, by default, the same as the input
|
|
||||||
image. The `targetgamma` argument can be used to specify a
|
|
||||||
different gamma for the output image. This effectively recodes the
|
|
||||||
image to a different gamma, dithering as we go. The gamma specified
|
|
||||||
is the exponent used to encode the output file (and appears in the
|
|
||||||
output PNG's ``gAMA`` chunk); it is usually less than 1.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Encoding is what happened when the PNG was made (and also what
|
|
||||||
# happens when we output the PNG). Decoding is what we do to the
|
|
||||||
# source PNG in order to process it.
|
|
||||||
|
|
||||||
# The dithering algorithm is not completely general; it
|
|
||||||
# can only do bit depth reduction, not arbitrary palette changes.
|
|
||||||
import operator
|
|
||||||
|
|
||||||
maxval = 2 ** bitdepth - 1
|
|
||||||
r = png.Reader(file=input)
|
|
||||||
|
|
||||||
_, _, pixels, info = r.asDirect()
|
|
||||||
planes = info["planes"]
|
|
||||||
# :todo: make an Exception
|
|
||||||
assert planes == 1
|
|
||||||
width = info["size"][0]
|
|
||||||
sourcemaxval = 2 ** info["bitdepth"] - 1
|
|
||||||
|
|
||||||
if linear:
|
|
||||||
gamma = 1
|
|
||||||
else:
|
|
||||||
gamma = info.get("gamma") or defaultgamma
|
|
||||||
|
|
||||||
# Calculate an effective gamma for input and output;
|
|
||||||
# then build tables using those.
|
|
||||||
|
|
||||||
# `gamma` (whether it was obtained from the input file or an
|
|
||||||
# assumed value) is the encoding gamma.
|
|
||||||
# We need the decoding gamma, which is the reciprocal.
|
|
||||||
decode = 1.0 / gamma
|
|
||||||
|
|
||||||
# `targetdecode` is the assumed gamma that is going to be used
|
|
||||||
# to decoding the target PNG.
|
|
||||||
# Note that even though we will _encode_ the target PNG we
|
|
||||||
# still need the decoding gamma, because
|
|
||||||
# the table we use maps from PNG pixel value to linear light level.
|
|
||||||
if targetgamma is None:
|
|
||||||
targetdecode = decode
|
|
||||||
else:
|
|
||||||
targetdecode = 1.0 / targetgamma
|
|
||||||
|
|
||||||
incode = build_decode_table(sourcemaxval, decode)
|
|
||||||
|
|
||||||
# For encoding, we still build a decode table, because we
|
|
||||||
# use it inverted (searching with bisect).
|
|
||||||
outcode = build_decode_table(maxval, targetdecode)
|
|
||||||
|
|
||||||
# The table used for choosing output codes. These values represent
|
|
||||||
# the cutoff points between two adjacent output codes.
|
|
||||||
# The cutoff parameter can be varied between 0 and 1 to
|
|
||||||
# preferentially choose lighter (when cutoff > 0.5) or
|
|
||||||
# darker (when cutoff < 0.5) values.
|
|
||||||
# :cutoff:default: The default for this used to be 0.75, but
|
|
||||||
# testing by drj on 2021-07-30 showed that this produces
|
|
||||||
# banding when dithering left-to-right gradients;
|
|
||||||
# test with:
|
|
||||||
# priforgepng grl | priditherpng | kitty icat
|
|
||||||
choosecode = list(zip(outcode[1:], outcode))
|
|
||||||
p = cutoff
|
|
||||||
choosecode = [x[0] * p + x[1] * (1.0 - p) for x in choosecode]
|
|
||||||
|
|
||||||
rows = repeat_header(pixels)
|
|
||||||
dithered_rows = run_dither(incode, choosecode, outcode, width, rows)
|
|
||||||
dithered_rows = remove_header(dithered_rows)
|
|
||||||
|
|
||||||
info["bitdepth"] = bitdepth
|
|
||||||
info["gamma"] = 1.0 / targetdecode
|
|
||||||
w = png.Writer(**info)
|
|
||||||
w.write(out, dithered_rows)
|
|
||||||
|
|
||||||
|
|
||||||
def build_decode_table(maxval, gamma):
|
|
||||||
"""Build a lookup table for decoding;
|
|
||||||
table converts from pixel values to linear space.
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert maxval == int(maxval)
|
|
||||||
assert maxval > 0
|
|
||||||
|
|
||||||
f = 1.0 / maxval
|
|
||||||
table = [f * v for v in range(maxval + 1)]
|
|
||||||
if gamma != 1.0:
|
|
||||||
table = [v ** gamma for v in table]
|
|
||||||
return table
|
|
||||||
|
|
||||||
|
|
||||||
def run_dither(incode, choosecode, outcode, width, rows):
|
|
||||||
"""
|
|
||||||
Run an serpentine dither.
|
|
||||||
Using the incode and choosecode tables.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Errors diffused downwards (into next row)
|
|
||||||
ed = [0.0] * width
|
|
||||||
flipped = False
|
|
||||||
for row in rows:
|
|
||||||
# Convert to linear...
|
|
||||||
row = [incode[v] for v in row]
|
|
||||||
# Add errors...
|
|
||||||
row = [e + v for e, v in zip(ed, row)]
|
|
||||||
|
|
||||||
if flipped:
|
|
||||||
row = row[::-1]
|
|
||||||
targetrow = [0] * width
|
|
||||||
|
|
||||||
for i, v in enumerate(row):
|
|
||||||
# `it` will be the index of the chosen target colour;
|
|
||||||
it = bisect_left(choosecode, v)
|
|
||||||
targetrow[i] = it
|
|
||||||
t = outcode[it]
|
|
||||||
# err is the error that needs distributing.
|
|
||||||
err = v - t
|
|
||||||
|
|
||||||
# Sierra "Filter Lite" distributes * 2
|
|
||||||
# as per this diagram. 1 1
|
|
||||||
ef = err * 0.5
|
|
||||||
# :todo: consider making rows one wider at each end and
|
|
||||||
# removing "if"s
|
|
||||||
if i + 1 < width:
|
|
||||||
row[i + 1] += ef
|
|
||||||
ef *= 0.5
|
|
||||||
ed[i] = ef
|
|
||||||
if i:
|
|
||||||
ed[i - 1] += ef
|
|
||||||
|
|
||||||
if flipped:
|
|
||||||
ed = ed[::-1]
|
|
||||||
targetrow = targetrow[::-1]
|
|
||||||
yield targetrow
|
|
||||||
flipped = not flipped
|
|
||||||
|
|
||||||
|
|
||||||
WARMUP_ROWS = 32
|
|
||||||
|
|
||||||
|
|
||||||
def repeat_header(rows):
|
|
||||||
"""Repeat the first row, to "warm up" the error register."""
|
|
||||||
for row in rows:
|
|
||||||
yield row
|
|
||||||
for _ in range(WARMUP_ROWS):
|
|
||||||
yield row
|
|
||||||
break
|
|
||||||
yield from rows
|
|
||||||
|
|
||||||
|
|
||||||
def remove_header(rows):
|
|
||||||
"""Remove the same number of rows that repeat_header added."""
|
|
||||||
|
|
||||||
for _ in range(WARMUP_ROWS):
|
|
||||||
next(rows)
|
|
||||||
yield from rows
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# https://docs.python.org/3.5/library/argparse.html
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
|
|
||||||
progname, *args = argv
|
|
||||||
|
|
||||||
parser.add_argument("--bitdepth", type=int, default=1, help="bitdepth of output")
|
|
||||||
parser.add_argument(
|
|
||||||
"--cutoff",
|
|
||||||
type=float,
|
|
||||||
default=0.5,
|
|
||||||
help="cutoff to select adjacent output values",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--defaultgamma",
|
|
||||||
type=float,
|
|
||||||
default=1.0,
|
|
||||||
help="gamma value to use when no gamma in input",
|
|
||||||
)
|
|
||||||
parser.add_argument("--linear", action="store_true", help="force linear input")
|
|
||||||
parser.add_argument(
|
|
||||||
"--targetgamma",
|
|
||||||
type=float,
|
|
||||||
help="gamma to use in output (target), defaults to input gamma",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
ns = parser.parse_args(args)
|
|
||||||
|
|
||||||
return dither(png.binary_stdout(), **vars(ns))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,275 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# priforgepng
|
|
||||||
|
|
||||||
"""Forge PNG image from raw computation."""
|
|
||||||
|
|
||||||
from array import array
|
|
||||||
from fractions import Fraction
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def gen_glr(x):
|
|
||||||
"""Gradient Left to Right"""
|
|
||||||
return x
|
|
||||||
|
|
||||||
|
|
||||||
def gen_grl(x):
|
|
||||||
"""Gradient Right to Left"""
|
|
||||||
return 1 - x
|
|
||||||
|
|
||||||
|
|
||||||
def gen_gtb(x, y):
|
|
||||||
"""Gradient Top to Bottom"""
|
|
||||||
return y
|
|
||||||
|
|
||||||
|
|
||||||
def gen_gbt(x, y):
|
|
||||||
"""Gradient Bottom to Top"""
|
|
||||||
return 1.0 - y
|
|
||||||
|
|
||||||
|
|
||||||
def gen_rtl(x, y):
|
|
||||||
"""Radial gradient, centred at Top-Left"""
|
|
||||||
return max(1 - (float(x) ** 2 + float(y) ** 2) ** 0.5, 0.0)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_rctr(x, y):
|
|
||||||
"""Radial gradient, centred at Centre"""
|
|
||||||
return gen_rtl(float(x) - 0.5, float(y) - 0.5)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_rtr(x, y):
|
|
||||||
"""Radial gradient, centred at Top-Right"""
|
|
||||||
return gen_rtl(1.0 - float(x), y)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_rbl(x, y):
|
|
||||||
"""Radial gradient, centred at Bottom-Left"""
|
|
||||||
return gen_rtl(x, 1.0 - float(y))
|
|
||||||
|
|
||||||
|
|
||||||
def gen_rbr(x, y):
|
|
||||||
"""Radial gradient, centred at Bottom-Right"""
|
|
||||||
return gen_rtl(1.0 - float(x), 1.0 - float(y))
|
|
||||||
|
|
||||||
|
|
||||||
def stripe(x, n):
|
|
||||||
return int(x * n) & 1
|
|
||||||
|
|
||||||
|
|
||||||
def gen_vs2(x):
|
|
||||||
"""2 Vertical Stripes"""
|
|
||||||
return stripe(x, 2)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_vs4(x):
|
|
||||||
"""4 Vertical Stripes"""
|
|
||||||
return stripe(x, 4)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_vs10(x):
|
|
||||||
"""10 Vertical Stripes"""
|
|
||||||
return stripe(x, 10)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_hs2(x, y):
|
|
||||||
"""2 Horizontal Stripes"""
|
|
||||||
return stripe(float(y), 2)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_hs4(x, y):
|
|
||||||
"""4 Horizontal Stripes"""
|
|
||||||
return stripe(float(y), 4)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_hs10(x, y):
|
|
||||||
"""10 Horizontal Stripes"""
|
|
||||||
return stripe(float(y), 10)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_slr(x, y):
|
|
||||||
"""10 diagonal stripes, rising from Left to Right"""
|
|
||||||
return stripe(x + y, 10)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_srl(x, y):
|
|
||||||
"""10 diagonal stripes, rising from Right to Left"""
|
|
||||||
return stripe(1 + x - y, 10)
|
|
||||||
|
|
||||||
|
|
||||||
def checker(x, y, n):
|
|
||||||
return stripe(x, n) ^ stripe(y, n)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_ck8(x, y):
|
|
||||||
"""8 by 8 checkerboard"""
|
|
||||||
return checker(x, y, 8)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_ck15(x, y):
|
|
||||||
"""15 by 15 checkerboard"""
|
|
||||||
return checker(x, y, 15)
|
|
||||||
|
|
||||||
|
|
||||||
def gen_zero(x):
|
|
||||||
"""All zero (black)"""
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def gen_one(x):
|
|
||||||
"""All one (white)"""
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def yield_fun_rows(size, bitdepth, pattern):
|
|
||||||
"""
|
|
||||||
Create a single channel (monochrome) test pattern.
|
|
||||||
Yield each row in turn.
|
|
||||||
"""
|
|
||||||
|
|
||||||
width, height = size
|
|
||||||
|
|
||||||
maxval = 2 ** bitdepth - 1
|
|
||||||
if maxval > 255:
|
|
||||||
typecode = "H"
|
|
||||||
else:
|
|
||||||
typecode = "B"
|
|
||||||
pfun = pattern_function(pattern)
|
|
||||||
|
|
||||||
# The coordinates are an integer + 0.5,
|
|
||||||
# effectively sampling each pixel at its centre.
|
|
||||||
# This is morally better, and produces all 256 sample values
|
|
||||||
# in a 256-pixel wide gradient.
|
|
||||||
|
|
||||||
# We make a list of x coordinates here and re-use it,
|
|
||||||
# because Fraction instances are slow to allocate.
|
|
||||||
xs = [Fraction(x, 2 * width) for x in range(1, 2 * width, 2)]
|
|
||||||
|
|
||||||
# The general case is a function in x and y,
|
|
||||||
# but if the function only takes an x argument,
|
|
||||||
# it's handled in a special case that is a lot faster.
|
|
||||||
if n_args(pfun) == 2:
|
|
||||||
for y in range(height):
|
|
||||||
a = array(typecode)
|
|
||||||
fy = Fraction(Fraction(y + 0.5), height)
|
|
||||||
for fx in xs:
|
|
||||||
a.append(int(round(maxval * pfun(fx, fy))))
|
|
||||||
yield a
|
|
||||||
return
|
|
||||||
|
|
||||||
# For functions in x only, it's a _lot_ faster
|
|
||||||
# to generate a single row and repeatedly yield it
|
|
||||||
a = array(typecode)
|
|
||||||
for fx in xs:
|
|
||||||
a.append(int(round(maxval * pfun(x=fx))))
|
|
||||||
for y in range(height):
|
|
||||||
yield a
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
def generate(args):
|
|
||||||
"""
|
|
||||||
Create a PNG test image and write the file to stdout.
|
|
||||||
|
|
||||||
`args` should be an argparse Namespace instance or similar.
|
|
||||||
"""
|
|
||||||
|
|
||||||
size = args.size
|
|
||||||
bitdepth = args.depth
|
|
||||||
|
|
||||||
out = png.binary_stdout()
|
|
||||||
|
|
||||||
for pattern in args.pattern:
|
|
||||||
rows = yield_fun_rows(size, bitdepth, pattern)
|
|
||||||
writer = png.Writer(
|
|
||||||
size[0], size[1], bitdepth=bitdepth, greyscale=True, alpha=False
|
|
||||||
)
|
|
||||||
writer.write(out, rows)
|
|
||||||
|
|
||||||
|
|
||||||
def n_args(fun):
|
|
||||||
"""Number of arguments in fun's argument list."""
|
|
||||||
return fun.__code__.co_argcount
|
|
||||||
|
|
||||||
|
|
||||||
def pattern_function(pattern):
|
|
||||||
"""From `pattern`, a string,
|
|
||||||
return the function for that pattern.
|
|
||||||
"""
|
|
||||||
|
|
||||||
lpat = pattern.lower()
|
|
||||||
for name, fun in globals().items():
|
|
||||||
parts = name.split("_")
|
|
||||||
if parts[0] != "gen":
|
|
||||||
continue
|
|
||||||
if parts[1] == lpat:
|
|
||||||
return fun
|
|
||||||
|
|
||||||
|
|
||||||
def patterns():
|
|
||||||
"""
|
|
||||||
List the patterns.
|
|
||||||
"""
|
|
||||||
|
|
||||||
for name, fun in globals().items():
|
|
||||||
parts = name.split("_")
|
|
||||||
if parts[0] == "gen":
|
|
||||||
yield parts[1], fun.__doc__
|
|
||||||
|
|
||||||
|
|
||||||
def dimensions(s):
|
|
||||||
"""
|
|
||||||
Typecheck the --size option, which should be
|
|
||||||
one or two comma separated numbers.
|
|
||||||
Example: "64,40".
|
|
||||||
"""
|
|
||||||
|
|
||||||
tupl = re.findall(r"\d+", s)
|
|
||||||
if len(tupl) not in (1, 2):
|
|
||||||
raise ValueError("%r should be width or width,height" % s)
|
|
||||||
if len(tupl) == 1:
|
|
||||||
tupl *= 2
|
|
||||||
assert len(tupl) == 2
|
|
||||||
return list(map(int, tupl))
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
parser = argparse.ArgumentParser(description="Forge greyscale PNG patterns")
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"-l", "--list", action="store_true", help="print list of patterns and exit"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-d", "--depth", default=8, type=int, metavar="N", help="N bits per pixel"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-s",
|
|
||||||
"--size",
|
|
||||||
default=[256, 256],
|
|
||||||
type=dimensions,
|
|
||||||
metavar="w[,h]",
|
|
||||||
help="width and height of the image in pixels",
|
|
||||||
)
|
|
||||||
parser.add_argument("pattern", nargs="*", help="name of pattern")
|
|
||||||
|
|
||||||
args = parser.parse_args(argv[1:])
|
|
||||||
|
|
||||||
if args.list:
|
|
||||||
for name, doc in sorted(patterns()):
|
|
||||||
print(name, doc, sep="\t")
|
|
||||||
return
|
|
||||||
|
|
||||||
if not args.pattern:
|
|
||||||
parser.error("--list or pattern is required")
|
|
||||||
return generate(args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# prigreypng
|
|
||||||
|
|
||||||
# Convert image to grey (L, or LA), but only if that involves no colour change.
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import array
|
|
||||||
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def as_grey(out, inp):
|
|
||||||
"""
|
|
||||||
Convert image to greyscale, but only when no colour change.
|
|
||||||
This works by using the input G channel (green) as
|
|
||||||
the output L channel (luminance) and
|
|
||||||
checking that every pixel is grey as we go.
|
|
||||||
A non-grey pixel will raise an error.
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = png.Reader(file=inp)
|
|
||||||
_, _, rows, info = r.asDirect()
|
|
||||||
if info["greyscale"]:
|
|
||||||
w = png.Writer(**info)
|
|
||||||
return w.write(out, rows)
|
|
||||||
|
|
||||||
planes = info["planes"]
|
|
||||||
targetplanes = planes - 2
|
|
||||||
alpha = info["alpha"]
|
|
||||||
width, height = info["size"]
|
|
||||||
typecode = "BH"[info["bitdepth"] > 8]
|
|
||||||
|
|
||||||
# Values per target row
|
|
||||||
vpr = width * targetplanes
|
|
||||||
|
|
||||||
def iterasgrey():
|
|
||||||
for i, row in enumerate(rows):
|
|
||||||
row = array.array(typecode, row)
|
|
||||||
targetrow = array.array(typecode, [0] * vpr)
|
|
||||||
# Copy G (and possibly A) channel.
|
|
||||||
green = row[0::planes]
|
|
||||||
if alpha:
|
|
||||||
targetrow[0::2] = green
|
|
||||||
targetrow[1::2] = row[3::4]
|
|
||||||
else:
|
|
||||||
targetrow = green
|
|
||||||
# Check R and B channel match.
|
|
||||||
if green != row[0::planes] or green != row[2::planes]:
|
|
||||||
raise ValueError("Row %i contains non-grey pixel." % i)
|
|
||||||
yield targetrow
|
|
||||||
|
|
||||||
info["greyscale"] = True
|
|
||||||
del info["planes"]
|
|
||||||
w = png.Writer(**info)
|
|
||||||
return w.write(out, iterasgrey())
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
return as_grey(png.binary_stdout(), args.input)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# pripalpng
|
|
||||||
|
|
||||||
|
|
||||||
"""Convert to Palette PNG (without changing colours)"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import collections
|
|
||||||
|
|
||||||
# https://docs.python.org/2.7/library/io.html
|
|
||||||
import io
|
|
||||||
import string
|
|
||||||
import zlib
|
|
||||||
|
|
||||||
# Local module.
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def make_inverse_palette(rows, channels):
|
|
||||||
"""
|
|
||||||
The inverse palette maps from tuple to palette index.
|
|
||||||
"""
|
|
||||||
|
|
||||||
palette = {}
|
|
||||||
|
|
||||||
for row in rows:
|
|
||||||
for pixel in png.group(row, channels):
|
|
||||||
if pixel in palette:
|
|
||||||
continue
|
|
||||||
palette[pixel] = len(palette)
|
|
||||||
return palette
|
|
||||||
|
|
||||||
|
|
||||||
def palette_convert(out, inp, palette_file):
|
|
||||||
"""
|
|
||||||
Convert PNG image in `inp` to use a palette, colour type 3,
|
|
||||||
and write converted image to `out`.
|
|
||||||
|
|
||||||
`palette_file` is a file descriptor for the palette to use.
|
|
||||||
|
|
||||||
If `palette_file` is None, then `inp` is used as the palette.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if palette_file is None:
|
|
||||||
inp, palette_file = palette_file, inp
|
|
||||||
|
|
||||||
reader = png.Reader(file=palette_file)
|
|
||||||
w, h, rows, info = asRGBorA8(reader)
|
|
||||||
channels = info["planes"]
|
|
||||||
if not inp:
|
|
||||||
rows = list(rows)
|
|
||||||
|
|
||||||
palette_map = make_inverse_palette(rows, channels)
|
|
||||||
|
|
||||||
if inp:
|
|
||||||
reader = png.Reader(file=inp)
|
|
||||||
w, h, rows, info = asRGBorA8(reader)
|
|
||||||
channels = info["planes"]
|
|
||||||
|
|
||||||
# Default for colours not in palette is to use last entry.
|
|
||||||
last = len(palette_map) - 1
|
|
||||||
|
|
||||||
def map_pixel(p):
|
|
||||||
return palette_map.get(p, last)
|
|
||||||
|
|
||||||
def convert_rows():
|
|
||||||
for row in rows:
|
|
||||||
yield [map_pixel(p) for p in png.group(row, channels)]
|
|
||||||
|
|
||||||
# Make a palette by sorting the pixels according to their index.
|
|
||||||
palette = sorted(palette_map.keys(), key=palette_map.get)
|
|
||||||
pal_info = dict(size=info["size"], palette=palette)
|
|
||||||
|
|
||||||
w = png.Writer(**pal_info)
|
|
||||||
w.write(out, convert_rows())
|
|
||||||
|
|
||||||
|
|
||||||
def asRGBorA8(reader):
|
|
||||||
"""
|
|
||||||
Return (width, height, rows, info) converting to RGB,
|
|
||||||
or RGBA if original has an alpha channel.
|
|
||||||
"""
|
|
||||||
_, _, _, info = reader.read()
|
|
||||||
if info["alpha"]:
|
|
||||||
return reader.asRGBA8()
|
|
||||||
else:
|
|
||||||
return reader.asRGB8()
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
|
|
||||||
argv = argv[1:]
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__)
|
|
||||||
parser.add_argument("--palette", type=png.cli_open)
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args(argv)
|
|
||||||
|
|
||||||
palette_convert(png.binary_stdout(), args.input, args.palette)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,355 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# pripamtopng
|
|
||||||
#
|
|
||||||
# Python Raster Image PAM to PNG
|
|
||||||
|
|
||||||
import array
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
Description = """Convert NetPBM PAM/PNM format files to PNG."""
|
|
||||||
|
|
||||||
|
|
||||||
def read_pam_header(infile):
|
|
||||||
"""
|
|
||||||
Read (the rest of a) PAM header.
|
|
||||||
`infile` should be positioned immediately after the initial 'P7' line
|
|
||||||
(at the beginning of the second line).
|
|
||||||
Returns are as for `read_pnm_header`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Unlike PBM, PGM, and PPM, we can read the header a line at a time.
|
|
||||||
header = dict()
|
|
||||||
while True:
|
|
||||||
line = infile.readline().strip()
|
|
||||||
if line == b"ENDHDR":
|
|
||||||
break
|
|
||||||
if not line:
|
|
||||||
raise EOFError("PAM ended prematurely")
|
|
||||||
if line[0] == b"#":
|
|
||||||
continue
|
|
||||||
line = line.split(None, 1)
|
|
||||||
key = line[0]
|
|
||||||
if key not in header:
|
|
||||||
header[key] = line[1]
|
|
||||||
else:
|
|
||||||
header[key] += b" " + line[1]
|
|
||||||
|
|
||||||
required = [b"WIDTH", b"HEIGHT", b"DEPTH", b"MAXVAL"]
|
|
||||||
required_str = b", ".join(required).decode("ascii")
|
|
||||||
result = []
|
|
||||||
for token in required:
|
|
||||||
if token not in header:
|
|
||||||
raise png.Error("PAM file must specify " + required_str)
|
|
||||||
try:
|
|
||||||
x = int(header[token])
|
|
||||||
except ValueError:
|
|
||||||
raise png.Error(required_str + " must all be valid integers")
|
|
||||||
if x <= 0:
|
|
||||||
raise png.Error(required_str + " must all be positive integers")
|
|
||||||
result.append(x)
|
|
||||||
|
|
||||||
return (b"P7",) + tuple(result)
|
|
||||||
|
|
||||||
|
|
||||||
def read_pnm_header(infile):
|
|
||||||
"""
|
|
||||||
Read a PNM header, returning (format,width,height,depth,maxval).
|
|
||||||
Also reads a PAM header (by using a helper function).
|
|
||||||
`width` and `height` are in pixels.
|
|
||||||
`depth` is the number of channels in the image;
|
|
||||||
for PBM and PGM it is synthesized as 1, for PPM as 3;
|
|
||||||
for PAM images it is read from the header.
|
|
||||||
`maxval` is synthesized (as 1) for PBM images.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Generally, see http://netpbm.sourceforge.net/doc/ppm.html
|
|
||||||
# and http://netpbm.sourceforge.net/doc/pam.html
|
|
||||||
|
|
||||||
# Technically 'P7' must be followed by a newline,
|
|
||||||
# so by using rstrip() we are being liberal in what we accept.
|
|
||||||
# I think this is acceptable.
|
|
||||||
magic = infile.read(3).rstrip()
|
|
||||||
if magic == b"P7":
|
|
||||||
# PAM header parsing is completely different.
|
|
||||||
return read_pam_header(infile)
|
|
||||||
|
|
||||||
# Expected number of tokens in header (3 for P4, 4 for P6)
|
|
||||||
expected = 4
|
|
||||||
pbm = (b"P1", b"P4")
|
|
||||||
if magic in pbm:
|
|
||||||
expected = 3
|
|
||||||
header = [magic]
|
|
||||||
|
|
||||||
# We must read the rest of the header byte by byte because
|
|
||||||
# the final whitespace character may not be a newline.
|
|
||||||
# Of course all PNM files in the wild use a newline at this point,
|
|
||||||
# but we are strong and so we avoid
|
|
||||||
# the temptation to use readline.
|
|
||||||
bs = bytearray()
|
|
||||||
backs = bytearray()
|
|
||||||
|
|
||||||
def next():
|
|
||||||
if backs:
|
|
||||||
c = bytes(backs[0:1])
|
|
||||||
del backs[0]
|
|
||||||
else:
|
|
||||||
c = infile.read(1)
|
|
||||||
if not c:
|
|
||||||
raise png.Error("premature EOF reading PNM header")
|
|
||||||
bs.extend(c)
|
|
||||||
return c
|
|
||||||
|
|
||||||
def backup():
|
|
||||||
"""Push last byte of token onto front of backs."""
|
|
||||||
backs.insert(0, bs[-1])
|
|
||||||
del bs[-1]
|
|
||||||
|
|
||||||
def ignore():
|
|
||||||
del bs[:]
|
|
||||||
|
|
||||||
def tokens():
|
|
||||||
ls = lexInit
|
|
||||||
while True:
|
|
||||||
token, ls = ls()
|
|
||||||
if token:
|
|
||||||
yield token
|
|
||||||
|
|
||||||
def lexInit():
|
|
||||||
c = next()
|
|
||||||
# Skip comments
|
|
||||||
if b"#" <= c <= b"#":
|
|
||||||
while c not in b"\n\r":
|
|
||||||
c = next()
|
|
||||||
ignore()
|
|
||||||
return None, lexInit
|
|
||||||
# Skip whitespace (that precedes a token)
|
|
||||||
if c.isspace():
|
|
||||||
ignore()
|
|
||||||
return None, lexInit
|
|
||||||
if not c.isdigit():
|
|
||||||
raise png.Error("unexpected byte %r found in header" % c)
|
|
||||||
return None, lexNumber
|
|
||||||
|
|
||||||
def lexNumber():
|
|
||||||
# According to the specification it is legal to have comments
|
|
||||||
# that appear in the middle of a token.
|
|
||||||
# I've never seen it; and,
|
|
||||||
# it's a bit awkward to code good lexers in Python (no goto).
|
|
||||||
# So we break on such cases.
|
|
||||||
c = next()
|
|
||||||
while c.isdigit():
|
|
||||||
c = next()
|
|
||||||
backup()
|
|
||||||
token = bs[:]
|
|
||||||
ignore()
|
|
||||||
return token, lexInit
|
|
||||||
|
|
||||||
for token in tokens():
|
|
||||||
# All "tokens" are decimal integers, so convert them here.
|
|
||||||
header.append(int(token))
|
|
||||||
if len(header) == expected:
|
|
||||||
break
|
|
||||||
|
|
||||||
final = next()
|
|
||||||
if not final.isspace():
|
|
||||||
raise png.Error("expected header to end with whitespace, not %r" % final)
|
|
||||||
|
|
||||||
if magic in pbm:
|
|
||||||
# synthesize a MAXVAL
|
|
||||||
header.append(1)
|
|
||||||
depth = (1, 3)[magic == b"P6"]
|
|
||||||
return header[0], header[1], header[2], depth, header[3]
|
|
||||||
|
|
||||||
|
|
||||||
def convert_pnm_plain(w, infile, outfile):
|
|
||||||
"""
|
|
||||||
Convert a plain PNM file containing raw pixel data into
|
|
||||||
a PNG file with the parameters set in the writer object.
|
|
||||||
Works for plain PGM formats.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# See convert_pnm_binary for the corresponding function for
|
|
||||||
# binary PNM formats.
|
|
||||||
|
|
||||||
rows = scan_rows_from_file_plain(infile, w.width, w.height, w.planes)
|
|
||||||
w.write(outfile, rows)
|
|
||||||
|
|
||||||
|
|
||||||
def scan_rows_from_file_plain(infile, width, height, planes):
|
|
||||||
"""
|
|
||||||
Generate a sequence of rows from the input file `infile`.
|
|
||||||
The input file should be in a "Netpbm-like" plain format.
|
|
||||||
The input file should be positioned at the beginning of the
|
|
||||||
first value (that is, immediately after the header).
|
|
||||||
The number of pixels to read is taken from
|
|
||||||
the image dimensions (`width`, `height`, `planes`).
|
|
||||||
|
|
||||||
Each row is yielded as a single sequence of values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Values per row
|
|
||||||
vpr = width * planes
|
|
||||||
|
|
||||||
values = []
|
|
||||||
rows_output = 0
|
|
||||||
|
|
||||||
# The core problem is that input lines (text lines) may not
|
|
||||||
# correspond with pixel rows. We use two nested loops.
|
|
||||||
# The outer loop reads the input one text line at a time;
|
|
||||||
# this will contain a whole number of values, which are
|
|
||||||
# added to the `values` list.
|
|
||||||
# The inner loop strips the first `vpr` values from the
|
|
||||||
# list, until there aren't enough.
|
|
||||||
# Note we can't tell how many iterations the inner loop will
|
|
||||||
# run for, it could be 0 (if not enough values were read to
|
|
||||||
# make a whole pixel row) or many (if the entire image were
|
|
||||||
# on one input line), or somewhere in between.
|
|
||||||
# In PNM there is in general no requirement to have
|
|
||||||
# correspondence between text lines and pixel rows.
|
|
||||||
|
|
||||||
for inp in infile:
|
|
||||||
values.extend(map(int, inp.split()))
|
|
||||||
while len(values) >= vpr:
|
|
||||||
yield values[:vpr]
|
|
||||||
del values[:vpr]
|
|
||||||
rows_output += 1
|
|
||||||
if rows_output >= height:
|
|
||||||
# Diagnostic here if there are spare values?
|
|
||||||
return
|
|
||||||
# Diagnostic here for early EOF?
|
|
||||||
|
|
||||||
|
|
||||||
def convert_pnm_binary(w, infile, outfile):
|
|
||||||
"""
|
|
||||||
Convert a PNM file containing raw pixel data into
|
|
||||||
a PNG file with the parameters set in the writer object.
|
|
||||||
Works for (binary) PGM, PPM, and PAM formats.
|
|
||||||
"""
|
|
||||||
|
|
||||||
rows = scan_rows_from_file(infile, w.width, w.height, w.planes, w.bitdepth)
|
|
||||||
w.write(outfile, rows)
|
|
||||||
|
|
||||||
|
|
||||||
def scan_rows_from_file(infile, width, height, planes, bitdepth):
|
|
||||||
"""
|
|
||||||
Generate a sequence of rows from the input file `infile`.
|
|
||||||
The input file should be in a "Netpbm-like" binary format.
|
|
||||||
The input file should be positioned at the beginning of the first pixel.
|
|
||||||
The number of pixels to read is taken from
|
|
||||||
the image dimensions (`width`, `height`, `planes`);
|
|
||||||
the number of bytes per value is implied by `bitdepth`.
|
|
||||||
Each row is yielded as a single sequence of values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Values per row
|
|
||||||
vpr = width * planes
|
|
||||||
# Bytes per row
|
|
||||||
bpr = vpr
|
|
||||||
if bitdepth > 8:
|
|
||||||
assert bitdepth == 16
|
|
||||||
bpr *= 2
|
|
||||||
fmt = ">%dH" % vpr
|
|
||||||
|
|
||||||
def line():
|
|
||||||
return array.array("H", struct.unpack(fmt, infile.read(bpr)))
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
def line():
|
|
||||||
return array.array("B", infile.read(bpr))
|
|
||||||
|
|
||||||
for y in range(height):
|
|
||||||
yield line()
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(args):
|
|
||||||
"""
|
|
||||||
Create a parser and parse the command line arguments.
|
|
||||||
"""
|
|
||||||
from argparse import ArgumentParser
|
|
||||||
|
|
||||||
parser = ArgumentParser(description=Description)
|
|
||||||
version = "%(prog)s " + png.__version__
|
|
||||||
parser.add_argument("--version", action="version", version=version)
|
|
||||||
parser.add_argument(
|
|
||||||
"-c",
|
|
||||||
"--compression",
|
|
||||||
type=int,
|
|
||||||
metavar="level",
|
|
||||||
help="zlib compression level (0-9)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"input",
|
|
||||||
nargs="?",
|
|
||||||
default="-",
|
|
||||||
type=png.cli_open,
|
|
||||||
metavar="PAM/PNM",
|
|
||||||
help="input PAM/PNM file to convert",
|
|
||||||
)
|
|
||||||
args = parser.parse_args(args)
|
|
||||||
return args
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
|
|
||||||
args = parse_args(argv[1:])
|
|
||||||
|
|
||||||
# Prepare input and output files
|
|
||||||
infile = args.input
|
|
||||||
|
|
||||||
# Call after parsing, so that --version and --help work.
|
|
||||||
outfile = png.binary_stdout()
|
|
||||||
|
|
||||||
# Encode PNM to PNG
|
|
||||||
format, width, height, depth, maxval = read_pnm_header(infile)
|
|
||||||
|
|
||||||
ok_formats = (b"P2", b"P5", b"P6", b"P7")
|
|
||||||
if format not in ok_formats:
|
|
||||||
raise NotImplementedError("file format %s not supported" % format)
|
|
||||||
|
|
||||||
# The NetPBM depth (number of channels) completely
|
|
||||||
# determines the PNG format.
|
|
||||||
# Observe:
|
|
||||||
# - L, LA, RGB, RGBA are the 4 modes supported by PNG;
|
|
||||||
# - they correspond to 1, 2, 3, 4 channels respectively.
|
|
||||||
# We use the number of channels in the source image to
|
|
||||||
# determine which one we have.
|
|
||||||
# We ignore the NetPBM image type and the PAM TUPLTYPE.
|
|
||||||
greyscale = depth <= 2
|
|
||||||
pamalpha = depth in (2, 4)
|
|
||||||
supported = [2 ** x - 1 for x in range(1, 17)]
|
|
||||||
try:
|
|
||||||
mi = supported.index(maxval)
|
|
||||||
except ValueError:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"input maxval (%s) not in supported list %s" % (maxval, str(supported))
|
|
||||||
)
|
|
||||||
bitdepth = mi + 1
|
|
||||||
writer = png.Writer(
|
|
||||||
width,
|
|
||||||
height,
|
|
||||||
greyscale=greyscale,
|
|
||||||
bitdepth=bitdepth,
|
|
||||||
alpha=pamalpha,
|
|
||||||
compression=args.compression,
|
|
||||||
)
|
|
||||||
|
|
||||||
plain = format in (b"P1", b"P2", b"P3")
|
|
||||||
if plain:
|
|
||||||
convert_pnm_plain(writer, infile, outfile)
|
|
||||||
else:
|
|
||||||
convert_pnm_binary(writer, infile, outfile)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
try:
|
|
||||||
sys.exit(main())
|
|
||||||
except png.Error as e:
|
|
||||||
print(e, file=sys.stderr)
|
|
||||||
sys.exit(99)
|
|
||||||
@@ -1,540 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# Imported from //depot/prj/plan9topam/master/code/plan9topam.py#4 on
|
|
||||||
# 2009-06-15.
|
|
||||||
|
|
||||||
"""Command line tool to convert from Plan 9 image format to PNG format.
|
|
||||||
|
|
||||||
Plan 9 image format description:
|
|
||||||
https://plan9.io/magic/man2html/6/image
|
|
||||||
|
|
||||||
Where possible this tool will use unbuffered read() calls,
|
|
||||||
so that when finished the file offset is exactly at the end of
|
|
||||||
the image data.
|
|
||||||
This is useful for Plan9 subfont files which place font metric
|
|
||||||
data immediately after the image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Test materials
|
|
||||||
|
|
||||||
# asset/left.bit is a Plan 9 image file, a leftwards facing Glenda.
|
|
||||||
# Other materials have to be scrounged from the internet.
|
|
||||||
# https://plan9.io/sources/plan9/sys/games/lib/sokoban/images/cargo.bit
|
|
||||||
|
|
||||||
import array
|
|
||||||
import collections
|
|
||||||
import io
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.3.5/lib/module-itertools.html
|
|
||||||
import itertools
|
|
||||||
import os
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.3.5/lib/module-re.html
|
|
||||||
import re
|
|
||||||
import struct
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.3.5/lib/module-sys.html
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# https://docs.python.org/3/library/tarfile.html
|
|
||||||
import tarfile
|
|
||||||
|
|
||||||
|
|
||||||
# https://pypi.org/project/pypng/
|
|
||||||
import png
|
|
||||||
|
|
||||||
# internal
|
|
||||||
import prix
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
"""Some sort of Plan 9 image error."""
|
|
||||||
|
|
||||||
|
|
||||||
def block(s, n):
|
|
||||||
return zip(*[iter(s)] * n)
|
|
||||||
|
|
||||||
|
|
||||||
def plan9_as_image(inp):
|
|
||||||
"""Represent a Plan 9 image file as a png.Image instance, so
|
|
||||||
that it can be written as a PNG file.
|
|
||||||
Works with compressed input files and may work with uncompressed files.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Use inp.raw if available.
|
|
||||||
# This avoids buffering and means that when the image is processed,
|
|
||||||
# the resulting input stream is cued up exactly at the end
|
|
||||||
# of the image.
|
|
||||||
inp = getattr(inp, "raw", inp)
|
|
||||||
|
|
||||||
info, blocks = plan9_open_image(inp)
|
|
||||||
|
|
||||||
rows, infodict = plan9_image_rows(blocks, info)
|
|
||||||
|
|
||||||
return png.Image(rows, infodict)
|
|
||||||
|
|
||||||
|
|
||||||
def plan9_open_image(inp):
|
|
||||||
"""Open a Plan9 image file (`inp` should be an already open
|
|
||||||
file object), and return (`info`, `blocks`) pair.
|
|
||||||
`info` should be a Plan9 5-tuple;
|
|
||||||
`blocks` is the input, and it should yield (`row`, `data`)
|
|
||||||
pairs (see :meth:`pixmeta`).
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = inp.read(11)
|
|
||||||
if r == b"compressed\n":
|
|
||||||
info, blocks = decompress(inp)
|
|
||||||
else:
|
|
||||||
# Since Python 3, there is a good chance that this path
|
|
||||||
# doesn't work.
|
|
||||||
info, blocks = glue(inp, r)
|
|
||||||
|
|
||||||
return info, blocks
|
|
||||||
|
|
||||||
|
|
||||||
def glue(f, r):
|
|
||||||
"""Return (info, stream) pair, given `r` the initial portion of
|
|
||||||
the metadata that has already been read from the stream `f`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = r + f.read(60 - len(r))
|
|
||||||
return (meta(r), f)
|
|
||||||
|
|
||||||
|
|
||||||
def meta(r):
|
|
||||||
"""Convert 60 byte bytestring `r`, the metadata from an image file.
|
|
||||||
Returns a 5-tuple (*chan*,*minx*,*miny*,*limx*,*limy*).
|
|
||||||
5-tuples may settle into lists in transit.
|
|
||||||
|
|
||||||
As per https://plan9.io/magic/man2html/6/image the metadata
|
|
||||||
comprises 5 words separated by blanks.
|
|
||||||
As it happens each word starts at an index that is a multiple of 12,
|
|
||||||
but this routine does not care about that.
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = r.split()
|
|
||||||
# :todo: raise FormatError
|
|
||||||
if 5 != len(r):
|
|
||||||
raise Error("Expected 5 space-separated words in metadata")
|
|
||||||
r = [r[0]] + [int(x) for x in r[1:]]
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
def bitdepthof(chan):
|
|
||||||
"""Return the bitdepth for a Plan9 pixel format string."""
|
|
||||||
|
|
||||||
maxd = 0
|
|
||||||
for c in re.findall(rb"[a-z]\d*", chan):
|
|
||||||
if c[0] != "x":
|
|
||||||
maxd = max(maxd, int(c[1:]))
|
|
||||||
return maxd
|
|
||||||
|
|
||||||
|
|
||||||
def maxvalof(chan):
|
|
||||||
"""Return the netpbm MAXVAL for a Plan9 pixel format string."""
|
|
||||||
|
|
||||||
bitdepth = bitdepthof(chan)
|
|
||||||
return (2 ** bitdepth) - 1
|
|
||||||
|
|
||||||
|
|
||||||
def plan9_image_rows(blocks, metadata):
|
|
||||||
"""
|
|
||||||
Convert (uncompressed) Plan 9 image file to pair of (*rows*, *info*).
|
|
||||||
This is intended to be used by PyPNG format.
|
|
||||||
*info* is the image info (metadata) returned in a dictionary,
|
|
||||||
*rows* is an iterator that yields each row in
|
|
||||||
boxed row flat pixel format.
|
|
||||||
|
|
||||||
`blocks`, should be an iterator of (`row`, `data`) pairs.
|
|
||||||
"""
|
|
||||||
|
|
||||||
chan, minx, miny, limx, limy = metadata
|
|
||||||
rows = limy - miny
|
|
||||||
width = limx - minx
|
|
||||||
nchans = len(re.findall(b"[a-wyz]", chan))
|
|
||||||
alpha = b"a" in chan
|
|
||||||
# Iverson's convention for the win!
|
|
||||||
ncolour = nchans - alpha
|
|
||||||
greyscale = ncolour == 1
|
|
||||||
bitdepth = bitdepthof(chan)
|
|
||||||
maxval = maxvalof(chan)
|
|
||||||
|
|
||||||
# PNG style info dict.
|
|
||||||
meta = dict(
|
|
||||||
size=(width, rows),
|
|
||||||
bitdepth=bitdepth,
|
|
||||||
greyscale=greyscale,
|
|
||||||
alpha=alpha,
|
|
||||||
planes=nchans,
|
|
||||||
)
|
|
||||||
|
|
||||||
arraycode = "BH"[bitdepth > 8]
|
|
||||||
|
|
||||||
return (
|
|
||||||
map(
|
|
||||||
lambda x: array.array(arraycode, itertools.chain(*x)),
|
|
||||||
block(unpack(blocks, rows, width, chan, maxval), width),
|
|
||||||
),
|
|
||||||
meta,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def unpack(f, rows, width, chan, maxval):
|
|
||||||
"""Unpack `f` into pixels.
|
|
||||||
`chan` describes the pixel format using
|
|
||||||
the Plan9 syntax ("k8", "r8g8b8", and so on).
|
|
||||||
Assumes the pixel format has a total channel bit depth
|
|
||||||
that is either a multiple or a divisor of 8
|
|
||||||
(the Plan9 image specification requires this).
|
|
||||||
`f` should be an iterator that returns blocks of input such that
|
|
||||||
each block contains a whole number of pixels.
|
|
||||||
The return value is an iterator that yields each pixel as an n-tuple.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def mask(w):
|
|
||||||
"""An integer, to be used as a mask, with bottom `w` bits set to 1."""
|
|
||||||
|
|
||||||
return (1 << w) - 1
|
|
||||||
|
|
||||||
def deblock(f, depth, width):
|
|
||||||
"""A "packer" used to convert multiple bytes into single pixels.
|
|
||||||
`depth` is the pixel depth in bits (>= 8), `width` is the row width in
|
|
||||||
pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
w = depth // 8
|
|
||||||
i = 0
|
|
||||||
for block in f:
|
|
||||||
for i in range(len(block) // w):
|
|
||||||
p = block[w * i : w * (i + 1)]
|
|
||||||
i += w
|
|
||||||
# Convert little-endian p to integer x
|
|
||||||
x = 0
|
|
||||||
s = 1 # scale
|
|
||||||
for j in p:
|
|
||||||
x += s * j
|
|
||||||
s <<= 8
|
|
||||||
yield x
|
|
||||||
|
|
||||||
def bitfunge(f, depth, width):
|
|
||||||
"""A "packer" used to convert single bytes into multiple pixels.
|
|
||||||
Depth is the pixel depth (< 8), width is the row width in pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert 8 / depth == 8 // depth
|
|
||||||
|
|
||||||
for block in f:
|
|
||||||
col = 0
|
|
||||||
for x in block:
|
|
||||||
for j in range(8 // depth):
|
|
||||||
yield x >> (8 - depth)
|
|
||||||
col += 1
|
|
||||||
if col == width:
|
|
||||||
# A row-end forces a new byte even if
|
|
||||||
# we haven't consumed all of the current byte.
|
|
||||||
# Effectively rows are bit-padded to make
|
|
||||||
# a whole number of bytes.
|
|
||||||
col = 0
|
|
||||||
break
|
|
||||||
x <<= depth
|
|
||||||
|
|
||||||
# number of bits in each channel
|
|
||||||
bits = [int(d) for d in re.findall(rb"\d+", chan)]
|
|
||||||
# colr of each channel
|
|
||||||
# (r, g, b, k for actual colours, and
|
|
||||||
# a, m, x for alpha, map-index, and unused)
|
|
||||||
colr = re.findall(b"[a-z]", chan)
|
|
||||||
|
|
||||||
depth = sum(bits)
|
|
||||||
|
|
||||||
# Select a "packer" that either:
|
|
||||||
# - gathers multiple bytes into a single pixel (for depth >= 8); or,
|
|
||||||
# - splits bytes into several pixels (for depth < 8).
|
|
||||||
if depth >= 8:
|
|
||||||
assert depth % 8 == 0
|
|
||||||
packer = deblock
|
|
||||||
else:
|
|
||||||
assert 8 % depth == 0
|
|
||||||
packer = bitfunge
|
|
||||||
|
|
||||||
for x in packer(f, depth, width):
|
|
||||||
# x is the pixel as an unsigned integer
|
|
||||||
o = []
|
|
||||||
# This is a bit yucky.
|
|
||||||
# Extract each channel from the _most_ significant part of x.
|
|
||||||
for b, col in zip(bits, colr):
|
|
||||||
v = (x >> (depth - b)) & mask(b)
|
|
||||||
x <<= b
|
|
||||||
if col != "x":
|
|
||||||
# scale to maxval
|
|
||||||
v = v * float(maxval) / mask(b)
|
|
||||||
v = int(v + 0.5)
|
|
||||||
o.append(v)
|
|
||||||
yield o
|
|
||||||
|
|
||||||
|
|
||||||
def decompress(f):
|
|
||||||
"""Decompress a Plan 9 image file.
|
|
||||||
The input `f` should be a binary file object that
|
|
||||||
is already cued past the initial 'compressed\n' string.
|
|
||||||
The return result is (`info`, `blocks`);
|
|
||||||
`info` is a 5-tuple of the Plan 9 image metadata;
|
|
||||||
`blocks` is an iterator that yields a (row, data) pair
|
|
||||||
for each block of data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
r = meta(f.read(60))
|
|
||||||
return r, decomprest(f, r[4])
|
|
||||||
|
|
||||||
|
|
||||||
def decomprest(f, rows):
|
|
||||||
"""Iterator that decompresses the rest of a file once the metadata
|
|
||||||
have been consumed."""
|
|
||||||
|
|
||||||
row = 0
|
|
||||||
while row < rows:
|
|
||||||
row, o = deblock(f)
|
|
||||||
yield o
|
|
||||||
|
|
||||||
|
|
||||||
def deblock(f):
|
|
||||||
"""Decompress a single block from a compressed Plan 9 image file.
|
|
||||||
Each block starts with 2 decimal strings of 12 bytes each.
|
|
||||||
Yields a sequence of (row, data) pairs where
|
|
||||||
`row` is the total number of rows processed
|
|
||||||
(according to the file format) and
|
|
||||||
`data` is the decompressed data for this block.
|
|
||||||
"""
|
|
||||||
|
|
||||||
row = int(f.read(12))
|
|
||||||
size = int(f.read(12))
|
|
||||||
if not (0 <= size <= 6000):
|
|
||||||
raise Error("block has invalid size; not a Plan 9 image file?")
|
|
||||||
|
|
||||||
# Since each block is at most 6000 bytes we may as well read it all in
|
|
||||||
# one go.
|
|
||||||
d = f.read(size)
|
|
||||||
i = 0
|
|
||||||
o = []
|
|
||||||
|
|
||||||
while i < size:
|
|
||||||
x = d[i]
|
|
||||||
i += 1
|
|
||||||
if x & 0x80:
|
|
||||||
x = (x & 0x7F) + 1
|
|
||||||
lit = d[i : i + x]
|
|
||||||
i += x
|
|
||||||
o.extend(lit)
|
|
||||||
continue
|
|
||||||
# x's high-order bit is 0
|
|
||||||
length = (x >> 2) + 3
|
|
||||||
# Offset is made from bottom 2 bits of x and 8 bits of next byte.
|
|
||||||
# MSByte LSByte
|
|
||||||
# +---------------------+-------------------------+
|
|
||||||
# | - - - - - - | x1 x0 | d7 d6 d5 d4 d3 d2 d1 d0 |
|
|
||||||
# +-----------------------------------------------+
|
|
||||||
# Had to discover by inspection which way round the bits go,
|
|
||||||
# because https://plan9.io/magic/man2html/6/image doesn't say.
|
|
||||||
# that x's 2 bits are most significant.
|
|
||||||
offset = (x & 3) << 8
|
|
||||||
offset |= d[i]
|
|
||||||
i += 1
|
|
||||||
# Note: complement operator neatly maps (0 to 1023) to (-1 to
|
|
||||||
# -1024). Adding len(o) gives a (non-negative) offset into o from
|
|
||||||
# which to start indexing.
|
|
||||||
offset = ~offset + len(o)
|
|
||||||
if offset < 0:
|
|
||||||
raise Error(
|
|
||||||
"byte offset indexes off the begininning of "
|
|
||||||
"the output buffer; not a Plan 9 image file?"
|
|
||||||
)
|
|
||||||
for j in range(length):
|
|
||||||
o.append(o[offset + j])
|
|
||||||
return row, bytes(o)
|
|
||||||
|
|
||||||
|
|
||||||
FontChar = collections.namedtuple("FontChar", "x top bottom left width")
|
|
||||||
|
|
||||||
|
|
||||||
def font_copy(inp, image, out, control):
|
|
||||||
"""
|
|
||||||
Convert a Plan 9 font (`inp`, `image`) to a series of PNG images,
|
|
||||||
and write them out as a tar file to the file object `out`.
|
|
||||||
Write a text control file out to the file object `control`.
|
|
||||||
|
|
||||||
Each valid glyph in the font becomes a single PNG image;
|
|
||||||
the output is a tar file of all the images.
|
|
||||||
|
|
||||||
A Plan 9 font consists of a Plan 9 image immediately
|
|
||||||
followed by font data.
|
|
||||||
The image for the font should be the `image` argument,
|
|
||||||
the file containing the rest of the font data should be the
|
|
||||||
file object `inp` which should be cued up to the start of
|
|
||||||
the font data that immediately follows the image.
|
|
||||||
|
|
||||||
https://plan9.io/magic/man2html/6/font
|
|
||||||
"""
|
|
||||||
|
|
||||||
# The format is a little unusual, and isn't completely
|
|
||||||
# clearly documented.
|
|
||||||
# Each 6-byte structure (see FontChar above) defines
|
|
||||||
# a rectangular region of the image that is used for each
|
|
||||||
# glyph.
|
|
||||||
# The source image region that is used may be strictly
|
|
||||||
# smaller than the rectangle for the target glyph.
|
|
||||||
# This seems like a micro-optimisation.
|
|
||||||
# For each glyph,
|
|
||||||
# rows above `top` and below `bottom` will not be copied
|
|
||||||
# from the source (they can be assumed to be blank).
|
|
||||||
# No space is saved in the source image, since the rows must
|
|
||||||
# be present.
|
|
||||||
# `x` is always non-decreasing, so the glyphs appear strictly
|
|
||||||
# left-to-image in the source image.
|
|
||||||
# The x of the next glyph is used to
|
|
||||||
# infer the width of the source rectangle.
|
|
||||||
# `top` and `bottom` give the y-coordinate of the top- and
|
|
||||||
# bottom- sides of the rectangle in both source and targets.
|
|
||||||
# `left` is the x-coordinate of the left-side of the
|
|
||||||
# rectangle in the target glyph. (equivalently, the amount
|
|
||||||
# of padding that should be added on the left).
|
|
||||||
# `width` is the advance-width of the glyph; by convention
|
|
||||||
# it is 0 for an undefined glyph.
|
|
||||||
|
|
||||||
name = getattr(inp, "name", "*subfont*name*not*supplied*")
|
|
||||||
|
|
||||||
header = inp.read(36)
|
|
||||||
n, height, ascent = [int(x) for x in header.split()]
|
|
||||||
print("baseline", name, ascent, file=control, sep=",")
|
|
||||||
|
|
||||||
chs = []
|
|
||||||
for i in range(n + 1):
|
|
||||||
bs = inp.read(6)
|
|
||||||
ch = FontChar(*struct.unpack("<HBBBB", bs))
|
|
||||||
chs.append(ch)
|
|
||||||
|
|
||||||
tar = tarfile.open(mode="w|", fileobj=out)
|
|
||||||
|
|
||||||
# Start at 0, increment for every image output
|
|
||||||
# (recall that not every input glyph has an output image)
|
|
||||||
output_index = 0
|
|
||||||
for i in range(n):
|
|
||||||
ch = chs[i]
|
|
||||||
if ch.width == 0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("png", "index", output_index, "glyph", name, i, file=control, sep=",")
|
|
||||||
|
|
||||||
info = dict(image.info, size=(ch.width, height))
|
|
||||||
target = new_image(info)
|
|
||||||
|
|
||||||
source_width = chs[i + 1].x - ch.x
|
|
||||||
rect = ((ch.left, ch.top), (ch.left + source_width, ch.bottom))
|
|
||||||
image_draw(target, rect, image, (ch.x, ch.top))
|
|
||||||
|
|
||||||
# :todo: add source, glyph, and baseline data here (as a
|
|
||||||
# private tag?)
|
|
||||||
o = io.BytesIO()
|
|
||||||
target.write(o)
|
|
||||||
binary_size = o.tell()
|
|
||||||
o.seek(0)
|
|
||||||
|
|
||||||
tarinfo = tar.gettarinfo(arcname="%s/glyph%d.png" % (name, i), fileobj=inp)
|
|
||||||
tarinfo.size = binary_size
|
|
||||||
tar.addfile(tarinfo, fileobj=o)
|
|
||||||
|
|
||||||
output_index += 1
|
|
||||||
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def new_image(info):
|
|
||||||
"""Return a fresh png.Image instance."""
|
|
||||||
|
|
||||||
width, height = info["size"]
|
|
||||||
vpr = width * info["planes"]
|
|
||||||
row = lambda: [0] * vpr
|
|
||||||
rows = [row() for _ in range(height)]
|
|
||||||
return png.Image(rows, info)
|
|
||||||
|
|
||||||
|
|
||||||
def image_draw(target, rect, source, point):
|
|
||||||
"""The point `point` in the source image is aligned with the
|
|
||||||
top-left of rect in the target image, and then the rectangle
|
|
||||||
in target is replaced with the pixels from `source`.
|
|
||||||
|
|
||||||
This routine assumes that both source and target can have
|
|
||||||
their rows objects indexed (not streamed).
|
|
||||||
"""
|
|
||||||
|
|
||||||
# :todo: there is no attempt to do clipping or channel or
|
|
||||||
# colour conversion. But maybe later?
|
|
||||||
|
|
||||||
if target.info["planes"] != source.info["planes"]:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"source and target must have the same number of planes"
|
|
||||||
)
|
|
||||||
|
|
||||||
if target.info["bitdepth"] != source.info["bitdepth"]:
|
|
||||||
raise NotImplementedError("source and target must have the same bitdepth")
|
|
||||||
|
|
||||||
tl, br = rect
|
|
||||||
left, top = tl
|
|
||||||
right, bottom = br
|
|
||||||
height = bottom - top
|
|
||||||
|
|
||||||
planes = source.info["planes"]
|
|
||||||
|
|
||||||
vpr = (right - left) * planes
|
|
||||||
source_left, source_top = point
|
|
||||||
|
|
||||||
source_l = source_left * planes
|
|
||||||
source_r = source_l + vpr
|
|
||||||
|
|
||||||
target_l = left * planes
|
|
||||||
target_r = target_l + vpr
|
|
||||||
|
|
||||||
for y in range(height):
|
|
||||||
row = source.rows[y + source_top]
|
|
||||||
row = row[source_l:source_r]
|
|
||||||
target.rows[top + y][target_l:target_r] = row
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Convert Plan9 image to PNG")
|
|
||||||
parser.add_argument(
|
|
||||||
"input",
|
|
||||||
nargs="?",
|
|
||||||
default="-",
|
|
||||||
type=png.cli_open,
|
|
||||||
metavar="image",
|
|
||||||
help="image file in Plan 9 format",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--control",
|
|
||||||
default=os.path.devnull,
|
|
||||||
type=argparse.FileType("w"),
|
|
||||||
metavar="ControlCSV",
|
|
||||||
help="(when using --font) write a control CSV file to named file",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--font",
|
|
||||||
action="store_true",
|
|
||||||
help="process as Plan 9 subfont: output a tar file of PNGs",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
image = plan9_as_image(args.input)
|
|
||||||
image.stream()
|
|
||||||
|
|
||||||
if not args.font:
|
|
||||||
image.write(png.binary_stdout())
|
|
||||||
else:
|
|
||||||
font_copy(args.input, image, png.binary_stdout(), args.control)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# pripnglsch
|
|
||||||
# PNG List Chunks
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def list_chunks(out, inp):
|
|
||||||
r = png.Reader(file=inp)
|
|
||||||
for t, v in r.chunks():
|
|
||||||
add = ""
|
|
||||||
if len(v) <= 28:
|
|
||||||
add = " " + v.hex()
|
|
||||||
else:
|
|
||||||
add = " " + v[:26].hex() + "..."
|
|
||||||
t = t.decode("ascii")
|
|
||||||
print("%s %10d%s" % (t, len(v), add), file=out)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import argparse
|
|
||||||
import sys
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
return list_chunks(sys.stdout, args.input)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,101 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
import struct
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
|
|
||||||
def write_pnm(file, plain, rows, meta):
|
|
||||||
"""
|
|
||||||
Write a Netpbm PNM (or PAM) file.
|
|
||||||
*file* output file object;
|
|
||||||
*plain* (a bool) true if writing plain format (not possible for PAM);
|
|
||||||
*rows* an iterator for the rows;
|
|
||||||
*meta* the info dictionary.
|
|
||||||
"""
|
|
||||||
|
|
||||||
meta = dict(meta)
|
|
||||||
meta["maxval"] = 2 ** meta["bitdepth"] - 1
|
|
||||||
meta["width"], meta["height"] = meta["size"]
|
|
||||||
|
|
||||||
# Number of planes determines both image formats:
|
|
||||||
# 1 : L to PGM
|
|
||||||
# 2 : LA to PAM
|
|
||||||
# 3 : RGB to PPM
|
|
||||||
# 4 : RGBA to PAM
|
|
||||||
planes = meta["planes"]
|
|
||||||
|
|
||||||
# Assume inputs are from a PNG file.
|
|
||||||
assert planes in (1, 2, 3, 4)
|
|
||||||
if planes in (1, 3):
|
|
||||||
if 1 == planes:
|
|
||||||
# PGM
|
|
||||||
# Even if maxval is 1 we use PGM instead of PBM,
|
|
||||||
# to avoid converting data.
|
|
||||||
magic = "P5"
|
|
||||||
if plain:
|
|
||||||
magic = "P2"
|
|
||||||
else:
|
|
||||||
# PPM
|
|
||||||
magic = "P6"
|
|
||||||
if plain:
|
|
||||||
magic = "P3"
|
|
||||||
header = "{magic} {width:d} {height:d} {maxval:d}\n".format(magic=magic, **meta)
|
|
||||||
if planes in (2, 4):
|
|
||||||
# PAM
|
|
||||||
# See http://netpbm.sourceforge.net/doc/pam.html
|
|
||||||
if plain:
|
|
||||||
raise Exception("PAM (%d-plane) does not support plain format" % planes)
|
|
||||||
if 2 == planes:
|
|
||||||
tupltype = "GRAYSCALE_ALPHA"
|
|
||||||
else:
|
|
||||||
tupltype = "RGB_ALPHA"
|
|
||||||
header = (
|
|
||||||
"P7\nWIDTH {width:d}\nHEIGHT {height:d}\n"
|
|
||||||
"DEPTH {planes:d}\nMAXVAL {maxval:d}\n"
|
|
||||||
"TUPLTYPE {tupltype}\nENDHDR\n".format(tupltype=tupltype, **meta)
|
|
||||||
)
|
|
||||||
file.write(header.encode("ascii"))
|
|
||||||
|
|
||||||
# Values per row
|
|
||||||
vpr = planes * meta["width"]
|
|
||||||
|
|
||||||
if plain:
|
|
||||||
for row in rows:
|
|
||||||
row_b = b" ".join([b"%d" % v for v in row])
|
|
||||||
file.write(row_b)
|
|
||||||
file.write(b"\n")
|
|
||||||
else:
|
|
||||||
# format for struct.pack
|
|
||||||
fmt = ">%d" % vpr
|
|
||||||
if meta["maxval"] > 0xFF:
|
|
||||||
fmt = fmt + "H"
|
|
||||||
else:
|
|
||||||
fmt = fmt + "B"
|
|
||||||
for row in rows:
|
|
||||||
file.write(struct.pack(fmt, *row))
|
|
||||||
|
|
||||||
file.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Convert PNG to PAM")
|
|
||||||
parser.add_argument("--plain", action="store_true")
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="?", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Encode PNG to PNM (or PAM)
|
|
||||||
image = png.Reader(file=args.input)
|
|
||||||
_, _, rows, info = image.asDirect()
|
|
||||||
write_pnm(png.binary_stdout(), args.plain, rows, info)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# http://www.python.org/doc/2.4.4/lib/module-itertools.html
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
Description = """Join PNG images in a row left-to-right."""
|
|
||||||
|
|
||||||
|
|
||||||
class FormatError(Exception):
|
|
||||||
"""
|
|
||||||
Some problem with the image format.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def join_row(out, l):
|
|
||||||
"""
|
|
||||||
Concatenate the list of images.
|
|
||||||
All input images must be same height and
|
|
||||||
have the same number of channels.
|
|
||||||
They are concatenated left-to-right.
|
|
||||||
`out` is the (open file) destination for the output image.
|
|
||||||
`l` should be a list of open files (the input image files).
|
|
||||||
"""
|
|
||||||
|
|
||||||
l = [png.Reader(file=f) for f in l]
|
|
||||||
|
|
||||||
# Ewgh, side effects.
|
|
||||||
for r in l:
|
|
||||||
r.preamble()
|
|
||||||
|
|
||||||
# The reference height; from the first image.
|
|
||||||
height = l[0].height
|
|
||||||
# The total target width
|
|
||||||
width = 0
|
|
||||||
for i,r in enumerate(l):
|
|
||||||
if r.height != height:
|
|
||||||
raise FormatError('Image %d, height %d, does not match %d.' %
|
|
||||||
(i, r.height, height))
|
|
||||||
width += r.width
|
|
||||||
|
|
||||||
# Various bugs here because different numbers of channels and depths go wrong.
|
|
||||||
pixel, info = zip(*[r.asDirect()[2:4] for r in l])
|
|
||||||
tinfo = dict(info[0])
|
|
||||||
del tinfo['size']
|
|
||||||
w = png.Writer(width, height, **tinfo)
|
|
||||||
|
|
||||||
def iter_all_rows():
|
|
||||||
for row in zip(*pixel):
|
|
||||||
# `row` is a sequence that has one row from each input image.
|
|
||||||
# list() is required here to hasten the lazy row building;
|
|
||||||
# not sure if that's a bug in PyPNG or not.
|
|
||||||
yield list(itertools.chain(*row))
|
|
||||||
w.write(out, iter_all_rows())
|
|
||||||
|
|
||||||
def main(argv):
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=Description)
|
|
||||||
parser.add_argument(
|
|
||||||
"input", nargs="*", default="-", type=png.cli_open, metavar="PNG"
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
return join_row(png.binary_stdout(), args.input)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main(sys.argv)
|
|
||||||
@@ -1,215 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# priweavepng
|
|
||||||
# Weave selected channels from input PNG files into
|
|
||||||
# a multi-channel output PNG.
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import re
|
|
||||||
|
|
||||||
from array import array
|
|
||||||
|
|
||||||
import png
|
|
||||||
|
|
||||||
"""
|
|
||||||
priweavepng file1.png [file2.png ...]
|
|
||||||
|
|
||||||
The `priweavepng` tool combines channels from the input images and
|
|
||||||
weaves a selection of those channels into an output image.
|
|
||||||
|
|
||||||
Conceptually an intermediate image is formed consisting of
|
|
||||||
all channels of all input images in the order given on the command line
|
|
||||||
and in the order of each channel in its image.
|
|
||||||
Then from 1 to 4 channels are selected and
|
|
||||||
an image is output with those channels.
|
|
||||||
The limit on the number of selected channels is
|
|
||||||
imposed by the PNG image format.
|
|
||||||
|
|
||||||
The `-c n` option selects channel `n`.
|
|
||||||
Further channels can be selected either by repeating the `-c` option,
|
|
||||||
or using a comma separated list.
|
|
||||||
For example `-c 3,2,1` will select channels 3, 2, and 1 in that order;
|
|
||||||
if the input is an RGB PNG, this will swop the Red and Blue channels.
|
|
||||||
The order is significant, the order in which the options are given is
|
|
||||||
the order of the output channels.
|
|
||||||
It is permissible, and sometimes useful
|
|
||||||
(for example, grey to colour expansion, see below),
|
|
||||||
to repeat the same channel.
|
|
||||||
|
|
||||||
If no `-c` option is used the default is
|
|
||||||
to select all of the input channels, up to the first 4.
|
|
||||||
|
|
||||||
`priweavepng` does not care about the meaning of the channels
|
|
||||||
and treats them as a matrix of values.
|
|
||||||
|
|
||||||
The numer of output channels determines the colour mode of the PNG file:
|
|
||||||
L (1-channel, Grey), LA (2-channel, Grey+Alpha),
|
|
||||||
RGB (3-channel, Red+Green+Blue), RGBA (4-channel, Red+Green+Blue+Alpha).
|
|
||||||
|
|
||||||
The `priweavepng` tool can be used for a variety of
|
|
||||||
channel building, swopping, and extraction effects:
|
|
||||||
|
|
||||||
Combine 3 grayscale images into RGB colour:
|
|
||||||
priweavepng grey1.png grey2.png grey3.png
|
|
||||||
|
|
||||||
Swop Red and Blue channels in colour image:
|
|
||||||
priweavepng -c 3 -c 2 -c 1 rgb.png
|
|
||||||
|
|
||||||
Extract Green channel as a greyscale image:
|
|
||||||
priweavepng -c 2 rgb.png
|
|
||||||
|
|
||||||
Convert a greyscale image to a colour image (all grey):
|
|
||||||
priweavepng -c 1 -c 1 -c 1 grey.png
|
|
||||||
|
|
||||||
Add alpha mask from a separate (greyscale) image:
|
|
||||||
priweavepng rgb.png grey.png
|
|
||||||
|
|
||||||
Extract alpha mask into a separate (greyscale) image:
|
|
||||||
priweavepng -c 4 rgba.png
|
|
||||||
|
|
||||||
Steal alpha mask from second file and add to first.
|
|
||||||
Note that the intermediate image in this example has 7 channels:
|
|
||||||
priweavepng -c 1 -c 2 -c 3 -c 7 rgb.png rgba.png
|
|
||||||
|
|
||||||
Take Green channel from 3 successive colour images to make a new RGB image:
|
|
||||||
priweavepng -c 2 -c 5 -c 8 rgb1.png rgb2.png rgb3.png
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
Image = collections.namedtuple("Image", "rows info")
|
|
||||||
|
|
||||||
# For each channel in the intermediate raster,
|
|
||||||
# model:
|
|
||||||
# - image: the input image (0-based);
|
|
||||||
# - i: the channel index within that image (0-based);
|
|
||||||
# - bitdepth: the bitdepth of this channel.
|
|
||||||
Channel = collections.namedtuple("Channel", "image i bitdepth")
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def weave(out, args):
|
|
||||||
"""Stack the input PNG files and extract channels
|
|
||||||
into a single output PNG.
|
|
||||||
"""
|
|
||||||
|
|
||||||
paths = args.input
|
|
||||||
|
|
||||||
if len(paths) < 1:
|
|
||||||
raise Error("Required input is missing.")
|
|
||||||
|
|
||||||
# List of Image instances
|
|
||||||
images = []
|
|
||||||
# Channel map. Maps from channel number (starting from 1)
|
|
||||||
# to an (image_index, channel_index) pair.
|
|
||||||
channel_map = dict()
|
|
||||||
channel = 1
|
|
||||||
|
|
||||||
for image_index, path in enumerate(paths):
|
|
||||||
inp = png.cli_open(path)
|
|
||||||
rows, info = png.Reader(file=inp).asDirect()[2:]
|
|
||||||
rows = list(rows)
|
|
||||||
image = Image(rows, info)
|
|
||||||
images.append(image)
|
|
||||||
# A later version of PyPNG may intelligently support
|
|
||||||
# PNG files with heterogenous bitdepths.
|
|
||||||
# For now, assumes bitdepth of all channels in image
|
|
||||||
# is the same.
|
|
||||||
channel_bitdepth = (image.info["bitdepth"],) * image.info["planes"]
|
|
||||||
for i in range(image.info["planes"]):
|
|
||||||
channel_map[channel + i] = Channel(image_index, i, channel_bitdepth[i])
|
|
||||||
channel += image.info["planes"]
|
|
||||||
|
|
||||||
assert channel - 1 == sum(image.info["planes"] for image in images)
|
|
||||||
|
|
||||||
# If no channels, select up to first 4 as default.
|
|
||||||
if not args.channel:
|
|
||||||
args.channel = range(1, channel)[:4]
|
|
||||||
|
|
||||||
out_channels = len(args.channel)
|
|
||||||
if not (0 < out_channels <= 4):
|
|
||||||
raise Error("Too many channels selected (must be 1 to 4)")
|
|
||||||
alpha = out_channels in (2, 4)
|
|
||||||
greyscale = out_channels in (1, 2)
|
|
||||||
|
|
||||||
bitdepth = tuple(image.info["bitdepth"] for image in images)
|
|
||||||
arraytype = "BH"[max(bitdepth) > 8]
|
|
||||||
|
|
||||||
size = [image.info["size"] for image in images]
|
|
||||||
# Currently, fail unless all images same size.
|
|
||||||
if len(set(size)) > 1:
|
|
||||||
raise NotImplementedError("Cannot cope when sizes differ - sorry!")
|
|
||||||
size = size[0]
|
|
||||||
|
|
||||||
# Values per row, of output image
|
|
||||||
vpr = out_channels * size[0]
|
|
||||||
|
|
||||||
def weave_row_iter():
|
|
||||||
"""
|
|
||||||
Yield each woven row in turn.
|
|
||||||
"""
|
|
||||||
# The zip call creates an iterator that yields
|
|
||||||
# a tuple with each element containing the next row
|
|
||||||
# for each of the input images.
|
|
||||||
for row_tuple in zip(*(image.rows for image in images)):
|
|
||||||
# output row
|
|
||||||
row = array(arraytype, [0] * vpr)
|
|
||||||
# for each output channel select correct input channel
|
|
||||||
for out_channel_i, selection in enumerate(args.channel):
|
|
||||||
channel = channel_map[selection]
|
|
||||||
# incoming row (make it an array)
|
|
||||||
irow = array(arraytype, row_tuple[channel.image])
|
|
||||||
n = images[channel.image].info["planes"]
|
|
||||||
row[out_channel_i::out_channels] = irow[channel.i :: n]
|
|
||||||
yield row
|
|
||||||
|
|
||||||
w = png.Writer(
|
|
||||||
size[0],
|
|
||||||
size[1],
|
|
||||||
greyscale=greyscale,
|
|
||||||
alpha=alpha,
|
|
||||||
bitdepth=bitdepth,
|
|
||||||
interlace=args.interlace,
|
|
||||||
)
|
|
||||||
w.write(out, weave_row_iter())
|
|
||||||
|
|
||||||
|
|
||||||
def comma_list(s):
|
|
||||||
"""
|
|
||||||
Type and return a list of integers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return [int(c) for c in re.findall(r"\d+", s)]
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
import argparse
|
|
||||||
import itertools
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv
|
|
||||||
argv = argv[1:]
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument(
|
|
||||||
"-c",
|
|
||||||
"--channel",
|
|
||||||
action="append",
|
|
||||||
type=comma_list,
|
|
||||||
help="list of channels to extract",
|
|
||||||
)
|
|
||||||
parser.add_argument("--interlace", action="store_true", help="write interlaced PNG")
|
|
||||||
parser.add_argument("input", nargs="+")
|
|
||||||
args = parser.parse_args(argv)
|
|
||||||
|
|
||||||
if args.channel:
|
|
||||||
args.channel = list(itertools.chain(*args.channel))
|
|
||||||
|
|
||||||
return weave(png.binary_stdout(), args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from barcode.pybarcode import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from serial.tools.miniterm import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from serial.tools.list_ports import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
python3
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
|
|
||||||
# Copyright 2012-2023, Andrey Kislyuk and argcomplete contributors.
|
|
||||||
# Licensed under the Apache License. See https://github.com/kislyuk/argcomplete for more info.
|
|
||||||
|
|
||||||
"""
|
|
||||||
This script is part of the Python argcomplete package (https://github.com/kislyuk/argcomplete).
|
|
||||||
It is used to check if an EASY-INSTALL-SCRIPT wrapper redirects to a script that contains the string
|
|
||||||
"PYTHON_ARGCOMPLETE_OK". If you have enabled global completion in argcomplete, the completion hook will run it every
|
|
||||||
time you press <TAB> in your shell.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python-argcomplete-check-easy-install-script <input executable file>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if len(sys.argv) != 2:
|
|
||||||
sys.exit(__doc__)
|
|
||||||
|
|
||||||
sys.tracebacklimit = 0
|
|
||||||
|
|
||||||
with open(sys.argv[1]) as fh:
|
|
||||||
line1, head = fh.read(1024).split("\n", 1)[:2]
|
|
||||||
if line1.startswith("#") and ("py" in line1 or "Py" in line1):
|
|
||||||
import re
|
|
||||||
|
|
||||||
lines = head.split("\n", 12)
|
|
||||||
for line in lines:
|
|
||||||
if line.startswith("# EASY-INSTALL-SCRIPT"):
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
dist, script = re.match("# EASY-INSTALL-SCRIPT: '(.+)','(.+)'", line).groups()
|
|
||||||
if "PYTHON_ARGCOMPLETE_OK" in pkg_resources.get_distribution(dist).get_metadata("scripts/" + script):
|
|
||||||
exit(0)
|
|
||||||
elif line.startswith("# EASY-INSTALL-ENTRY-SCRIPT"):
|
|
||||||
dist, group, name = re.match("# EASY-INSTALL-ENTRY-SCRIPT: '(.+)','(.+)','(.+)'", line).groups()
|
|
||||||
import pkgutil
|
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
module_name = pkg_resources.get_distribution(dist).get_entry_info(group, name).module_name
|
|
||||||
with open(pkgutil.get_loader(module_name).get_filename()) as mod_fh:
|
|
||||||
if "PYTHON_ARGCOMPLETE_OK" in mod_fh.read(1024):
|
|
||||||
exit(0)
|
|
||||||
elif line.startswith("# EASY-INSTALL-DEV-SCRIPT"):
|
|
||||||
for line2 in lines:
|
|
||||||
if line2.startswith("__file__"):
|
|
||||||
filename = re.match("__file__ = '(.+)'", line2).group(1)
|
|
||||||
with open(filename) as mod_fh:
|
|
||||||
if "PYTHON_ARGCOMPLETE_OK" in mod_fh.read(1024):
|
|
||||||
exit(0)
|
|
||||||
elif line.startswith("# PBR Generated"):
|
|
||||||
module = re.search("from (.*) import", head).groups()[0]
|
|
||||||
import pkgutil
|
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
with open(pkgutil.get_loader(module).get_filename()) as mod_fh:
|
|
||||||
if "PYTHON_ARGCOMPLETE_OK" in mod_fh.read(1024):
|
|
||||||
exit(0)
|
|
||||||
|
|
||||||
exit(1)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from barcode.pybarcode import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from escpos.cli import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
/usr/bin/python3
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
python3
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from qrcode.console_scripts import main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -1,71 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# PYTHON_ARGCOMPLETE_OK
|
|
||||||
|
|
||||||
# Copyright 2012-2023, Andrey Kislyuk and argcomplete contributors.
|
|
||||||
# Licensed under the Apache License. See https://github.com/kislyuk/argcomplete for more info.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Register a Python executable for use with the argcomplete module.
|
|
||||||
|
|
||||||
To perform the registration, source the output of this script in your bash shell
|
|
||||||
(quote the output to avoid interpolation).
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
$ eval "$(register-python-argcomplete my-favorite-script.py)"
|
|
||||||
|
|
||||||
For Tcsh
|
|
||||||
|
|
||||||
$ eval `register-python-argcomplete --shell tcsh my-favorite-script.py`
|
|
||||||
|
|
||||||
For Fish
|
|
||||||
|
|
||||||
$ register-python-argcomplete --shell fish my-favourite-script.py > ~/.config/fish/my-favourite-script.py.fish
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import argcomplete
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--no-defaults",
|
|
||||||
dest="use_defaults",
|
|
||||||
action="store_false",
|
|
||||||
default=True,
|
|
||||||
help="when no matches are generated, do not fallback to readline's default completion (affects bash only)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--complete-arguments",
|
|
||||||
nargs=argparse.REMAINDER,
|
|
||||||
help="arguments to call complete with; use of this option discards default options (affects bash only)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-s",
|
|
||||||
"--shell",
|
|
||||||
choices=("bash", "zsh", "tcsh", "fish", "powershell"),
|
|
||||||
default="bash",
|
|
||||||
help="output code for the specified shell",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-e", "--external-argcomplete-script", help="external argcomplete script for auto completion of the executable"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument("executable", nargs="+", help="executable to completed (when invoked by exactly this name)")
|
|
||||||
|
|
||||||
argcomplete.autocomplete(parser)
|
|
||||||
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
parser.print_help()
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
sys.stdout.write(
|
|
||||||
argcomplete.shellcode(
|
|
||||||
args.executable, args.use_defaults, args.shell, args.complete_arguments, args.external_argcomplete_script
|
|
||||||
)
|
|
||||||
)
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/home/mongar/Escritorio/pruebas_oc/venv/bin/python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from tabulate import _main
|
|
||||||
if __name__ == '__main__':
|
|
||||||
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
|
||||||
sys.exit(_main())
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pip
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
Zope Public License (ZPL) Version 2.1
|
|
||||||
|
|
||||||
A copyright notice accompanies this license document that identifies the
|
|
||||||
copyright holders.
|
|
||||||
|
|
||||||
This license has been certified as open source. It has also been designated as
|
|
||||||
GPL compatible by the Free Software Foundation (FSF).
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions in source code must retain the accompanying copyright
|
|
||||||
notice, this list of conditions, and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the accompanying copyright
|
|
||||||
notice, this list of conditions, and the following disclaimer in the
|
|
||||||
documentation and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Names of the copyright holders must not be used to endorse or promote
|
|
||||||
products derived from this software without prior written permission from the
|
|
||||||
copyright holders.
|
|
||||||
|
|
||||||
4. The right to distribute this software or to use it for any purpose does not
|
|
||||||
give you the right to use Servicemarks (sm) or Trademarks (tm) of the
|
|
||||||
copyright
|
|
||||||
holders. Use of them is covered by separate agreement with the copyright
|
|
||||||
holders.
|
|
||||||
|
|
||||||
5. If any files are modified, you must cause the modified files to carry
|
|
||||||
prominent notices stating that you changed the files and the date of any
|
|
||||||
change.
|
|
||||||
|
|
||||||
Disclaimer
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED
|
|
||||||
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
|
||||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
|
||||||
EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
|
||||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
|
||||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
|
||||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
|
||||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,22 +0,0 @@
|
|||||||
DateTime-5.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
|
||||||
DateTime-5.4.dist-info/LICENSE.txt,sha256=PmcdsR32h1FswdtbPWXkqjg-rKPCDOo_r1Og9zNdCjw,2070
|
|
||||||
DateTime-5.4.dist-info/METADATA,sha256=aa2Ts6CsOlO4gtI6h7mS3CKb_ViWN_f5OcPDRCnvQOs,33527
|
|
||||||
DateTime-5.4.dist-info/RECORD,,
|
|
||||||
DateTime-5.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
||||||
DateTime-5.4.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
||||||
DateTime-5.4.dist-info/top_level.txt,sha256=iVdUvuV_RIkkMzsnPGNfwojRWvuonInryaK3hA5Hh0o,9
|
|
||||||
DateTime/DateTime.py,sha256=dtd-xuhJPPYbtg4Z-vRKdMNO81I-Zu2baNZ6gVzC1WY,71351
|
|
||||||
DateTime/DateTime.txt,sha256=KZFzxoQItLsar1ZDd2vZN74Y6L4a04H8jXMwqc8KjmY,22487
|
|
||||||
DateTime/__init__.py,sha256=trlFzEmNkmUpxZT7krPSVDayDK1bRxToccg3CcCF8wg,714
|
|
||||||
DateTime/__pycache__/DateTime.cpython-311.pyc,,
|
|
||||||
DateTime/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
DateTime/__pycache__/interfaces.cpython-311.pyc,,
|
|
||||||
DateTime/__pycache__/pytz_support.cpython-311.pyc,,
|
|
||||||
DateTime/interfaces.py,sha256=n47sexf1eQ6YMdYB_60PgHtSzYIj4FND-RmHFiNpm1E,12187
|
|
||||||
DateTime/pytz.txt,sha256=9Phns9ESXs9MaOKxXztX6sJ09QczGxsbYoSRSllKUfk,5619
|
|
||||||
DateTime/pytz_support.py,sha256=inR1SO0X17fp9C2GsRw99S_MhxKiEt5dOV3-TGsBxDI,11853
|
|
||||||
DateTime/tests/__init__.py,sha256=H7Ixo1xp-8BlJ65u14hk5i_TKEmETyi2FmLMD6H-mpo,683
|
|
||||||
DateTime/tests/__pycache__/__init__.cpython-311.pyc,,
|
|
||||||
DateTime/tests/__pycache__/test_datetime.cpython-311.pyc,,
|
|
||||||
DateTime/tests/julian_testdata.txt,sha256=qxvLvabVB9ayhh5UHBvPhuqW5mRL_lizzbUh6lc3d4I,1397
|
|
||||||
DateTime/tests/test_datetime.py,sha256=J0bzZHJECSmYwHbXM7IhN7AIJLAvZVPhTyTbSfx0xQs,29598
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
Wheel-Version: 1.0
|
|
||||||
Generator: bdist_wheel (0.42.0)
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
DateTime
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,785 +0,0 @@
|
|||||||
The DateTime package
|
|
||||||
====================
|
|
||||||
|
|
||||||
Encapsulation of date/time values.
|
|
||||||
|
|
||||||
|
|
||||||
Function Timezones()
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
Returns the list of recognized timezone names:
|
|
||||||
|
|
||||||
>>> from DateTime import Timezones
|
|
||||||
>>> zones = set(Timezones())
|
|
||||||
|
|
||||||
Almost all of the standard pytz timezones are included, with the exception
|
|
||||||
of some commonly-used but ambiguous abbreviations, where historical Zope
|
|
||||||
usage conflicts with the name used by pytz:
|
|
||||||
|
|
||||||
>>> import pytz
|
|
||||||
>>> [x for x in pytz.all_timezones if x not in zones]
|
|
||||||
['CET', 'EET', 'EST', 'MET', 'MST', 'WET']
|
|
||||||
|
|
||||||
Class DateTime
|
|
||||||
--------------
|
|
||||||
|
|
||||||
DateTime objects represent instants in time and provide interfaces for
|
|
||||||
controlling its representation without affecting the absolute value of
|
|
||||||
the object.
|
|
||||||
|
|
||||||
DateTime objects may be created from a wide variety of string or
|
|
||||||
numeric data, or may be computed from other DateTime objects.
|
|
||||||
DateTimes support the ability to convert their representations to many
|
|
||||||
major timezones, as well as the ability to create a DateTime object
|
|
||||||
in the context of a given timezone.
|
|
||||||
|
|
||||||
DateTime objects provide partial numerical behavior:
|
|
||||||
|
|
||||||
* Two date-time objects can be subtracted to obtain a time, in days
|
|
||||||
between the two.
|
|
||||||
|
|
||||||
* A date-time object and a positive or negative number may be added to
|
|
||||||
obtain a new date-time object that is the given number of days later
|
|
||||||
than the input date-time object.
|
|
||||||
|
|
||||||
* A positive or negative number and a date-time object may be added to
|
|
||||||
obtain a new date-time object that is the given number of days later
|
|
||||||
than the input date-time object.
|
|
||||||
|
|
||||||
* A positive or negative number may be subtracted from a date-time
|
|
||||||
object to obtain a new date-time object that is the given number of
|
|
||||||
days earlier than the input date-time object.
|
|
||||||
|
|
||||||
DateTime objects may be converted to integer, long, or float numbers
|
|
||||||
of days since January 1, 1901, using the standard int, long, and float
|
|
||||||
functions (Compatibility Note: int, long and float return the number
|
|
||||||
of days since 1901 in GMT rather than local machine timezone).
|
|
||||||
DateTime objects also provide access to their value in a float format
|
|
||||||
usable with the Python time module, provided that the value of the
|
|
||||||
object falls in the range of the epoch-based time module.
|
|
||||||
|
|
||||||
A DateTime object should be considered immutable; all conversion and numeric
|
|
||||||
operations return a new DateTime object rather than modify the current object.
|
|
||||||
|
|
||||||
A DateTime object always maintains its value as an absolute UTC time,
|
|
||||||
and is represented in the context of some timezone based on the
|
|
||||||
arguments used to create the object. A DateTime object's methods
|
|
||||||
return values based on the timezone context.
|
|
||||||
|
|
||||||
Note that in all cases the local machine timezone is used for
|
|
||||||
representation if no timezone is specified.
|
|
||||||
|
|
||||||
Constructor for DateTime
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
DateTime() returns a new date-time object. DateTimes may be created
|
|
||||||
with from zero to seven arguments:
|
|
||||||
|
|
||||||
* If the function is called with no arguments, then the current date/
|
|
||||||
time is returned, represented in the timezone of the local machine.
|
|
||||||
|
|
||||||
* If the function is invoked with a single string argument which is a
|
|
||||||
recognized timezone name, an object representing the current time is
|
|
||||||
returned, represented in the specified timezone.
|
|
||||||
|
|
||||||
* If the function is invoked with a single string argument
|
|
||||||
representing a valid date/time, an object representing that date/
|
|
||||||
time will be returned.
|
|
||||||
|
|
||||||
As a general rule, any date-time representation that is recognized
|
|
||||||
and unambiguous to a resident of North America is acceptable. (The
|
|
||||||
reason for this qualification is that in North America, a date like:
|
|
||||||
2/1/1994 is interpreted as February 1, 1994, while in some parts of
|
|
||||||
the world, it is interpreted as January 2, 1994.) A date/ time
|
|
||||||
string consists of two components, a date component and an optional
|
|
||||||
time component, separated by one or more spaces. If the time
|
|
||||||
component is omitted, 12:00am is assumed.
|
|
||||||
|
|
||||||
Any recognized timezone name specified as the final element of the
|
|
||||||
date/time string will be used for computing the date/time value.
|
|
||||||
(If you create a DateTime with the string,
|
|
||||||
"Mar 9, 1997 1:45pm US/Pacific", the value will essentially be the
|
|
||||||
same as if you had captured time.time() at the specified date and
|
|
||||||
time on a machine in that timezone). If no timezone is passed, then
|
|
||||||
the timezone configured on the local machine will be used, **except**
|
|
||||||
that if the date format matches ISO 8601 ('YYYY-MM-DD'), the instance
|
|
||||||
will use UTC / GMT+0 as the timezone.
|
|
||||||
|
|
||||||
o Returns current date/time, represented in US/Eastern:
|
|
||||||
|
|
||||||
>>> from DateTime import DateTime
|
|
||||||
>>> e = DateTime('US/Eastern')
|
|
||||||
>>> e.timezone()
|
|
||||||
'US/Eastern'
|
|
||||||
|
|
||||||
o Returns specified time, represented in local machine zone:
|
|
||||||
|
|
||||||
>>> x = DateTime('1997/3/9 1:45pm')
|
|
||||||
>>> x.parts() # doctest: +ELLIPSIS
|
|
||||||
(1997, 3, 9, 13, 45, ...)
|
|
||||||
|
|
||||||
o Specified time in local machine zone, verbose format:
|
|
||||||
|
|
||||||
>>> y = DateTime('Mar 9, 1997 13:45:00')
|
|
||||||
>>> y.parts() # doctest: +ELLIPSIS
|
|
||||||
(1997, 3, 9, 13, 45, ...)
|
|
||||||
>>> y == x
|
|
||||||
True
|
|
||||||
|
|
||||||
o Specified time in UTC via ISO 8601 rule:
|
|
||||||
|
|
||||||
>>> z = DateTime('2014-03-24')
|
|
||||||
>>> z.parts() # doctest: +ELLIPSIS
|
|
||||||
(2014, 3, 24, 0, 0, ...)
|
|
||||||
>>> z.timezone()
|
|
||||||
'GMT+0'
|
|
||||||
|
|
||||||
The date component consists of year, month, and day values. The
|
|
||||||
year value must be a one-, two-, or four-digit integer. If a one-
|
|
||||||
or two-digit year is used, the year is assumed to be in the
|
|
||||||
twentieth century. The month may an integer, from 1 to 12, a month
|
|
||||||
name, or a month abbreviation, where a period may optionally follow
|
|
||||||
the abbreviation. The day must be an integer from 1 to the number of
|
|
||||||
days in the month. The year, month, and day values may be separated
|
|
||||||
by periods, hyphens, forward slashes, or spaces. Extra spaces are
|
|
||||||
permitted around the delimiters. Year, month, and day values may be
|
|
||||||
given in any order as long as it is possible to distinguish the
|
|
||||||
components. If all three components are numbers that are less than
|
|
||||||
13, then a month-day-year ordering is assumed.
|
|
||||||
|
|
||||||
The time component consists of hour, minute, and second values
|
|
||||||
separated by colons. The hour value must be an integer between 0
|
|
||||||
and 23 inclusively. The minute value must be an integer between 0
|
|
||||||
and 59 inclusively. The second value may be an integer value
|
|
||||||
between 0 and 59.999 inclusively. The second value or both the
|
|
||||||
minute and second values may be omitted. The time may be followed
|
|
||||||
by am or pm in upper or lower case, in which case a 12-hour clock is
|
|
||||||
assumed.
|
|
||||||
|
|
||||||
* If the DateTime function is invoked with a single numeric argument,
|
|
||||||
the number is assumed to be either a floating point value such as
|
|
||||||
that returned by time.time(), or a number of days after January 1,
|
|
||||||
1901 00:00:00 UTC.
|
|
||||||
|
|
||||||
A DateTime object is returned that represents either the GMT value
|
|
||||||
of the time.time() float represented in the local machine's
|
|
||||||
timezone, or that number of days after January 1, 1901. Note that
|
|
||||||
the number of days after 1901 need to be expressed from the
|
|
||||||
viewpoint of the local machine's timezone. A negative argument will
|
|
||||||
yield a date-time value before 1901.
|
|
||||||
|
|
||||||
* If the function is invoked with two numeric arguments, then the
|
|
||||||
first is taken to be an integer year and the second argument is
|
|
||||||
taken to be an offset in days from the beginning of the year, in the
|
|
||||||
context of the local machine timezone. The date-time value returned
|
|
||||||
is the given offset number of days from the beginning of the given
|
|
||||||
year, represented in the timezone of the local machine. The offset
|
|
||||||
may be positive or negative. Two-digit years are assumed to be in
|
|
||||||
the twentieth century.
|
|
||||||
|
|
||||||
* If the function is invoked with two arguments, the first a float
|
|
||||||
representing a number of seconds past the epoch in GMT (such as
|
|
||||||
those returned by time.time()) and the second a string naming a
|
|
||||||
recognized timezone, a DateTime with a value of that GMT time will
|
|
||||||
be returned, represented in the given timezone.
|
|
||||||
|
|
||||||
>>> import time
|
|
||||||
>>> t = time.time()
|
|
||||||
|
|
||||||
Time t represented as US/Eastern:
|
|
||||||
|
|
||||||
>>> now_east = DateTime(t, 'US/Eastern')
|
|
||||||
|
|
||||||
Time t represented as US/Pacific:
|
|
||||||
|
|
||||||
>>> now_west = DateTime(t, 'US/Pacific')
|
|
||||||
|
|
||||||
Only their representations are different:
|
|
||||||
|
|
||||||
>>> now_east.equalTo(now_west)
|
|
||||||
True
|
|
||||||
|
|
||||||
* If the function is invoked with three or more numeric arguments,
|
|
||||||
then the first is taken to be an integer year, the second is taken
|
|
||||||
to be an integer month, and the third is taken to be an integer day.
|
|
||||||
If the combination of values is not valid, then a DateTimeError is
|
|
||||||
raised. One- or two-digit years up to 69 are assumed to be in the
|
|
||||||
21st century, whereas values 70-99 are assumed to be 20th century.
|
|
||||||
The fourth, fifth, and sixth arguments are floating point, positive
|
|
||||||
or negative offsets in units of hours, minutes, and days, and
|
|
||||||
default to zero if not given. An optional string may be given as
|
|
||||||
the final argument to indicate timezone (the effect of this is as if
|
|
||||||
you had taken the value of time.time() at that time on a machine in
|
|
||||||
the specified timezone).
|
|
||||||
|
|
||||||
If a string argument passed to the DateTime constructor cannot be
|
|
||||||
parsed, it will raise SyntaxError. Invalid date, time, or
|
|
||||||
timezone components will raise a DateTimeError.
|
|
||||||
|
|
||||||
The module function Timezones() will return a list of the timezones
|
|
||||||
recognized by the DateTime module. Recognition of timezone names is
|
|
||||||
case-insensitive.
|
|
||||||
|
|
||||||
Instance Methods for DateTime (IDateTime interface)
|
|
||||||
---------------------------------------------------
|
|
||||||
|
|
||||||
Conversion and comparison methods
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
* ``timeTime()`` returns the date/time as a floating-point number in
|
|
||||||
UTC, in the format used by the Python time module. Note that it is
|
|
||||||
possible to create date /time values with DateTime that have no
|
|
||||||
meaningful value to the time module, and in such cases a
|
|
||||||
DateTimeError is raised. A DateTime object's value must generally
|
|
||||||
be between Jan 1, 1970 (or your local machine epoch) and Jan 2038 to
|
|
||||||
produce a valid time.time() style value.
|
|
||||||
|
|
||||||
>>> dt = DateTime('Mar 9, 1997 13:45:00 US/Eastern')
|
|
||||||
>>> dt.timeTime()
|
|
||||||
857933100.0
|
|
||||||
|
|
||||||
>>> DateTime('2040/01/01 UTC').timeTime()
|
|
||||||
2208988800.0
|
|
||||||
|
|
||||||
>>> DateTime('1900/01/01 UTC').timeTime()
|
|
||||||
-2208988800.0
|
|
||||||
|
|
||||||
* ``toZone(z)`` returns a DateTime with the value as the current
|
|
||||||
object, represented in the indicated timezone:
|
|
||||||
|
|
||||||
>>> dt.toZone('UTC')
|
|
||||||
DateTime('1997/03/09 18:45:00 UTC')
|
|
||||||
|
|
||||||
>>> dt.toZone('UTC').equalTo(dt)
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isFuture()`` returns true if this object represents a date/time
|
|
||||||
later than the time of the call:
|
|
||||||
|
|
||||||
>>> dt.isFuture()
|
|
||||||
False
|
|
||||||
>>> DateTime('Jan 1 3000').isFuture() # not time-machine safe!
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isPast()`` returns true if this object represents a date/time
|
|
||||||
earlier than the time of the call:
|
|
||||||
|
|
||||||
>>> dt.isPast()
|
|
||||||
True
|
|
||||||
>>> DateTime('Jan 1 3000').isPast() # not time-machine safe!
|
|
||||||
False
|
|
||||||
|
|
||||||
* ``isCurrentYear()`` returns true if this object represents a
|
|
||||||
date/time that falls within the current year, in the context of this
|
|
||||||
object's timezone representation:
|
|
||||||
|
|
||||||
>>> dt.isCurrentYear()
|
|
||||||
False
|
|
||||||
>>> DateTime().isCurrentYear()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isCurrentMonth()`` returns true if this object represents a
|
|
||||||
date/time that falls within the current month, in the context of
|
|
||||||
this object's timezone representation:
|
|
||||||
|
|
||||||
>>> dt.isCurrentMonth()
|
|
||||||
False
|
|
||||||
>>> DateTime().isCurrentMonth()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isCurrentDay()`` returns true if this object represents a
|
|
||||||
date/time that falls within the current day, in the context of this
|
|
||||||
object's timezone representation:
|
|
||||||
|
|
||||||
>>> dt.isCurrentDay()
|
|
||||||
False
|
|
||||||
>>> DateTime().isCurrentDay()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isCurrentHour()`` returns true if this object represents a
|
|
||||||
date/time that falls within the current hour, in the context of this
|
|
||||||
object's timezone representation:
|
|
||||||
|
|
||||||
>>> dt.isCurrentHour()
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> DateTime().isCurrentHour()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isCurrentMinute()`` returns true if this object represents a
|
|
||||||
date/time that falls within the current minute, in the context of
|
|
||||||
this object's timezone representation:
|
|
||||||
|
|
||||||
>>> dt.isCurrentMinute()
|
|
||||||
False
|
|
||||||
>>> DateTime().isCurrentMinute()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``isLeapYear()`` returns true if the current year (in the context of
|
|
||||||
the object's timezone) is a leap year:
|
|
||||||
|
|
||||||
>>> dt.isLeapYear()
|
|
||||||
False
|
|
||||||
>>> DateTime('Mar 8 2004').isLeapYear()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``earliestTime()`` returns a new DateTime object that represents the
|
|
||||||
earliest possible time (in whole seconds) that still falls within
|
|
||||||
the current object's day, in the object's timezone context:
|
|
||||||
|
|
||||||
>>> dt.earliestTime()
|
|
||||||
DateTime('1997/03/09 00:00:00 US/Eastern')
|
|
||||||
|
|
||||||
* ``latestTime()`` return a new DateTime object that represents the
|
|
||||||
latest possible time (in whole seconds) that still falls within the
|
|
||||||
current object's day, in the object's timezone context
|
|
||||||
|
|
||||||
>>> dt.latestTime()
|
|
||||||
DateTime('1997/03/09 23:59:59 US/Eastern')
|
|
||||||
|
|
||||||
Component access
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
* ``parts()`` returns a tuple containing the calendar year, month,
|
|
||||||
day, hour, minute second and timezone of the object
|
|
||||||
|
|
||||||
>>> dt.parts() # doctest: +ELLIPSIS
|
|
||||||
(1997, 3, 9, 13, 45, ... 'US/Eastern')
|
|
||||||
|
|
||||||
* ``timezone()`` returns the timezone in which the object is represented:
|
|
||||||
|
|
||||||
>>> dt.timezone() in Timezones()
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``tzoffset()`` returns the timezone offset for the objects timezone:
|
|
||||||
|
|
||||||
>>> dt.tzoffset()
|
|
||||||
-18000
|
|
||||||
|
|
||||||
* ``year()`` returns the calendar year of the object:
|
|
||||||
|
|
||||||
>>> dt.year()
|
|
||||||
1997
|
|
||||||
|
|
||||||
* ``month()`` returns the month of the object as an integer:
|
|
||||||
|
|
||||||
>>> dt.month()
|
|
||||||
3
|
|
||||||
|
|
||||||
* ``Month()`` returns the full month name:
|
|
||||||
|
|
||||||
>>> dt.Month()
|
|
||||||
'March'
|
|
||||||
|
|
||||||
* ``aMonth()`` returns the abbreviated month name:
|
|
||||||
|
|
||||||
>>> dt.aMonth()
|
|
||||||
'Mar'
|
|
||||||
|
|
||||||
* ``pMonth()`` returns the abbreviated (with period) month name:
|
|
||||||
|
|
||||||
>>> dt.pMonth()
|
|
||||||
'Mar.'
|
|
||||||
|
|
||||||
* ``day()`` returns the integer day:
|
|
||||||
|
|
||||||
>>> dt.day()
|
|
||||||
9
|
|
||||||
|
|
||||||
* ``Day()`` returns the full name of the day of the week:
|
|
||||||
|
|
||||||
>>> dt.Day()
|
|
||||||
'Sunday'
|
|
||||||
|
|
||||||
* ``dayOfYear()`` returns the day of the year, in context of the
|
|
||||||
timezone representation of the object:
|
|
||||||
|
|
||||||
>>> dt.dayOfYear()
|
|
||||||
68
|
|
||||||
|
|
||||||
* ``aDay()`` returns the abbreviated name of the day of the week:
|
|
||||||
|
|
||||||
>>> dt.aDay()
|
|
||||||
'Sun'
|
|
||||||
|
|
||||||
* ``pDay()`` returns the abbreviated (with period) name of the day of
|
|
||||||
the week:
|
|
||||||
|
|
||||||
>>> dt.pDay()
|
|
||||||
'Sun.'
|
|
||||||
|
|
||||||
* ``dow()`` returns the integer day of the week, where Sunday is 0:
|
|
||||||
|
|
||||||
>>> dt.dow()
|
|
||||||
0
|
|
||||||
|
|
||||||
* ``dow_1()`` returns the integer day of the week, where sunday is 1:
|
|
||||||
|
|
||||||
>>> dt.dow_1()
|
|
||||||
1
|
|
||||||
|
|
||||||
* ``h_12()`` returns the 12-hour clock representation of the hour:
|
|
||||||
|
|
||||||
>>> dt.h_12()
|
|
||||||
1
|
|
||||||
|
|
||||||
* ``h_24()`` returns the 24-hour clock representation of the hour:
|
|
||||||
|
|
||||||
>>> dt.h_24()
|
|
||||||
13
|
|
||||||
|
|
||||||
* ``ampm()`` returns the appropriate time modifier (am or pm):
|
|
||||||
|
|
||||||
>>> dt.ampm()
|
|
||||||
'pm'
|
|
||||||
|
|
||||||
* ``hour()`` returns the 24-hour clock representation of the hour:
|
|
||||||
|
|
||||||
>>> dt.hour()
|
|
||||||
13
|
|
||||||
|
|
||||||
* ``minute()`` returns the minute:
|
|
||||||
|
|
||||||
>>> dt.minute()
|
|
||||||
45
|
|
||||||
|
|
||||||
* ``second()`` returns the second:
|
|
||||||
|
|
||||||
>>> dt.second() == 0
|
|
||||||
True
|
|
||||||
|
|
||||||
* ``millis()`` returns the milliseconds since the epoch in GMT.
|
|
||||||
|
|
||||||
>>> dt.millis() == 857933100000
|
|
||||||
True
|
|
||||||
|
|
||||||
strftime()
|
|
||||||
~~~~~~~~~~
|
|
||||||
|
|
||||||
See ``tests/test_datetime.py``.
|
|
||||||
|
|
||||||
General formats from previous DateTime
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
* ``Date()`` return the date string for the object:
|
|
||||||
|
|
||||||
>>> dt.Date()
|
|
||||||
'1997/03/09'
|
|
||||||
|
|
||||||
* ``Time()`` returns the time string for an object to the nearest
|
|
||||||
second:
|
|
||||||
|
|
||||||
>>> dt.Time()
|
|
||||||
'13:45:00'
|
|
||||||
|
|
||||||
* ``TimeMinutes()`` returns the time string for an object not showing
|
|
||||||
seconds:
|
|
||||||
|
|
||||||
>>> dt.TimeMinutes()
|
|
||||||
'13:45'
|
|
||||||
|
|
||||||
* ``AMPM()`` returns the time string for an object to the nearest second:
|
|
||||||
|
|
||||||
>>> dt.AMPM()
|
|
||||||
'01:45:00 pm'
|
|
||||||
|
|
||||||
* ``AMPMMinutes()`` returns the time string for an object not showing
|
|
||||||
seconds:
|
|
||||||
|
|
||||||
>>> dt.AMPMMinutes()
|
|
||||||
'01:45 pm'
|
|
||||||
|
|
||||||
* ``PreciseTime()`` returns the time string for the object:
|
|
||||||
|
|
||||||
>>> dt.PreciseTime()
|
|
||||||
'13:45:00.000'
|
|
||||||
|
|
||||||
* ``PreciseAMPM()`` returns the time string for the object:
|
|
||||||
|
|
||||||
>>> dt.PreciseAMPM()
|
|
||||||
'01:45:00.000 pm'
|
|
||||||
|
|
||||||
* ``yy()`` returns the calendar year as a 2 digit string
|
|
||||||
|
|
||||||
>>> dt.yy()
|
|
||||||
'97'
|
|
||||||
|
|
||||||
* ``mm()`` returns the month as a 2 digit string
|
|
||||||
|
|
||||||
>>> dt.mm()
|
|
||||||
'03'
|
|
||||||
|
|
||||||
* ``dd()`` returns the day as a 2 digit string:
|
|
||||||
|
|
||||||
>>> dt.dd()
|
|
||||||
'09'
|
|
||||||
|
|
||||||
* ``rfc822()`` returns the date in RFC 822 format:
|
|
||||||
|
|
||||||
>>> dt.rfc822()
|
|
||||||
'Sun, 09 Mar 1997 13:45:00 -0500'
|
|
||||||
|
|
||||||
New formats
|
|
||||||
~~~~~~~~~~~
|
|
||||||
|
|
||||||
* ``fCommon()`` returns a string representing the object's value in
|
|
||||||
the format: March 9, 1997 1:45 pm:
|
|
||||||
|
|
||||||
>>> dt.fCommon()
|
|
||||||
'March 9, 1997 1:45 pm'
|
|
||||||
|
|
||||||
* ``fCommonZ()`` returns a string representing the object's value in
|
|
||||||
the format: March 9, 1997 1:45 pm US/Eastern:
|
|
||||||
|
|
||||||
>>> dt.fCommonZ()
|
|
||||||
'March 9, 1997 1:45 pm US/Eastern'
|
|
||||||
|
|
||||||
* ``aCommon()`` returns a string representing the object's value in
|
|
||||||
the format: Mar 9, 1997 1:45 pm:
|
|
||||||
|
|
||||||
>>> dt.aCommon()
|
|
||||||
'Mar 9, 1997 1:45 pm'
|
|
||||||
|
|
||||||
* ``aCommonZ()`` return a string representing the object's value in
|
|
||||||
the format: Mar 9, 1997 1:45 pm US/Eastern:
|
|
||||||
|
|
||||||
>>> dt.aCommonZ()
|
|
||||||
'Mar 9, 1997 1:45 pm US/Eastern'
|
|
||||||
|
|
||||||
* ``pCommon()`` returns a string representing the object's value in
|
|
||||||
the format Mar. 9, 1997 1:45 pm:
|
|
||||||
|
|
||||||
>>> dt.pCommon()
|
|
||||||
'Mar. 9, 1997 1:45 pm'
|
|
||||||
|
|
||||||
* ``pCommonZ()`` returns a string representing the object's value in
|
|
||||||
the format: Mar. 9, 1997 1:45 pm US/Eastern:
|
|
||||||
|
|
||||||
>>> dt.pCommonZ()
|
|
||||||
'Mar. 9, 1997 1:45 pm US/Eastern'
|
|
||||||
|
|
||||||
* ``ISO()`` returns a string with the date/time in ISO format. Note:
|
|
||||||
this is not ISO 8601-format! See the ISO8601 and HTML4 methods below
|
|
||||||
for ISO 8601-compliant output. Dates are output as: YYYY-MM-DD HH:MM:SS
|
|
||||||
|
|
||||||
>>> dt.ISO()
|
|
||||||
'1997-03-09 13:45:00'
|
|
||||||
|
|
||||||
* ``ISO8601()`` returns the object in ISO 8601-compatible format
|
|
||||||
containing the date, time with seconds-precision and the time zone
|
|
||||||
identifier - see http://www.w3.org/TR/NOTE-datetime. Dates are
|
|
||||||
output as: YYYY-MM-DDTHH:MM:SSTZD (T is a literal character, TZD is
|
|
||||||
Time Zone Designator, format +HH:MM or -HH:MM).
|
|
||||||
|
|
||||||
The ``HTML4()`` method below offers the same formatting, but
|
|
||||||
converts to UTC before returning the value and sets the TZD"Z"
|
|
||||||
|
|
||||||
>>> dt.ISO8601()
|
|
||||||
'1997-03-09T13:45:00-05:00'
|
|
||||||
|
|
||||||
|
|
||||||
* ``HTML4()`` returns the object in the format used in the HTML4.0
|
|
||||||
specification, one of the standard forms in ISO8601. See
|
|
||||||
http://www.w3.org/TR/NOTE-datetime. Dates are output as:
|
|
||||||
YYYY-MM-DDTHH:MM:SSZ (T, Z are literal characters, the time is in
|
|
||||||
UTC.):
|
|
||||||
|
|
||||||
>>> dt.HTML4()
|
|
||||||
'1997-03-09T18:45:00Z'
|
|
||||||
|
|
||||||
* ``JulianDay()`` returns the Julian day according to
|
|
||||||
http://www.tondering.dk/claus/cal/node3.html#sec-calcjd
|
|
||||||
|
|
||||||
>>> dt.JulianDay()
|
|
||||||
2450517
|
|
||||||
|
|
||||||
* ``week()`` returns the week number according to ISO
|
|
||||||
see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000
|
|
||||||
|
|
||||||
>>> dt.week()
|
|
||||||
10
|
|
||||||
|
|
||||||
Deprecated API
|
|
||||||
~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
* DayOfWeek(): see Day()
|
|
||||||
|
|
||||||
* Day_(): see pDay()
|
|
||||||
|
|
||||||
* Mon(): see aMonth()
|
|
||||||
|
|
||||||
* Mon_(): see pMonth
|
|
||||||
|
|
||||||
General Services Provided by DateTime
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
DateTimes can be repr()'ed; the result will be a string indicating how
|
|
||||||
to make a DateTime object like this:
|
|
||||||
|
|
||||||
>>> repr(dt)
|
|
||||||
"DateTime('1997/03/09 13:45:00 US/Eastern')"
|
|
||||||
|
|
||||||
When we convert them into a string, we get a nicer string that could
|
|
||||||
actually be shown to a user:
|
|
||||||
|
|
||||||
>>> str(dt)
|
|
||||||
'1997/03/09 13:45:00 US/Eastern'
|
|
||||||
|
|
||||||
The hash value of a DateTime is based on the date and time and is
|
|
||||||
equal for different representations of the DateTime:
|
|
||||||
|
|
||||||
>>> hash(dt)
|
|
||||||
3618678
|
|
||||||
>>> hash(dt.toZone('UTC'))
|
|
||||||
3618678
|
|
||||||
|
|
||||||
DateTime objects can be compared to other DateTime objects OR floating
|
|
||||||
point numbers such as the ones which are returned by the Python time
|
|
||||||
module by using the equalTo method. Using this API, True is returned if the
|
|
||||||
object represents a date/time equal to the specified DateTime or time module
|
|
||||||
style time:
|
|
||||||
|
|
||||||
>>> dt.equalTo(dt)
|
|
||||||
True
|
|
||||||
>>> dt.equalTo(dt.toZone('UTC'))
|
|
||||||
True
|
|
||||||
>>> dt.equalTo(dt.timeTime())
|
|
||||||
True
|
|
||||||
>>> dt.equalTo(DateTime())
|
|
||||||
False
|
|
||||||
|
|
||||||
Same goes for inequalities:
|
|
||||||
|
|
||||||
>>> dt.notEqualTo(dt)
|
|
||||||
False
|
|
||||||
>>> dt.notEqualTo(dt.toZone('UTC'))
|
|
||||||
False
|
|
||||||
>>> dt.notEqualTo(dt.timeTime())
|
|
||||||
False
|
|
||||||
>>> dt.notEqualTo(DateTime())
|
|
||||||
True
|
|
||||||
|
|
||||||
Normal equality operations only work with DateTime objects and take the
|
|
||||||
timezone setting into account:
|
|
||||||
|
|
||||||
>>> dt == dt
|
|
||||||
True
|
|
||||||
>>> dt == dt.toZone('UTC')
|
|
||||||
False
|
|
||||||
>>> dt == DateTime()
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> dt != dt
|
|
||||||
False
|
|
||||||
>>> dt != dt.toZone('UTC')
|
|
||||||
True
|
|
||||||
>>> dt != DateTime()
|
|
||||||
True
|
|
||||||
|
|
||||||
But the other comparison operations compare the referenced moment in time and
|
|
||||||
not the representation itself:
|
|
||||||
|
|
||||||
>>> dt > dt
|
|
||||||
False
|
|
||||||
>>> DateTime() > dt
|
|
||||||
True
|
|
||||||
>>> dt > DateTime().timeTime()
|
|
||||||
False
|
|
||||||
>>> DateTime().timeTime() > dt
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> dt.greaterThan(dt)
|
|
||||||
False
|
|
||||||
>>> DateTime().greaterThan(dt)
|
|
||||||
True
|
|
||||||
>>> dt.greaterThan(DateTime().timeTime())
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> dt >= dt
|
|
||||||
True
|
|
||||||
>>> DateTime() >= dt
|
|
||||||
True
|
|
||||||
>>> dt >= DateTime().timeTime()
|
|
||||||
False
|
|
||||||
>>> DateTime().timeTime() >= dt
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> dt.greaterThanEqualTo(dt)
|
|
||||||
True
|
|
||||||
>>> DateTime().greaterThanEqualTo(dt)
|
|
||||||
True
|
|
||||||
>>> dt.greaterThanEqualTo(DateTime().timeTime())
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> dt < dt
|
|
||||||
False
|
|
||||||
>>> DateTime() < dt
|
|
||||||
False
|
|
||||||
>>> dt < DateTime().timeTime()
|
|
||||||
True
|
|
||||||
>>> DateTime().timeTime() < dt
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> dt.lessThan(dt)
|
|
||||||
False
|
|
||||||
>>> DateTime().lessThan(dt)
|
|
||||||
False
|
|
||||||
>>> dt.lessThan(DateTime().timeTime())
|
|
||||||
True
|
|
||||||
|
|
||||||
>>> dt <= dt
|
|
||||||
True
|
|
||||||
>>> DateTime() <= dt
|
|
||||||
False
|
|
||||||
>>> dt <= DateTime().timeTime()
|
|
||||||
True
|
|
||||||
>>> DateTime().timeTime() <= dt
|
|
||||||
False
|
|
||||||
|
|
||||||
>>> dt.lessThanEqualTo(dt)
|
|
||||||
True
|
|
||||||
>>> DateTime().lessThanEqualTo(dt)
|
|
||||||
False
|
|
||||||
>>> dt.lessThanEqualTo(DateTime().timeTime())
|
|
||||||
True
|
|
||||||
|
|
||||||
Numeric Services Provided by DateTime
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
A DateTime may be added to a number and a number may be added to a
|
|
||||||
DateTime:
|
|
||||||
|
|
||||||
>>> dt + 5
|
|
||||||
DateTime('1997/03/14 13:45:00 US/Eastern')
|
|
||||||
>>> 5 + dt
|
|
||||||
DateTime('1997/03/14 13:45:00 US/Eastern')
|
|
||||||
|
|
||||||
Two DateTimes cannot be added:
|
|
||||||
|
|
||||||
>>> from DateTime.interfaces import DateTimeError
|
|
||||||
>>> try:
|
|
||||||
... dt + dt
|
|
||||||
... print('fail')
|
|
||||||
... except DateTimeError:
|
|
||||||
... print('ok')
|
|
||||||
ok
|
|
||||||
|
|
||||||
Either a DateTime or a number may be subtracted from a DateTime,
|
|
||||||
however, a DateTime may not be subtracted from a number:
|
|
||||||
|
|
||||||
>>> DateTime('1997/03/10 13:45 US/Eastern') - dt
|
|
||||||
1.0
|
|
||||||
>>> dt - 1
|
|
||||||
DateTime('1997/03/08 13:45:00 US/Eastern')
|
|
||||||
>>> 1 - dt
|
|
||||||
Traceback (most recent call last):
|
|
||||||
...
|
|
||||||
TypeError: unsupported operand type(s) for -: 'int' and 'DateTime'
|
|
||||||
|
|
||||||
DateTimes can also be converted to integers (number of seconds since
|
|
||||||
the epoch) and floats:
|
|
||||||
|
|
||||||
>>> int(dt)
|
|
||||||
857933100
|
|
||||||
>>> float(dt)
|
|
||||||
857933100.0
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
##############################################################################
|
|
||||||
#
|
|
||||||
# Copyright (c) 2002 Zope Foundation and Contributors.
|
|
||||||
#
|
|
||||||
# This software is subject to the provisions of the Zope Public License,
|
|
||||||
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
|
|
||||||
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
|
|
||||||
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE
|
|
||||||
#
|
|
||||||
##############################################################################
|
|
||||||
|
|
||||||
from .DateTime import DateTime
|
|
||||||
from .DateTime import Timezones
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ('DateTime', 'Timezones')
|
|
||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,375 +0,0 @@
|
|||||||
##############################################################################
|
|
||||||
#
|
|
||||||
# Copyright (c) 2005 Zope Foundation and Contributors.
|
|
||||||
#
|
|
||||||
# This software is subject to the provisions of the Zope Public License,
|
|
||||||
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
|
|
||||||
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
|
|
||||||
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE
|
|
||||||
#
|
|
||||||
##############################################################################
|
|
||||||
from zope.interface import Interface
|
|
||||||
|
|
||||||
|
|
||||||
class DateTimeError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SyntaxError(DateTimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DateError(DateTimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class TimeError(DateTimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class IDateTime(Interface):
|
|
||||||
# Conversion and comparison methods
|
|
||||||
|
|
||||||
def localZone(ltm=None):
|
|
||||||
"""Returns the time zone on the given date. The time zone
|
|
||||||
can change according to daylight savings."""
|
|
||||||
|
|
||||||
def timeTime():
|
|
||||||
"""Return the date/time as a floating-point number in UTC, in
|
|
||||||
the format used by the Python time module. Note that it is
|
|
||||||
possible to create date/time values with DateTime that have no
|
|
||||||
meaningful value to the time module."""
|
|
||||||
|
|
||||||
def toZone(z):
|
|
||||||
"""Return a DateTime with the value as the current object,
|
|
||||||
represented in the indicated timezone."""
|
|
||||||
|
|
||||||
def isFuture():
|
|
||||||
"""Return true if this object represents a date/time later
|
|
||||||
than the time of the call"""
|
|
||||||
|
|
||||||
def isPast():
|
|
||||||
"""Return true if this object represents a date/time earlier
|
|
||||||
than the time of the call"""
|
|
||||||
|
|
||||||
def isCurrentYear():
|
|
||||||
"""Return true if this object represents a date/time that
|
|
||||||
falls within the current year, in the context of this
|
|
||||||
object's timezone representation"""
|
|
||||||
|
|
||||||
def isCurrentMonth():
|
|
||||||
"""Return true if this object represents a date/time that
|
|
||||||
falls within the current month, in the context of this
|
|
||||||
object's timezone representation"""
|
|
||||||
|
|
||||||
def isCurrentDay():
|
|
||||||
"""Return true if this object represents a date/time that
|
|
||||||
falls within the current day, in the context of this object's
|
|
||||||
timezone representation"""
|
|
||||||
|
|
||||||
def isCurrentHour():
|
|
||||||
"""Return true if this object represents a date/time that
|
|
||||||
falls within the current hour, in the context of this object's
|
|
||||||
timezone representation"""
|
|
||||||
|
|
||||||
def isCurrentMinute():
|
|
||||||
"""Return true if this object represents a date/time that
|
|
||||||
falls within the current minute, in the context of this
|
|
||||||
object's timezone representation"""
|
|
||||||
|
|
||||||
def isLeapYear():
|
|
||||||
"""Return true if the current year (in the context of the
|
|
||||||
object's timezone) is a leap year"""
|
|
||||||
|
|
||||||
def earliestTime():
|
|
||||||
"""Return a new DateTime object that represents the earliest
|
|
||||||
possible time (in whole seconds) that still falls within the
|
|
||||||
current object's day, in the object's timezone context"""
|
|
||||||
|
|
||||||
def latestTime():
|
|
||||||
"""Return a new DateTime object that represents the latest
|
|
||||||
possible time (in whole seconds) that still falls within the
|
|
||||||
current object's day, in the object's timezone context"""
|
|
||||||
|
|
||||||
def greaterThan(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time greater than the specified DateTime or time module
|
|
||||||
style time. Revised to give more correct results through
|
|
||||||
comparison of long integer milliseconds."""
|
|
||||||
|
|
||||||
__gt__ = greaterThan
|
|
||||||
|
|
||||||
def greaterThanEqualTo(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time greater than or equal to the specified DateTime or
|
|
||||||
time module style time. Revised to give more correct results
|
|
||||||
through comparison of long integer milliseconds."""
|
|
||||||
|
|
||||||
__ge__ = greaterThanEqualTo
|
|
||||||
|
|
||||||
def equalTo(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time equal to the specified DateTime or time module style
|
|
||||||
time. Revised to give more correct results through comparison
|
|
||||||
of long integer milliseconds."""
|
|
||||||
|
|
||||||
__eq__ = equalTo
|
|
||||||
|
|
||||||
def notEqualTo(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time not equal to the specified DateTime or time module
|
|
||||||
style time. Revised to give more correct results through
|
|
||||||
comparison of long integer milliseconds."""
|
|
||||||
|
|
||||||
__ne__ = notEqualTo
|
|
||||||
|
|
||||||
def lessThan(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time less than the specified DateTime or time module
|
|
||||||
style time. Revised to give more correct results through
|
|
||||||
comparison of long integer milliseconds."""
|
|
||||||
|
|
||||||
__lt__ = lessThan
|
|
||||||
|
|
||||||
def lessThanEqualTo(t):
|
|
||||||
"""Compare this DateTime object to another DateTime object OR
|
|
||||||
a floating point number such as that which is returned by the
|
|
||||||
Python time module. Returns true if the object represents a
|
|
||||||
date/time less than or equal to the specified DateTime or time
|
|
||||||
module style time. Revised to give more correct results
|
|
||||||
through comparison of long integer milliseconds."""
|
|
||||||
|
|
||||||
__le__ = lessThanEqualTo
|
|
||||||
|
|
||||||
# Component access
|
|
||||||
|
|
||||||
def parts():
|
|
||||||
"""Return a tuple containing the calendar year, month, day,
|
|
||||||
hour, minute second and timezone of the object"""
|
|
||||||
|
|
||||||
def timezone():
|
|
||||||
"""Return the timezone in which the object is represented."""
|
|
||||||
|
|
||||||
def tzoffset():
|
|
||||||
"""Return the timezone offset for the objects timezone."""
|
|
||||||
|
|
||||||
def year():
|
|
||||||
"""Return the calendar year of the object"""
|
|
||||||
|
|
||||||
def month():
|
|
||||||
"""Return the month of the object as an integer"""
|
|
||||||
|
|
||||||
def Month():
|
|
||||||
"""Return the full month name"""
|
|
||||||
|
|
||||||
def aMonth():
|
|
||||||
"""Return the abbreviated month name."""
|
|
||||||
|
|
||||||
def Mon():
|
|
||||||
"""Compatibility: see aMonth"""
|
|
||||||
|
|
||||||
def pMonth():
|
|
||||||
"""Return the abbreviated (with period) month name."""
|
|
||||||
|
|
||||||
def Mon_():
|
|
||||||
"""Compatibility: see pMonth"""
|
|
||||||
|
|
||||||
def day():
|
|
||||||
"""Return the integer day"""
|
|
||||||
|
|
||||||
def Day():
|
|
||||||
"""Return the full name of the day of the week"""
|
|
||||||
|
|
||||||
def DayOfWeek():
|
|
||||||
"""Compatibility: see Day"""
|
|
||||||
|
|
||||||
def dayOfYear():
|
|
||||||
"""Return the day of the year, in context of the timezone
|
|
||||||
representation of the object"""
|
|
||||||
|
|
||||||
def aDay():
|
|
||||||
"""Return the abbreviated name of the day of the week"""
|
|
||||||
|
|
||||||
def pDay():
|
|
||||||
"""Return the abbreviated (with period) name of the day of the
|
|
||||||
week"""
|
|
||||||
|
|
||||||
def Day_():
|
|
||||||
"""Compatibility: see pDay"""
|
|
||||||
|
|
||||||
def dow():
|
|
||||||
"""Return the integer day of the week, where sunday is 0"""
|
|
||||||
|
|
||||||
def dow_1():
|
|
||||||
"""Return the integer day of the week, where sunday is 1"""
|
|
||||||
|
|
||||||
def h_12():
|
|
||||||
"""Return the 12-hour clock representation of the hour"""
|
|
||||||
|
|
||||||
def h_24():
|
|
||||||
"""Return the 24-hour clock representation of the hour"""
|
|
||||||
|
|
||||||
def ampm():
|
|
||||||
"""Return the appropriate time modifier (am or pm)"""
|
|
||||||
|
|
||||||
def hour():
|
|
||||||
"""Return the 24-hour clock representation of the hour"""
|
|
||||||
|
|
||||||
def minute():
|
|
||||||
"""Return the minute"""
|
|
||||||
|
|
||||||
def second():
|
|
||||||
"""Return the second"""
|
|
||||||
|
|
||||||
def millis():
|
|
||||||
"""Return the millisecond since the epoch in GMT."""
|
|
||||||
|
|
||||||
def strftime(format):
|
|
||||||
"""Format the date/time using the *current timezone representation*."""
|
|
||||||
|
|
||||||
# General formats from previous DateTime
|
|
||||||
|
|
||||||
def Date():
|
|
||||||
"""Return the date string for the object."""
|
|
||||||
|
|
||||||
def Time():
|
|
||||||
"""Return the time string for an object to the nearest second."""
|
|
||||||
|
|
||||||
def TimeMinutes():
|
|
||||||
"""Return the time string for an object not showing seconds."""
|
|
||||||
|
|
||||||
def AMPM():
|
|
||||||
"""Return the time string for an object to the nearest second."""
|
|
||||||
|
|
||||||
def AMPMMinutes():
|
|
||||||
"""Return the time string for an object not showing seconds."""
|
|
||||||
|
|
||||||
def PreciseTime():
|
|
||||||
"""Return the time string for the object."""
|
|
||||||
|
|
||||||
def PreciseAMPM():
|
|
||||||
"""Return the time string for the object."""
|
|
||||||
|
|
||||||
def yy():
|
|
||||||
"""Return calendar year as a 2 digit string"""
|
|
||||||
|
|
||||||
def mm():
|
|
||||||
"""Return month as a 2 digit string"""
|
|
||||||
|
|
||||||
def dd():
|
|
||||||
"""Return day as a 2 digit string"""
|
|
||||||
|
|
||||||
def rfc822():
|
|
||||||
"""Return the date in RFC 822 format"""
|
|
||||||
|
|
||||||
# New formats
|
|
||||||
|
|
||||||
def fCommon():
|
|
||||||
"""Return a string representing the object's value in the
|
|
||||||
format: March 1, 1997 1:45 pm"""
|
|
||||||
|
|
||||||
def fCommonZ():
|
|
||||||
"""Return a string representing the object's value in the
|
|
||||||
format: March 1, 1997 1:45 pm US/Eastern"""
|
|
||||||
|
|
||||||
def aCommon():
|
|
||||||
"""Return a string representing the object's value in the
|
|
||||||
format: Mar 1, 1997 1:45 pm"""
|
|
||||||
|
|
||||||
def aCommonZ():
|
|
||||||
"""Return a string representing the object's value in the
|
|
||||||
format: Mar 1, 1997 1:45 pm US/Eastern"""
|
|
||||||
|
|
||||||
def pCommon():
|
|
||||||
"""Return a string representing the object's value in the
|
|
||||||
format: Mar. 1, 1997 1:45 pm"""
|
|
||||||
|
|
||||||
def pCommonZ():
|
|
||||||
"""Return a string representing the object's value
|
|
||||||
in the format: Mar. 1, 1997 1:45 pm US/Eastern"""
|
|
||||||
|
|
||||||
def ISO():
|
|
||||||
"""Return the object in ISO standard format. Note: this is
|
|
||||||
*not* ISO 8601-format! See the ISO8601 and HTML4 methods below
|
|
||||||
for ISO 8601-compliant output
|
|
||||||
|
|
||||||
Dates are output as: YYYY-MM-DD HH:MM:SS
|
|
||||||
"""
|
|
||||||
|
|
||||||
def ISO8601():
|
|
||||||
"""Return the object in ISO 8601-compatible format containing
|
|
||||||
the date, time with seconds-precision and the time zone
|
|
||||||
identifier - see http://www.w3.org/TR/NOTE-datetime
|
|
||||||
|
|
||||||
Dates are output as: YYYY-MM-DDTHH:MM:SSTZD
|
|
||||||
T is a literal character.
|
|
||||||
TZD is Time Zone Designator, format +HH:MM or -HH:MM
|
|
||||||
|
|
||||||
The HTML4 method below offers the same formatting, but
|
|
||||||
converts to UTC before returning the value and sets the TZD"Z"
|
|
||||||
"""
|
|
||||||
|
|
||||||
def HTML4():
|
|
||||||
"""Return the object in the format used in the HTML4.0
|
|
||||||
specification, one of the standard forms in ISO8601. See
|
|
||||||
http://www.w3.org/TR/NOTE-datetime
|
|
||||||
|
|
||||||
Dates are output as: YYYY-MM-DDTHH:MM:SSZ
|
|
||||||
T, Z are literal characters.
|
|
||||||
The time is in UTC.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def JulianDay():
|
|
||||||
"""Return the Julian day according to
|
|
||||||
https://www.tondering.dk/claus/cal/julperiod.php#formula
|
|
||||||
"""
|
|
||||||
|
|
||||||
def week():
|
|
||||||
"""Return the week number according to ISO.
|
|
||||||
|
|
||||||
See https://www.tondering.dk/claus/cal/week.php#weekno
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Python operator and conversion API
|
|
||||||
|
|
||||||
def __add__(other):
|
|
||||||
"""A DateTime may be added to a number and a number may be
|
|
||||||
added to a DateTime; two DateTimes cannot be added."""
|
|
||||||
|
|
||||||
__radd__ = __add__
|
|
||||||
|
|
||||||
def __sub__(other):
|
|
||||||
"""Either a DateTime or a number may be subtracted from a
|
|
||||||
DateTime, however, a DateTime may not be subtracted from a
|
|
||||||
number."""
|
|
||||||
|
|
||||||
def __repr__():
|
|
||||||
"""Convert a DateTime to a string that looks like a Python
|
|
||||||
expression."""
|
|
||||||
|
|
||||||
def __str__():
|
|
||||||
"""Convert a DateTime to a string."""
|
|
||||||
|
|
||||||
def __hash__():
|
|
||||||
"""Compute a hash value for a DateTime"""
|
|
||||||
|
|
||||||
def __int__():
|
|
||||||
"""Convert to an integer number of seconds since the epoch (gmt)"""
|
|
||||||
|
|
||||||
def __long__():
|
|
||||||
"""Convert to a long-int number of seconds since the epoch (gmt)"""
|
|
||||||
|
|
||||||
def __float__():
|
|
||||||
"""Convert to floating-point number of seconds since the epoch (gmt)"""
|
|
||||||
@@ -1,192 +0,0 @@
|
|||||||
Pytz Support
|
|
||||||
============
|
|
||||||
|
|
||||||
Allows the pytz package to be used for time zone information. The
|
|
||||||
advantage of using pytz is that it has a more complete and up to date
|
|
||||||
time zone and daylight savings time database.
|
|
||||||
|
|
||||||
Usage
|
|
||||||
-----
|
|
||||||
You don't have to do anything special to make it work.
|
|
||||||
|
|
||||||
>>> from DateTime import DateTime, Timezones
|
|
||||||
>>> d = DateTime('March 11, 2007 US/Eastern')
|
|
||||||
|
|
||||||
Daylight Savings
|
|
||||||
----------------
|
|
||||||
In 2007 daylight savings time in the US was changed. The Energy Policy
|
|
||||||
Act of 2005 mandates that DST will start on the second Sunday in March
|
|
||||||
and end on the first Sunday in November.
|
|
||||||
|
|
||||||
In 2007, the start and stop dates are March 11 and November 4,
|
|
||||||
respectively. These dates are different from previous DST start and
|
|
||||||
stop dates. In 2006, the dates were the first Sunday in April (April
|
|
||||||
2, 2006) and the last Sunday in October (October 29, 2006).
|
|
||||||
|
|
||||||
Let's make sure that DateTime can deal with this, since the primary
|
|
||||||
motivation to use pytz for time zone information is the fact that it
|
|
||||||
is kept up to date with daylight savings changes.
|
|
||||||
|
|
||||||
>>> DateTime('March 11, 2007 US/Eastern').tzoffset()
|
|
||||||
-18000
|
|
||||||
>>> DateTime('March 12, 2007 US/Eastern').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('November 4, 2007 US/Eastern').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('November 5, 2007 US/Eastern').tzoffset()
|
|
||||||
-18000
|
|
||||||
|
|
||||||
Let's compare this to 2006.
|
|
||||||
|
|
||||||
>>> DateTime('April 2, 2006 US/Eastern').tzoffset()
|
|
||||||
-18000
|
|
||||||
>>> DateTime('April 3, 2006 US/Eastern').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('October 29, 2006 US/Eastern').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('October 30, 2006 US/Eastern').tzoffset()
|
|
||||||
-18000
|
|
||||||
|
|
||||||
Time Zones
|
|
||||||
---------
|
|
||||||
DateTime can use pytz's large database of time zones. Here are some
|
|
||||||
examples:
|
|
||||||
|
|
||||||
>>> d = DateTime('Pacific/Kwajalein')
|
|
||||||
>>> d = DateTime('America/Shiprock')
|
|
||||||
>>> d = DateTime('Africa/Ouagadougou')
|
|
||||||
|
|
||||||
Of course pytz doesn't know about everything.
|
|
||||||
|
|
||||||
>>> from DateTime.interfaces import SyntaxError
|
|
||||||
>>> try:
|
|
||||||
... d = DateTime('July 21, 1969 Moon/Eastern')
|
|
||||||
... print('fail')
|
|
||||||
... except SyntaxError:
|
|
||||||
... print('ok')
|
|
||||||
ok
|
|
||||||
|
|
||||||
You can still use zone names that DateTime defines that aren't part of
|
|
||||||
the pytz database.
|
|
||||||
|
|
||||||
>>> d = DateTime('eet')
|
|
||||||
>>> d = DateTime('iceland')
|
|
||||||
|
|
||||||
These time zones use DateTimes database. So it's preferable to use the
|
|
||||||
official time zone name.
|
|
||||||
|
|
||||||
One trickiness is that DateTime supports some zone name
|
|
||||||
abbreviations. Some of these map to pytz names, so these abbreviations
|
|
||||||
will give you time zone date from pytz. Notable among abbreviations
|
|
||||||
that work this way are 'est', 'cst', 'mst', and 'pst'.
|
|
||||||
|
|
||||||
Let's verify that 'est' picks up the 2007 daylight savings time changes.
|
|
||||||
|
|
||||||
>>> DateTime('March 11, 2007 est').tzoffset()
|
|
||||||
-18000
|
|
||||||
>>> DateTime('March 12, 2007 est').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('November 4, 2007 est').tzoffset()
|
|
||||||
-14400
|
|
||||||
>>> DateTime('November 5, 2007 est').tzoffset()
|
|
||||||
-18000
|
|
||||||
|
|
||||||
You can get a list of time zones supported by calling the Timezones() function.
|
|
||||||
|
|
||||||
>>> Timezones() #doctest: +ELLIPSIS
|
|
||||||
['Africa/Abidjan', 'Africa/Accra', 'Africa/Addis_Ababa', ...]
|
|
||||||
|
|
||||||
Note that you can mess with this list without hurting things.
|
|
||||||
|
|
||||||
>>> t = Timezones()
|
|
||||||
>>> t.remove('US/Eastern')
|
|
||||||
>>> d = DateTime('US/Eastern')
|
|
||||||
|
|
||||||
|
|
||||||
Internal Components
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
The following are tests of internal components.
|
|
||||||
|
|
||||||
Cache
|
|
||||||
~~~~~
|
|
||||||
|
|
||||||
The DateTime class uses a new time zone cache.
|
|
||||||
|
|
||||||
>>> from DateTime.DateTime import _TZINFO
|
|
||||||
>>> _TZINFO #doctest: +ELLIPSIS
|
|
||||||
<DateTime.pytz_support.PytzCache ...>
|
|
||||||
|
|
||||||
The cache maps time zone names to time zone instances.
|
|
||||||
|
|
||||||
>>> cache = _TZINFO
|
|
||||||
>>> tz = cache['GMT+730']
|
|
||||||
>>> tz = cache['US/Mountain']
|
|
||||||
|
|
||||||
The cache also must provide a few attributes for use by the DateTime
|
|
||||||
class.
|
|
||||||
|
|
||||||
The _zlst attribute is a list of supported time zone names.
|
|
||||||
|
|
||||||
>>> cache._zlst #doctest: +ELLIPSIS
|
|
||||||
['Africa/Abidjan'... 'Africa/Accra'... 'IDLE'... 'NZST'... 'NZT'...]
|
|
||||||
|
|
||||||
The _zidx attribute is a list of lower-case and possibly abbreviated
|
|
||||||
time zone names that can be mapped to official zone names.
|
|
||||||
|
|
||||||
>>> 'australia/yancowinna' in cache._zidx
|
|
||||||
True
|
|
||||||
>>> 'europe/isle_of_man' in cache._zidx
|
|
||||||
True
|
|
||||||
>>> 'gmt+0500' in cache._zidx
|
|
||||||
True
|
|
||||||
|
|
||||||
Note that there are more items in _zidx than in _zlst since there are
|
|
||||||
multiple names for some time zones.
|
|
||||||
|
|
||||||
>>> len(cache._zidx) > len(cache._zlst)
|
|
||||||
True
|
|
||||||
|
|
||||||
Each entry in _zlst should also be present in _zidx in lower case form.
|
|
||||||
|
|
||||||
>>> for name in cache._zlst:
|
|
||||||
... if not name.lower() in cache._zidx:
|
|
||||||
... print("Error %s not in _zidx" % name.lower())
|
|
||||||
|
|
||||||
The _zmap attribute maps the names in _zidx to official names in _zlst.
|
|
||||||
|
|
||||||
>>> cache._zmap['africa/abidjan']
|
|
||||||
'Africa/Abidjan'
|
|
||||||
>>> cache._zmap['gmt+1']
|
|
||||||
'GMT+1'
|
|
||||||
>>> cache._zmap['gmt+0100']
|
|
||||||
'GMT+1'
|
|
||||||
>>> cache._zmap['utc']
|
|
||||||
'UTC'
|
|
||||||
|
|
||||||
Let's make sure that _zmap and _zidx agree.
|
|
||||||
|
|
||||||
>>> idx = set(cache._zidx)
|
|
||||||
>>> keys = set(cache._zmap.keys())
|
|
||||||
>>> idx == keys
|
|
||||||
True
|
|
||||||
|
|
||||||
Timezone objects
|
|
||||||
~~~~~~~~~~~~~~~~
|
|
||||||
The timezone instances have only one public method info(). It returns
|
|
||||||
a tuple of (offset, is_dst, name). The method takes a timestamp, which
|
|
||||||
is used to determine dst information.
|
|
||||||
|
|
||||||
>>> t1 = DateTime('November 4, 00:00 2007 US/Mountain').timeTime()
|
|
||||||
>>> t2 = DateTime('November 4, 02:00 2007 US/Mountain').timeTime()
|
|
||||||
>>> tz.info(t1)
|
|
||||||
(-21600, 1, 'MDT')
|
|
||||||
>>> tz.info(t2)
|
|
||||||
(-25200, 0, 'MST')
|
|
||||||
|
|
||||||
If you don't pass any arguments to info it provides daylight savings
|
|
||||||
time information as of today.
|
|
||||||
|
|
||||||
>>> tz.info() in ((-21600, 1, 'MDT'), (-25200, 0, 'MST'))
|
|
||||||
True
|
|
||||||
|
|
||||||
@@ -1,269 +0,0 @@
|
|||||||
##############################################################################
|
|
||||||
#
|
|
||||||
# Copyright (c) 2007 Zope Foundation and Contributors.
|
|
||||||
#
|
|
||||||
# This software is subject to the provisions of the Zope Public License,
|
|
||||||
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
|
|
||||||
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
|
|
||||||
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE
|
|
||||||
#
|
|
||||||
##############################################################################
|
|
||||||
|
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timedelta
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
import pytz.reference
|
|
||||||
from pytz.tzinfo import StaticTzInfo
|
|
||||||
from pytz.tzinfo import memorized_timedelta
|
|
||||||
|
|
||||||
from .interfaces import DateTimeError
|
|
||||||
|
|
||||||
|
|
||||||
EPOCH = datetime.fromtimestamp(0, tz=pytz.utc)
|
|
||||||
|
|
||||||
_numeric_timezone_data = {
|
|
||||||
'GMT': ('GMT', 0, 1, [], '', [(0, 0, 0)], 'GMT\000'),
|
|
||||||
'GMT+0': ('GMT+0', 0, 1, [], '', [(0, 0, 0)], 'GMT+0000\000'),
|
|
||||||
'GMT+1': ('GMT+1', 0, 1, [], '', [(3600, 0, 0)], 'GMT+0100\000'),
|
|
||||||
'GMT+2': ('GMT+2', 0, 1, [], '', [(7200, 0, 0)], 'GMT+0200\000'),
|
|
||||||
'GMT+3': ('GMT+3', 0, 1, [], '', [(10800, 0, 0)], 'GMT+0300\000'),
|
|
||||||
'GMT+4': ('GMT+4', 0, 1, [], '', [(14400, 0, 0)], 'GMT+0400\000'),
|
|
||||||
'GMT+5': ('GMT+5', 0, 1, [], '', [(18000, 0, 0)], 'GMT+0500\000'),
|
|
||||||
'GMT+6': ('GMT+6', 0, 1, [], '', [(21600, 0, 0)], 'GMT+0600\000'),
|
|
||||||
'GMT+7': ('GMT+7', 0, 1, [], '', [(25200, 0, 0)], 'GMT+0700\000'),
|
|
||||||
'GMT+8': ('GMT+8', 0, 1, [], '', [(28800, 0, 0)], 'GMT+0800\000'),
|
|
||||||
'GMT+9': ('GMT+9', 0, 1, [], '', [(32400, 0, 0)], 'GMT+0900\000'),
|
|
||||||
'GMT+10': ('GMT+10', 0, 1, [], '', [(36000, 0, 0)], 'GMT+1000\000'),
|
|
||||||
'GMT+11': ('GMT+11', 0, 1, [], '', [(39600, 0, 0)], 'GMT+1100\000'),
|
|
||||||
'GMT+12': ('GMT+12', 0, 1, [], '', [(43200, 0, 0)], 'GMT+1200\000'),
|
|
||||||
'GMT+13': ('GMT+13', 0, 1, [], '', [(46800, 0, 0)], 'GMT+1300\000'),
|
|
||||||
|
|
||||||
'GMT-1': ('GMT-1', 0, 1, [], '', [(-3600, 0, 0)], 'GMT-0100\000'),
|
|
||||||
'GMT-2': ('GMT-2', 0, 1, [], '', [(-7200, 0, 0)], 'GMT-0200\000'),
|
|
||||||
'GMT-3': ('GMT-3', 0, 1, [], '', [(-10800, 0, 0)], 'GMT-0300\000'),
|
|
||||||
'GMT-4': ('GMT-4', 0, 1, [], '', [(-14400, 0, 0)], 'GMT-0400\000'),
|
|
||||||
'GMT-5': ('GMT-5', 0, 1, [], '', [(-18000, 0, 0)], 'GMT-0500\000'),
|
|
||||||
'GMT-6': ('GMT-6', 0, 1, [], '', [(-21600, 0, 0)], 'GMT-0600\000'),
|
|
||||||
'GMT-7': ('GMT-7', 0, 1, [], '', [(-25200, 0, 0)], 'GMT-0700\000'),
|
|
||||||
'GMT-8': ('GMT-8', 0, 1, [], '', [(-28800, 0, 0)], 'GMT-0800\000'),
|
|
||||||
'GMT-9': ('GMT-9', 0, 1, [], '', [(-32400, 0, 0)], 'GMT-0900\000'),
|
|
||||||
'GMT-10': ('GMT-10', 0, 1, [], '', [(-36000, 0, 0)], 'GMT-1000\000'),
|
|
||||||
'GMT-11': ('GMT-11', 0, 1, [], '', [(-39600, 0, 0)], 'GMT-1100\000'),
|
|
||||||
'GMT-12': ('GMT-12', 0, 1, [], '', [(-43200, 0, 0)], 'GMT-1200\000'),
|
|
||||||
|
|
||||||
'GMT+0130': ('GMT+0130', 0, 1, [], '', [(5400, 0, 0)], 'GMT+0130\000'),
|
|
||||||
'GMT+0230': ('GMT+0230', 0, 1, [], '', [(9000, 0, 0)], 'GMT+0230\000'),
|
|
||||||
'GMT+0330': ('GMT+0330', 0, 1, [], '', [(12600, 0, 0)], 'GMT+0330\000'),
|
|
||||||
'GMT+0430': ('GMT+0430', 0, 1, [], '', [(16200, 0, 0)], 'GMT+0430\000'),
|
|
||||||
'GMT+0530': ('GMT+0530', 0, 1, [], '', [(19800, 0, 0)], 'GMT+0530\000'),
|
|
||||||
'GMT+0630': ('GMT+0630', 0, 1, [], '', [(23400, 0, 0)], 'GMT+0630\000'),
|
|
||||||
'GMT+0730': ('GMT+0730', 0, 1, [], '', [(27000, 0, 0)], 'GMT+0730\000'),
|
|
||||||
'GMT+0830': ('GMT+0830', 0, 1, [], '', [(30600, 0, 0)], 'GMT+0830\000'),
|
|
||||||
'GMT+0930': ('GMT+0930', 0, 1, [], '', [(34200, 0, 0)], 'GMT+0930\000'),
|
|
||||||
'GMT+1030': ('GMT+1030', 0, 1, [], '', [(37800, 0, 0)], 'GMT+1030\000'),
|
|
||||||
'GMT+1130': ('GMT+1130', 0, 1, [], '', [(41400, 0, 0)], 'GMT+1130\000'),
|
|
||||||
'GMT+1230': ('GMT+1230', 0, 1, [], '', [(45000, 0, 0)], 'GMT+1230\000'),
|
|
||||||
|
|
||||||
'GMT-0130': ('GMT-0130', 0, 1, [], '', [(-5400, 0, 0)], 'GMT-0130\000'),
|
|
||||||
'GMT-0230': ('GMT-0230', 0, 1, [], '', [(-9000, 0, 0)], 'GMT-0230\000'),
|
|
||||||
'GMT-0330': ('GMT-0330', 0, 1, [], '', [(-12600, 0, 0)], 'GMT-0330\000'),
|
|
||||||
'GMT-0430': ('GMT-0430', 0, 1, [], '', [(-16200, 0, 0)], 'GMT-0430\000'),
|
|
||||||
'GMT-0530': ('GMT-0530', 0, 1, [], '', [(-19800, 0, 0)], 'GMT-0530\000'),
|
|
||||||
'GMT-0630': ('GMT-0630', 0, 1, [], '', [(-23400, 0, 0)], 'GMT-0630\000'),
|
|
||||||
'GMT-0730': ('GMT-0730', 0, 1, [], '', [(-27000, 0, 0)], 'GMT-0730\000'),
|
|
||||||
'GMT-0830': ('GMT-0830', 0, 1, [], '', [(-30600, 0, 0)], 'GMT-0830\000'),
|
|
||||||
'GMT-0930': ('GMT-0930', 0, 1, [], '', [(-34200, 0, 0)], 'GMT-0930\000'),
|
|
||||||
'GMT-1030': ('GMT-1030', 0, 1, [], '', [(-37800, 0, 0)], 'GMT-1030\000'),
|
|
||||||
'GMT-1130': ('GMT-1130', 0, 1, [], '', [(-41400, 0, 0)], 'GMT-1130\000'),
|
|
||||||
'GMT-1230': ('GMT-1230', 0, 1, [], '', [(-45000, 0, 0)], 'GMT-1230\000'),
|
|
||||||
}
|
|
||||||
|
|
||||||
# These are the timezones not in pytz.common_timezones
|
|
||||||
_old_zlst = [
|
|
||||||
'AST', 'AT', 'BST', 'BT', 'CCT',
|
|
||||||
'CET', 'CST', 'Cuba', 'EADT', 'EAST',
|
|
||||||
'EEST', 'EET', 'EST', 'Egypt', 'FST',
|
|
||||||
'FWT', 'GB-Eire', 'GMT+0100', 'GMT+0130', 'GMT+0200',
|
|
||||||
'GMT+0230', 'GMT+0300', 'GMT+0330', 'GMT+0400', 'GMT+0430',
|
|
||||||
'GMT+0500', 'GMT+0530', 'GMT+0600', 'GMT+0630', 'GMT+0700',
|
|
||||||
'GMT+0730', 'GMT+0800', 'GMT+0830', 'GMT+0900', 'GMT+0930',
|
|
||||||
'GMT+1', 'GMT+1000', 'GMT+1030', 'GMT+1100', 'GMT+1130',
|
|
||||||
'GMT+1200', 'GMT+1230', 'GMT+1300', 'GMT-0100', 'GMT-0130',
|
|
||||||
'GMT-0200', 'GMT-0300', 'GMT-0400', 'GMT-0500', 'GMT-0600',
|
|
||||||
'GMT-0630', 'GMT-0700', 'GMT-0730', 'GMT-0800', 'GMT-0830',
|
|
||||||
'GMT-0900', 'GMT-0930', 'GMT-1000', 'GMT-1030', 'GMT-1100',
|
|
||||||
'GMT-1130', 'GMT-1200', 'GMT-1230', 'GST', 'Greenwich',
|
|
||||||
'Hongkong', 'IDLE', 'IDLW', 'Iceland', 'Iran',
|
|
||||||
'Israel', 'JST', 'Jamaica', 'Japan', 'MEST',
|
|
||||||
'MET', 'MEWT', 'MST', 'NT', 'NZDT',
|
|
||||||
'NZST', 'NZT', 'PST', 'Poland', 'SST',
|
|
||||||
'SWT', 'Singapore', 'Turkey', 'UCT', 'UT',
|
|
||||||
'Universal', 'WADT', 'WAST', 'WAT', 'WET',
|
|
||||||
'ZP4', 'ZP5', 'ZP6',
|
|
||||||
]
|
|
||||||
|
|
||||||
_old_zmap = {
|
|
||||||
'aest': 'GMT+10', 'aedt': 'GMT+11',
|
|
||||||
'aus eastern standard time': 'GMT+10',
|
|
||||||
'sydney standard time': 'GMT+10',
|
|
||||||
'tasmania standard time': 'GMT+10',
|
|
||||||
'e. australia standard time': 'GMT+10',
|
|
||||||
'aus central standard time': 'GMT+0930',
|
|
||||||
'cen. australia standard time': 'GMT+0930',
|
|
||||||
'w. australia standard time': 'GMT+8',
|
|
||||||
|
|
||||||
'central europe standard time': 'GMT+1',
|
|
||||||
'eastern standard time': 'US/Eastern',
|
|
||||||
'us eastern standard time': 'US/Eastern',
|
|
||||||
'central standard time': 'US/Central',
|
|
||||||
'mountain standard time': 'US/Mountain',
|
|
||||||
'pacific standard time': 'US/Pacific',
|
|
||||||
'mst': 'US/Mountain', 'pst': 'US/Pacific',
|
|
||||||
'cst': 'US/Central', 'est': 'US/Eastern',
|
|
||||||
|
|
||||||
'gmt+0000': 'GMT+0', 'gmt+0': 'GMT+0',
|
|
||||||
|
|
||||||
'gmt+0100': 'GMT+1', 'gmt+0200': 'GMT+2', 'gmt+0300': 'GMT+3',
|
|
||||||
'gmt+0400': 'GMT+4', 'gmt+0500': 'GMT+5', 'gmt+0600': 'GMT+6',
|
|
||||||
'gmt+0700': 'GMT+7', 'gmt+0800': 'GMT+8', 'gmt+0900': 'GMT+9',
|
|
||||||
'gmt+1000': 'GMT+10', 'gmt+1100': 'GMT+11', 'gmt+1200': 'GMT+12',
|
|
||||||
'gmt+1300': 'GMT+13',
|
|
||||||
'gmt-0100': 'GMT-1', 'gmt-0200': 'GMT-2', 'gmt-0300': 'GMT-3',
|
|
||||||
'gmt-0400': 'GMT-4', 'gmt-0500': 'GMT-5', 'gmt-0600': 'GMT-6',
|
|
||||||
'gmt-0700': 'GMT-7', 'gmt-0800': 'GMT-8', 'gmt-0900': 'GMT-9',
|
|
||||||
'gmt-1000': 'GMT-10', 'gmt-1100': 'GMT-11', 'gmt-1200': 'GMT-12',
|
|
||||||
|
|
||||||
'gmt+1': 'GMT+1', 'gmt+2': 'GMT+2', 'gmt+3': 'GMT+3',
|
|
||||||
'gmt+4': 'GMT+4', 'gmt+5': 'GMT+5', 'gmt+6': 'GMT+6',
|
|
||||||
'gmt+7': 'GMT+7', 'gmt+8': 'GMT+8', 'gmt+9': 'GMT+9',
|
|
||||||
'gmt+10': 'GMT+10', 'gmt+11': 'GMT+11', 'gmt+12': 'GMT+12',
|
|
||||||
'gmt+13': 'GMT+13',
|
|
||||||
'gmt-1': 'GMT-1', 'gmt-2': 'GMT-2', 'gmt-3': 'GMT-3',
|
|
||||||
'gmt-4': 'GMT-4', 'gmt-5': 'GMT-5', 'gmt-6': 'GMT-6',
|
|
||||||
'gmt-7': 'GMT-7', 'gmt-8': 'GMT-8', 'gmt-9': 'GMT-9',
|
|
||||||
'gmt-10': 'GMT-10', 'gmt-11': 'GMT-11', 'gmt-12': 'GMT-12',
|
|
||||||
|
|
||||||
'gmt+130': 'GMT+0130', 'gmt+0130': 'GMT+0130',
|
|
||||||
'gmt+230': 'GMT+0230', 'gmt+0230': 'GMT+0230',
|
|
||||||
'gmt+330': 'GMT+0330', 'gmt+0330': 'GMT+0330',
|
|
||||||
'gmt+430': 'GMT+0430', 'gmt+0430': 'GMT+0430',
|
|
||||||
'gmt+530': 'GMT+0530', 'gmt+0530': 'GMT+0530',
|
|
||||||
'gmt+630': 'GMT+0630', 'gmt+0630': 'GMT+0630',
|
|
||||||
'gmt+730': 'GMT+0730', 'gmt+0730': 'GMT+0730',
|
|
||||||
'gmt+830': 'GMT+0830', 'gmt+0830': 'GMT+0830',
|
|
||||||
'gmt+930': 'GMT+0930', 'gmt+0930': 'GMT+0930',
|
|
||||||
'gmt+1030': 'GMT+1030',
|
|
||||||
'gmt+1130': 'GMT+1130',
|
|
||||||
'gmt+1230': 'GMT+1230',
|
|
||||||
|
|
||||||
'gmt-130': 'GMT-0130', 'gmt-0130': 'GMT-0130',
|
|
||||||
'gmt-230': 'GMT-0230', 'gmt-0230': 'GMT-0230',
|
|
||||||
'gmt-330': 'GMT-0330', 'gmt-0330': 'GMT-0330',
|
|
||||||
'gmt-430': 'GMT-0430', 'gmt-0430': 'GMT-0430',
|
|
||||||
'gmt-530': 'GMT-0530', 'gmt-0530': 'GMT-0530',
|
|
||||||
'gmt-630': 'GMT-0630', 'gmt-0630': 'GMT-0630',
|
|
||||||
'gmt-730': 'GMT-0730', 'gmt-0730': 'GMT-0730',
|
|
||||||
'gmt-830': 'GMT-0830', 'gmt-0830': 'GMT-0830',
|
|
||||||
'gmt-930': 'GMT-0930', 'gmt-0930': 'GMT-0930',
|
|
||||||
'gmt-1030': 'GMT-1030',
|
|
||||||
'gmt-1130': 'GMT-1130',
|
|
||||||
'gmt-1230': 'GMT-1230',
|
|
||||||
|
|
||||||
'ut': 'Universal',
|
|
||||||
'bst': 'GMT+1', 'mest': 'GMT+2', 'sst': 'GMT+2',
|
|
||||||
'fst': 'GMT+2', 'wadt': 'GMT+8', 'eadt': 'GMT+11', 'nzdt': 'GMT+13',
|
|
||||||
'wet': 'GMT', 'wat': 'GMT+1', 'at': 'GMT-2', 'ast': 'GMT-4',
|
|
||||||
'nt': 'GMT-11', 'idlw': 'GMT-12', 'cet': 'GMT+1', 'cest': 'GMT+2',
|
|
||||||
'met': 'GMT+1',
|
|
||||||
'mewt': 'GMT+1', 'swt': 'GMT+1', 'fwt': 'GMT+1', 'eet': 'GMT+2',
|
|
||||||
'eest': 'GMT+3',
|
|
||||||
'bt': 'GMT+3', 'zp4': 'GMT+4', 'zp5': 'GMT+5', 'zp6': 'GMT+6',
|
|
||||||
'wast': 'GMT+7', 'cct': 'GMT+8', 'jst': 'GMT+9', 'east': 'GMT+10',
|
|
||||||
'gst': 'GMT+10', 'nzt': 'GMT+12', 'nzst': 'GMT+12', 'idle': 'GMT+12',
|
|
||||||
'ret': 'GMT+4', 'ist': 'GMT+0530', 'edt': 'GMT-4',
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# some timezone definitions of the "-0400" are not working
|
|
||||||
# when upgrading
|
|
||||||
for hour in range(0, 13):
|
|
||||||
hour = hour
|
|
||||||
fhour = str(hour)
|
|
||||||
if len(fhour) == 1:
|
|
||||||
fhour = '0' + fhour
|
|
||||||
_old_zmap['-%s00' % fhour] = 'GMT-%i' % hour
|
|
||||||
_old_zmap['+%s00' % fhour] = 'GMT+%i' % hour
|
|
||||||
|
|
||||||
|
|
||||||
def _p(zone):
|
|
||||||
return _numeric_timezones[zone]
|
|
||||||
|
|
||||||
|
|
||||||
def _static_timezone_factory(data):
|
|
||||||
zone = data[0]
|
|
||||||
cls = type(zone, (StaticTzInfo,), dict(
|
|
||||||
__reduce__=lambda _: (_p, (zone, )),
|
|
||||||
zone=zone,
|
|
||||||
_utcoffset=memorized_timedelta(data[5][0][0]),
|
|
||||||
_tzname=data[6][:-1])) # strip the trailing null
|
|
||||||
return cls()
|
|
||||||
|
|
||||||
|
|
||||||
_numeric_timezones = {key: _static_timezone_factory(data)
|
|
||||||
for key, data in _numeric_timezone_data.items()}
|
|
||||||
|
|
||||||
|
|
||||||
class Timezone:
|
|
||||||
"""
|
|
||||||
Timezone information returned by PytzCache.__getitem__
|
|
||||||
Adapts datetime.tzinfo object to DateTime._timezone interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, tzinfo):
|
|
||||||
self.tzinfo = tzinfo
|
|
||||||
|
|
||||||
def info(self, t=None):
|
|
||||||
if t is None:
|
|
||||||
dt = datetime.now(tz=pytz.utc)
|
|
||||||
else:
|
|
||||||
# can't use utcfromtimestamp past 2038
|
|
||||||
dt = EPOCH + timedelta(0, t)
|
|
||||||
|
|
||||||
# need to normalize tzinfo for the datetime to deal with
|
|
||||||
# daylight savings time.
|
|
||||||
normalized_dt = self.tzinfo.normalize(dt.astimezone(self.tzinfo))
|
|
||||||
normalized_tzinfo = normalized_dt.tzinfo
|
|
||||||
|
|
||||||
offset = normalized_tzinfo.utcoffset(normalized_dt)
|
|
||||||
secs = offset.days * 24 * 60 * 60 + offset.seconds
|
|
||||||
dst = normalized_tzinfo.dst(normalized_dt)
|
|
||||||
if dst == timedelta(0):
|
|
||||||
is_dst = 0
|
|
||||||
else:
|
|
||||||
is_dst = 1
|
|
||||||
return secs, is_dst, normalized_tzinfo.tzname(normalized_dt)
|
|
||||||
|
|
||||||
|
|
||||||
class PytzCache:
|
|
||||||
"""
|
|
||||||
Reimplementation of the DateTime._cache class that uses for timezone info
|
|
||||||
"""
|
|
||||||
|
|
||||||
_zlst = pytz.common_timezones + _old_zlst # used by DateTime.TimeZones
|
|
||||||
_zmap = {name.lower(): name for name in pytz.all_timezones}
|
|
||||||
_zmap.update(_old_zmap) # These must take priority
|
|
||||||
_zidx = _zmap.keys()
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
name = self._zmap.get(key.lower(), key) # fallback to key
|
|
||||||
try:
|
|
||||||
return Timezone(pytz.timezone(name))
|
|
||||||
except pytz.UnknownTimeZoneError:
|
|
||||||
try:
|
|
||||||
return Timezone(_numeric_timezones[name])
|
|
||||||
except KeyError:
|
|
||||||
raise DateTimeError('Unrecognized timezone: %s' % key)
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
##############################################################################
|
|
||||||
#
|
|
||||||
# Copyright (c) 2003 Zope Foundation and Contributors.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# This software is subject to the provisions of the Zope Public License,
|
|
||||||
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
|
|
||||||
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
|
|
||||||
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE.
|
|
||||||
#
|
|
||||||
##############################################################################
|
|
||||||
|
|
||||||
# This file is needed to make this a package.
|
|
||||||
Binary file not shown.
Binary file not shown.
@@ -1,57 +0,0 @@
|
|||||||
1970-01-01 (1970, 1, 4)
|
|
||||||
1970-01-02 (1970, 1, 5)
|
|
||||||
1970-01-30 (1970, 5, 5)
|
|
||||||
1970-01-31 (1970, 5, 6)
|
|
||||||
1970-02-01 (1970, 5, 7)
|
|
||||||
1970-02-02 (1970, 6, 1)
|
|
||||||
1970-02-28 (1970, 9, 6)
|
|
||||||
1970-03-01 (1970, 9, 7)
|
|
||||||
1970-03-30 (1970, 14, 1)
|
|
||||||
1970-03-31 (1970, 14, 2)
|
|
||||||
1970-04-01 (1970, 14, 3)
|
|
||||||
1970-09-30 (1970, 40, 3)
|
|
||||||
1970-10-01 (1970, 40, 4)
|
|
||||||
1970-10-02 (1970, 40, 5)
|
|
||||||
1970-10-03 (1970, 40, 6)
|
|
||||||
1970-10-04 (1970, 40, 7)
|
|
||||||
1970-10-05 (1970, 41, 1)
|
|
||||||
1971-01-02 (1970, 53, 6)
|
|
||||||
1971-01-03 (1970, 53, 7)
|
|
||||||
1971-01-04 (1971, 1, 1)
|
|
||||||
1971-01-05 (1971, 1, 2)
|
|
||||||
1971-12-31 (1971, 52, 5)
|
|
||||||
1972-01-01 (1971, 52, 6)
|
|
||||||
1972-01-02 (1971, 52, 7)
|
|
||||||
1972-01-03 (1972, 1, 1)
|
|
||||||
1972-01-04 (1972, 1, 2)
|
|
||||||
1972-12-30 (1972, 52, 6)
|
|
||||||
1972-12-31 (1972, 52, 7)
|
|
||||||
1973-01-01 (1973, 1, 1)
|
|
||||||
1973-01-02 (1973, 1, 2)
|
|
||||||
1973-12-29 (1973, 52, 6)
|
|
||||||
1973-12-30 (1973, 52, 7)
|
|
||||||
1973-12-31 (1974, 1, 1)
|
|
||||||
1974-01-01 (1974, 1, 2)
|
|
||||||
1998-12-30 (1998, 53, 3)
|
|
||||||
1998-12-31 (1998, 53, 4)
|
|
||||||
1999-01-01 (1998, 53, 5)
|
|
||||||
1999-01-02 (1998, 53, 6)
|
|
||||||
1999-01-03 (1998, 53, 7)
|
|
||||||
1999-01-04 (1999, 1, 1)
|
|
||||||
1999-01-05 (1999, 1, 2)
|
|
||||||
1999-12-30 (1999, 52, 4)
|
|
||||||
1999-12-31 (1999, 52, 5)
|
|
||||||
2000-01-01 (1999, 52, 6)
|
|
||||||
2000-01-02 (1999, 52, 7)
|
|
||||||
2000-01-03 (2000, 1, 1)
|
|
||||||
2000-01-04 (2000, 1, 2)
|
|
||||||
2000-01-05 (2000, 1, 3)
|
|
||||||
2000-01-06 (2000, 1, 4)
|
|
||||||
2000-01-07 (2000, 1, 5)
|
|
||||||
2000-01-08 (2000, 1, 6)
|
|
||||||
2000-01-09 (2000, 1, 7)
|
|
||||||
2000-01-10 (2000, 2, 1)
|
|
||||||
2019-12-28 (2019, 52, 6)
|
|
||||||
2019-12-29 (2019, 52, 7)
|
|
||||||
2019-12-30 (2020, 1, 1)
|
|
||||||
2019-12-31 (2020, 1, 2)
|
|
||||||
@@ -1,746 +0,0 @@
|
|||||||
##############################################################################
|
|
||||||
#
|
|
||||||
# Copyright (c) 2003 Zope Foundation and Contributors.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# This software is subject to the provisions of the Zope Public License,
|
|
||||||
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
|
|
||||||
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
|
|
||||||
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
|
|
||||||
# FOR A PARTICULAR PURPOSE.
|
|
||||||
#
|
|
||||||
##############################################################################
|
|
||||||
|
|
||||||
import math
|
|
||||||
import os
|
|
||||||
import pickle
|
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import unittest
|
|
||||||
from datetime import date
|
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timedelta
|
|
||||||
from datetime import tzinfo
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from DateTime import DateTime
|
|
||||||
from DateTime.DateTime import _findLocalTimeZoneName
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
__file__
|
|
||||||
except NameError: # pragma: no cover
|
|
||||||
f = sys.argv[0]
|
|
||||||
else:
|
|
||||||
f = __file__
|
|
||||||
|
|
||||||
IS_PYPY = getattr(platform, 'python_implementation', lambda: None)() == 'PyPy'
|
|
||||||
|
|
||||||
DATADIR = os.path.dirname(os.path.abspath(f))
|
|
||||||
del f
|
|
||||||
|
|
||||||
ZERO = timedelta(0)
|
|
||||||
|
|
||||||
|
|
||||||
class FixedOffset(tzinfo):
|
|
||||||
"""Fixed offset in minutes east from UTC."""
|
|
||||||
|
|
||||||
def __init__(self, offset, name):
|
|
||||||
self.__offset = timedelta(minutes=offset)
|
|
||||||
self.__name = name
|
|
||||||
|
|
||||||
def utcoffset(self, dt):
|
|
||||||
return self.__offset
|
|
||||||
|
|
||||||
def tzname(self, dt):
|
|
||||||
return self.__name
|
|
||||||
|
|
||||||
def dst(self, dt):
|
|
||||||
return ZERO
|
|
||||||
|
|
||||||
|
|
||||||
class DateTimeTests(unittest.TestCase):
|
|
||||||
|
|
||||||
def _compare(self, dt1, dt2):
|
|
||||||
'''Compares the internal representation of dt1 with
|
|
||||||
the representation in dt2. Allows sub-millisecond variations.
|
|
||||||
Primarily for testing.'''
|
|
||||||
self.assertEqual(round(dt1._t, 3), round(dt2._t, 3))
|
|
||||||
self.assertEqual(round(dt1._d, 9), round(dt2._d, 9))
|
|
||||||
self.assertEqual(round(dt1.time, 9), round(dt2.time, 9))
|
|
||||||
self.assertEqual(dt1.millis(), dt2.millis())
|
|
||||||
self.assertEqual(dt1._micros, dt2._micros)
|
|
||||||
|
|
||||||
def testBug1203(self):
|
|
||||||
# 01:59:60 occurred in old DateTime
|
|
||||||
dt = DateTime(7200, 'GMT')
|
|
||||||
self.assertTrue(str(dt).find('60') < 0, dt)
|
|
||||||
|
|
||||||
def testDSTInEffect(self):
|
|
||||||
# Checks GMT offset for a DST date in the US/Eastern time zone
|
|
||||||
dt = DateTime(2000, 5, 9, 15, 0, 0, 'US/Eastern')
|
|
||||||
self.assertEqual(dt.toZone('GMT').hour(), 19,
|
|
||||||
(dt, dt.toZone('GMT')))
|
|
||||||
|
|
||||||
def testDSTNotInEffect(self):
|
|
||||||
# Checks GMT offset for a non-DST date in the US/Eastern time zone
|
|
||||||
dt = DateTime(2000, 11, 9, 15, 0, 0, 'US/Eastern')
|
|
||||||
self.assertEqual(dt.toZone('GMT').hour(), 20,
|
|
||||||
(dt, dt.toZone('GMT')))
|
|
||||||
|
|
||||||
def testAddPrecision(self):
|
|
||||||
# Precision of serial additions
|
|
||||||
dt = DateTime()
|
|
||||||
self.assertEqual(str(dt + 0.10 + 3.14 + 6.76 - 10), str(dt),
|
|
||||||
dt)
|
|
||||||
# checks problem reported in
|
|
||||||
# https://github.com/zopefoundation/DateTime/issues/41
|
|
||||||
dt = DateTime(2038, 10, 7, 8, 52, 44.959840, "UTC")
|
|
||||||
self.assertEqual(str(dt + 0.10 + 3.14 + 6.76 - 10), str(dt),
|
|
||||||
dt)
|
|
||||||
|
|
||||||
def testConsistentSecondMicroRounding(self):
|
|
||||||
dt = DateTime(2038, 10, 7, 8, 52, 44.9598398, "UTC")
|
|
||||||
self.assertEqual(int(dt.second() * 1000000),
|
|
||||||
dt.micros() % 60000000)
|
|
||||||
|
|
||||||
def testConstructor3(self):
|
|
||||||
# Constructor from date/time string
|
|
||||||
dt = DateTime()
|
|
||||||
dt1s = '%d/%d/%d %d:%d:%f %s' % (
|
|
||||||
dt.year(),
|
|
||||||
dt.month(),
|
|
||||||
dt.day(),
|
|
||||||
dt.hour(),
|
|
||||||
dt.minute(),
|
|
||||||
dt.second(),
|
|
||||||
dt.timezone())
|
|
||||||
dt1 = DateTime(dt1s)
|
|
||||||
# Compare representations as it's the
|
|
||||||
# only way to compare the dates to the same accuracy
|
|
||||||
self.assertEqual(repr(dt), repr(dt1))
|
|
||||||
|
|
||||||
def testConstructor4(self):
|
|
||||||
# Constructor from time float
|
|
||||||
dt = DateTime()
|
|
||||||
dt1 = DateTime(float(dt))
|
|
||||||
self._compare(dt, dt1)
|
|
||||||
|
|
||||||
def testConstructor5(self):
|
|
||||||
# Constructor from time float and timezone
|
|
||||||
dt = DateTime()
|
|
||||||
dt1 = DateTime(float(dt), dt.timezone())
|
|
||||||
self.assertEqual(str(dt), str(dt1), (dt, dt1))
|
|
||||||
dt1 = DateTime(float(dt), str(dt.timezone()))
|
|
||||||
self.assertEqual(str(dt), str(dt1), (dt, dt1))
|
|
||||||
|
|
||||||
def testConstructor6(self):
|
|
||||||
# Constructor from year and julian date
|
|
||||||
# This test must normalize the time zone, or it *will* break when
|
|
||||||
# DST changes!
|
|
||||||
dt1 = DateTime(2000, 5.500000578705)
|
|
||||||
dt = DateTime('2000/1/5 12:00:00.050 pm %s' % dt1.localZone())
|
|
||||||
self._compare(dt, dt1)
|
|
||||||
|
|
||||||
def testConstructor7(self):
|
|
||||||
# Constructor from parts
|
|
||||||
dt = DateTime()
|
|
||||||
dt1 = DateTime(
|
|
||||||
dt.year(),
|
|
||||||
dt.month(),
|
|
||||||
dt.day(),
|
|
||||||
dt.hour(),
|
|
||||||
dt.minute(),
|
|
||||||
dt.second(),
|
|
||||||
dt.timezone())
|
|
||||||
# Compare representations as it's the
|
|
||||||
# only way to compare the dates to the same accuracy
|
|
||||||
self.assertEqual(repr(dt), repr(dt1))
|
|
||||||
|
|
||||||
def testDayOfWeek(self):
|
|
||||||
# Compare to the datetime.date value to make it locale independent
|
|
||||||
expected = date(2000, 6, 16).strftime('%A')
|
|
||||||
# strftime() used to always be passed a day of week of 0
|
|
||||||
dt = DateTime('2000/6/16')
|
|
||||||
s = dt.strftime('%A')
|
|
||||||
self.assertEqual(s, expected, (dt, s))
|
|
||||||
|
|
||||||
def testOldDate(self):
|
|
||||||
# Fails when an 1800 date is displayed with negative signs
|
|
||||||
dt = DateTime('1830/5/6 12:31:46.213 pm')
|
|
||||||
dt1 = dt.toZone('GMT+6')
|
|
||||||
self.assertTrue(str(dt1).find('-') < 0, (dt, dt1))
|
|
||||||
|
|
||||||
def testSubtraction(self):
|
|
||||||
# Reconstruction of a DateTime from its parts, with subtraction
|
|
||||||
# this also tests the accuracy of addition and reconstruction
|
|
||||||
dt = DateTime()
|
|
||||||
dt1 = dt - 3.141592653
|
|
||||||
dt2 = DateTime(
|
|
||||||
dt.year(),
|
|
||||||
dt.month(),
|
|
||||||
dt.day(),
|
|
||||||
dt.hour(),
|
|
||||||
dt.minute(),
|
|
||||||
dt.second())
|
|
||||||
dt3 = dt2 - 3.141592653
|
|
||||||
self.assertEqual(dt1, dt3, (dt, dt1, dt2, dt3))
|
|
||||||
|
|
||||||
def testTZ1add(self):
|
|
||||||
# Time zone manipulation: add to a date
|
|
||||||
dt = DateTime('1997/3/8 1:45am GMT-4')
|
|
||||||
dt1 = DateTime('1997/3/9 1:45pm GMT+8')
|
|
||||||
self.assertTrue((dt + 1.0).equalTo(dt1))
|
|
||||||
|
|
||||||
def testTZ1sub(self):
|
|
||||||
# Time zone manipulation: subtract from a date
|
|
||||||
dt = DateTime('1997/3/8 1:45am GMT-4')
|
|
||||||
dt1 = DateTime('1997/3/9 1:45pm GMT+8')
|
|
||||||
self.assertTrue((dt1 - 1.0).equalTo(dt))
|
|
||||||
|
|
||||||
def testTZ1diff(self):
|
|
||||||
# Time zone manipulation: diff two dates
|
|
||||||
dt = DateTime('1997/3/8 1:45am GMT-4')
|
|
||||||
dt1 = DateTime('1997/3/9 1:45pm GMT+8')
|
|
||||||
self.assertEqual(dt1 - dt, 1.0, (dt, dt1))
|
|
||||||
|
|
||||||
def test_compare_methods(self):
|
|
||||||
# Compare two dates using several methods
|
|
||||||
dt = DateTime('1997/1/1')
|
|
||||||
dt1 = DateTime('1997/2/2')
|
|
||||||
self.assertTrue(dt1.greaterThan(dt))
|
|
||||||
self.assertTrue(dt1.greaterThanEqualTo(dt))
|
|
||||||
self.assertTrue(dt.lessThan(dt1))
|
|
||||||
self.assertTrue(dt.lessThanEqualTo(dt1))
|
|
||||||
self.assertTrue(dt.notEqualTo(dt1))
|
|
||||||
self.assertFalse(dt.equalTo(dt1))
|
|
||||||
# Compare a date to float
|
|
||||||
dt = DateTime(1.0)
|
|
||||||
self.assertTrue(dt == DateTime(1.0)) # testing __eq__
|
|
||||||
self.assertFalse(dt != DateTime(1.0)) # testing __ne__
|
|
||||||
self.assertFalse(dt.greaterThan(1.0))
|
|
||||||
self.assertTrue(dt.greaterThanEqualTo(1.0))
|
|
||||||
self.assertFalse(dt.lessThan(1.0))
|
|
||||||
self.assertTrue(dt.lessThanEqualTo(1.0))
|
|
||||||
self.assertFalse(dt.notEqualTo(1.0))
|
|
||||||
self.assertTrue(dt.equalTo(1.0))
|
|
||||||
# Compare a date to int
|
|
||||||
dt = DateTime(1)
|
|
||||||
self.assertEqual(dt, DateTime(1.0))
|
|
||||||
self.assertTrue(dt == DateTime(1)) # testing __eq__
|
|
||||||
self.assertFalse(dt != DateTime(1)) # testing __ne__
|
|
||||||
self.assertFalse(dt.greaterThan(1))
|
|
||||||
self.assertTrue(dt.greaterThanEqualTo(1))
|
|
||||||
self.assertFalse(dt.lessThan(1))
|
|
||||||
self.assertTrue(dt.lessThanEqualTo(1))
|
|
||||||
self.assertFalse(dt.notEqualTo(1))
|
|
||||||
self.assertTrue(dt.equalTo(1))
|
|
||||||
# Compare a date to string; there is no implicit type conversion
|
|
||||||
# but behavior if consistent as when comparing, for example, an int
|
|
||||||
# and a string.
|
|
||||||
dt = DateTime("2023")
|
|
||||||
self.assertFalse(dt == "2023") # testing __eq__
|
|
||||||
self.assertTrue(dt != "2023") # testing __ne__
|
|
||||||
self.assertRaises(TypeError, dt.greaterThan, "2023")
|
|
||||||
self.assertRaises(TypeError, dt.greaterThanEqualTo, "2023")
|
|
||||||
self.assertRaises(TypeError, dt.lessThan, "2023")
|
|
||||||
self.assertRaises(TypeError, dt.lessThanEqualTo, "2023")
|
|
||||||
self.assertTrue(dt.notEqualTo("2023"))
|
|
||||||
self.assertFalse(dt.equalTo("2023"))
|
|
||||||
|
|
||||||
def test_compare_methods_none(self):
|
|
||||||
# Compare a date to None
|
|
||||||
for dt in (DateTime('1997/1/1'), DateTime(0)):
|
|
||||||
self.assertTrue(dt.greaterThan(None))
|
|
||||||
self.assertTrue(dt.greaterThanEqualTo(None))
|
|
||||||
self.assertFalse(dt.lessThan(None))
|
|
||||||
self.assertFalse(dt.lessThanEqualTo(None))
|
|
||||||
self.assertTrue(dt.notEqualTo(None))
|
|
||||||
self.assertFalse(dt.equalTo(None))
|
|
||||||
|
|
||||||
def test_pickle(self):
|
|
||||||
dt = DateTime()
|
|
||||||
data = pickle.dumps(dt, 1)
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_with_tz(self):
|
|
||||||
dt = DateTime('2002/5/2 8:00am GMT+8')
|
|
||||||
data = pickle.dumps(dt, 1)
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_asdatetime_with_tz(self):
|
|
||||||
dt = DateTime('2002/5/2 8:00am GMT+8')
|
|
||||||
data = pickle.dumps(dt.asdatetime(), 1)
|
|
||||||
new = DateTime(pickle.loads(data))
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_with_numerical_tz(self):
|
|
||||||
for dt_str in ('2007/01/02 12:34:56.789 +0300',
|
|
||||||
'2007/01/02 12:34:56.789 +0430',
|
|
||||||
'2007/01/02 12:34:56.789 -1234'):
|
|
||||||
dt = DateTime(dt_str)
|
|
||||||
data = pickle.dumps(dt, 1)
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_with_micros(self):
|
|
||||||
dt = DateTime('2002/5/2 8:00:14.123 GMT+8')
|
|
||||||
data = pickle.dumps(dt, 1)
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_old(self):
|
|
||||||
dt = DateTime('2002/5/2 8:00am GMT+0')
|
|
||||||
data = (
|
|
||||||
'(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05'
|
|
||||||
'_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq'
|
|
||||||
'\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU'
|
|
||||||
'\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU\x02amq'
|
|
||||||
'\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq\x12K\x00U'
|
|
||||||
'\x07_microsq\x13L1020326400000000L\nU\x02_dq\x14G@\xe2\x12j\xaa'
|
|
||||||
'\xaa\xaa\xabU\x07_secondq\x15G\x00\x00\x00\x00\x00\x00\x00\x00U'
|
|
||||||
'\x03_tzq\x16U\x05GMT+0q\x17U\x06_monthq\x18K\x05U'
|
|
||||||
'\x0f_timezone_naiveq\x19I00\nU\x04_dayq\x1aK\x02U\x05_yearq'
|
|
||||||
'\x1bM\xd2\x07U\x08_nearsecq\x1cG\x00\x00\x00\x00\x00\x00\x00'
|
|
||||||
'\x00U\x07_pmhourq\x1dK\x08U\n_dayoffsetq\x1eK\x04U\x04timeq'
|
|
||||||
'\x1fG?\xd5UUUV\x00\x00ub.')
|
|
||||||
data = data.encode('latin-1')
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def test_pickle_old_without_micros(self):
|
|
||||||
dt = DateTime('2002/5/2 8:00am GMT+0')
|
|
||||||
data = (
|
|
||||||
'(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05'
|
|
||||||
'_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq'
|
|
||||||
'\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU'
|
|
||||||
'\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU'
|
|
||||||
'\x02amq\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq'
|
|
||||||
'\x12K\x00U\x02_dq\x13G@\xe2\x12j\xaa\xaa\xaa\xabU\x07_secondq'
|
|
||||||
'\x14G\x00\x00\x00\x00\x00\x00\x00\x00U\x03_tzq\x15U\x05GMT+0q'
|
|
||||||
'\x16U\x06_monthq\x17K\x05U\x0f_timezone_naiveq\x18I00\nU'
|
|
||||||
'\x04_dayq\x19K\x02U\x05_yearq\x1aM\xd2\x07U\x08_nearsecq'
|
|
||||||
'\x1bG\x00\x00\x00\x00\x00\x00\x00\x00U\x07_pmhourq\x1cK\x08U'
|
|
||||||
'\n_dayoffsetq\x1dK\x04U\x04timeq\x1eG?\xd5UUUV\x00\x00ub.')
|
|
||||||
data = data.encode('latin-1')
|
|
||||||
new = pickle.loads(data)
|
|
||||||
for key in DateTime.__slots__:
|
|
||||||
self.assertEqual(getattr(dt, key), getattr(new, key))
|
|
||||||
|
|
||||||
def testTZ2(self):
|
|
||||||
# Time zone manipulation test 2
|
|
||||||
dt = DateTime()
|
|
||||||
dt1 = dt.toZone('GMT')
|
|
||||||
s = dt.second()
|
|
||||||
s1 = dt1.second()
|
|
||||||
self.assertEqual(s, s1, (dt, dt1, s, s1))
|
|
||||||
|
|
||||||
def testTZDiffDaylight(self):
|
|
||||||
# Diff dates across daylight savings dates
|
|
||||||
dt = DateTime('2000/6/8 1:45am US/Eastern')
|
|
||||||
dt1 = DateTime('2000/12/8 12:45am US/Eastern')
|
|
||||||
self.assertEqual(dt1 - dt, 183, (dt, dt1, dt1 - dt))
|
|
||||||
|
|
||||||
def testY10KDate(self):
|
|
||||||
# Comparison of a Y10K date and a Y2K date
|
|
||||||
dt = DateTime('10213/09/21')
|
|
||||||
dt1 = DateTime(2000, 1, 1)
|
|
||||||
|
|
||||||
dsec = (dt.millis() - dt1.millis()) / 1000.0
|
|
||||||
ddays = math.floor((dsec / 86400.0) + 0.5)
|
|
||||||
|
|
||||||
self.assertEqual(ddays, 3000000, ddays)
|
|
||||||
|
|
||||||
def test_tzoffset(self):
|
|
||||||
# Test time-zone given as an offset
|
|
||||||
|
|
||||||
# GMT
|
|
||||||
dt = DateTime('Tue, 10 Sep 2001 09:41:03 GMT')
|
|
||||||
self.assertEqual(dt.tzoffset(), 0)
|
|
||||||
|
|
||||||
# Timezone by name, a timezone that hasn't got daylightsaving.
|
|
||||||
dt = DateTime('Tue, 2 Mar 2001 09:41:03 GMT+3')
|
|
||||||
self.assertEqual(dt.tzoffset(), 10800)
|
|
||||||
|
|
||||||
# Timezone by name, has daylightsaving but is not in effect.
|
|
||||||
dt = DateTime('Tue, 21 Jan 2001 09:41:03 PST')
|
|
||||||
self.assertEqual(dt.tzoffset(), -28800)
|
|
||||||
|
|
||||||
# Timezone by name, with daylightsaving in effect
|
|
||||||
dt = DateTime('Tue, 24 Aug 2001 09:41:03 PST')
|
|
||||||
self.assertEqual(dt.tzoffset(), -25200)
|
|
||||||
|
|
||||||
# A negative numerical timezone
|
|
||||||
dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0400')
|
|
||||||
self.assertEqual(dt.tzoffset(), -14400)
|
|
||||||
|
|
||||||
# A positive numerical timzone
|
|
||||||
dt = DateTime('Tue, 6 Dec 1966 01:41:03 +0200')
|
|
||||||
self.assertEqual(dt.tzoffset(), 7200)
|
|
||||||
|
|
||||||
# A negative numerical timezone with minutes.
|
|
||||||
dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0637')
|
|
||||||
self.assertEqual(dt.tzoffset(), -23820)
|
|
||||||
|
|
||||||
# A positive numerical timezone with minutes.
|
|
||||||
dt = DateTime('Tue, 24 Jul 2001 09:41:03 +0425')
|
|
||||||
self.assertEqual(dt.tzoffset(), 15900)
|
|
||||||
|
|
||||||
def testISO8601(self):
|
|
||||||
# ISO8601 reference dates
|
|
||||||
ref0 = DateTime('2002/5/2 8:00am GMT')
|
|
||||||
ref1 = DateTime('2002/5/2 8:00am US/Eastern')
|
|
||||||
ref2 = DateTime('2006/11/6 10:30 GMT')
|
|
||||||
ref3 = DateTime('2004/06/14 14:30:15 GMT-3')
|
|
||||||
ref4 = DateTime('2006/01/01 GMT')
|
|
||||||
|
|
||||||
# Basic tests
|
|
||||||
# Though this is timezone naive and according to specification should
|
|
||||||
# be interpreted in the local timezone, to preserve backwards
|
|
||||||
# compatibility with previously expected behaviour.
|
|
||||||
isoDt = DateTime('2002-05-02T08:00:00')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02T08:00:00Z')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02T08:00:00+00:00')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02T08:00:00-04:00')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02 08:00:00-04:00')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 1386: the colon in the timezone offset is optional
|
|
||||||
isoDt = DateTime('2002-05-02T08:00:00-0400')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: date reduced formats
|
|
||||||
isoDt = DateTime('2006-01-01')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('200601-01')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20060101')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2006-01')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('200601')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2006')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: date/time separators are also optional
|
|
||||||
isoDt = DateTime('20020502T08:00:00')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02T080000')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20020502T080000')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: timezones with only one digit for hour
|
|
||||||
isoDt = DateTime('20020502T080000+0')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20020502 080000-4')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20020502T080000-400')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20020502T080000-4:00')
|
|
||||||
self.assertTrue(ref1.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: optional seconds/minutes
|
|
||||||
isoDt = DateTime('2002-05-02T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-05-02T08')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: week format
|
|
||||||
isoDt = DateTime('2002-W18-4T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002-W184T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002W18-4T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002W184T08')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2004-W25-1T14:30:15-03:00')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2004-W25T14:30:15-03:00')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: day of year format
|
|
||||||
isoDt = DateTime('2002-122T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2002122T0800')
|
|
||||||
self.assertTrue(ref0.equalTo(isoDt))
|
|
||||||
|
|
||||||
# Bug 2191: hours/minutes fractions
|
|
||||||
isoDt = DateTime('2006-11-06T10.5')
|
|
||||||
self.assertTrue(ref2.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2006-11-06T10,5')
|
|
||||||
self.assertTrue(ref2.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20040614T1430.25-3')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2004-06-14T1430,25-3')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('2004-06-14T14:30.25-3')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
isoDt = DateTime('20040614T14:30,25-3')
|
|
||||||
self.assertTrue(ref3.equalTo(isoDt))
|
|
||||||
|
|
||||||
# ISO8601 standard format
|
|
||||||
iso8601_string = '2002-05-02T08:00:00-04:00'
|
|
||||||
iso8601DT = DateTime(iso8601_string)
|
|
||||||
self.assertEqual(iso8601_string, iso8601DT.ISO8601())
|
|
||||||
|
|
||||||
# ISO format with no timezone
|
|
||||||
isoDt = DateTime('2006-01-01 00:00:00')
|
|
||||||
self.assertTrue(ref4.equalTo(isoDt))
|
|
||||||
|
|
||||||
def testJulianWeek(self):
|
|
||||||
# Check JulianDayWeek function
|
|
||||||
fn = os.path.join(DATADIR, 'julian_testdata.txt')
|
|
||||||
with open(fn) as fd:
|
|
||||||
lines = fd.readlines()
|
|
||||||
for line in lines:
|
|
||||||
d = DateTime(line[:10])
|
|
||||||
result_from_mx = tuple(map(int, line[12:-2].split(',')))
|
|
||||||
self.assertEqual(result_from_mx[1], d.week())
|
|
||||||
|
|
||||||
def testCopyConstructor(self):
|
|
||||||
d = DateTime('2004/04/04')
|
|
||||||
self.assertEqual(DateTime(d), d)
|
|
||||||
self.assertEqual(str(DateTime(d)), str(d))
|
|
||||||
d2 = DateTime('1999/04/12 01:00:00')
|
|
||||||
self.assertEqual(DateTime(d2), d2)
|
|
||||||
self.assertEqual(str(DateTime(d2)), str(d2))
|
|
||||||
|
|
||||||
def testCopyConstructorPreservesTimezone(self):
|
|
||||||
# test for https://bugs.launchpad.net/zope2/+bug/200007
|
|
||||||
# This always worked in the local timezone, so we need at least
|
|
||||||
# two tests with different zones to be sure at least one of them
|
|
||||||
# is not local.
|
|
||||||
d = DateTime('2004/04/04')
|
|
||||||
self.assertEqual(DateTime(d).timezone(), d.timezone())
|
|
||||||
d2 = DateTime('2008/04/25 12:00:00 EST')
|
|
||||||
self.assertEqual(DateTime(d2).timezone(), d2.timezone())
|
|
||||||
self.assertEqual(str(DateTime(d2)), str(d2))
|
|
||||||
d3 = DateTime('2008/04/25 12:00:00 PST')
|
|
||||||
self.assertEqual(DateTime(d3).timezone(), d3.timezone())
|
|
||||||
self.assertEqual(str(DateTime(d3)), str(d3))
|
|
||||||
|
|
||||||
def testRFC822(self):
|
|
||||||
# rfc822 conversion
|
|
||||||
dt = DateTime('2002-05-02T08:00:00+00:00')
|
|
||||||
self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0000')
|
|
||||||
|
|
||||||
dt = DateTime('2002-05-02T08:00:00+02:00')
|
|
||||||
self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0200')
|
|
||||||
|
|
||||||
dt = DateTime('2002-05-02T08:00:00-02:00')
|
|
||||||
self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 -0200')
|
|
||||||
|
|
||||||
# Checking that conversion from local time is working.
|
|
||||||
dt = DateTime()
|
|
||||||
dts = dt.rfc822().split(' ')
|
|
||||||
times = dts[4].split(':')
|
|
||||||
_isDST = time.localtime(time.time())[8]
|
|
||||||
if _isDST:
|
|
||||||
offset = time.altzone
|
|
||||||
else:
|
|
||||||
offset = time.timezone
|
|
||||||
self.assertEqual(dts[0], dt.aDay() + ',')
|
|
||||||
self.assertEqual(int(dts[1]), dt.day())
|
|
||||||
self.assertEqual(dts[2], dt.aMonth())
|
|
||||||
self.assertEqual(int(dts[3]), dt.year())
|
|
||||||
self.assertEqual(int(times[0]), dt.h_24())
|
|
||||||
self.assertEqual(int(times[1]), dt.minute())
|
|
||||||
self.assertEqual(int(times[2]), int(dt.second()))
|
|
||||||
self.assertEqual(dts[5], "%+03d%02d" % divmod((-offset / 60), 60))
|
|
||||||
|
|
||||||
def testInternationalDateformat(self):
|
|
||||||
for year in (1990, 2001, 2020):
|
|
||||||
for month in (1, 12):
|
|
||||||
for day in (1, 12, 28, 31):
|
|
||||||
try:
|
|
||||||
d_us = DateTime("%d/%d/%d" % (year, month, day))
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
d_int = DateTime("%d.%d.%d" % (day, month, year),
|
|
||||||
datefmt="international")
|
|
||||||
self.assertEqual(d_us, d_int)
|
|
||||||
|
|
||||||
d_int = DateTime("%d/%d/%d" % (day, month, year),
|
|
||||||
datefmt="international")
|
|
||||||
self.assertEqual(d_us, d_int)
|
|
||||||
|
|
||||||
def test_intl_format_hyphen(self):
|
|
||||||
d_jan = DateTime('2011-01-11 GMT')
|
|
||||||
d_nov = DateTime('2011-11-01 GMT')
|
|
||||||
d_us = DateTime('11-01-2011 GMT')
|
|
||||||
d_int = DateTime('11-01-2011 GMT', datefmt="international")
|
|
||||||
self.assertNotEqual(d_us, d_int)
|
|
||||||
self.assertEqual(d_us, d_nov)
|
|
||||||
self.assertEqual(d_int, d_jan)
|
|
||||||
|
|
||||||
def test_calcTimezoneName(self):
|
|
||||||
from DateTime.interfaces import TimeError
|
|
||||||
timezone_dependent_epoch = 2177452800
|
|
||||||
try:
|
|
||||||
DateTime()._calcTimezoneName(timezone_dependent_epoch, 0)
|
|
||||||
except TimeError:
|
|
||||||
self.fail('Zope Collector issue #484 (negative time bug): '
|
|
||||||
'TimeError raised')
|
|
||||||
|
|
||||||
def testStrftimeTZhandling(self):
|
|
||||||
# strftime timezone testing
|
|
||||||
# This is a test for collector issue #1127
|
|
||||||
format = '%Y-%m-%d %H:%M %Z'
|
|
||||||
dt = DateTime('Wed, 19 Nov 2003 18:32:07 -0215')
|
|
||||||
dt_string = dt.strftime(format)
|
|
||||||
dt_local = dt.toZone(_findLocalTimeZoneName(0))
|
|
||||||
dt_localstring = dt_local.strftime(format)
|
|
||||||
self.assertEqual(dt_string, dt_localstring)
|
|
||||||
|
|
||||||
def testStrftimeFarDates(self):
|
|
||||||
# Checks strftime in dates <= 1900 or >= 2038
|
|
||||||
dt = DateTime('1900/01/30')
|
|
||||||
self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/1900')
|
|
||||||
dt = DateTime('2040/01/30')
|
|
||||||
self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/2040')
|
|
||||||
|
|
||||||
def testZoneInFarDates(self):
|
|
||||||
# Checks time zone in dates <= 1900 or >= 2038
|
|
||||||
dt1 = DateTime('2040/01/30 14:33 GMT+1')
|
|
||||||
dt2 = DateTime('2040/01/30 11:33 GMT-2')
|
|
||||||
self.assertEqual(dt1.strftime('%d/%m/%Y %H:%M'),
|
|
||||||
dt2.strftime('%d/%m/%Y %H:%M'))
|
|
||||||
|
|
||||||
@unittest.skipIf(
|
|
||||||
IS_PYPY,
|
|
||||||
"Using Non-Ascii characters for strftime doesn't work in PyPy"
|
|
||||||
"https://bitbucket.org/pypy/pypy/issues/2161/pypy3-strftime-does-not-accept-unicode" # noqa: E501 line too long
|
|
||||||
)
|
|
||||||
def testStrftimeStr(self):
|
|
||||||
dt = DateTime('2002-05-02T08:00:00+00:00')
|
|
||||||
uchar = b'\xc3\xa0'.decode('utf-8')
|
|
||||||
ok = dt.strftime('Le %d/%m/%Y a %Hh%M').replace('a', uchar)
|
|
||||||
ustr = b'Le %d/%m/%Y \xc3\xa0 %Hh%M'.decode('utf-8')
|
|
||||||
self.assertEqual(dt.strftime(ustr), ok)
|
|
||||||
|
|
||||||
def testTimezoneNaiveHandling(self):
|
|
||||||
# checks that we assign timezone naivity correctly
|
|
||||||
dt = DateTime('2007-10-04T08:00:00+00:00')
|
|
||||||
self.assertFalse(dt.timezoneNaive(),
|
|
||||||
'error with naivity handling in __parse_iso8601')
|
|
||||||
dt = DateTime('2007-10-04T08:00:00Z')
|
|
||||||
self.assertFalse(dt.timezoneNaive(),
|
|
||||||
'error with naivity handling in __parse_iso8601')
|
|
||||||
dt = DateTime('2007-10-04T08:00:00')
|
|
||||||
self.assertTrue(dt.timezoneNaive(),
|
|
||||||
'error with naivity handling in __parse_iso8601')
|
|
||||||
dt = DateTime('2007/10/04 15:12:33.487618 GMT+1')
|
|
||||||
self.assertFalse(dt.timezoneNaive(),
|
|
||||||
'error with naivity handling in _parse')
|
|
||||||
dt = DateTime('2007/10/04 15:12:33.487618')
|
|
||||||
self.assertTrue(dt.timezoneNaive(),
|
|
||||||
'error with naivity handling in _parse')
|
|
||||||
dt = DateTime()
|
|
||||||
self.assertFalse(dt.timezoneNaive(),
|
|
||||||
'error with naivity for current time')
|
|
||||||
s = '2007-10-04T08:00:00'
|
|
||||||
dt = DateTime(s)
|
|
||||||
self.assertEqual(s, dt.ISO8601())
|
|
||||||
s = '2007-10-04T08:00:00+00:00'
|
|
||||||
dt = DateTime(s)
|
|
||||||
self.assertEqual(s, dt.ISO8601())
|
|
||||||
|
|
||||||
def testConversions(self):
|
|
||||||
sdt0 = datetime.now() # this is a timezone naive datetime
|
|
||||||
dt0 = DateTime(sdt0)
|
|
||||||
self.assertTrue(dt0.timezoneNaive(), (sdt0, dt0))
|
|
||||||
sdt1 = datetime(2007, 10, 4, 18, 14, 42, 580, pytz.utc)
|
|
||||||
dt1 = DateTime(sdt1)
|
|
||||||
self.assertFalse(dt1.timezoneNaive(), (sdt1, dt1))
|
|
||||||
|
|
||||||
# convert back
|
|
||||||
sdt2 = dt0.asdatetime()
|
|
||||||
self.assertEqual(sdt0, sdt2)
|
|
||||||
sdt3 = dt1.utcdatetime() # this returns a timezone naive datetime
|
|
||||||
self.assertEqual(sdt1.hour, sdt3.hour)
|
|
||||||
|
|
||||||
dt4 = DateTime('2007-10-04T10:00:00+05:00')
|
|
||||||
sdt4 = datetime(2007, 10, 4, 5, 0)
|
|
||||||
self.assertEqual(dt4.utcdatetime(), sdt4)
|
|
||||||
self.assertEqual(dt4.asdatetime(), sdt4.replace(tzinfo=pytz.utc))
|
|
||||||
|
|
||||||
dt5 = DateTime('2007-10-23 10:00:00 US/Eastern')
|
|
||||||
tz = pytz.timezone('US/Eastern')
|
|
||||||
sdt5 = datetime(2007, 10, 23, 10, 0, tzinfo=tz)
|
|
||||||
dt6 = DateTime(sdt5)
|
|
||||||
self.assertEqual(dt5.asdatetime(), sdt5)
|
|
||||||
self.assertEqual(dt6.asdatetime(), sdt5)
|
|
||||||
self.assertEqual(dt5, dt6)
|
|
||||||
self.assertEqual(dt5.asdatetime().tzinfo, tz)
|
|
||||||
self.assertEqual(dt6.asdatetime().tzinfo, tz)
|
|
||||||
|
|
||||||
def testBasicTZ(self):
|
|
||||||
# psycopg2 supplies it's own tzinfo instances, with no `zone` attribute
|
|
||||||
tz = FixedOffset(60, 'GMT+1')
|
|
||||||
dt1 = datetime(2008, 8, 5, 12, 0, tzinfo=tz)
|
|
||||||
DT = DateTime(dt1)
|
|
||||||
dt2 = DT.asdatetime()
|
|
||||||
offset1 = dt1.tzinfo.utcoffset(dt1)
|
|
||||||
offset2 = dt2.tzinfo.utcoffset(dt2)
|
|
||||||
self.assertEqual(offset1, offset2)
|
|
||||||
|
|
||||||
def testEDTTimezone(self):
|
|
||||||
# should be able to parse EDT timezones: see lp:599856.
|
|
||||||
dt = DateTime("Mon, 28 Jun 2010 10:12:25 EDT")
|
|
||||||
self.assertEqual(dt.Day(), 'Monday')
|
|
||||||
self.assertEqual(dt.day(), 28)
|
|
||||||
self.assertEqual(dt.Month(), 'June')
|
|
||||||
self.assertEqual(dt.timezone(), 'GMT-4')
|
|
||||||
|
|
||||||
def testParseISO8601(self):
|
|
||||||
parsed = DateTime()._parse_iso8601('2010-10-10')
|
|
||||||
self.assertEqual(parsed, (2010, 10, 10, 0, 0, 0, 'GMT+0000'))
|
|
||||||
|
|
||||||
def test_interface(self):
|
|
||||||
from DateTime.interfaces import IDateTime
|
|
||||||
self.assertTrue(IDateTime.providedBy(DateTime()))
|
|
||||||
|
|
||||||
def test_security(self):
|
|
||||||
dt = DateTime()
|
|
||||||
self.assertEqual(dt.__roles__, None)
|
|
||||||
self.assertEqual(dt.__allow_access_to_unprotected_subobjects__, 1)
|
|
||||||
|
|
||||||
def test_format(self):
|
|
||||||
dt = DateTime(1968, 3, 10, 23, 45, 0, 'Europe/Vienna')
|
|
||||||
fmt = '%d.%m.%Y %H:%M'
|
|
||||||
result = dt.strftime(fmt)
|
|
||||||
unformatted_result = '1968/03/10 23:45:00 Europe/Vienna'
|
|
||||||
self.assertEqual(result, f'{dt:%d.%m.%Y %H:%M}')
|
|
||||||
self.assertEqual(unformatted_result, f'{dt}')
|
|
||||||
self.assertEqual(unformatted_result, f'{dt}')
|
|
||||||
self.assertEqual(result, f'{dt:{fmt}}')
|
|
||||||
self.assertEqual(unformatted_result, f'{dt:}')
|
|
||||||
self.assertEqual(unformatted_result, f'{dt}')
|
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
|
||||||
import doctest
|
|
||||||
return unittest.TestSuite([
|
|
||||||
unittest.defaultTestLoader.loadTestsFromTestCase(DateTimeTests),
|
|
||||||
doctest.DocFileSuite('DateTime.txt', package='DateTime'),
|
|
||||||
doctest.DocFileSuite('pytz.txt', package='DateTime'),
|
|
||||||
])
|
|
||||||
@@ -1,133 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# bitmap distribution font (bdf) file parser
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1996-05-16 fl created (as bdf2pil)
|
|
||||||
# 1997-08-25 fl converted to FontFile driver
|
|
||||||
# 2001-05-25 fl removed bogus __init__ call
|
|
||||||
# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev)
|
|
||||||
# 2003-04-22 fl more robustification (from Graham Dumpleton)
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1997-2003 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Parse X Bitmap Distribution Format (BDF)
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import BinaryIO
|
|
||||||
|
|
||||||
from . import FontFile, Image
|
|
||||||
|
|
||||||
bdf_slant = {
|
|
||||||
"R": "Roman",
|
|
||||||
"I": "Italic",
|
|
||||||
"O": "Oblique",
|
|
||||||
"RI": "Reverse Italic",
|
|
||||||
"RO": "Reverse Oblique",
|
|
||||||
"OT": "Other",
|
|
||||||
}
|
|
||||||
|
|
||||||
bdf_spacing = {"P": "Proportional", "M": "Monospaced", "C": "Cell"}
|
|
||||||
|
|
||||||
|
|
||||||
def bdf_char(
|
|
||||||
f: BinaryIO,
|
|
||||||
) -> (
|
|
||||||
tuple[
|
|
||||||
str,
|
|
||||||
int,
|
|
||||||
tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]],
|
|
||||||
Image.Image,
|
|
||||||
]
|
|
||||||
| None
|
|
||||||
):
|
|
||||||
# skip to STARTCHAR
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s:
|
|
||||||
return None
|
|
||||||
if s[:9] == b"STARTCHAR":
|
|
||||||
break
|
|
||||||
id = s[9:].strip().decode("ascii")
|
|
||||||
|
|
||||||
# load symbol properties
|
|
||||||
props = {}
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s or s[:6] == b"BITMAP":
|
|
||||||
break
|
|
||||||
i = s.find(b" ")
|
|
||||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
|
||||||
|
|
||||||
# load bitmap
|
|
||||||
bitmap = bytearray()
|
|
||||||
while True:
|
|
||||||
s = f.readline()
|
|
||||||
if not s or s[:7] == b"ENDCHAR":
|
|
||||||
break
|
|
||||||
bitmap += s[:-1]
|
|
||||||
|
|
||||||
# The word BBX
|
|
||||||
# followed by the width in x (BBw), height in y (BBh),
|
|
||||||
# and x and y displacement (BBxoff0, BByoff0)
|
|
||||||
# of the lower left corner from the origin of the character.
|
|
||||||
width, height, x_disp, y_disp = (int(p) for p in props["BBX"].split())
|
|
||||||
|
|
||||||
# The word DWIDTH
|
|
||||||
# followed by the width in x and y of the character in device pixels.
|
|
||||||
dwx, dwy = (int(p) for p in props["DWIDTH"].split())
|
|
||||||
|
|
||||||
bbox = (
|
|
||||||
(dwx, dwy),
|
|
||||||
(x_disp, -y_disp - height, width + x_disp, -y_disp),
|
|
||||||
(0, 0, width, height),
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
im = Image.frombytes("1", (width, height), bitmap, "hex", "1")
|
|
||||||
except ValueError:
|
|
||||||
# deal with zero-width characters
|
|
||||||
im = Image.new("1", (width, height))
|
|
||||||
|
|
||||||
return id, int(props["ENCODING"]), bbox, im
|
|
||||||
|
|
||||||
|
|
||||||
class BdfFontFile(FontFile.FontFile):
|
|
||||||
"""Font file plugin for the X11 BDF format."""
|
|
||||||
|
|
||||||
def __init__(self, fp: BinaryIO):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
s = fp.readline()
|
|
||||||
if s[:13] != b"STARTFONT 2.1":
|
|
||||||
msg = "not a valid BDF file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
props = {}
|
|
||||||
comments = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
s = fp.readline()
|
|
||||||
if not s or s[:13] == b"ENDPROPERTIES":
|
|
||||||
break
|
|
||||||
i = s.find(b" ")
|
|
||||||
props[s[:i].decode("ascii")] = s[i + 1 : -1].decode("ascii")
|
|
||||||
if s[:i] in [b"COMMENT", b"COPYRIGHT"]:
|
|
||||||
if s.find(b"LogicalFontDescription") < 0:
|
|
||||||
comments.append(s[i + 1 : -1].decode("ascii"))
|
|
||||||
|
|
||||||
while True:
|
|
||||||
c = bdf_char(fp)
|
|
||||||
if not c:
|
|
||||||
break
|
|
||||||
id, ch, (xy, dst, src), im = c
|
|
||||||
if 0 <= ch < len(self.glyph):
|
|
||||||
self.glyph[ch] = xy, dst, src, im
|
|
||||||
@@ -1,475 +0,0 @@
|
|||||||
"""
|
|
||||||
Blizzard Mipmap Format (.blp)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
|
|
||||||
BLP1 files, used mostly in Warcraft III, are not fully supported.
|
|
||||||
All types of BLP2 files used in World of Warcraft are supported.
|
|
||||||
|
|
||||||
The BLP file structure consists of a header, up to 16 mipmaps of the
|
|
||||||
texture
|
|
||||||
|
|
||||||
Texture sizes must be powers of two, though the two dimensions do
|
|
||||||
not have to be equal; 512x256 is valid, but 512x200 is not.
|
|
||||||
The first mipmap (mipmap #0) is the full size image; each subsequent
|
|
||||||
mipmap halves both dimensions. The final mipmap should be 1x1.
|
|
||||||
|
|
||||||
BLP files come in many different flavours:
|
|
||||||
* JPEG-compressed (type == 0) - only supported for BLP1.
|
|
||||||
* RAW images (type == 1, encoding == 1). Each mipmap is stored as an
|
|
||||||
array of 8-bit values, one per pixel, left to right, top to bottom.
|
|
||||||
Each value is an index to the palette.
|
|
||||||
* DXT-compressed (type == 1, encoding == 2):
|
|
||||||
- DXT1 compression is used if alpha_encoding == 0.
|
|
||||||
- An additional alpha bit is used if alpha_depth == 1.
|
|
||||||
- DXT3 compression is used if alpha_encoding == 1.
|
|
||||||
- DXT5 compression is used if alpha_encoding == 7.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
from enum import IntEnum
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
|
|
||||||
class Format(IntEnum):
|
|
||||||
JPEG = 0
|
|
||||||
|
|
||||||
|
|
||||||
class Encoding(IntEnum):
|
|
||||||
UNCOMPRESSED = 1
|
|
||||||
DXT = 2
|
|
||||||
UNCOMPRESSED_RAW_BGRA = 3
|
|
||||||
|
|
||||||
|
|
||||||
class AlphaEncoding(IntEnum):
|
|
||||||
DXT1 = 0
|
|
||||||
DXT3 = 1
|
|
||||||
DXT5 = 7
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_565(i):
|
|
||||||
return ((i >> 11) & 0x1F) << 3, ((i >> 5) & 0x3F) << 2, (i & 0x1F) << 3
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt1(data, alpha=False):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 8 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
# Decode next 8-byte block.
|
|
||||||
idx = block * 8
|
|
||||||
color0, color1, bits = struct.unpack_from("<HHI", data, idx)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
# Decode this block into 4x4 pixels
|
|
||||||
# Accumulate the results onto our 4 row accumulators
|
|
||||||
for j in range(4):
|
|
||||||
for i in range(4):
|
|
||||||
# get next control op and generate a pixel
|
|
||||||
|
|
||||||
control = bits & 3
|
|
||||||
bits = bits >> 2
|
|
||||||
|
|
||||||
a = 0xFF
|
|
||||||
if control == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif control == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif control == 2:
|
|
||||||
if color0 > color1:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
else:
|
|
||||||
r = (r0 + r1) // 2
|
|
||||||
g = (g0 + g1) // 2
|
|
||||||
b = (b0 + b1) // 2
|
|
||||||
elif control == 3:
|
|
||||||
if color0 > color1:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
else:
|
|
||||||
r, g, b, a = 0, 0, 0, 0
|
|
||||||
|
|
||||||
if alpha:
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
else:
|
|
||||||
ret[j].extend([r, g, b])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt3(data):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4*width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 16 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
idx = block * 16
|
|
||||||
block = data[idx : idx + 16]
|
|
||||||
# Decode next 16-byte block.
|
|
||||||
bits = struct.unpack_from("<8B", block)
|
|
||||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
|
||||||
|
|
||||||
(code,) = struct.unpack_from("<I", block, 12)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
for j in range(4):
|
|
||||||
high = False # Do we want the higher bits?
|
|
||||||
for i in range(4):
|
|
||||||
alphacode_index = (4 * j + i) // 2
|
|
||||||
a = bits[alphacode_index]
|
|
||||||
if high:
|
|
||||||
high = False
|
|
||||||
a >>= 4
|
|
||||||
else:
|
|
||||||
high = True
|
|
||||||
a &= 0xF
|
|
||||||
a *= 17 # We get a value between 0 and 15
|
|
||||||
|
|
||||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
|
||||||
|
|
||||||
if color_code == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif color_code == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif color_code == 2:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
elif color_code == 3:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dxt5(data):
|
|
||||||
"""
|
|
||||||
input: one "row" of data (i.e. will produce 4 * width pixels)
|
|
||||||
"""
|
|
||||||
|
|
||||||
blocks = len(data) // 16 # number of blocks in row
|
|
||||||
ret = (bytearray(), bytearray(), bytearray(), bytearray())
|
|
||||||
|
|
||||||
for block in range(blocks):
|
|
||||||
idx = block * 16
|
|
||||||
block = data[idx : idx + 16]
|
|
||||||
# Decode next 16-byte block.
|
|
||||||
a0, a1 = struct.unpack_from("<BB", block)
|
|
||||||
|
|
||||||
bits = struct.unpack_from("<6B", block, 2)
|
|
||||||
alphacode1 = bits[2] | (bits[3] << 8) | (bits[4] << 16) | (bits[5] << 24)
|
|
||||||
alphacode2 = bits[0] | (bits[1] << 8)
|
|
||||||
|
|
||||||
color0, color1 = struct.unpack_from("<HH", block, 8)
|
|
||||||
|
|
||||||
(code,) = struct.unpack_from("<I", block, 12)
|
|
||||||
|
|
||||||
r0, g0, b0 = unpack_565(color0)
|
|
||||||
r1, g1, b1 = unpack_565(color1)
|
|
||||||
|
|
||||||
for j in range(4):
|
|
||||||
for i in range(4):
|
|
||||||
# get next control op and generate a pixel
|
|
||||||
alphacode_index = 3 * (4 * j + i)
|
|
||||||
|
|
||||||
if alphacode_index <= 12:
|
|
||||||
alphacode = (alphacode2 >> alphacode_index) & 0x07
|
|
||||||
elif alphacode_index == 15:
|
|
||||||
alphacode = (alphacode2 >> 15) | ((alphacode1 << 1) & 0x06)
|
|
||||||
else: # alphacode_index >= 18 and alphacode_index <= 45
|
|
||||||
alphacode = (alphacode1 >> (alphacode_index - 16)) & 0x07
|
|
||||||
|
|
||||||
if alphacode == 0:
|
|
||||||
a = a0
|
|
||||||
elif alphacode == 1:
|
|
||||||
a = a1
|
|
||||||
elif a0 > a1:
|
|
||||||
a = ((8 - alphacode) * a0 + (alphacode - 1) * a1) // 7
|
|
||||||
elif alphacode == 6:
|
|
||||||
a = 0
|
|
||||||
elif alphacode == 7:
|
|
||||||
a = 255
|
|
||||||
else:
|
|
||||||
a = ((6 - alphacode) * a0 + (alphacode - 1) * a1) // 5
|
|
||||||
|
|
||||||
color_code = (code >> 2 * (4 * j + i)) & 0x03
|
|
||||||
|
|
||||||
if color_code == 0:
|
|
||||||
r, g, b = r0, g0, b0
|
|
||||||
elif color_code == 1:
|
|
||||||
r, g, b = r1, g1, b1
|
|
||||||
elif color_code == 2:
|
|
||||||
r = (2 * r0 + r1) // 3
|
|
||||||
g = (2 * g0 + g1) // 3
|
|
||||||
b = (2 * b0 + b1) // 3
|
|
||||||
elif color_code == 3:
|
|
||||||
r = (2 * r1 + r0) // 3
|
|
||||||
g = (2 * g1 + g0) // 3
|
|
||||||
b = (2 * b1 + b0) // 3
|
|
||||||
|
|
||||||
ret[j].extend([r, g, b, a])
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
class BLPFormatError(NotImplementedError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] in (b"BLP1", b"BLP2")
|
|
||||||
|
|
||||||
|
|
||||||
class BlpImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
Blizzard Mipmap Format
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "BLP"
|
|
||||||
format_description = "Blizzard Mipmap Format"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self.magic = self.fp.read(4)
|
|
||||||
|
|
||||||
self.fp.seek(5, os.SEEK_CUR)
|
|
||||||
(self._blp_alpha_depth,) = struct.unpack("<b", self.fp.read(1))
|
|
||||||
|
|
||||||
self.fp.seek(2, os.SEEK_CUR)
|
|
||||||
self._size = struct.unpack("<II", self.fp.read(8))
|
|
||||||
|
|
||||||
if self.magic in (b"BLP1", b"BLP2"):
|
|
||||||
decoder = self.magic.decode()
|
|
||||||
else:
|
|
||||||
msg = f"Bad BLP magic {repr(self.magic)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
self._mode = "RGBA" if self._blp_alpha_depth else "RGB"
|
|
||||||
self.tile = [(decoder, (0, 0) + self.size, 0, (self.mode, 0, 1))]
|
|
||||||
|
|
||||||
|
|
||||||
class _BLPBaseDecoder(ImageFile.PyDecoder):
|
|
||||||
_pulls_fd = True
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
try:
|
|
||||||
self._read_blp_header()
|
|
||||||
self._load()
|
|
||||||
except struct.error as e:
|
|
||||||
msg = "Truncated BLP file"
|
|
||||||
raise OSError(msg) from e
|
|
||||||
return -1, 0
|
|
||||||
|
|
||||||
def _read_blp_header(self):
|
|
||||||
self.fd.seek(4)
|
|
||||||
(self._blp_compression,) = struct.unpack("<i", self._safe_read(4))
|
|
||||||
|
|
||||||
(self._blp_encoding,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
(self._blp_alpha_depth,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
(self._blp_alpha_encoding,) = struct.unpack("<b", self._safe_read(1))
|
|
||||||
self.fd.seek(1, os.SEEK_CUR) # mips
|
|
||||||
|
|
||||||
self.size = struct.unpack("<II", self._safe_read(8))
|
|
||||||
|
|
||||||
if isinstance(self, BLP1Decoder):
|
|
||||||
# Only present for BLP1
|
|
||||||
(self._blp_encoding,) = struct.unpack("<i", self._safe_read(4))
|
|
||||||
self.fd.seek(4, os.SEEK_CUR) # subtype
|
|
||||||
|
|
||||||
self._blp_offsets = struct.unpack("<16I", self._safe_read(16 * 4))
|
|
||||||
self._blp_lengths = struct.unpack("<16I", self._safe_read(16 * 4))
|
|
||||||
|
|
||||||
def _safe_read(self, length):
|
|
||||||
return ImageFile._safe_read(self.fd, length)
|
|
||||||
|
|
||||||
def _read_palette(self):
|
|
||||||
ret = []
|
|
||||||
for i in range(256):
|
|
||||||
try:
|
|
||||||
b, g, r, a = struct.unpack("<4B", self._safe_read(4))
|
|
||||||
except struct.error:
|
|
||||||
break
|
|
||||||
ret.append((b, g, r, a))
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def _read_bgra(self, palette):
|
|
||||||
data = bytearray()
|
|
||||||
_data = BytesIO(self._safe_read(self._blp_lengths[0]))
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
(offset,) = struct.unpack("<B", _data.read(1))
|
|
||||||
except struct.error:
|
|
||||||
break
|
|
||||||
b, g, r, a = palette[offset]
|
|
||||||
d = (r, g, b)
|
|
||||||
if self._blp_alpha_depth:
|
|
||||||
d += (a,)
|
|
||||||
data.extend(d)
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
class BLP1Decoder(_BLPBaseDecoder):
|
|
||||||
def _load(self):
|
|
||||||
if self._blp_compression == Format.JPEG:
|
|
||||||
self._decode_jpeg_stream()
|
|
||||||
|
|
||||||
elif self._blp_compression == 1:
|
|
||||||
if self._blp_encoding in (4, 5):
|
|
||||||
palette = self._read_palette()
|
|
||||||
data = self._read_bgra(palette)
|
|
||||||
self.set_as_raw(bytes(data))
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BLP encoding {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BLP compression {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
def _decode_jpeg_stream(self):
|
|
||||||
from .JpegImagePlugin import JpegImageFile
|
|
||||||
|
|
||||||
(jpeg_header_size,) = struct.unpack("<I", self._safe_read(4))
|
|
||||||
jpeg_header = self._safe_read(jpeg_header_size)
|
|
||||||
self._safe_read(self._blp_offsets[0] - self.fd.tell()) # What IS this?
|
|
||||||
data = self._safe_read(self._blp_lengths[0])
|
|
||||||
data = jpeg_header + data
|
|
||||||
data = BytesIO(data)
|
|
||||||
image = JpegImageFile(data)
|
|
||||||
Image._decompression_bomb_check(image.size)
|
|
||||||
if image.mode == "CMYK":
|
|
||||||
decoder_name, extents, offset, args = image.tile[0]
|
|
||||||
image.tile = [(decoder_name, extents, offset, (args[0], "CMYK"))]
|
|
||||||
r, g, b = image.convert("RGB").split()
|
|
||||||
image = Image.merge("RGB", (b, g, r))
|
|
||||||
self.set_as_raw(image.tobytes())
|
|
||||||
|
|
||||||
|
|
||||||
class BLP2Decoder(_BLPBaseDecoder):
|
|
||||||
def _load(self):
|
|
||||||
palette = self._read_palette()
|
|
||||||
|
|
||||||
self.fd.seek(self._blp_offsets[0])
|
|
||||||
|
|
||||||
if self._blp_compression == 1:
|
|
||||||
# Uncompressed or DirectX compression
|
|
||||||
|
|
||||||
if self._blp_encoding == Encoding.UNCOMPRESSED:
|
|
||||||
data = self._read_bgra(palette)
|
|
||||||
|
|
||||||
elif self._blp_encoding == Encoding.DXT:
|
|
||||||
data = bytearray()
|
|
||||||
if self._blp_alpha_encoding == AlphaEncoding.DXT1:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 8
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt1(
|
|
||||||
self._safe_read(linesize), alpha=bool(self._blp_alpha_depth)
|
|
||||||
):
|
|
||||||
data += d
|
|
||||||
|
|
||||||
elif self._blp_alpha_encoding == AlphaEncoding.DXT3:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 16
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt3(self._safe_read(linesize)):
|
|
||||||
data += d
|
|
||||||
|
|
||||||
elif self._blp_alpha_encoding == AlphaEncoding.DXT5:
|
|
||||||
linesize = (self.size[0] + 3) // 4 * 16
|
|
||||||
for yb in range((self.size[1] + 3) // 4):
|
|
||||||
for d in decode_dxt5(self._safe_read(linesize)):
|
|
||||||
data += d
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported alpha encoding {repr(self._blp_alpha_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unknown BLP encoding {repr(self._blp_encoding)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
else:
|
|
||||||
msg = f"Unknown BLP compression {repr(self._blp_compression)}"
|
|
||||||
raise BLPFormatError(msg)
|
|
||||||
|
|
||||||
self.set_as_raw(bytes(data))
|
|
||||||
|
|
||||||
|
|
||||||
class BLPEncoder(ImageFile.PyEncoder):
|
|
||||||
_pushes_fd = True
|
|
||||||
|
|
||||||
def _write_palette(self):
|
|
||||||
data = b""
|
|
||||||
palette = self.im.getpalette("RGBA", "RGBA")
|
|
||||||
for i in range(len(palette) // 4):
|
|
||||||
r, g, b, a = palette[i * 4 : (i + 1) * 4]
|
|
||||||
data += struct.pack("<4B", b, g, r, a)
|
|
||||||
while len(data) < 256 * 4:
|
|
||||||
data += b"\x00" * 4
|
|
||||||
return data
|
|
||||||
|
|
||||||
def encode(self, bufsize):
|
|
||||||
palette_data = self._write_palette()
|
|
||||||
|
|
||||||
offset = 20 + 16 * 4 * 2 + len(palette_data)
|
|
||||||
data = struct.pack("<16I", offset, *((0,) * 15))
|
|
||||||
|
|
||||||
w, h = self.im.size
|
|
||||||
data += struct.pack("<16I", w * h, *((0,) * 15))
|
|
||||||
|
|
||||||
data += palette_data
|
|
||||||
|
|
||||||
for y in range(h):
|
|
||||||
for x in range(w):
|
|
||||||
data += struct.pack("<B", self.im.getpixel((x, y)))
|
|
||||||
|
|
||||||
return len(data), 0, data
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if im.mode != "P":
|
|
||||||
msg = "Unsupported BLP image mode"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
magic = b"BLP1" if im.encoderinfo.get("blp_version") == "BLP1" else b"BLP2"
|
|
||||||
fp.write(magic)
|
|
||||||
|
|
||||||
fp.write(struct.pack("<i", 1)) # Uncompressed or DirectX compression
|
|
||||||
fp.write(struct.pack("<b", Encoding.UNCOMPRESSED))
|
|
||||||
fp.write(struct.pack("<b", 1 if im.palette.mode == "RGBA" else 0))
|
|
||||||
fp.write(struct.pack("<b", 0)) # alpha encoding
|
|
||||||
fp.write(struct.pack("<b", 0)) # mips
|
|
||||||
fp.write(struct.pack("<II", *im.size))
|
|
||||||
if magic == b"BLP1":
|
|
||||||
fp.write(struct.pack("<i", 5))
|
|
||||||
fp.write(struct.pack("<i", 0))
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("BLP", (0, 0) + im.size, 0, im.mode)])
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(BlpImageFile.format, BlpImageFile, _accept)
|
|
||||||
Image.register_extension(BlpImageFile.format, ".blp")
|
|
||||||
Image.register_decoder("BLP1", BLP1Decoder)
|
|
||||||
Image.register_decoder("BLP2", BLP2Decoder)
|
|
||||||
|
|
||||||
Image.register_save(BlpImageFile.format, _save)
|
|
||||||
Image.register_encoder("BLP", BLPEncoder)
|
|
||||||
@@ -1,471 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# BMP file handler
|
|
||||||
#
|
|
||||||
# Windows (and OS/2) native bitmap storage format.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1995-09-01 fl Created
|
|
||||||
# 1996-04-30 fl Added save
|
|
||||||
# 1997-08-27 fl Fixed save of 1-bit images
|
|
||||||
# 1998-03-06 fl Load P images as L where possible
|
|
||||||
# 1998-07-03 fl Load P images as 1 where possible
|
|
||||||
# 1998-12-29 fl Handle small palettes
|
|
||||||
# 2002-12-30 fl Fixed load of 1-bit palette images
|
|
||||||
# 2003-04-21 fl Fixed load of 1-bit monochrome images
|
|
||||||
# 2003-04-23 fl Added limited support for BI_BITFIELDS compression
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB
|
|
||||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
from ._binary import o16le as o16
|
|
||||||
from ._binary import o32le as o32
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Read BMP file
|
|
||||||
|
|
||||||
BIT2MODE = {
|
|
||||||
# bits => mode, rawmode
|
|
||||||
1: ("P", "P;1"),
|
|
||||||
4: ("P", "P;4"),
|
|
||||||
8: ("P", "P"),
|
|
||||||
16: ("RGB", "BGR;15"),
|
|
||||||
24: ("RGB", "BGR"),
|
|
||||||
32: ("RGB", "BGRX"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:2] == b"BM"
|
|
||||||
|
|
||||||
|
|
||||||
def _dib_accept(prefix):
|
|
||||||
return i32(prefix) in [12, 40, 64, 108, 124]
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Image plugin for the Windows BMP format.
|
|
||||||
# =============================================================================
|
|
||||||
class BmpImageFile(ImageFile.ImageFile):
|
|
||||||
"""Image plugin for the Windows Bitmap format (BMP)"""
|
|
||||||
|
|
||||||
# ------------------------------------------------------------- Description
|
|
||||||
format_description = "Windows Bitmap"
|
|
||||||
format = "BMP"
|
|
||||||
|
|
||||||
# -------------------------------------------------- BMP Compression values
|
|
||||||
COMPRESSIONS = {"RAW": 0, "RLE8": 1, "RLE4": 2, "BITFIELDS": 3, "JPEG": 4, "PNG": 5}
|
|
||||||
for k, v in COMPRESSIONS.items():
|
|
||||||
vars()[k] = v
|
|
||||||
|
|
||||||
def _bitmap(self, header=0, offset=0):
|
|
||||||
"""Read relevant info about the BMP"""
|
|
||||||
read, seek = self.fp.read, self.fp.seek
|
|
||||||
if header:
|
|
||||||
seek(header)
|
|
||||||
# read bmp header size @offset 14 (this is part of the header size)
|
|
||||||
file_info = {"header_size": i32(read(4)), "direction": -1}
|
|
||||||
|
|
||||||
# -------------------- If requested, read header at a specific position
|
|
||||||
# read the rest of the bmp header, without its size
|
|
||||||
header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4)
|
|
||||||
|
|
||||||
# -------------------------------------------------- IBM OS/2 Bitmap v1
|
|
||||||
# ----- This format has different offsets because of width/height types
|
|
||||||
if file_info["header_size"] == 12:
|
|
||||||
file_info["width"] = i16(header_data, 0)
|
|
||||||
file_info["height"] = i16(header_data, 2)
|
|
||||||
file_info["planes"] = i16(header_data, 4)
|
|
||||||
file_info["bits"] = i16(header_data, 6)
|
|
||||||
file_info["compression"] = self.RAW
|
|
||||||
file_info["palette_padding"] = 3
|
|
||||||
|
|
||||||
# --------------------------------------------- Windows Bitmap v2 to v5
|
|
||||||
# v3, OS/2 v2, v4, v5
|
|
||||||
elif file_info["header_size"] in (40, 64, 108, 124):
|
|
||||||
file_info["y_flip"] = header_data[7] == 0xFF
|
|
||||||
file_info["direction"] = 1 if file_info["y_flip"] else -1
|
|
||||||
file_info["width"] = i32(header_data, 0)
|
|
||||||
file_info["height"] = (
|
|
||||||
i32(header_data, 4)
|
|
||||||
if not file_info["y_flip"]
|
|
||||||
else 2**32 - i32(header_data, 4)
|
|
||||||
)
|
|
||||||
file_info["planes"] = i16(header_data, 8)
|
|
||||||
file_info["bits"] = i16(header_data, 10)
|
|
||||||
file_info["compression"] = i32(header_data, 12)
|
|
||||||
# byte size of pixel data
|
|
||||||
file_info["data_size"] = i32(header_data, 16)
|
|
||||||
file_info["pixels_per_meter"] = (
|
|
||||||
i32(header_data, 20),
|
|
||||||
i32(header_data, 24),
|
|
||||||
)
|
|
||||||
file_info["colors"] = i32(header_data, 28)
|
|
||||||
file_info["palette_padding"] = 4
|
|
||||||
self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"])
|
|
||||||
if file_info["compression"] == self.BITFIELDS:
|
|
||||||
if len(header_data) >= 52:
|
|
||||||
for idx, mask in enumerate(
|
|
||||||
["r_mask", "g_mask", "b_mask", "a_mask"]
|
|
||||||
):
|
|
||||||
file_info[mask] = i32(header_data, 36 + idx * 4)
|
|
||||||
else:
|
|
||||||
# 40 byte headers only have the three components in the
|
|
||||||
# bitfields masks, ref:
|
|
||||||
# https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx
|
|
||||||
# See also
|
|
||||||
# https://github.com/python-pillow/Pillow/issues/1293
|
|
||||||
# There is a 4th component in the RGBQuad, in the alpha
|
|
||||||
# location, but it is listed as a reserved component,
|
|
||||||
# and it is not generally an alpha channel
|
|
||||||
file_info["a_mask"] = 0x0
|
|
||||||
for mask in ["r_mask", "g_mask", "b_mask"]:
|
|
||||||
file_info[mask] = i32(read(4))
|
|
||||||
file_info["rgb_mask"] = (
|
|
||||||
file_info["r_mask"],
|
|
||||||
file_info["g_mask"],
|
|
||||||
file_info["b_mask"],
|
|
||||||
)
|
|
||||||
file_info["rgba_mask"] = (
|
|
||||||
file_info["r_mask"],
|
|
||||||
file_info["g_mask"],
|
|
||||||
file_info["b_mask"],
|
|
||||||
file_info["a_mask"],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BMP header type ({file_info['header_size']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# ------------------ Special case : header is reported 40, which
|
|
||||||
# ---------------------- is shorter than real size for bpp >= 16
|
|
||||||
self._size = file_info["width"], file_info["height"]
|
|
||||||
|
|
||||||
# ------- If color count was not found in the header, compute from bits
|
|
||||||
file_info["colors"] = (
|
|
||||||
file_info["colors"]
|
|
||||||
if file_info.get("colors", 0)
|
|
||||||
else (1 << file_info["bits"])
|
|
||||||
)
|
|
||||||
if offset == 14 + file_info["header_size"] and file_info["bits"] <= 8:
|
|
||||||
offset += 4 * file_info["colors"]
|
|
||||||
|
|
||||||
# ---------------------- Check bit depth for unusual unsupported values
|
|
||||||
self._mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None))
|
|
||||||
if self.mode is None:
|
|
||||||
msg = f"Unsupported BMP pixel depth ({file_info['bits']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# ---------------- Process BMP with Bitfields compression (not palette)
|
|
||||||
decoder_name = "raw"
|
|
||||||
if file_info["compression"] == self.BITFIELDS:
|
|
||||||
SUPPORTED = {
|
|
||||||
32: [
|
|
||||||
(0xFF0000, 0xFF00, 0xFF, 0x0),
|
|
||||||
(0xFF000000, 0xFF0000, 0xFF00, 0x0),
|
|
||||||
(0xFF000000, 0xFF0000, 0xFF00, 0xFF),
|
|
||||||
(0xFF, 0xFF00, 0xFF0000, 0xFF000000),
|
|
||||||
(0xFF0000, 0xFF00, 0xFF, 0xFF000000),
|
|
||||||
(0x0, 0x0, 0x0, 0x0),
|
|
||||||
],
|
|
||||||
24: [(0xFF0000, 0xFF00, 0xFF)],
|
|
||||||
16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)],
|
|
||||||
}
|
|
||||||
MASK_MODES = {
|
|
||||||
(32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX",
|
|
||||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR",
|
|
||||||
(32, (0xFF000000, 0xFF0000, 0xFF00, 0xFF)): "ABGR",
|
|
||||||
(32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA",
|
|
||||||
(32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA",
|
|
||||||
(32, (0x0, 0x0, 0x0, 0x0)): "BGRA",
|
|
||||||
(24, (0xFF0000, 0xFF00, 0xFF)): "BGR",
|
|
||||||
(16, (0xF800, 0x7E0, 0x1F)): "BGR;16",
|
|
||||||
(16, (0x7C00, 0x3E0, 0x1F)): "BGR;15",
|
|
||||||
}
|
|
||||||
if file_info["bits"] in SUPPORTED:
|
|
||||||
if (
|
|
||||||
file_info["bits"] == 32
|
|
||||||
and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]]
|
|
||||||
):
|
|
||||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])]
|
|
||||||
self._mode = "RGBA" if "A" in raw_mode else self.mode
|
|
||||||
elif (
|
|
||||||
file_info["bits"] in (24, 16)
|
|
||||||
and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]]
|
|
||||||
):
|
|
||||||
raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])]
|
|
||||||
else:
|
|
||||||
msg = "Unsupported BMP bitfields layout"
|
|
||||||
raise OSError(msg)
|
|
||||||
else:
|
|
||||||
msg = "Unsupported BMP bitfields layout"
|
|
||||||
raise OSError(msg)
|
|
||||||
elif file_info["compression"] == self.RAW:
|
|
||||||
if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset
|
|
||||||
raw_mode, self._mode = "BGRA", "RGBA"
|
|
||||||
elif file_info["compression"] in (self.RLE8, self.RLE4):
|
|
||||||
decoder_name = "bmp_rle"
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported BMP compression ({file_info['compression']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# --------------- Once the header is processed, process the palette/LUT
|
|
||||||
if self.mode == "P": # Paletted for 1, 4 and 8 bit images
|
|
||||||
# ---------------------------------------------------- 1-bit images
|
|
||||||
if not (0 < file_info["colors"] <= 65536):
|
|
||||||
msg = f"Unsupported BMP Palette size ({file_info['colors']})"
|
|
||||||
raise OSError(msg)
|
|
||||||
else:
|
|
||||||
padding = file_info["palette_padding"]
|
|
||||||
palette = read(padding * file_info["colors"])
|
|
||||||
grayscale = True
|
|
||||||
indices = (
|
|
||||||
(0, 255)
|
|
||||||
if file_info["colors"] == 2
|
|
||||||
else list(range(file_info["colors"]))
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------- Check if grayscale and ignore palette if so
|
|
||||||
for ind, val in enumerate(indices):
|
|
||||||
rgb = palette[ind * padding : ind * padding + 3]
|
|
||||||
if rgb != o8(val) * 3:
|
|
||||||
grayscale = False
|
|
||||||
|
|
||||||
# ------- If all colors are gray, white or black, ditch palette
|
|
||||||
if grayscale:
|
|
||||||
self._mode = "1" if file_info["colors"] == 2 else "L"
|
|
||||||
raw_mode = self.mode
|
|
||||||
else:
|
|
||||||
self._mode = "P"
|
|
||||||
self.palette = ImagePalette.raw(
|
|
||||||
"BGRX" if padding == 4 else "BGR", palette
|
|
||||||
)
|
|
||||||
|
|
||||||
# ---------------------------- Finally set the tile data for the plugin
|
|
||||||
self.info["compression"] = file_info["compression"]
|
|
||||||
args = [raw_mode]
|
|
||||||
if decoder_name == "bmp_rle":
|
|
||||||
args.append(file_info["compression"] == self.RLE4)
|
|
||||||
else:
|
|
||||||
args.append(((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3))
|
|
||||||
args.append(file_info["direction"])
|
|
||||||
self.tile = [
|
|
||||||
(
|
|
||||||
decoder_name,
|
|
||||||
(0, 0, file_info["width"], file_info["height"]),
|
|
||||||
offset or self.fp.tell(),
|
|
||||||
tuple(args),
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
"""Open file, check magic number and read header"""
|
|
||||||
# read 14 bytes: magic number, filesize, reserved, header final offset
|
|
||||||
head_data = self.fp.read(14)
|
|
||||||
# choke if the file does not have the required magic bytes
|
|
||||||
if not _accept(head_data):
|
|
||||||
msg = "Not a BMP file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
# read the start position of the BMP image data (u32)
|
|
||||||
offset = i32(head_data, 10)
|
|
||||||
# load bitmap information (offset=raster info)
|
|
||||||
self._bitmap(offset=offset)
|
|
||||||
|
|
||||||
|
|
||||||
class BmpRleDecoder(ImageFile.PyDecoder):
|
|
||||||
_pulls_fd = True
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
rle4 = self.args[1]
|
|
||||||
data = bytearray()
|
|
||||||
x = 0
|
|
||||||
while len(data) < self.state.xsize * self.state.ysize:
|
|
||||||
pixels = self.fd.read(1)
|
|
||||||
byte = self.fd.read(1)
|
|
||||||
if not pixels or not byte:
|
|
||||||
break
|
|
||||||
num_pixels = pixels[0]
|
|
||||||
if num_pixels:
|
|
||||||
# encoded mode
|
|
||||||
if x + num_pixels > self.state.xsize:
|
|
||||||
# Too much data for row
|
|
||||||
num_pixels = max(0, self.state.xsize - x)
|
|
||||||
if rle4:
|
|
||||||
first_pixel = o8(byte[0] >> 4)
|
|
||||||
second_pixel = o8(byte[0] & 0x0F)
|
|
||||||
for index in range(num_pixels):
|
|
||||||
if index % 2 == 0:
|
|
||||||
data += first_pixel
|
|
||||||
else:
|
|
||||||
data += second_pixel
|
|
||||||
else:
|
|
||||||
data += byte * num_pixels
|
|
||||||
x += num_pixels
|
|
||||||
else:
|
|
||||||
if byte[0] == 0:
|
|
||||||
# end of line
|
|
||||||
while len(data) % self.state.xsize != 0:
|
|
||||||
data += b"\x00"
|
|
||||||
x = 0
|
|
||||||
elif byte[0] == 1:
|
|
||||||
# end of bitmap
|
|
||||||
break
|
|
||||||
elif byte[0] == 2:
|
|
||||||
# delta
|
|
||||||
bytes_read = self.fd.read(2)
|
|
||||||
if len(bytes_read) < 2:
|
|
||||||
break
|
|
||||||
right, up = self.fd.read(2)
|
|
||||||
data += b"\x00" * (right + up * self.state.xsize)
|
|
||||||
x = len(data) % self.state.xsize
|
|
||||||
else:
|
|
||||||
# absolute mode
|
|
||||||
if rle4:
|
|
||||||
# 2 pixels per byte
|
|
||||||
byte_count = byte[0] // 2
|
|
||||||
bytes_read = self.fd.read(byte_count)
|
|
||||||
for byte_read in bytes_read:
|
|
||||||
data += o8(byte_read >> 4)
|
|
||||||
data += o8(byte_read & 0x0F)
|
|
||||||
else:
|
|
||||||
byte_count = byte[0]
|
|
||||||
bytes_read = self.fd.read(byte_count)
|
|
||||||
data += bytes_read
|
|
||||||
if len(bytes_read) < byte_count:
|
|
||||||
break
|
|
||||||
x += byte[0]
|
|
||||||
|
|
||||||
# align to 16-bit word boundary
|
|
||||||
if self.fd.tell() % 2 != 0:
|
|
||||||
self.fd.seek(1, os.SEEK_CUR)
|
|
||||||
rawmode = "L" if self.mode == "L" else "P"
|
|
||||||
self.set_as_raw(bytes(data), (rawmode, 0, self.args[-1]))
|
|
||||||
return -1, 0
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Image plugin for the DIB format (BMP alias)
|
|
||||||
# =============================================================================
|
|
||||||
class DibImageFile(BmpImageFile):
|
|
||||||
format = "DIB"
|
|
||||||
format_description = "Windows Bitmap"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self._bitmap()
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Write BMP file
|
|
||||||
|
|
||||||
|
|
||||||
SAVE = {
|
|
||||||
"1": ("1", 1, 2),
|
|
||||||
"L": ("L", 8, 256),
|
|
||||||
"P": ("P", 8, 256),
|
|
||||||
"RGB": ("BGR", 24, 0),
|
|
||||||
"RGBA": ("BGRA", 32, 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _dib_save(im, fp, filename):
|
|
||||||
_save(im, fp, filename, False)
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename, bitmap_header=True):
|
|
||||||
try:
|
|
||||||
rawmode, bits, colors = SAVE[im.mode]
|
|
||||||
except KeyError as e:
|
|
||||||
msg = f"cannot write mode {im.mode} as BMP"
|
|
||||||
raise OSError(msg) from e
|
|
||||||
|
|
||||||
info = im.encoderinfo
|
|
||||||
|
|
||||||
dpi = info.get("dpi", (96, 96))
|
|
||||||
|
|
||||||
# 1 meter == 39.3701 inches
|
|
||||||
ppm = tuple(int(x * 39.3701 + 0.5) for x in dpi)
|
|
||||||
|
|
||||||
stride = ((im.size[0] * bits + 7) // 8 + 3) & (~3)
|
|
||||||
header = 40 # or 64 for OS/2 version 2
|
|
||||||
image = stride * im.size[1]
|
|
||||||
|
|
||||||
if im.mode == "1":
|
|
||||||
palette = b"".join(o8(i) * 4 for i in (0, 255))
|
|
||||||
elif im.mode == "L":
|
|
||||||
palette = b"".join(o8(i) * 4 for i in range(256))
|
|
||||||
elif im.mode == "P":
|
|
||||||
palette = im.im.getpalette("RGB", "BGRX")
|
|
||||||
colors = len(palette) // 4
|
|
||||||
else:
|
|
||||||
palette = None
|
|
||||||
|
|
||||||
# bitmap header
|
|
||||||
if bitmap_header:
|
|
||||||
offset = 14 + header + colors * 4
|
|
||||||
file_size = offset + image
|
|
||||||
if file_size > 2**32 - 1:
|
|
||||||
msg = "File size is too large for the BMP format"
|
|
||||||
raise ValueError(msg)
|
|
||||||
fp.write(
|
|
||||||
b"BM" # file type (magic)
|
|
||||||
+ o32(file_size) # file size
|
|
||||||
+ o32(0) # reserved
|
|
||||||
+ o32(offset) # image data offset
|
|
||||||
)
|
|
||||||
|
|
||||||
# bitmap info header
|
|
||||||
fp.write(
|
|
||||||
o32(header) # info header size
|
|
||||||
+ o32(im.size[0]) # width
|
|
||||||
+ o32(im.size[1]) # height
|
|
||||||
+ o16(1) # planes
|
|
||||||
+ o16(bits) # depth
|
|
||||||
+ o32(0) # compression (0=uncompressed)
|
|
||||||
+ o32(image) # size of bitmap
|
|
||||||
+ o32(ppm[0]) # resolution
|
|
||||||
+ o32(ppm[1]) # resolution
|
|
||||||
+ o32(colors) # colors used
|
|
||||||
+ o32(colors) # colors important
|
|
||||||
)
|
|
||||||
|
|
||||||
fp.write(b"\0" * (header - 40)) # padding (for OS/2 format)
|
|
||||||
|
|
||||||
if palette:
|
|
||||||
fp.write(palette)
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, stride, -1))])
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(BmpImageFile.format, BmpImageFile, _accept)
|
|
||||||
Image.register_save(BmpImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(BmpImageFile.format, ".bmp")
|
|
||||||
|
|
||||||
Image.register_mime(BmpImageFile.format, "image/bmp")
|
|
||||||
|
|
||||||
Image.register_decoder("bmp_rle", BmpRleDecoder)
|
|
||||||
|
|
||||||
Image.register_open(DibImageFile.format, DibImageFile, _dib_accept)
|
|
||||||
Image.register_save(DibImageFile.format, _dib_save)
|
|
||||||
|
|
||||||
Image.register_extension(DibImageFile.format, ".dib")
|
|
||||||
|
|
||||||
Image.register_mime(DibImageFile.format, "image/bmp")
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# BUFR stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 1996-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific BUFR image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC"
|
|
||||||
|
|
||||||
|
|
||||||
class BufrStubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "BUFR"
|
|
||||||
format_description = "BUFR"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "Not a BUFR file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self._mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "BUFR save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept)
|
|
||||||
Image.register_save(BufrStubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(BufrStubImageFile.format, ".bufr")
|
|
||||||
@@ -1,121 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# a class to read from a container file
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-06-18 fl Created
|
|
||||||
# 1995-09-07 fl Added readline(), readlines()
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2001 by Secret Labs AB
|
|
||||||
# Copyright (c) 1995 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
from typing import IO, AnyStr, Generic, Literal
|
|
||||||
|
|
||||||
|
|
||||||
class ContainerIO(Generic[AnyStr]):
|
|
||||||
"""
|
|
||||||
A file object that provides read access to a part of an existing
|
|
||||||
file (for example a TAR file).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, file: IO[AnyStr], offset: int, length: int) -> None:
|
|
||||||
"""
|
|
||||||
Create file object.
|
|
||||||
|
|
||||||
:param file: Existing file.
|
|
||||||
:param offset: Start of region, in bytes.
|
|
||||||
:param length: Size of region, in bytes.
|
|
||||||
"""
|
|
||||||
self.fh: IO[AnyStr] = file
|
|
||||||
self.pos = 0
|
|
||||||
self.offset = offset
|
|
||||||
self.length = length
|
|
||||||
self.fh.seek(offset)
|
|
||||||
|
|
||||||
##
|
|
||||||
# Always false.
|
|
||||||
|
|
||||||
def isatty(self) -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def seek(self, offset: int, mode: Literal[0, 1, 2] = io.SEEK_SET) -> None:
|
|
||||||
"""
|
|
||||||
Move file pointer.
|
|
||||||
|
|
||||||
:param offset: Offset in bytes.
|
|
||||||
:param mode: Starting position. Use 0 for beginning of region, 1
|
|
||||||
for current offset, and 2 for end of region. You cannot move
|
|
||||||
the pointer outside the defined region.
|
|
||||||
"""
|
|
||||||
if mode == 1:
|
|
||||||
self.pos = self.pos + offset
|
|
||||||
elif mode == 2:
|
|
||||||
self.pos = self.length + offset
|
|
||||||
else:
|
|
||||||
self.pos = offset
|
|
||||||
# clamp
|
|
||||||
self.pos = max(0, min(self.pos, self.length))
|
|
||||||
self.fh.seek(self.offset + self.pos)
|
|
||||||
|
|
||||||
def tell(self) -> int:
|
|
||||||
"""
|
|
||||||
Get current file pointer.
|
|
||||||
|
|
||||||
:returns: Offset from start of region, in bytes.
|
|
||||||
"""
|
|
||||||
return self.pos
|
|
||||||
|
|
||||||
def read(self, n: int = 0) -> AnyStr:
|
|
||||||
"""
|
|
||||||
Read data.
|
|
||||||
|
|
||||||
:param n: Number of bytes to read. If omitted or zero,
|
|
||||||
read until end of region.
|
|
||||||
:returns: An 8-bit string.
|
|
||||||
"""
|
|
||||||
if n:
|
|
||||||
n = min(n, self.length - self.pos)
|
|
||||||
else:
|
|
||||||
n = self.length - self.pos
|
|
||||||
if not n: # EOF
|
|
||||||
return b"" if "b" in self.fh.mode else "" # type: ignore[return-value]
|
|
||||||
self.pos = self.pos + n
|
|
||||||
return self.fh.read(n)
|
|
||||||
|
|
||||||
def readline(self) -> AnyStr:
|
|
||||||
"""
|
|
||||||
Read a line of text.
|
|
||||||
|
|
||||||
:returns: An 8-bit string.
|
|
||||||
"""
|
|
||||||
s: AnyStr = b"" if "b" in self.fh.mode else "" # type: ignore[assignment]
|
|
||||||
newline_character = b"\n" if "b" in self.fh.mode else "\n"
|
|
||||||
while True:
|
|
||||||
c = self.read(1)
|
|
||||||
if not c:
|
|
||||||
break
|
|
||||||
s = s + c
|
|
||||||
if c == newline_character:
|
|
||||||
break
|
|
||||||
return s
|
|
||||||
|
|
||||||
def readlines(self) -> list[AnyStr]:
|
|
||||||
"""
|
|
||||||
Read multiple lines of text.
|
|
||||||
|
|
||||||
:returns: A list of 8-bit strings.
|
|
||||||
"""
|
|
||||||
lines = []
|
|
||||||
while True:
|
|
||||||
s = self.readline()
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
lines.append(s)
|
|
||||||
return lines
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# Windows Cursor support for PIL
|
|
||||||
#
|
|
||||||
# notes:
|
|
||||||
# uses BmpImagePlugin.py to read the bitmap data.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 96-05-27 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import BmpImagePlugin, Image
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"\0\0\2\0"
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Windows Cursor files.
|
|
||||||
|
|
||||||
|
|
||||||
class CurImageFile(BmpImagePlugin.BmpImageFile):
|
|
||||||
format = "CUR"
|
|
||||||
format_description = "Windows Cursor"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
# check magic
|
|
||||||
s = self.fp.read(6)
|
|
||||||
if not _accept(s):
|
|
||||||
msg = "not a CUR file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# pick the largest cursor in the file
|
|
||||||
m = b""
|
|
||||||
for i in range(i16(s, 4)):
|
|
||||||
s = self.fp.read(16)
|
|
||||||
if not m:
|
|
||||||
m = s
|
|
||||||
elif s[0] > m[0] and s[1] > m[1]:
|
|
||||||
m = s
|
|
||||||
if not m:
|
|
||||||
msg = "No cursors were found"
|
|
||||||
raise TypeError(msg)
|
|
||||||
|
|
||||||
# load as bitmap
|
|
||||||
self._bitmap(i32(m, 12) + offset)
|
|
||||||
|
|
||||||
# patch up the bitmap height
|
|
||||||
self._size = self.size[0], self.size[1] // 2
|
|
||||||
d, e, o, a = self.tile[0]
|
|
||||||
self.tile[0] = d, (0, 0) + self.size, o, a
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
Image.register_open(CurImageFile.format, CurImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(CurImageFile.format, ".cur")
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# DCX file handling
|
|
||||||
#
|
|
||||||
# DCX is a container file format defined by Intel, commonly used
|
|
||||||
# for fax applications. Each DCX file consists of a directory
|
|
||||||
# (a list of file offsets) followed by a set of (usually 1-bit)
|
|
||||||
# PCX files.
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-09-09 fl Created
|
|
||||||
# 1996-03-20 fl Properly derived from PcxImageFile.
|
|
||||||
# 1998-07-15 fl Renamed offset attribute to avoid name clash
|
|
||||||
# 2002-07-30 fl Fixed file handling
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-98 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-96 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from .PcxImagePlugin import PcxImageFile
|
|
||||||
|
|
||||||
MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then?
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return len(prefix) >= 4 and i32(prefix) == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the Intel DCX format.
|
|
||||||
|
|
||||||
|
|
||||||
class DcxImageFile(PcxImageFile):
|
|
||||||
format = "DCX"
|
|
||||||
format_description = "Intel DCX"
|
|
||||||
_close_exclusive_fp_after_loading = False
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# Header
|
|
||||||
s = self.fp.read(4)
|
|
||||||
if not _accept(s):
|
|
||||||
msg = "not a DCX file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# Component directory
|
|
||||||
self._offset = []
|
|
||||||
for i in range(1024):
|
|
||||||
offset = i32(self.fp.read(4))
|
|
||||||
if not offset:
|
|
||||||
break
|
|
||||||
self._offset.append(offset)
|
|
||||||
|
|
||||||
self._fp = self.fp
|
|
||||||
self.frame = None
|
|
||||||
self.n_frames = len(self._offset)
|
|
||||||
self.is_animated = self.n_frames > 1
|
|
||||||
self.seek(0)
|
|
||||||
|
|
||||||
def seek(self, frame):
|
|
||||||
if not self._seek_check(frame):
|
|
||||||
return
|
|
||||||
self.frame = frame
|
|
||||||
self.fp = self._fp
|
|
||||||
self.fp.seek(self._offset[frame])
|
|
||||||
PcxImageFile._open(self)
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self.frame
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(DcxImageFile.format, DcxImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(DcxImageFile.format, ".dcx")
|
|
||||||
@@ -1,566 +0,0 @@
|
|||||||
"""
|
|
||||||
A Pillow loader for .dds files (S3TC-compressed aka DXTC)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
Documentation:
|
|
||||||
https://web.archive.org/web/20170802060935/http://oss.sgi.com/projects/ogl-sample/registry/EXT/texture_compression_s3tc.txt
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
from enum import IntEnum, IntFlag
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
from ._binary import o32le as o32
|
|
||||||
|
|
||||||
# Magic ("DDS ")
|
|
||||||
DDS_MAGIC = 0x20534444
|
|
||||||
|
|
||||||
|
|
||||||
# DDS flags
|
|
||||||
class DDSD(IntFlag):
|
|
||||||
CAPS = 0x1
|
|
||||||
HEIGHT = 0x2
|
|
||||||
WIDTH = 0x4
|
|
||||||
PITCH = 0x8
|
|
||||||
PIXELFORMAT = 0x1000
|
|
||||||
MIPMAPCOUNT = 0x20000
|
|
||||||
LINEARSIZE = 0x80000
|
|
||||||
DEPTH = 0x800000
|
|
||||||
|
|
||||||
|
|
||||||
# DDS caps
|
|
||||||
class DDSCAPS(IntFlag):
|
|
||||||
COMPLEX = 0x8
|
|
||||||
TEXTURE = 0x1000
|
|
||||||
MIPMAP = 0x400000
|
|
||||||
|
|
||||||
|
|
||||||
class DDSCAPS2(IntFlag):
|
|
||||||
CUBEMAP = 0x200
|
|
||||||
CUBEMAP_POSITIVEX = 0x400
|
|
||||||
CUBEMAP_NEGATIVEX = 0x800
|
|
||||||
CUBEMAP_POSITIVEY = 0x1000
|
|
||||||
CUBEMAP_NEGATIVEY = 0x2000
|
|
||||||
CUBEMAP_POSITIVEZ = 0x4000
|
|
||||||
CUBEMAP_NEGATIVEZ = 0x8000
|
|
||||||
VOLUME = 0x200000
|
|
||||||
|
|
||||||
|
|
||||||
# Pixel Format
|
|
||||||
class DDPF(IntFlag):
|
|
||||||
ALPHAPIXELS = 0x1
|
|
||||||
ALPHA = 0x2
|
|
||||||
FOURCC = 0x4
|
|
||||||
PALETTEINDEXED8 = 0x20
|
|
||||||
RGB = 0x40
|
|
||||||
LUMINANCE = 0x20000
|
|
||||||
|
|
||||||
|
|
||||||
# dxgiformat.h
|
|
||||||
class DXGI_FORMAT(IntEnum):
|
|
||||||
UNKNOWN = 0
|
|
||||||
R32G32B32A32_TYPELESS = 1
|
|
||||||
R32G32B32A32_FLOAT = 2
|
|
||||||
R32G32B32A32_UINT = 3
|
|
||||||
R32G32B32A32_SINT = 4
|
|
||||||
R32G32B32_TYPELESS = 5
|
|
||||||
R32G32B32_FLOAT = 6
|
|
||||||
R32G32B32_UINT = 7
|
|
||||||
R32G32B32_SINT = 8
|
|
||||||
R16G16B16A16_TYPELESS = 9
|
|
||||||
R16G16B16A16_FLOAT = 10
|
|
||||||
R16G16B16A16_UNORM = 11
|
|
||||||
R16G16B16A16_UINT = 12
|
|
||||||
R16G16B16A16_SNORM = 13
|
|
||||||
R16G16B16A16_SINT = 14
|
|
||||||
R32G32_TYPELESS = 15
|
|
||||||
R32G32_FLOAT = 16
|
|
||||||
R32G32_UINT = 17
|
|
||||||
R32G32_SINT = 18
|
|
||||||
R32G8X24_TYPELESS = 19
|
|
||||||
D32_FLOAT_S8X24_UINT = 20
|
|
||||||
R32_FLOAT_X8X24_TYPELESS = 21
|
|
||||||
X32_TYPELESS_G8X24_UINT = 22
|
|
||||||
R10G10B10A2_TYPELESS = 23
|
|
||||||
R10G10B10A2_UNORM = 24
|
|
||||||
R10G10B10A2_UINT = 25
|
|
||||||
R11G11B10_FLOAT = 26
|
|
||||||
R8G8B8A8_TYPELESS = 27
|
|
||||||
R8G8B8A8_UNORM = 28
|
|
||||||
R8G8B8A8_UNORM_SRGB = 29
|
|
||||||
R8G8B8A8_UINT = 30
|
|
||||||
R8G8B8A8_SNORM = 31
|
|
||||||
R8G8B8A8_SINT = 32
|
|
||||||
R16G16_TYPELESS = 33
|
|
||||||
R16G16_FLOAT = 34
|
|
||||||
R16G16_UNORM = 35
|
|
||||||
R16G16_UINT = 36
|
|
||||||
R16G16_SNORM = 37
|
|
||||||
R16G16_SINT = 38
|
|
||||||
R32_TYPELESS = 39
|
|
||||||
D32_FLOAT = 40
|
|
||||||
R32_FLOAT = 41
|
|
||||||
R32_UINT = 42
|
|
||||||
R32_SINT = 43
|
|
||||||
R24G8_TYPELESS = 44
|
|
||||||
D24_UNORM_S8_UINT = 45
|
|
||||||
R24_UNORM_X8_TYPELESS = 46
|
|
||||||
X24_TYPELESS_G8_UINT = 47
|
|
||||||
R8G8_TYPELESS = 48
|
|
||||||
R8G8_UNORM = 49
|
|
||||||
R8G8_UINT = 50
|
|
||||||
R8G8_SNORM = 51
|
|
||||||
R8G8_SINT = 52
|
|
||||||
R16_TYPELESS = 53
|
|
||||||
R16_FLOAT = 54
|
|
||||||
D16_UNORM = 55
|
|
||||||
R16_UNORM = 56
|
|
||||||
R16_UINT = 57
|
|
||||||
R16_SNORM = 58
|
|
||||||
R16_SINT = 59
|
|
||||||
R8_TYPELESS = 60
|
|
||||||
R8_UNORM = 61
|
|
||||||
R8_UINT = 62
|
|
||||||
R8_SNORM = 63
|
|
||||||
R8_SINT = 64
|
|
||||||
A8_UNORM = 65
|
|
||||||
R1_UNORM = 66
|
|
||||||
R9G9B9E5_SHAREDEXP = 67
|
|
||||||
R8G8_B8G8_UNORM = 68
|
|
||||||
G8R8_G8B8_UNORM = 69
|
|
||||||
BC1_TYPELESS = 70
|
|
||||||
BC1_UNORM = 71
|
|
||||||
BC1_UNORM_SRGB = 72
|
|
||||||
BC2_TYPELESS = 73
|
|
||||||
BC2_UNORM = 74
|
|
||||||
BC2_UNORM_SRGB = 75
|
|
||||||
BC3_TYPELESS = 76
|
|
||||||
BC3_UNORM = 77
|
|
||||||
BC3_UNORM_SRGB = 78
|
|
||||||
BC4_TYPELESS = 79
|
|
||||||
BC4_UNORM = 80
|
|
||||||
BC4_SNORM = 81
|
|
||||||
BC5_TYPELESS = 82
|
|
||||||
BC5_UNORM = 83
|
|
||||||
BC5_SNORM = 84
|
|
||||||
B5G6R5_UNORM = 85
|
|
||||||
B5G5R5A1_UNORM = 86
|
|
||||||
B8G8R8A8_UNORM = 87
|
|
||||||
B8G8R8X8_UNORM = 88
|
|
||||||
R10G10B10_XR_BIAS_A2_UNORM = 89
|
|
||||||
B8G8R8A8_TYPELESS = 90
|
|
||||||
B8G8R8A8_UNORM_SRGB = 91
|
|
||||||
B8G8R8X8_TYPELESS = 92
|
|
||||||
B8G8R8X8_UNORM_SRGB = 93
|
|
||||||
BC6H_TYPELESS = 94
|
|
||||||
BC6H_UF16 = 95
|
|
||||||
BC6H_SF16 = 96
|
|
||||||
BC7_TYPELESS = 97
|
|
||||||
BC7_UNORM = 98
|
|
||||||
BC7_UNORM_SRGB = 99
|
|
||||||
AYUV = 100
|
|
||||||
Y410 = 101
|
|
||||||
Y416 = 102
|
|
||||||
NV12 = 103
|
|
||||||
P010 = 104
|
|
||||||
P016 = 105
|
|
||||||
OPAQUE_420 = 106
|
|
||||||
YUY2 = 107
|
|
||||||
Y210 = 108
|
|
||||||
Y216 = 109
|
|
||||||
NV11 = 110
|
|
||||||
AI44 = 111
|
|
||||||
IA44 = 112
|
|
||||||
P8 = 113
|
|
||||||
A8P8 = 114
|
|
||||||
B4G4R4A4_UNORM = 115
|
|
||||||
P208 = 130
|
|
||||||
V208 = 131
|
|
||||||
V408 = 132
|
|
||||||
SAMPLER_FEEDBACK_MIN_MIP_OPAQUE = 189
|
|
||||||
SAMPLER_FEEDBACK_MIP_REGION_USED_OPAQUE = 190
|
|
||||||
|
|
||||||
|
|
||||||
class D3DFMT(IntEnum):
|
|
||||||
UNKNOWN = 0
|
|
||||||
R8G8B8 = 20
|
|
||||||
A8R8G8B8 = 21
|
|
||||||
X8R8G8B8 = 22
|
|
||||||
R5G6B5 = 23
|
|
||||||
X1R5G5B5 = 24
|
|
||||||
A1R5G5B5 = 25
|
|
||||||
A4R4G4B4 = 26
|
|
||||||
R3G3B2 = 27
|
|
||||||
A8 = 28
|
|
||||||
A8R3G3B2 = 29
|
|
||||||
X4R4G4B4 = 30
|
|
||||||
A2B10G10R10 = 31
|
|
||||||
A8B8G8R8 = 32
|
|
||||||
X8B8G8R8 = 33
|
|
||||||
G16R16 = 34
|
|
||||||
A2R10G10B10 = 35
|
|
||||||
A16B16G16R16 = 36
|
|
||||||
A8P8 = 40
|
|
||||||
P8 = 41
|
|
||||||
L8 = 50
|
|
||||||
A8L8 = 51
|
|
||||||
A4L4 = 52
|
|
||||||
V8U8 = 60
|
|
||||||
L6V5U5 = 61
|
|
||||||
X8L8V8U8 = 62
|
|
||||||
Q8W8V8U8 = 63
|
|
||||||
V16U16 = 64
|
|
||||||
A2W10V10U10 = 67
|
|
||||||
D16_LOCKABLE = 70
|
|
||||||
D32 = 71
|
|
||||||
D15S1 = 73
|
|
||||||
D24S8 = 75
|
|
||||||
D24X8 = 77
|
|
||||||
D24X4S4 = 79
|
|
||||||
D16 = 80
|
|
||||||
D32F_LOCKABLE = 82
|
|
||||||
D24FS8 = 83
|
|
||||||
D32_LOCKABLE = 84
|
|
||||||
S8_LOCKABLE = 85
|
|
||||||
L16 = 81
|
|
||||||
VERTEXDATA = 100
|
|
||||||
INDEX16 = 101
|
|
||||||
INDEX32 = 102
|
|
||||||
Q16W16V16U16 = 110
|
|
||||||
R16F = 111
|
|
||||||
G16R16F = 112
|
|
||||||
A16B16G16R16F = 113
|
|
||||||
R32F = 114
|
|
||||||
G32R32F = 115
|
|
||||||
A32B32G32R32F = 116
|
|
||||||
CxV8U8 = 117
|
|
||||||
A1 = 118
|
|
||||||
A2B10G10R10_XR_BIAS = 119
|
|
||||||
BINARYBUFFER = 199
|
|
||||||
|
|
||||||
UYVY = i32(b"UYVY")
|
|
||||||
R8G8_B8G8 = i32(b"RGBG")
|
|
||||||
YUY2 = i32(b"YUY2")
|
|
||||||
G8R8_G8B8 = i32(b"GRGB")
|
|
||||||
DXT1 = i32(b"DXT1")
|
|
||||||
DXT2 = i32(b"DXT2")
|
|
||||||
DXT3 = i32(b"DXT3")
|
|
||||||
DXT4 = i32(b"DXT4")
|
|
||||||
DXT5 = i32(b"DXT5")
|
|
||||||
DX10 = i32(b"DX10")
|
|
||||||
BC4S = i32(b"BC4S")
|
|
||||||
BC4U = i32(b"BC4U")
|
|
||||||
BC5S = i32(b"BC5S")
|
|
||||||
BC5U = i32(b"BC5U")
|
|
||||||
ATI1 = i32(b"ATI1")
|
|
||||||
ATI2 = i32(b"ATI2")
|
|
||||||
MULTI2_ARGB8 = i32(b"MET1")
|
|
||||||
|
|
||||||
|
|
||||||
# Backward compatibility layer
|
|
||||||
module = sys.modules[__name__]
|
|
||||||
for item in DDSD:
|
|
||||||
setattr(module, "DDSD_" + item.name, item.value)
|
|
||||||
for item in DDSCAPS:
|
|
||||||
setattr(module, "DDSCAPS_" + item.name, item.value)
|
|
||||||
for item in DDSCAPS2:
|
|
||||||
setattr(module, "DDSCAPS2_" + item.name, item.value)
|
|
||||||
for item in DDPF:
|
|
||||||
setattr(module, "DDPF_" + item.name, item.value)
|
|
||||||
|
|
||||||
DDS_FOURCC = DDPF.FOURCC
|
|
||||||
DDS_RGB = DDPF.RGB
|
|
||||||
DDS_RGBA = DDPF.RGB | DDPF.ALPHAPIXELS
|
|
||||||
DDS_LUMINANCE = DDPF.LUMINANCE
|
|
||||||
DDS_LUMINANCEA = DDPF.LUMINANCE | DDPF.ALPHAPIXELS
|
|
||||||
DDS_ALPHA = DDPF.ALPHA
|
|
||||||
DDS_PAL8 = DDPF.PALETTEINDEXED8
|
|
||||||
|
|
||||||
DDS_HEADER_FLAGS_TEXTURE = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PIXELFORMAT
|
|
||||||
DDS_HEADER_FLAGS_MIPMAP = DDSD.MIPMAPCOUNT
|
|
||||||
DDS_HEADER_FLAGS_VOLUME = DDSD.DEPTH
|
|
||||||
DDS_HEADER_FLAGS_PITCH = DDSD.PITCH
|
|
||||||
DDS_HEADER_FLAGS_LINEARSIZE = DDSD.LINEARSIZE
|
|
||||||
|
|
||||||
DDS_HEIGHT = DDSD.HEIGHT
|
|
||||||
DDS_WIDTH = DDSD.WIDTH
|
|
||||||
|
|
||||||
DDS_SURFACE_FLAGS_TEXTURE = DDSCAPS.TEXTURE
|
|
||||||
DDS_SURFACE_FLAGS_MIPMAP = DDSCAPS.COMPLEX | DDSCAPS.MIPMAP
|
|
||||||
DDS_SURFACE_FLAGS_CUBEMAP = DDSCAPS.COMPLEX
|
|
||||||
|
|
||||||
DDS_CUBEMAP_POSITIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEX
|
|
||||||
DDS_CUBEMAP_NEGATIVEX = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEX
|
|
||||||
DDS_CUBEMAP_POSITIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEY
|
|
||||||
DDS_CUBEMAP_NEGATIVEY = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEY
|
|
||||||
DDS_CUBEMAP_POSITIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_POSITIVEZ
|
|
||||||
DDS_CUBEMAP_NEGATIVEZ = DDSCAPS2.CUBEMAP | DDSCAPS2.CUBEMAP_NEGATIVEZ
|
|
||||||
|
|
||||||
DXT1_FOURCC = D3DFMT.DXT1
|
|
||||||
DXT3_FOURCC = D3DFMT.DXT3
|
|
||||||
DXT5_FOURCC = D3DFMT.DXT5
|
|
||||||
|
|
||||||
DXGI_FORMAT_R8G8B8A8_TYPELESS = DXGI_FORMAT.R8G8B8A8_TYPELESS
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM = DXGI_FORMAT.R8G8B8A8_UNORM
|
|
||||||
DXGI_FORMAT_R8G8B8A8_UNORM_SRGB = DXGI_FORMAT.R8G8B8A8_UNORM_SRGB
|
|
||||||
DXGI_FORMAT_BC5_TYPELESS = DXGI_FORMAT.BC5_TYPELESS
|
|
||||||
DXGI_FORMAT_BC5_UNORM = DXGI_FORMAT.BC5_UNORM
|
|
||||||
DXGI_FORMAT_BC5_SNORM = DXGI_FORMAT.BC5_SNORM
|
|
||||||
DXGI_FORMAT_BC6H_UF16 = DXGI_FORMAT.BC6H_UF16
|
|
||||||
DXGI_FORMAT_BC6H_SF16 = DXGI_FORMAT.BC6H_SF16
|
|
||||||
DXGI_FORMAT_BC7_TYPELESS = DXGI_FORMAT.BC7_TYPELESS
|
|
||||||
DXGI_FORMAT_BC7_UNORM = DXGI_FORMAT.BC7_UNORM
|
|
||||||
DXGI_FORMAT_BC7_UNORM_SRGB = DXGI_FORMAT.BC7_UNORM_SRGB
|
|
||||||
|
|
||||||
|
|
||||||
class DdsImageFile(ImageFile.ImageFile):
|
|
||||||
format = "DDS"
|
|
||||||
format_description = "DirectDraw Surface"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "not a DDS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
(header_size,) = struct.unpack("<I", self.fp.read(4))
|
|
||||||
if header_size != 124:
|
|
||||||
msg = f"Unsupported header size {repr(header_size)}"
|
|
||||||
raise OSError(msg)
|
|
||||||
header_bytes = self.fp.read(header_size - 4)
|
|
||||||
if len(header_bytes) != 120:
|
|
||||||
msg = f"Incomplete header: {len(header_bytes)} bytes"
|
|
||||||
raise OSError(msg)
|
|
||||||
header = io.BytesIO(header_bytes)
|
|
||||||
|
|
||||||
flags, height, width = struct.unpack("<3I", header.read(12))
|
|
||||||
self._size = (width, height)
|
|
||||||
extents = (0, 0) + self.size
|
|
||||||
|
|
||||||
pitch, depth, mipmaps = struct.unpack("<3I", header.read(12))
|
|
||||||
struct.unpack("<11I", header.read(44)) # reserved
|
|
||||||
|
|
||||||
# pixel format
|
|
||||||
pfsize, pfflags, fourcc, bitcount = struct.unpack("<4I", header.read(16))
|
|
||||||
n = 0
|
|
||||||
rawmode = None
|
|
||||||
if pfflags & DDPF.RGB:
|
|
||||||
# Texture contains uncompressed RGB data
|
|
||||||
if pfflags & DDPF.ALPHAPIXELS:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
mask_count = 4
|
|
||||||
else:
|
|
||||||
self._mode = "RGB"
|
|
||||||
mask_count = 3
|
|
||||||
|
|
||||||
masks = struct.unpack(f"<{mask_count}I", header.read(mask_count * 4))
|
|
||||||
self.tile = [("dds_rgb", extents, 0, (bitcount, masks))]
|
|
||||||
return
|
|
||||||
elif pfflags & DDPF.LUMINANCE:
|
|
||||||
if bitcount == 8:
|
|
||||||
self._mode = "L"
|
|
||||||
elif bitcount == 16 and pfflags & DDPF.ALPHAPIXELS:
|
|
||||||
self._mode = "LA"
|
|
||||||
else:
|
|
||||||
msg = f"Unsupported bitcount {bitcount} for {pfflags}"
|
|
||||||
raise OSError(msg)
|
|
||||||
elif pfflags & DDPF.PALETTEINDEXED8:
|
|
||||||
self._mode = "P"
|
|
||||||
self.palette = ImagePalette.raw("RGBA", self.fp.read(1024))
|
|
||||||
elif pfflags & DDPF.FOURCC:
|
|
||||||
offset = header_size + 4
|
|
||||||
if fourcc == D3DFMT.DXT1:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.pixel_format = "DXT1"
|
|
||||||
n = 1
|
|
||||||
elif fourcc == D3DFMT.DXT3:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.pixel_format = "DXT3"
|
|
||||||
n = 2
|
|
||||||
elif fourcc == D3DFMT.DXT5:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.pixel_format = "DXT5"
|
|
||||||
n = 3
|
|
||||||
elif fourcc in (D3DFMT.BC4U, D3DFMT.ATI1):
|
|
||||||
self._mode = "L"
|
|
||||||
self.pixel_format = "BC4"
|
|
||||||
n = 4
|
|
||||||
elif fourcc == D3DFMT.BC5S:
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC5S"
|
|
||||||
n = 5
|
|
||||||
elif fourcc in (D3DFMT.BC5U, D3DFMT.ATI2):
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC5"
|
|
||||||
n = 5
|
|
||||||
elif fourcc == D3DFMT.DX10:
|
|
||||||
offset += 20
|
|
||||||
# ignoring flags which pertain to volume textures and cubemaps
|
|
||||||
(dxgi_format,) = struct.unpack("<I", self.fp.read(4))
|
|
||||||
self.fp.read(16)
|
|
||||||
if dxgi_format in (
|
|
||||||
DXGI_FORMAT.BC1_UNORM,
|
|
||||||
DXGI_FORMAT.BC1_TYPELESS,
|
|
||||||
):
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.pixel_format = "BC1"
|
|
||||||
n = 1
|
|
||||||
elif dxgi_format in (DXGI_FORMAT.BC4_TYPELESS, DXGI_FORMAT.BC4_UNORM):
|
|
||||||
self._mode = "L"
|
|
||||||
self.pixel_format = "BC4"
|
|
||||||
n = 4
|
|
||||||
elif dxgi_format in (DXGI_FORMAT.BC5_TYPELESS, DXGI_FORMAT.BC5_UNORM):
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC5"
|
|
||||||
n = 5
|
|
||||||
elif dxgi_format == DXGI_FORMAT.BC5_SNORM:
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC5S"
|
|
||||||
n = 5
|
|
||||||
elif dxgi_format == DXGI_FORMAT.BC6H_UF16:
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC6H"
|
|
||||||
n = 6
|
|
||||||
elif dxgi_format == DXGI_FORMAT.BC6H_SF16:
|
|
||||||
self._mode = "RGB"
|
|
||||||
self.pixel_format = "BC6HS"
|
|
||||||
n = 6
|
|
||||||
elif dxgi_format in (
|
|
||||||
DXGI_FORMAT.BC7_TYPELESS,
|
|
||||||
DXGI_FORMAT.BC7_UNORM,
|
|
||||||
DXGI_FORMAT.BC7_UNORM_SRGB,
|
|
||||||
):
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.pixel_format = "BC7"
|
|
||||||
n = 7
|
|
||||||
if dxgi_format == DXGI_FORMAT.BC7_UNORM_SRGB:
|
|
||||||
self.info["gamma"] = 1 / 2.2
|
|
||||||
elif dxgi_format in (
|
|
||||||
DXGI_FORMAT.R8G8B8A8_TYPELESS,
|
|
||||||
DXGI_FORMAT.R8G8B8A8_UNORM,
|
|
||||||
DXGI_FORMAT.R8G8B8A8_UNORM_SRGB,
|
|
||||||
):
|
|
||||||
self._mode = "RGBA"
|
|
||||||
if dxgi_format == DXGI_FORMAT.R8G8B8A8_UNORM_SRGB:
|
|
||||||
self.info["gamma"] = 1 / 2.2
|
|
||||||
else:
|
|
||||||
msg = f"Unimplemented DXGI format {dxgi_format}"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unimplemented pixel format {repr(fourcc)}"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
else:
|
|
||||||
msg = f"Unknown pixel format flags {pfflags}"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
if n:
|
|
||||||
self.tile = [
|
|
||||||
ImageFile._Tile("bcn", extents, offset, (n, self.pixel_format))
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
self.tile = [ImageFile._Tile("raw", extents, 0, rawmode or self.mode)]
|
|
||||||
|
|
||||||
def load_seek(self, pos):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DdsRgbDecoder(ImageFile.PyDecoder):
|
|
||||||
_pulls_fd = True
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
bitcount, masks = self.args
|
|
||||||
|
|
||||||
# Some masks will be padded with zeros, e.g. R 0b11 G 0b1100
|
|
||||||
# Calculate how many zeros each mask is padded with
|
|
||||||
mask_offsets = []
|
|
||||||
# And the maximum value of each channel without the padding
|
|
||||||
mask_totals = []
|
|
||||||
for mask in masks:
|
|
||||||
offset = 0
|
|
||||||
if mask != 0:
|
|
||||||
while mask >> (offset + 1) << (offset + 1) == mask:
|
|
||||||
offset += 1
|
|
||||||
mask_offsets.append(offset)
|
|
||||||
mask_totals.append(mask >> offset)
|
|
||||||
|
|
||||||
data = bytearray()
|
|
||||||
bytecount = bitcount // 8
|
|
||||||
while len(data) < self.state.xsize * self.state.ysize * len(masks):
|
|
||||||
value = int.from_bytes(self.fd.read(bytecount), "little")
|
|
||||||
for i, mask in enumerate(masks):
|
|
||||||
masked_value = value & mask
|
|
||||||
# Remove the zero padding, and scale it to 8 bits
|
|
||||||
data += o8(
|
|
||||||
int(((masked_value >> mask_offsets[i]) / mask_totals[i]) * 255)
|
|
||||||
)
|
|
||||||
self.set_as_raw(bytes(data))
|
|
||||||
return -1, 0
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if im.mode not in ("RGB", "RGBA", "L", "LA"):
|
|
||||||
msg = f"cannot write mode {im.mode} as DDS"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
alpha = im.mode[-1] == "A"
|
|
||||||
if im.mode[0] == "L":
|
|
||||||
pixel_flags = DDPF.LUMINANCE
|
|
||||||
rawmode = im.mode
|
|
||||||
if alpha:
|
|
||||||
rgba_mask = [0x000000FF, 0x000000FF, 0x000000FF]
|
|
||||||
else:
|
|
||||||
rgba_mask = [0xFF000000, 0xFF000000, 0xFF000000]
|
|
||||||
else:
|
|
||||||
pixel_flags = DDPF.RGB
|
|
||||||
rawmode = im.mode[::-1]
|
|
||||||
rgba_mask = [0x00FF0000, 0x0000FF00, 0x000000FF]
|
|
||||||
|
|
||||||
if alpha:
|
|
||||||
r, g, b, a = im.split()
|
|
||||||
im = Image.merge("RGBA", (a, r, g, b))
|
|
||||||
if alpha:
|
|
||||||
pixel_flags |= DDPF.ALPHAPIXELS
|
|
||||||
rgba_mask.append(0xFF000000 if alpha else 0)
|
|
||||||
|
|
||||||
flags = DDSD.CAPS | DDSD.HEIGHT | DDSD.WIDTH | DDSD.PITCH | DDSD.PIXELFORMAT
|
|
||||||
bitcount = len(im.getbands()) * 8
|
|
||||||
pitch = (im.width * bitcount + 7) // 8
|
|
||||||
|
|
||||||
fp.write(
|
|
||||||
o32(DDS_MAGIC)
|
|
||||||
+ struct.pack(
|
|
||||||
"<7I",
|
|
||||||
124, # header size
|
|
||||||
flags, # flags
|
|
||||||
im.height,
|
|
||||||
im.width,
|
|
||||||
pitch,
|
|
||||||
0, # depth
|
|
||||||
0, # mipmaps
|
|
||||||
)
|
|
||||||
+ struct.pack("11I", *((0,) * 11)) # reserved
|
|
||||||
# pfsize, pfflags, fourcc, bitcount
|
|
||||||
+ struct.pack("<4I", 32, pixel_flags, 0, bitcount)
|
|
||||||
+ struct.pack("<4I", *rgba_mask) # dwRGBABitMask
|
|
||||||
+ struct.pack("<5I", DDSCAPS.TEXTURE, 0, 0, 0, 0)
|
|
||||||
)
|
|
||||||
ImageFile._save(
|
|
||||||
im, fp, [ImageFile._Tile("raw", (0, 0) + im.size, 0, (rawmode, 0, 1))]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"DDS "
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(DdsImageFile.format, DdsImageFile, _accept)
|
|
||||||
Image.register_decoder("dds_rgb", DdsRgbDecoder)
|
|
||||||
Image.register_save(DdsImageFile.format, _save)
|
|
||||||
Image.register_extension(DdsImageFile.format, ".dds")
|
|
||||||
@@ -1,478 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# EPS file handling
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-09-01 fl Created (0.1)
|
|
||||||
# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2)
|
|
||||||
# 1996-08-22 fl Don't choke on floating point BoundingBox values
|
|
||||||
# 1996-08-23 fl Handle files from Macintosh (0.3)
|
|
||||||
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4)
|
|
||||||
# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5)
|
|
||||||
# 2014-05-07 e Handling of EPS with binary preview and fixed resolution
|
|
||||||
# resizing
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$")
|
|
||||||
field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$")
|
|
||||||
|
|
||||||
gs_binary = None
|
|
||||||
gs_windows_binary = None
|
|
||||||
|
|
||||||
|
|
||||||
def has_ghostscript():
|
|
||||||
global gs_binary, gs_windows_binary
|
|
||||||
if gs_binary is None:
|
|
||||||
if sys.platform.startswith("win"):
|
|
||||||
if gs_windows_binary is None:
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
for binary in ("gswin32c", "gswin64c", "gs"):
|
|
||||||
if shutil.which(binary) is not None:
|
|
||||||
gs_windows_binary = binary
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
gs_windows_binary = False
|
|
||||||
gs_binary = gs_windows_binary
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
subprocess.check_call(["gs", "--version"], stdout=subprocess.DEVNULL)
|
|
||||||
gs_binary = "gs"
|
|
||||||
except OSError:
|
|
||||||
gs_binary = False
|
|
||||||
return gs_binary is not False
|
|
||||||
|
|
||||||
|
|
||||||
def Ghostscript(tile, size, fp, scale=1, transparency=False):
|
|
||||||
"""Render an image using Ghostscript"""
|
|
||||||
global gs_binary
|
|
||||||
if not has_ghostscript():
|
|
||||||
msg = "Unable to locate Ghostscript on paths"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# Unpack decoder tile
|
|
||||||
decoder, tile, offset, data = tile[0]
|
|
||||||
length, bbox = data
|
|
||||||
|
|
||||||
# Hack to support hi-res rendering
|
|
||||||
scale = int(scale) or 1
|
|
||||||
width = size[0] * scale
|
|
||||||
height = size[1] * scale
|
|
||||||
# resolution is dependent on bbox and size
|
|
||||||
res_x = 72.0 * width / (bbox[2] - bbox[0])
|
|
||||||
res_y = 72.0 * height / (bbox[3] - bbox[1])
|
|
||||||
|
|
||||||
out_fd, outfile = tempfile.mkstemp()
|
|
||||||
os.close(out_fd)
|
|
||||||
|
|
||||||
infile_temp = None
|
|
||||||
if hasattr(fp, "name") and os.path.exists(fp.name):
|
|
||||||
infile = fp.name
|
|
||||||
else:
|
|
||||||
in_fd, infile_temp = tempfile.mkstemp()
|
|
||||||
os.close(in_fd)
|
|
||||||
infile = infile_temp
|
|
||||||
|
|
||||||
# Ignore length and offset!
|
|
||||||
# Ghostscript can read it
|
|
||||||
# Copy whole file to read in Ghostscript
|
|
||||||
with open(infile_temp, "wb") as f:
|
|
||||||
# fetch length of fp
|
|
||||||
fp.seek(0, io.SEEK_END)
|
|
||||||
fsize = fp.tell()
|
|
||||||
# ensure start position
|
|
||||||
# go back
|
|
||||||
fp.seek(0)
|
|
||||||
lengthfile = fsize
|
|
||||||
while lengthfile > 0:
|
|
||||||
s = fp.read(min(lengthfile, 100 * 1024))
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
lengthfile -= len(s)
|
|
||||||
f.write(s)
|
|
||||||
|
|
||||||
device = "pngalpha" if transparency else "ppmraw"
|
|
||||||
|
|
||||||
# Build Ghostscript command
|
|
||||||
command = [
|
|
||||||
gs_binary,
|
|
||||||
"-q", # quiet mode
|
|
||||||
f"-g{width:d}x{height:d}", # set output geometry (pixels)
|
|
||||||
f"-r{res_x:f}x{res_y:f}", # set input DPI (dots per inch)
|
|
||||||
"-dBATCH", # exit after processing
|
|
||||||
"-dNOPAUSE", # don't pause between pages
|
|
||||||
"-dSAFER", # safe mode
|
|
||||||
f"-sDEVICE={device}",
|
|
||||||
f"-sOutputFile={outfile}", # output file
|
|
||||||
# adjust for image origin
|
|
||||||
"-c",
|
|
||||||
f"{-bbox[0]} {-bbox[1]} translate",
|
|
||||||
"-f",
|
|
||||||
infile, # input file
|
|
||||||
# showpage (see https://bugs.ghostscript.com/show_bug.cgi?id=698272)
|
|
||||||
"-c",
|
|
||||||
"showpage",
|
|
||||||
]
|
|
||||||
|
|
||||||
# push data through Ghostscript
|
|
||||||
try:
|
|
||||||
startupinfo = None
|
|
||||||
if sys.platform.startswith("win"):
|
|
||||||
startupinfo = subprocess.STARTUPINFO()
|
|
||||||
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
|
|
||||||
subprocess.check_call(command, startupinfo=startupinfo)
|
|
||||||
out_im = Image.open(outfile)
|
|
||||||
out_im.load()
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
os.unlink(outfile)
|
|
||||||
if infile_temp:
|
|
||||||
os.unlink(infile_temp)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
im = out_im.im.copy()
|
|
||||||
out_im.close()
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
class PSFile:
|
|
||||||
"""
|
|
||||||
Wrapper for bytesio object that treats either CR or LF as end of line.
|
|
||||||
This class is no longer used internally, but kept for backwards compatibility.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
deprecate(
|
|
||||||
"PSFile",
|
|
||||||
11,
|
|
||||||
action="If you need the functionality of this class "
|
|
||||||
"you will need to implement it yourself.",
|
|
||||||
)
|
|
||||||
self.fp = fp
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
def seek(self, offset, whence=io.SEEK_SET):
|
|
||||||
self.char = None
|
|
||||||
self.fp.seek(offset, whence)
|
|
||||||
|
|
||||||
def readline(self):
|
|
||||||
s = [self.char or b""]
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
c = self.fp.read(1)
|
|
||||||
while (c not in b"\r\n") and len(c):
|
|
||||||
s.append(c)
|
|
||||||
c = self.fp.read(1)
|
|
||||||
|
|
||||||
self.char = self.fp.read(1)
|
|
||||||
# line endings can be 1 or 2 of \r \n, in either order
|
|
||||||
if self.char in b"\r\n":
|
|
||||||
self.char = None
|
|
||||||
|
|
||||||
return b"".join(s).decode("latin-1")
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"%!PS" or (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Encapsulated PostScript. This plugin supports only
|
|
||||||
# a few variants of this format.
|
|
||||||
|
|
||||||
|
|
||||||
class EpsImageFile(ImageFile.ImageFile):
|
|
||||||
"""EPS File Parser for the Python Imaging Library"""
|
|
||||||
|
|
||||||
format = "EPS"
|
|
||||||
format_description = "Encapsulated Postscript"
|
|
||||||
|
|
||||||
mode_map = {1: "L", 2: "LAB", 3: "RGB", 4: "CMYK"}
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
(length, offset) = self._find_offset(self.fp)
|
|
||||||
|
|
||||||
# go to offset - start of "%!PS"
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
self._mode = "RGB"
|
|
||||||
self._size = None
|
|
||||||
|
|
||||||
byte_arr = bytearray(255)
|
|
||||||
bytes_mv = memoryview(byte_arr)
|
|
||||||
bytes_read = 0
|
|
||||||
reading_header_comments = True
|
|
||||||
reading_trailer_comments = False
|
|
||||||
trailer_reached = False
|
|
||||||
|
|
||||||
def check_required_header_comments():
|
|
||||||
if "PS-Adobe" not in self.info:
|
|
||||||
msg = 'EPS header missing "%!PS-Adobe" comment'
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
if "BoundingBox" not in self.info:
|
|
||||||
msg = 'EPS header missing "%%BoundingBox" comment'
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
def _read_comment(s):
|
|
||||||
nonlocal reading_trailer_comments
|
|
||||||
try:
|
|
||||||
m = split.match(s)
|
|
||||||
except re.error as e:
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg) from e
|
|
||||||
|
|
||||||
if m:
|
|
||||||
k, v = m.group(1, 2)
|
|
||||||
self.info[k] = v
|
|
||||||
if k == "BoundingBox":
|
|
||||||
if v == "(atend)":
|
|
||||||
reading_trailer_comments = True
|
|
||||||
elif not self._size or (
|
|
||||||
trailer_reached and reading_trailer_comments
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
# Note: The DSC spec says that BoundingBox
|
|
||||||
# fields should be integers, but some drivers
|
|
||||||
# put floating point values there anyway.
|
|
||||||
box = [int(float(i)) for i in v.split()]
|
|
||||||
self._size = box[2] - box[0], box[3] - box[1]
|
|
||||||
self.tile = [
|
|
||||||
("eps", (0, 0) + self.size, offset, (length, box))
|
|
||||||
]
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
return True
|
|
||||||
|
|
||||||
while True:
|
|
||||||
byte = self.fp.read(1)
|
|
||||||
if byte == b"":
|
|
||||||
# if we didn't read a byte we must be at the end of the file
|
|
||||||
if bytes_read == 0:
|
|
||||||
break
|
|
||||||
elif byte in b"\r\n":
|
|
||||||
# if we read a line ending character, ignore it and parse what
|
|
||||||
# we have already read. if we haven't read any other characters,
|
|
||||||
# continue reading
|
|
||||||
if bytes_read == 0:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
# ASCII/hexadecimal lines in an EPS file must not exceed
|
|
||||||
# 255 characters, not including line ending characters
|
|
||||||
if bytes_read >= 255:
|
|
||||||
# only enforce this for lines starting with a "%",
|
|
||||||
# otherwise assume it's binary data
|
|
||||||
if byte_arr[0] == ord("%"):
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
else:
|
|
||||||
if reading_header_comments:
|
|
||||||
check_required_header_comments()
|
|
||||||
reading_header_comments = False
|
|
||||||
# reset bytes_read so we can keep reading
|
|
||||||
# data until the end of the line
|
|
||||||
bytes_read = 0
|
|
||||||
byte_arr[bytes_read] = byte[0]
|
|
||||||
bytes_read += 1
|
|
||||||
continue
|
|
||||||
|
|
||||||
if reading_header_comments:
|
|
||||||
# Load EPS header
|
|
||||||
|
|
||||||
# if this line doesn't start with a "%",
|
|
||||||
# or does start with "%%EndComments",
|
|
||||||
# then we've reached the end of the header/comments
|
|
||||||
if byte_arr[0] != ord("%") or bytes_mv[:13] == b"%%EndComments":
|
|
||||||
check_required_header_comments()
|
|
||||||
reading_header_comments = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
s = str(bytes_mv[:bytes_read], "latin-1")
|
|
||||||
if not _read_comment(s):
|
|
||||||
m = field.match(s)
|
|
||||||
if m:
|
|
||||||
k = m.group(1)
|
|
||||||
if k[:8] == "PS-Adobe":
|
|
||||||
self.info["PS-Adobe"] = k[9:]
|
|
||||||
else:
|
|
||||||
self.info[k] = ""
|
|
||||||
elif s[0] == "%":
|
|
||||||
# handle non-DSC PostScript comments that some
|
|
||||||
# tools mistakenly put in the Comments section
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
msg = "bad EPS header"
|
|
||||||
raise OSError(msg)
|
|
||||||
elif bytes_mv[:11] == b"%ImageData:":
|
|
||||||
# Check for an "ImageData" descriptor
|
|
||||||
# https://www.adobe.com/devnet-apps/photoshop/fileformatashtml/#50577413_pgfId-1035096
|
|
||||||
|
|
||||||
# Values:
|
|
||||||
# columns
|
|
||||||
# rows
|
|
||||||
# bit depth (1 or 8)
|
|
||||||
# mode (1: L, 2: LAB, 3: RGB, 4: CMYK)
|
|
||||||
# number of padding channels
|
|
||||||
# block size (number of bytes per row per channel)
|
|
||||||
# binary/ascii (1: binary, 2: ascii)
|
|
||||||
# data start identifier (the image data follows after a single line
|
|
||||||
# consisting only of this quoted value)
|
|
||||||
image_data_values = byte_arr[11:bytes_read].split(None, 7)
|
|
||||||
columns, rows, bit_depth, mode_id = (
|
|
||||||
int(value) for value in image_data_values[:4]
|
|
||||||
)
|
|
||||||
|
|
||||||
if bit_depth == 1:
|
|
||||||
self._mode = "1"
|
|
||||||
elif bit_depth == 8:
|
|
||||||
try:
|
|
||||||
self._mode = self.mode_map[mode_id]
|
|
||||||
except ValueError:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
self._size = columns, rows
|
|
||||||
return
|
|
||||||
elif trailer_reached and reading_trailer_comments:
|
|
||||||
# Load EPS trailer
|
|
||||||
|
|
||||||
# if this line starts with "%%EOF",
|
|
||||||
# then we've reached the end of the file
|
|
||||||
if bytes_mv[:5] == b"%%EOF":
|
|
||||||
break
|
|
||||||
|
|
||||||
s = str(bytes_mv[:bytes_read], "latin-1")
|
|
||||||
_read_comment(s)
|
|
||||||
elif bytes_mv[:9] == b"%%Trailer":
|
|
||||||
trailer_reached = True
|
|
||||||
bytes_read = 0
|
|
||||||
|
|
||||||
check_required_header_comments()
|
|
||||||
|
|
||||||
if not self._size:
|
|
||||||
msg = "cannot determine EPS bounding box"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
def _find_offset(self, fp):
|
|
||||||
s = fp.read(4)
|
|
||||||
|
|
||||||
if s == b"%!PS":
|
|
||||||
# for HEAD without binary preview
|
|
||||||
fp.seek(0, io.SEEK_END)
|
|
||||||
length = fp.tell()
|
|
||||||
offset = 0
|
|
||||||
elif i32(s) == 0xC6D3D0C5:
|
|
||||||
# FIX for: Some EPS file not handled correctly / issue #302
|
|
||||||
# EPS can contain binary data
|
|
||||||
# or start directly with latin coding
|
|
||||||
# more info see:
|
|
||||||
# https://web.archive.org/web/20160528181353/http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf
|
|
||||||
s = fp.read(8)
|
|
||||||
offset = i32(s)
|
|
||||||
length = i32(s, 4)
|
|
||||||
else:
|
|
||||||
msg = "not an EPS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
return length, offset
|
|
||||||
|
|
||||||
def load(self, scale=1, transparency=False):
|
|
||||||
# Load EPS via Ghostscript
|
|
||||||
if self.tile:
|
|
||||||
self.im = Ghostscript(self.tile, self.size, self.fp, scale, transparency)
|
|
||||||
self._mode = self.im.mode
|
|
||||||
self._size = self.im.size
|
|
||||||
self.tile = []
|
|
||||||
return Image.Image.load(self)
|
|
||||||
|
|
||||||
def load_seek(self, *args, **kwargs):
|
|
||||||
# we can't incrementally load, so force ImageFile.parser to
|
|
||||||
# use our custom load method by defining this method.
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename, eps=1):
|
|
||||||
"""EPS Writer for the Python Imaging Library."""
|
|
||||||
|
|
||||||
# make sure image data is available
|
|
||||||
im.load()
|
|
||||||
|
|
||||||
# determine PostScript image mode
|
|
||||||
if im.mode == "L":
|
|
||||||
operator = (8, 1, b"image")
|
|
||||||
elif im.mode == "RGB":
|
|
||||||
operator = (8, 3, b"false 3 colorimage")
|
|
||||||
elif im.mode == "CMYK":
|
|
||||||
operator = (8, 4, b"false 4 colorimage")
|
|
||||||
else:
|
|
||||||
msg = "image mode is not supported"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
if eps:
|
|
||||||
# write EPS header
|
|
||||||
fp.write(b"%!PS-Adobe-3.0 EPSF-3.0\n")
|
|
||||||
fp.write(b"%%Creator: PIL 0.1 EpsEncode\n")
|
|
||||||
# fp.write("%%CreationDate: %s"...)
|
|
||||||
fp.write(b"%%%%BoundingBox: 0 0 %d %d\n" % im.size)
|
|
||||||
fp.write(b"%%Pages: 1\n")
|
|
||||||
fp.write(b"%%EndComments\n")
|
|
||||||
fp.write(b"%%Page: 1 1\n")
|
|
||||||
fp.write(b"%%ImageData: %d %d " % im.size)
|
|
||||||
fp.write(b'%d %d 0 1 1 "%s"\n' % operator)
|
|
||||||
|
|
||||||
# image header
|
|
||||||
fp.write(b"gsave\n")
|
|
||||||
fp.write(b"10 dict begin\n")
|
|
||||||
fp.write(b"/buf %d string def\n" % (im.size[0] * operator[1]))
|
|
||||||
fp.write(b"%d %d scale\n" % im.size)
|
|
||||||
fp.write(b"%d %d 8\n" % im.size) # <= bits
|
|
||||||
fp.write(b"[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1]))
|
|
||||||
fp.write(b"{ currentfile buf readhexstring pop } bind\n")
|
|
||||||
fp.write(operator[2] + b"\n")
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
ImageFile._save(im, fp, [("eps", (0, 0) + im.size, 0, None)])
|
|
||||||
|
|
||||||
fp.write(b"\n%%%%EndBinary\n")
|
|
||||||
fp.write(b"grestore end\n")
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(EpsImageFile.format, EpsImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_save(EpsImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extensions(EpsImageFile.format, [".ps", ".eps"])
|
|
||||||
|
|
||||||
Image.register_mime(EpsImageFile.format, "application/postscript")
|
|
||||||
@@ -1,381 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# EXIF tags
|
|
||||||
#
|
|
||||||
# Copyright (c) 2003 by Secret Labs AB
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
This module provides constants and clear-text names for various
|
|
||||||
well-known EXIF tags.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from enum import IntEnum
|
|
||||||
|
|
||||||
|
|
||||||
class Base(IntEnum):
|
|
||||||
# possibly incomplete
|
|
||||||
InteropIndex = 0x0001
|
|
||||||
ProcessingSoftware = 0x000B
|
|
||||||
NewSubfileType = 0x00FE
|
|
||||||
SubfileType = 0x00FF
|
|
||||||
ImageWidth = 0x0100
|
|
||||||
ImageLength = 0x0101
|
|
||||||
BitsPerSample = 0x0102
|
|
||||||
Compression = 0x0103
|
|
||||||
PhotometricInterpretation = 0x0106
|
|
||||||
Thresholding = 0x0107
|
|
||||||
CellWidth = 0x0108
|
|
||||||
CellLength = 0x0109
|
|
||||||
FillOrder = 0x010A
|
|
||||||
DocumentName = 0x010D
|
|
||||||
ImageDescription = 0x010E
|
|
||||||
Make = 0x010F
|
|
||||||
Model = 0x0110
|
|
||||||
StripOffsets = 0x0111
|
|
||||||
Orientation = 0x0112
|
|
||||||
SamplesPerPixel = 0x0115
|
|
||||||
RowsPerStrip = 0x0116
|
|
||||||
StripByteCounts = 0x0117
|
|
||||||
MinSampleValue = 0x0118
|
|
||||||
MaxSampleValue = 0x0119
|
|
||||||
XResolution = 0x011A
|
|
||||||
YResolution = 0x011B
|
|
||||||
PlanarConfiguration = 0x011C
|
|
||||||
PageName = 0x011D
|
|
||||||
FreeOffsets = 0x0120
|
|
||||||
FreeByteCounts = 0x0121
|
|
||||||
GrayResponseUnit = 0x0122
|
|
||||||
GrayResponseCurve = 0x0123
|
|
||||||
T4Options = 0x0124
|
|
||||||
T6Options = 0x0125
|
|
||||||
ResolutionUnit = 0x0128
|
|
||||||
PageNumber = 0x0129
|
|
||||||
TransferFunction = 0x012D
|
|
||||||
Software = 0x0131
|
|
||||||
DateTime = 0x0132
|
|
||||||
Artist = 0x013B
|
|
||||||
HostComputer = 0x013C
|
|
||||||
Predictor = 0x013D
|
|
||||||
WhitePoint = 0x013E
|
|
||||||
PrimaryChromaticities = 0x013F
|
|
||||||
ColorMap = 0x0140
|
|
||||||
HalftoneHints = 0x0141
|
|
||||||
TileWidth = 0x0142
|
|
||||||
TileLength = 0x0143
|
|
||||||
TileOffsets = 0x0144
|
|
||||||
TileByteCounts = 0x0145
|
|
||||||
SubIFDs = 0x014A
|
|
||||||
InkSet = 0x014C
|
|
||||||
InkNames = 0x014D
|
|
||||||
NumberOfInks = 0x014E
|
|
||||||
DotRange = 0x0150
|
|
||||||
TargetPrinter = 0x0151
|
|
||||||
ExtraSamples = 0x0152
|
|
||||||
SampleFormat = 0x0153
|
|
||||||
SMinSampleValue = 0x0154
|
|
||||||
SMaxSampleValue = 0x0155
|
|
||||||
TransferRange = 0x0156
|
|
||||||
ClipPath = 0x0157
|
|
||||||
XClipPathUnits = 0x0158
|
|
||||||
YClipPathUnits = 0x0159
|
|
||||||
Indexed = 0x015A
|
|
||||||
JPEGTables = 0x015B
|
|
||||||
OPIProxy = 0x015F
|
|
||||||
JPEGProc = 0x0200
|
|
||||||
JpegIFOffset = 0x0201
|
|
||||||
JpegIFByteCount = 0x0202
|
|
||||||
JpegRestartInterval = 0x0203
|
|
||||||
JpegLosslessPredictors = 0x0205
|
|
||||||
JpegPointTransforms = 0x0206
|
|
||||||
JpegQTables = 0x0207
|
|
||||||
JpegDCTables = 0x0208
|
|
||||||
JpegACTables = 0x0209
|
|
||||||
YCbCrCoefficients = 0x0211
|
|
||||||
YCbCrSubSampling = 0x0212
|
|
||||||
YCbCrPositioning = 0x0213
|
|
||||||
ReferenceBlackWhite = 0x0214
|
|
||||||
XMLPacket = 0x02BC
|
|
||||||
RelatedImageFileFormat = 0x1000
|
|
||||||
RelatedImageWidth = 0x1001
|
|
||||||
RelatedImageLength = 0x1002
|
|
||||||
Rating = 0x4746
|
|
||||||
RatingPercent = 0x4749
|
|
||||||
ImageID = 0x800D
|
|
||||||
CFARepeatPatternDim = 0x828D
|
|
||||||
BatteryLevel = 0x828F
|
|
||||||
Copyright = 0x8298
|
|
||||||
ExposureTime = 0x829A
|
|
||||||
FNumber = 0x829D
|
|
||||||
IPTCNAA = 0x83BB
|
|
||||||
ImageResources = 0x8649
|
|
||||||
ExifOffset = 0x8769
|
|
||||||
InterColorProfile = 0x8773
|
|
||||||
ExposureProgram = 0x8822
|
|
||||||
SpectralSensitivity = 0x8824
|
|
||||||
GPSInfo = 0x8825
|
|
||||||
ISOSpeedRatings = 0x8827
|
|
||||||
OECF = 0x8828
|
|
||||||
Interlace = 0x8829
|
|
||||||
TimeZoneOffset = 0x882A
|
|
||||||
SelfTimerMode = 0x882B
|
|
||||||
SensitivityType = 0x8830
|
|
||||||
StandardOutputSensitivity = 0x8831
|
|
||||||
RecommendedExposureIndex = 0x8832
|
|
||||||
ISOSpeed = 0x8833
|
|
||||||
ISOSpeedLatitudeyyy = 0x8834
|
|
||||||
ISOSpeedLatitudezzz = 0x8835
|
|
||||||
ExifVersion = 0x9000
|
|
||||||
DateTimeOriginal = 0x9003
|
|
||||||
DateTimeDigitized = 0x9004
|
|
||||||
OffsetTime = 0x9010
|
|
||||||
OffsetTimeOriginal = 0x9011
|
|
||||||
OffsetTimeDigitized = 0x9012
|
|
||||||
ComponentsConfiguration = 0x9101
|
|
||||||
CompressedBitsPerPixel = 0x9102
|
|
||||||
ShutterSpeedValue = 0x9201
|
|
||||||
ApertureValue = 0x9202
|
|
||||||
BrightnessValue = 0x9203
|
|
||||||
ExposureBiasValue = 0x9204
|
|
||||||
MaxApertureValue = 0x9205
|
|
||||||
SubjectDistance = 0x9206
|
|
||||||
MeteringMode = 0x9207
|
|
||||||
LightSource = 0x9208
|
|
||||||
Flash = 0x9209
|
|
||||||
FocalLength = 0x920A
|
|
||||||
Noise = 0x920D
|
|
||||||
ImageNumber = 0x9211
|
|
||||||
SecurityClassification = 0x9212
|
|
||||||
ImageHistory = 0x9213
|
|
||||||
TIFFEPStandardID = 0x9216
|
|
||||||
MakerNote = 0x927C
|
|
||||||
UserComment = 0x9286
|
|
||||||
SubsecTime = 0x9290
|
|
||||||
SubsecTimeOriginal = 0x9291
|
|
||||||
SubsecTimeDigitized = 0x9292
|
|
||||||
AmbientTemperature = 0x9400
|
|
||||||
Humidity = 0x9401
|
|
||||||
Pressure = 0x9402
|
|
||||||
WaterDepth = 0x9403
|
|
||||||
Acceleration = 0x9404
|
|
||||||
CameraElevationAngle = 0x9405
|
|
||||||
XPTitle = 0x9C9B
|
|
||||||
XPComment = 0x9C9C
|
|
||||||
XPAuthor = 0x9C9D
|
|
||||||
XPKeywords = 0x9C9E
|
|
||||||
XPSubject = 0x9C9F
|
|
||||||
FlashPixVersion = 0xA000
|
|
||||||
ColorSpace = 0xA001
|
|
||||||
ExifImageWidth = 0xA002
|
|
||||||
ExifImageHeight = 0xA003
|
|
||||||
RelatedSoundFile = 0xA004
|
|
||||||
ExifInteroperabilityOffset = 0xA005
|
|
||||||
FlashEnergy = 0xA20B
|
|
||||||
SpatialFrequencyResponse = 0xA20C
|
|
||||||
FocalPlaneXResolution = 0xA20E
|
|
||||||
FocalPlaneYResolution = 0xA20F
|
|
||||||
FocalPlaneResolutionUnit = 0xA210
|
|
||||||
SubjectLocation = 0xA214
|
|
||||||
ExposureIndex = 0xA215
|
|
||||||
SensingMethod = 0xA217
|
|
||||||
FileSource = 0xA300
|
|
||||||
SceneType = 0xA301
|
|
||||||
CFAPattern = 0xA302
|
|
||||||
CustomRendered = 0xA401
|
|
||||||
ExposureMode = 0xA402
|
|
||||||
WhiteBalance = 0xA403
|
|
||||||
DigitalZoomRatio = 0xA404
|
|
||||||
FocalLengthIn35mmFilm = 0xA405
|
|
||||||
SceneCaptureType = 0xA406
|
|
||||||
GainControl = 0xA407
|
|
||||||
Contrast = 0xA408
|
|
||||||
Saturation = 0xA409
|
|
||||||
Sharpness = 0xA40A
|
|
||||||
DeviceSettingDescription = 0xA40B
|
|
||||||
SubjectDistanceRange = 0xA40C
|
|
||||||
ImageUniqueID = 0xA420
|
|
||||||
CameraOwnerName = 0xA430
|
|
||||||
BodySerialNumber = 0xA431
|
|
||||||
LensSpecification = 0xA432
|
|
||||||
LensMake = 0xA433
|
|
||||||
LensModel = 0xA434
|
|
||||||
LensSerialNumber = 0xA435
|
|
||||||
CompositeImage = 0xA460
|
|
||||||
CompositeImageCount = 0xA461
|
|
||||||
CompositeImageExposureTimes = 0xA462
|
|
||||||
Gamma = 0xA500
|
|
||||||
PrintImageMatching = 0xC4A5
|
|
||||||
DNGVersion = 0xC612
|
|
||||||
DNGBackwardVersion = 0xC613
|
|
||||||
UniqueCameraModel = 0xC614
|
|
||||||
LocalizedCameraModel = 0xC615
|
|
||||||
CFAPlaneColor = 0xC616
|
|
||||||
CFALayout = 0xC617
|
|
||||||
LinearizationTable = 0xC618
|
|
||||||
BlackLevelRepeatDim = 0xC619
|
|
||||||
BlackLevel = 0xC61A
|
|
||||||
BlackLevelDeltaH = 0xC61B
|
|
||||||
BlackLevelDeltaV = 0xC61C
|
|
||||||
WhiteLevel = 0xC61D
|
|
||||||
DefaultScale = 0xC61E
|
|
||||||
DefaultCropOrigin = 0xC61F
|
|
||||||
DefaultCropSize = 0xC620
|
|
||||||
ColorMatrix1 = 0xC621
|
|
||||||
ColorMatrix2 = 0xC622
|
|
||||||
CameraCalibration1 = 0xC623
|
|
||||||
CameraCalibration2 = 0xC624
|
|
||||||
ReductionMatrix1 = 0xC625
|
|
||||||
ReductionMatrix2 = 0xC626
|
|
||||||
AnalogBalance = 0xC627
|
|
||||||
AsShotNeutral = 0xC628
|
|
||||||
AsShotWhiteXY = 0xC629
|
|
||||||
BaselineExposure = 0xC62A
|
|
||||||
BaselineNoise = 0xC62B
|
|
||||||
BaselineSharpness = 0xC62C
|
|
||||||
BayerGreenSplit = 0xC62D
|
|
||||||
LinearResponseLimit = 0xC62E
|
|
||||||
CameraSerialNumber = 0xC62F
|
|
||||||
LensInfo = 0xC630
|
|
||||||
ChromaBlurRadius = 0xC631
|
|
||||||
AntiAliasStrength = 0xC632
|
|
||||||
ShadowScale = 0xC633
|
|
||||||
DNGPrivateData = 0xC634
|
|
||||||
MakerNoteSafety = 0xC635
|
|
||||||
CalibrationIlluminant1 = 0xC65A
|
|
||||||
CalibrationIlluminant2 = 0xC65B
|
|
||||||
BestQualityScale = 0xC65C
|
|
||||||
RawDataUniqueID = 0xC65D
|
|
||||||
OriginalRawFileName = 0xC68B
|
|
||||||
OriginalRawFileData = 0xC68C
|
|
||||||
ActiveArea = 0xC68D
|
|
||||||
MaskedAreas = 0xC68E
|
|
||||||
AsShotICCProfile = 0xC68F
|
|
||||||
AsShotPreProfileMatrix = 0xC690
|
|
||||||
CurrentICCProfile = 0xC691
|
|
||||||
CurrentPreProfileMatrix = 0xC692
|
|
||||||
ColorimetricReference = 0xC6BF
|
|
||||||
CameraCalibrationSignature = 0xC6F3
|
|
||||||
ProfileCalibrationSignature = 0xC6F4
|
|
||||||
AsShotProfileName = 0xC6F6
|
|
||||||
NoiseReductionApplied = 0xC6F7
|
|
||||||
ProfileName = 0xC6F8
|
|
||||||
ProfileHueSatMapDims = 0xC6F9
|
|
||||||
ProfileHueSatMapData1 = 0xC6FA
|
|
||||||
ProfileHueSatMapData2 = 0xC6FB
|
|
||||||
ProfileToneCurve = 0xC6FC
|
|
||||||
ProfileEmbedPolicy = 0xC6FD
|
|
||||||
ProfileCopyright = 0xC6FE
|
|
||||||
ForwardMatrix1 = 0xC714
|
|
||||||
ForwardMatrix2 = 0xC715
|
|
||||||
PreviewApplicationName = 0xC716
|
|
||||||
PreviewApplicationVersion = 0xC717
|
|
||||||
PreviewSettingsName = 0xC718
|
|
||||||
PreviewSettingsDigest = 0xC719
|
|
||||||
PreviewColorSpace = 0xC71A
|
|
||||||
PreviewDateTime = 0xC71B
|
|
||||||
RawImageDigest = 0xC71C
|
|
||||||
OriginalRawFileDigest = 0xC71D
|
|
||||||
SubTileBlockSize = 0xC71E
|
|
||||||
RowInterleaveFactor = 0xC71F
|
|
||||||
ProfileLookTableDims = 0xC725
|
|
||||||
ProfileLookTableData = 0xC726
|
|
||||||
OpcodeList1 = 0xC740
|
|
||||||
OpcodeList2 = 0xC741
|
|
||||||
OpcodeList3 = 0xC74E
|
|
||||||
NoiseProfile = 0xC761
|
|
||||||
|
|
||||||
|
|
||||||
"""Maps EXIF tags to tag names."""
|
|
||||||
TAGS = {
|
|
||||||
**{i.value: i.name for i in Base},
|
|
||||||
0x920C: "SpatialFrequencyResponse",
|
|
||||||
0x9214: "SubjectLocation",
|
|
||||||
0x9215: "ExposureIndex",
|
|
||||||
0x828E: "CFAPattern",
|
|
||||||
0x920B: "FlashEnergy",
|
|
||||||
0x9216: "TIFF/EPStandardID",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class GPS(IntEnum):
|
|
||||||
GPSVersionID = 0
|
|
||||||
GPSLatitudeRef = 1
|
|
||||||
GPSLatitude = 2
|
|
||||||
GPSLongitudeRef = 3
|
|
||||||
GPSLongitude = 4
|
|
||||||
GPSAltitudeRef = 5
|
|
||||||
GPSAltitude = 6
|
|
||||||
GPSTimeStamp = 7
|
|
||||||
GPSSatellites = 8
|
|
||||||
GPSStatus = 9
|
|
||||||
GPSMeasureMode = 10
|
|
||||||
GPSDOP = 11
|
|
||||||
GPSSpeedRef = 12
|
|
||||||
GPSSpeed = 13
|
|
||||||
GPSTrackRef = 14
|
|
||||||
GPSTrack = 15
|
|
||||||
GPSImgDirectionRef = 16
|
|
||||||
GPSImgDirection = 17
|
|
||||||
GPSMapDatum = 18
|
|
||||||
GPSDestLatitudeRef = 19
|
|
||||||
GPSDestLatitude = 20
|
|
||||||
GPSDestLongitudeRef = 21
|
|
||||||
GPSDestLongitude = 22
|
|
||||||
GPSDestBearingRef = 23
|
|
||||||
GPSDestBearing = 24
|
|
||||||
GPSDestDistanceRef = 25
|
|
||||||
GPSDestDistance = 26
|
|
||||||
GPSProcessingMethod = 27
|
|
||||||
GPSAreaInformation = 28
|
|
||||||
GPSDateStamp = 29
|
|
||||||
GPSDifferential = 30
|
|
||||||
GPSHPositioningError = 31
|
|
||||||
|
|
||||||
|
|
||||||
"""Maps EXIF GPS tags to tag names."""
|
|
||||||
GPSTAGS = {i.value: i.name for i in GPS}
|
|
||||||
|
|
||||||
|
|
||||||
class Interop(IntEnum):
|
|
||||||
InteropIndex = 1
|
|
||||||
InteropVersion = 2
|
|
||||||
RelatedImageFileFormat = 4096
|
|
||||||
RelatedImageWidth = 4097
|
|
||||||
RleatedImageHeight = 4098
|
|
||||||
|
|
||||||
|
|
||||||
class IFD(IntEnum):
|
|
||||||
Exif = 34665
|
|
||||||
GPSInfo = 34853
|
|
||||||
Makernote = 37500
|
|
||||||
Interop = 40965
|
|
||||||
IFD1 = -1
|
|
||||||
|
|
||||||
|
|
||||||
class LightSource(IntEnum):
|
|
||||||
Unknown = 0
|
|
||||||
Daylight = 1
|
|
||||||
Fluorescent = 2
|
|
||||||
Tungsten = 3
|
|
||||||
Flash = 4
|
|
||||||
Fine = 9
|
|
||||||
Cloudy = 10
|
|
||||||
Shade = 11
|
|
||||||
DaylightFluorescent = 12
|
|
||||||
DayWhiteFluorescent = 13
|
|
||||||
CoolWhiteFluorescent = 14
|
|
||||||
WhiteFluorescent = 15
|
|
||||||
StandardLightA = 17
|
|
||||||
StandardLightB = 18
|
|
||||||
StandardLightC = 19
|
|
||||||
D55 = 20
|
|
||||||
D65 = 21
|
|
||||||
D75 = 22
|
|
||||||
D50 = 23
|
|
||||||
ISO = 24
|
|
||||||
Other = 255
|
|
||||||
@@ -1,72 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FITS file handling
|
|
||||||
#
|
|
||||||
# Copyright (c) 1998-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import math
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:6] == b"SIMPLE"
|
|
||||||
|
|
||||||
|
|
||||||
class FitsImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FITS"
|
|
||||||
format_description = "FITS"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
headers = {}
|
|
||||||
while True:
|
|
||||||
header = self.fp.read(80)
|
|
||||||
if not header:
|
|
||||||
msg = "Truncated FITS file"
|
|
||||||
raise OSError(msg)
|
|
||||||
keyword = header[:8].strip()
|
|
||||||
if keyword == b"END":
|
|
||||||
break
|
|
||||||
value = header[8:].split(b"/")[0].strip()
|
|
||||||
if value.startswith(b"="):
|
|
||||||
value = value[1:].strip()
|
|
||||||
if not headers and (not _accept(keyword) or value != b"T"):
|
|
||||||
msg = "Not a FITS file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
headers[keyword] = value
|
|
||||||
|
|
||||||
naxis = int(headers[b"NAXIS"])
|
|
||||||
if naxis == 0:
|
|
||||||
msg = "No image data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
elif naxis == 1:
|
|
||||||
self._size = 1, int(headers[b"NAXIS1"])
|
|
||||||
else:
|
|
||||||
self._size = int(headers[b"NAXIS1"]), int(headers[b"NAXIS2"])
|
|
||||||
|
|
||||||
number_of_bits = int(headers[b"BITPIX"])
|
|
||||||
if number_of_bits == 8:
|
|
||||||
self._mode = "L"
|
|
||||||
elif number_of_bits == 16:
|
|
||||||
self._mode = "I"
|
|
||||||
elif number_of_bits == 32:
|
|
||||||
self._mode = "I"
|
|
||||||
elif number_of_bits in (-32, -64):
|
|
||||||
self._mode = "F"
|
|
||||||
|
|
||||||
offset = math.ceil(self.fp.tell() / 2880) * 2880
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, offset, (self.mode, 0, -1))]
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(FitsImageFile.format, FitsImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extensions(FitsImageFile.format, [".fit", ".fits"])
|
|
||||||
@@ -1,173 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FLI/FLC file handling.
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 95-09-01 fl Created
|
|
||||||
# 97-01-03 fl Fixed parser, setup decoder tile
|
|
||||||
# 98-07-15 fl Renamed offset attribute to avoid name clash
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997-98.
|
|
||||||
# Copyright (c) Fredrik Lundh 1995-97.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
#
|
|
||||||
# decoder
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return (
|
|
||||||
len(prefix) >= 6
|
|
||||||
and i16(prefix, 4) in [0xAF11, 0xAF12]
|
|
||||||
and i16(prefix, 14) in [0, 3] # flags
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the FLI/FLC animation format. Use the <b>seek</b>
|
|
||||||
# method to load individual frames.
|
|
||||||
|
|
||||||
|
|
||||||
class FliImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FLI"
|
|
||||||
format_description = "Autodesk FLI/FLC Animation"
|
|
||||||
_close_exclusive_fp_after_loading = False
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# HEAD
|
|
||||||
s = self.fp.read(128)
|
|
||||||
if not (_accept(s) and s[20:22] == b"\x00\x00"):
|
|
||||||
msg = "not an FLI/FLC file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# frames
|
|
||||||
self.n_frames = i16(s, 6)
|
|
||||||
self.is_animated = self.n_frames > 1
|
|
||||||
|
|
||||||
# image characteristics
|
|
||||||
self._mode = "P"
|
|
||||||
self._size = i16(s, 8), i16(s, 10)
|
|
||||||
|
|
||||||
# animation speed
|
|
||||||
duration = i32(s, 16)
|
|
||||||
magic = i16(s, 4)
|
|
||||||
if magic == 0xAF11:
|
|
||||||
duration = (duration * 1000) // 70
|
|
||||||
self.info["duration"] = duration
|
|
||||||
|
|
||||||
# look for palette
|
|
||||||
palette = [(a, a, a) for a in range(256)]
|
|
||||||
|
|
||||||
s = self.fp.read(16)
|
|
||||||
|
|
||||||
self.__offset = 128
|
|
||||||
|
|
||||||
if i16(s, 4) == 0xF100:
|
|
||||||
# prefix chunk; ignore it
|
|
||||||
self.__offset = self.__offset + i32(s)
|
|
||||||
s = self.fp.read(16)
|
|
||||||
|
|
||||||
if i16(s, 4) == 0xF1FA:
|
|
||||||
# look for palette chunk
|
|
||||||
number_of_subchunks = i16(s, 6)
|
|
||||||
chunk_size = None
|
|
||||||
for _ in range(number_of_subchunks):
|
|
||||||
if chunk_size is not None:
|
|
||||||
self.fp.seek(chunk_size - 6, os.SEEK_CUR)
|
|
||||||
s = self.fp.read(6)
|
|
||||||
chunk_type = i16(s, 4)
|
|
||||||
if chunk_type in (4, 11):
|
|
||||||
self._palette(palette, 2 if chunk_type == 11 else 0)
|
|
||||||
break
|
|
||||||
chunk_size = i32(s)
|
|
||||||
if not chunk_size:
|
|
||||||
break
|
|
||||||
|
|
||||||
palette = [o8(r) + o8(g) + o8(b) for (r, g, b) in palette]
|
|
||||||
self.palette = ImagePalette.raw("RGB", b"".join(palette))
|
|
||||||
|
|
||||||
# set things up to decode first frame
|
|
||||||
self.__frame = -1
|
|
||||||
self._fp = self.fp
|
|
||||||
self.__rewind = self.fp.tell()
|
|
||||||
self.seek(0)
|
|
||||||
|
|
||||||
def _palette(self, palette, shift):
|
|
||||||
# load palette
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for e in range(i16(self.fp.read(2))):
|
|
||||||
s = self.fp.read(2)
|
|
||||||
i = i + s[0]
|
|
||||||
n = s[1]
|
|
||||||
if n == 0:
|
|
||||||
n = 256
|
|
||||||
s = self.fp.read(n * 3)
|
|
||||||
for n in range(0, len(s), 3):
|
|
||||||
r = s[n] << shift
|
|
||||||
g = s[n + 1] << shift
|
|
||||||
b = s[n + 2] << shift
|
|
||||||
palette[i] = (r, g, b)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
def seek(self, frame):
|
|
||||||
if not self._seek_check(frame):
|
|
||||||
return
|
|
||||||
if frame < self.__frame:
|
|
||||||
self._seek(0)
|
|
||||||
|
|
||||||
for f in range(self.__frame + 1, frame + 1):
|
|
||||||
self._seek(f)
|
|
||||||
|
|
||||||
def _seek(self, frame):
|
|
||||||
if frame == 0:
|
|
||||||
self.__frame = -1
|
|
||||||
self._fp.seek(self.__rewind)
|
|
||||||
self.__offset = 128
|
|
||||||
else:
|
|
||||||
# ensure that the previous frame was loaded
|
|
||||||
self.load()
|
|
||||||
|
|
||||||
if frame != self.__frame + 1:
|
|
||||||
msg = f"cannot seek to frame {frame}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.__frame = frame
|
|
||||||
|
|
||||||
# move to next frame
|
|
||||||
self.fp = self._fp
|
|
||||||
self.fp.seek(self.__offset)
|
|
||||||
|
|
||||||
s = self.fp.read(4)
|
|
||||||
if not s:
|
|
||||||
msg = "missing frame size"
|
|
||||||
raise EOFError(msg)
|
|
||||||
|
|
||||||
framesize = i32(s)
|
|
||||||
|
|
||||||
self.decodermaxblock = framesize
|
|
||||||
self.tile = [("fli", (0, 0) + self.size, self.__offset, None)]
|
|
||||||
|
|
||||||
self.__offset += framesize
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self.__frame
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# registry
|
|
||||||
|
|
||||||
Image.register_open(FliImageFile.format, FliImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extensions(FliImageFile.format, [".fli", ".flc"])
|
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# base class for raster font file parsers
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1997-06-05 fl created
|
|
||||||
# 1997-08-19 fl restrict image width
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-1998 by Secret Labs AB
|
|
||||||
# Copyright (c) 1997-1998 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
from typing import BinaryIO
|
|
||||||
|
|
||||||
from . import Image, _binary
|
|
||||||
|
|
||||||
WIDTH = 800
|
|
||||||
|
|
||||||
|
|
||||||
def puti16(
|
|
||||||
fp: BinaryIO, values: tuple[int, int, int, int, int, int, int, int, int, int]
|
|
||||||
) -> None:
|
|
||||||
"""Write network order (big-endian) 16-bit sequence"""
|
|
||||||
for v in values:
|
|
||||||
if v < 0:
|
|
||||||
v += 65536
|
|
||||||
fp.write(_binary.o16be(v))
|
|
||||||
|
|
||||||
|
|
||||||
class FontFile:
|
|
||||||
"""Base class for raster font file handlers."""
|
|
||||||
|
|
||||||
bitmap: Image.Image | None = None
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self.info: dict[bytes, bytes | int] = {}
|
|
||||||
self.glyph: list[
|
|
||||||
tuple[
|
|
||||||
tuple[int, int],
|
|
||||||
tuple[int, int, int, int],
|
|
||||||
tuple[int, int, int, int],
|
|
||||||
Image.Image,
|
|
||||||
]
|
|
||||||
| None
|
|
||||||
] = [None] * 256
|
|
||||||
|
|
||||||
def __getitem__(
|
|
||||||
self, ix: int
|
|
||||||
) -> (
|
|
||||||
tuple[
|
|
||||||
tuple[int, int],
|
|
||||||
tuple[int, int, int, int],
|
|
||||||
tuple[int, int, int, int],
|
|
||||||
Image.Image,
|
|
||||||
]
|
|
||||||
| None
|
|
||||||
):
|
|
||||||
return self.glyph[ix]
|
|
||||||
|
|
||||||
def compile(self) -> None:
|
|
||||||
"""Create metrics and bitmap"""
|
|
||||||
|
|
||||||
if self.bitmap:
|
|
||||||
return
|
|
||||||
|
|
||||||
# create bitmap large enough to hold all data
|
|
||||||
h = w = maxwidth = 0
|
|
||||||
lines = 1
|
|
||||||
for glyph in self.glyph:
|
|
||||||
if glyph:
|
|
||||||
d, dst, src, im = glyph
|
|
||||||
h = max(h, src[3] - src[1])
|
|
||||||
w = w + (src[2] - src[0])
|
|
||||||
if w > WIDTH:
|
|
||||||
lines += 1
|
|
||||||
w = src[2] - src[0]
|
|
||||||
maxwidth = max(maxwidth, w)
|
|
||||||
|
|
||||||
xsize = maxwidth
|
|
||||||
ysize = lines * h
|
|
||||||
|
|
||||||
if xsize == 0 and ysize == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.ysize = h
|
|
||||||
|
|
||||||
# paste glyphs into bitmap
|
|
||||||
self.bitmap = Image.new("1", (xsize, ysize))
|
|
||||||
self.metrics: list[
|
|
||||||
tuple[tuple[int, int], tuple[int, int, int, int], tuple[int, int, int, int]]
|
|
||||||
| None
|
|
||||||
] = [None] * 256
|
|
||||||
x = y = 0
|
|
||||||
for i in range(256):
|
|
||||||
glyph = self[i]
|
|
||||||
if glyph:
|
|
||||||
d, dst, src, im = glyph
|
|
||||||
xx = src[2] - src[0]
|
|
||||||
x0, y0 = x, y
|
|
||||||
x = x + xx
|
|
||||||
if x > WIDTH:
|
|
||||||
x, y = 0, y + h
|
|
||||||
x0, y0 = x, y
|
|
||||||
x = xx
|
|
||||||
s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0
|
|
||||||
self.bitmap.paste(im.crop(src), s)
|
|
||||||
self.metrics[i] = d, dst, s
|
|
||||||
|
|
||||||
def save(self, filename: str) -> None:
|
|
||||||
"""Save font"""
|
|
||||||
|
|
||||||
self.compile()
|
|
||||||
|
|
||||||
# font data
|
|
||||||
if not self.bitmap:
|
|
||||||
msg = "No bitmap created"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG")
|
|
||||||
|
|
||||||
# font metrics
|
|
||||||
with open(os.path.splitext(filename)[0] + ".pil", "wb") as fp:
|
|
||||||
fp.write(b"PILfont\n")
|
|
||||||
fp.write(f";;;;;;{self.ysize};\n".encode("ascii")) # HACK!!!
|
|
||||||
fp.write(b"DATA\n")
|
|
||||||
for id in range(256):
|
|
||||||
m = self.metrics[id]
|
|
||||||
if not m:
|
|
||||||
puti16(fp, (0,) * 10)
|
|
||||||
else:
|
|
||||||
puti16(fp, m[0] + m[1] + m[2])
|
|
||||||
@@ -1,255 +0,0 @@
|
|||||||
#
|
|
||||||
# THIS IS WORK IN PROGRESS
|
|
||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# FlashPix support for PIL
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 97-01-25 fl Created (reads uncompressed RGB images only)
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import olefile
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
|
|
||||||
# we map from colour field tuples to (mode, rawmode) descriptors
|
|
||||||
MODES = {
|
|
||||||
# opacity
|
|
||||||
(0x00007FFE,): ("A", "L"),
|
|
||||||
# monochrome
|
|
||||||
(0x00010000,): ("L", "L"),
|
|
||||||
(0x00018000, 0x00017FFE): ("RGBA", "LA"),
|
|
||||||
# photo YCC
|
|
||||||
(0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
|
|
||||||
(0x00028000, 0x00028001, 0x00028002, 0x00027FFE): ("RGBA", "YCCA;P"),
|
|
||||||
# standard RGB (NIFRGB)
|
|
||||||
(0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
|
|
||||||
(0x00038000, 0x00038001, 0x00038002, 0x00037FFE): ("RGBA", "RGBA"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:8] == olefile.MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the FlashPix images.
|
|
||||||
|
|
||||||
|
|
||||||
class FpxImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FPX"
|
|
||||||
format_description = "FlashPix"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
#
|
|
||||||
# read the OLE directory and see if this is a likely
|
|
||||||
# to be a FlashPix file
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.ole = olefile.OleFileIO(self.fp)
|
|
||||||
except OSError as e:
|
|
||||||
msg = "not an FPX file; invalid OLE file"
|
|
||||||
raise SyntaxError(msg) from e
|
|
||||||
|
|
||||||
if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
|
|
||||||
msg = "not an FPX file; bad root CLSID"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self._open_index(1)
|
|
||||||
|
|
||||||
def _open_index(self, index=1):
|
|
||||||
#
|
|
||||||
# get the Image Contents Property Set
|
|
||||||
|
|
||||||
prop = self.ole.getproperties(
|
|
||||||
[f"Data Object Store {index:06d}", "\005Image Contents"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# size (highest resolution)
|
|
||||||
|
|
||||||
self._size = prop[0x1000002], prop[0x1000003]
|
|
||||||
|
|
||||||
size = max(self.size)
|
|
||||||
i = 1
|
|
||||||
while size > 64:
|
|
||||||
size = size / 2
|
|
||||||
i += 1
|
|
||||||
self.maxid = i - 1
|
|
||||||
|
|
||||||
# mode. instead of using a single field for this, flashpix
|
|
||||||
# requires you to specify the mode for each channel in each
|
|
||||||
# resolution subimage, and leaves it to the decoder to make
|
|
||||||
# sure that they all match. for now, we'll cheat and assume
|
|
||||||
# that this is always the case.
|
|
||||||
|
|
||||||
id = self.maxid << 16
|
|
||||||
|
|
||||||
s = prop[0x2000002 | id]
|
|
||||||
|
|
||||||
bands = i32(s, 4)
|
|
||||||
if bands > 4:
|
|
||||||
msg = "Invalid number of bands"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# note: for now, we ignore the "uncalibrated" flag
|
|
||||||
colors = tuple(i32(s, 8 + i * 4) & 0x7FFFFFFF for i in range(bands))
|
|
||||||
|
|
||||||
self._mode, self.rawmode = MODES[colors]
|
|
||||||
|
|
||||||
# load JPEG tables, if any
|
|
||||||
self.jpeg = {}
|
|
||||||
for i in range(256):
|
|
||||||
id = 0x3000001 | (i << 16)
|
|
||||||
if id in prop:
|
|
||||||
self.jpeg[i] = prop[id]
|
|
||||||
|
|
||||||
self._open_subimage(1, self.maxid)
|
|
||||||
|
|
||||||
def _open_subimage(self, index=1, subimage=0):
|
|
||||||
#
|
|
||||||
# setup tile descriptors for a given subimage
|
|
||||||
|
|
||||||
stream = [
|
|
||||||
f"Data Object Store {index:06d}",
|
|
||||||
f"Resolution {subimage:04d}",
|
|
||||||
"Subimage 0000 Header",
|
|
||||||
]
|
|
||||||
|
|
||||||
fp = self.ole.openstream(stream)
|
|
||||||
|
|
||||||
# skip prefix
|
|
||||||
fp.read(28)
|
|
||||||
|
|
||||||
# header stream
|
|
||||||
s = fp.read(36)
|
|
||||||
|
|
||||||
size = i32(s, 4), i32(s, 8)
|
|
||||||
# tilecount = i32(s, 12)
|
|
||||||
tilesize = i32(s, 16), i32(s, 20)
|
|
||||||
# channels = i32(s, 24)
|
|
||||||
offset = i32(s, 28)
|
|
||||||
length = i32(s, 32)
|
|
||||||
|
|
||||||
if size != self.size:
|
|
||||||
msg = "subimage mismatch"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
# get tile descriptors
|
|
||||||
fp.seek(28 + offset)
|
|
||||||
s = fp.read(i32(s, 12) * length)
|
|
||||||
|
|
||||||
x = y = 0
|
|
||||||
xsize, ysize = size
|
|
||||||
xtile, ytile = tilesize
|
|
||||||
self.tile = []
|
|
||||||
|
|
||||||
for i in range(0, len(s), length):
|
|
||||||
x1 = min(xsize, x + xtile)
|
|
||||||
y1 = min(ysize, y + ytile)
|
|
||||||
|
|
||||||
compression = i32(s, i + 8)
|
|
||||||
|
|
||||||
if compression == 0:
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"raw",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(self.rawmode,),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif compression == 1:
|
|
||||||
# FIXME: the fill decoder is not implemented
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"fill",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(self.rawmode, s[12:16]),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif compression == 2:
|
|
||||||
internal_color_conversion = s[14]
|
|
||||||
jpeg_tables = s[15]
|
|
||||||
rawmode = self.rawmode
|
|
||||||
|
|
||||||
if internal_color_conversion:
|
|
||||||
# The image is stored as usual (usually YCbCr).
|
|
||||||
if rawmode == "RGBA":
|
|
||||||
# For "RGBA", data is stored as YCbCrA based on
|
|
||||||
# negative RGB. The following trick works around
|
|
||||||
# this problem :
|
|
||||||
jpegmode, rawmode = "YCbCrK", "CMYK"
|
|
||||||
else:
|
|
||||||
jpegmode = None # let the decoder decide
|
|
||||||
|
|
||||||
else:
|
|
||||||
# The image is stored as defined by rawmode
|
|
||||||
jpegmode = rawmode
|
|
||||||
|
|
||||||
self.tile.append(
|
|
||||||
(
|
|
||||||
"jpeg",
|
|
||||||
(x, y, x1, y1),
|
|
||||||
i32(s, i) + 28,
|
|
||||||
(rawmode, jpegmode),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# FIXME: jpeg tables are tile dependent; the prefix
|
|
||||||
# data must be placed in the tile descriptor itself!
|
|
||||||
|
|
||||||
if jpeg_tables:
|
|
||||||
self.tile_prefix = self.jpeg[jpeg_tables]
|
|
||||||
|
|
||||||
else:
|
|
||||||
msg = "unknown/invalid compression"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
x = x + xtile
|
|
||||||
if x >= xsize:
|
|
||||||
x, y = 0, y + ytile
|
|
||||||
if y >= ysize:
|
|
||||||
break # isn't really required
|
|
||||||
|
|
||||||
self.stream = stream
|
|
||||||
self._fp = self.fp
|
|
||||||
self.fp = None
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if not self.fp:
|
|
||||||
self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"])
|
|
||||||
|
|
||||||
return ImageFile.ImageFile.load(self)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self.ole.close()
|
|
||||||
super().close()
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.ole.close()
|
|
||||||
super().__exit__()
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
|
|
||||||
|
|
||||||
Image.register_extension(FpxImageFile.format, ".fpx")
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
"""
|
|
||||||
A Pillow loader for .ftc and .ftu files (FTEX)
|
|
||||||
Jerome Leclanche <jerome@leclan.ch>
|
|
||||||
|
|
||||||
The contents of this file are hereby released in the public domain (CC0)
|
|
||||||
Full text of the CC0 license:
|
|
||||||
https://creativecommons.org/publicdomain/zero/1.0/
|
|
||||||
|
|
||||||
Independence War 2: Edge Of Chaos - Texture File Format - 16 October 2001
|
|
||||||
|
|
||||||
The textures used for 3D objects in Independence War 2: Edge Of Chaos are in a
|
|
||||||
packed custom format called FTEX. This file format uses file extensions FTC
|
|
||||||
and FTU.
|
|
||||||
* FTC files are compressed textures (using standard texture compression).
|
|
||||||
* FTU files are not compressed.
|
|
||||||
Texture File Format
|
|
||||||
The FTC and FTU texture files both use the same format. This
|
|
||||||
has the following structure:
|
|
||||||
{header}
|
|
||||||
{format_directory}
|
|
||||||
{data}
|
|
||||||
Where:
|
|
||||||
{header} = {
|
|
||||||
u32:magic,
|
|
||||||
u32:version,
|
|
||||||
u32:width,
|
|
||||||
u32:height,
|
|
||||||
u32:mipmap_count,
|
|
||||||
u32:format_count
|
|
||||||
}
|
|
||||||
|
|
||||||
* The "magic" number is "FTEX".
|
|
||||||
* "width" and "height" are the dimensions of the texture.
|
|
||||||
* "mipmap_count" is the number of mipmaps in the texture.
|
|
||||||
* "format_count" is the number of texture formats (different versions of the
|
|
||||||
same texture) in this file.
|
|
||||||
|
|
||||||
{format_directory} = format_count * { u32:format, u32:where }
|
|
||||||
|
|
||||||
The format value is 0 for DXT1 compressed textures and 1 for 24-bit RGB
|
|
||||||
uncompressed textures.
|
|
||||||
The texture data for a format starts at the position "where" in the file.
|
|
||||||
|
|
||||||
Each set of texture data in the file has the following structure:
|
|
||||||
{data} = format_count * { u32:mipmap_size, mipmap_size * { u8 } }
|
|
||||||
* "mipmap_size" is the number of bytes in that mip level. For compressed
|
|
||||||
textures this is the size of the texture data compressed with DXT1. For 24 bit
|
|
||||||
uncompressed textures, this is 3 * width * height. Following this are the image
|
|
||||||
bytes for that mipmap level.
|
|
||||||
|
|
||||||
Note: All data is stored in little-Endian (Intel) byte order.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from enum import IntEnum
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
MAGIC = b"FTEX"
|
|
||||||
|
|
||||||
|
|
||||||
class Format(IntEnum):
|
|
||||||
DXT1 = 0
|
|
||||||
UNCOMPRESSED = 1
|
|
||||||
|
|
||||||
|
|
||||||
class FtexImageFile(ImageFile.ImageFile):
|
|
||||||
format = "FTEX"
|
|
||||||
format_description = "Texture File Format (IW2:EOC)"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
if not _accept(self.fp.read(4)):
|
|
||||||
msg = "not an FTEX file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
struct.unpack("<i", self.fp.read(4)) # version
|
|
||||||
self._size = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
mipmap_count, format_count = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
|
|
||||||
self._mode = "RGB"
|
|
||||||
|
|
||||||
# Only support single-format files.
|
|
||||||
# I don't know of any multi-format file.
|
|
||||||
assert format_count == 1
|
|
||||||
|
|
||||||
format, where = struct.unpack("<2i", self.fp.read(8))
|
|
||||||
self.fp.seek(where)
|
|
||||||
(mipmap_size,) = struct.unpack("<i", self.fp.read(4))
|
|
||||||
|
|
||||||
data = self.fp.read(mipmap_size)
|
|
||||||
|
|
||||||
if format == Format.DXT1:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.tile = [("bcn", (0, 0) + self.size, 0, 1)]
|
|
||||||
elif format == Format.UNCOMPRESSED:
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, 0, ("RGB", 0, 1))]
|
|
||||||
else:
|
|
||||||
msg = f"Invalid texture compression format: {repr(format)}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
self.fp.close()
|
|
||||||
self.fp = BytesIO(data)
|
|
||||||
|
|
||||||
def load_seek(self, pos):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(FtexImageFile.format, FtexImageFile, _accept)
|
|
||||||
Image.register_extensions(FtexImageFile.format, [".ftc", ".ftu"])
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
#
|
|
||||||
# load a GIMP brush file
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 96-03-14 fl Created
|
|
||||||
# 16-01-08 es Version 2
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
# Copyright (c) Eric Soroos 2016.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# See https://github.com/GNOME/gimp/blob/mainline/devel-docs/gbr.txt for
|
|
||||||
# format documentation.
|
|
||||||
#
|
|
||||||
# This code Interprets version 1 and 2 .gbr files.
|
|
||||||
# Version 1 files are obsolete, and should not be used for new
|
|
||||||
# brushes.
|
|
||||||
# Version 2 files are saved by GIMP v2.8 (at least)
|
|
||||||
# Version 3 files have a format specifier of 18 for 16bit floats in
|
|
||||||
# the color depth field. This is currently unsupported by Pillow.
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
from ._binary import i32be as i32
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return len(prefix) >= 8 and i32(prefix, 0) >= 20 and i32(prefix, 4) in (1, 2)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the GIMP brush format.
|
|
||||||
|
|
||||||
|
|
||||||
class GbrImageFile(ImageFile.ImageFile):
|
|
||||||
format = "GBR"
|
|
||||||
format_description = "GIMP brush file"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
header_size = i32(self.fp.read(4))
|
|
||||||
if header_size < 20:
|
|
||||||
msg = "not a GIMP brush"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
version = i32(self.fp.read(4))
|
|
||||||
if version not in (1, 2):
|
|
||||||
msg = f"Unsupported GIMP brush version: {version}"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
width = i32(self.fp.read(4))
|
|
||||||
height = i32(self.fp.read(4))
|
|
||||||
color_depth = i32(self.fp.read(4))
|
|
||||||
if width <= 0 or height <= 0:
|
|
||||||
msg = "not a GIMP brush"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
if color_depth not in (1, 4):
|
|
||||||
msg = f"Unsupported GIMP brush color depth: {color_depth}"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
if version == 1:
|
|
||||||
comment_length = header_size - 20
|
|
||||||
else:
|
|
||||||
comment_length = header_size - 28
|
|
||||||
magic_number = self.fp.read(4)
|
|
||||||
if magic_number != b"GIMP":
|
|
||||||
msg = "not a GIMP brush, bad magic number"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
self.info["spacing"] = i32(self.fp.read(4))
|
|
||||||
|
|
||||||
comment = self.fp.read(comment_length)[:-1]
|
|
||||||
|
|
||||||
if color_depth == 1:
|
|
||||||
self._mode = "L"
|
|
||||||
else:
|
|
||||||
self._mode = "RGBA"
|
|
||||||
|
|
||||||
self._size = width, height
|
|
||||||
|
|
||||||
self.info["comment"] = comment
|
|
||||||
|
|
||||||
# Image might not be small
|
|
||||||
Image._decompression_bomb_check(self.size)
|
|
||||||
|
|
||||||
# Data is an uncompressed block of w * h * bytes/pixel
|
|
||||||
self._data_size = width * height * color_depth
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if not self.im:
|
|
||||||
self.im = Image.core.new(self.mode, self.size)
|
|
||||||
self.frombytes(self.fp.read(self._data_size))
|
|
||||||
return Image.Image.load(self)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# registry
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(GbrImageFile.format, GbrImageFile, _accept)
|
|
||||||
Image.register_extension(GbrImageFile.format, ".gbr")
|
|
||||||
@@ -1,97 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# GD file handling
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-04-12 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1996 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
.. note::
|
|
||||||
This format cannot be automatically recognized, so the
|
|
||||||
class is not registered for use with :py:func:`PIL.Image.open()`. To open a
|
|
||||||
gd file, use the :py:func:`PIL.GdImageFile.open()` function instead.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This
|
|
||||||
implementation is provided for convenience and demonstrational
|
|
||||||
purposes only.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import ImageFile, ImagePalette, UnidentifiedImageError
|
|
||||||
from ._binary import i16be as i16
|
|
||||||
from ._binary import i32be as i32
|
|
||||||
|
|
||||||
|
|
||||||
class GdImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
Image plugin for the GD uncompressed format. Note that this format
|
|
||||||
is not supported by the standard :py:func:`PIL.Image.open()` function. To use
|
|
||||||
this plugin, you have to import the :py:mod:`PIL.GdImageFile` module and
|
|
||||||
use the :py:func:`PIL.GdImageFile.open()` function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "GD"
|
|
||||||
format_description = "GD uncompressed images"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# Header
|
|
||||||
s = self.fp.read(1037)
|
|
||||||
|
|
||||||
if i16(s) not in [65534, 65535]:
|
|
||||||
msg = "Not a valid GD 2.x .gd file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self._mode = "L" # FIXME: "P"
|
|
||||||
self._size = i16(s, 2), i16(s, 4)
|
|
||||||
|
|
||||||
true_color = s[6]
|
|
||||||
true_color_offset = 2 if true_color else 0
|
|
||||||
|
|
||||||
# transparency index
|
|
||||||
tindex = i32(s, 7 + true_color_offset)
|
|
||||||
if tindex < 256:
|
|
||||||
self.info["transparency"] = tindex
|
|
||||||
|
|
||||||
self.palette = ImagePalette.raw(
|
|
||||||
"XBGR", s[7 + true_color_offset + 4 : 7 + true_color_offset + 4 + 256 * 4]
|
|
||||||
)
|
|
||||||
|
|
||||||
self.tile = [
|
|
||||||
(
|
|
||||||
"raw",
|
|
||||||
(0, 0) + self.size,
|
|
||||||
7 + true_color_offset + 4 + 256 * 4,
|
|
||||||
("L", 0, 1),
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def open(fp, mode="r"):
|
|
||||||
"""
|
|
||||||
Load texture from a GD image file.
|
|
||||||
|
|
||||||
:param fp: GD file name, or an opened file handle.
|
|
||||||
:param mode: Optional mode. In this version, if the mode argument
|
|
||||||
is given, it must be "r".
|
|
||||||
:returns: An image instance.
|
|
||||||
:raises OSError: If the image could not be read.
|
|
||||||
"""
|
|
||||||
if mode != "r":
|
|
||||||
msg = "bad mode"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return GdImageFile(fp)
|
|
||||||
except SyntaxError as e:
|
|
||||||
msg = "cannot identify this image file"
|
|
||||||
raise UnidentifiedImageError(msg) from e
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,137 +0,0 @@
|
|||||||
#
|
|
||||||
# Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# stuff to read (and render) GIMP gradient files
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 97-08-23 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
"""
|
|
||||||
Stuff to translate curve segments to palette values (derived from
|
|
||||||
the corresponding code in GIMP, written by Federico Mena Quintero.
|
|
||||||
See the GIMP distribution for more information.)
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from math import log, pi, sin, sqrt
|
|
||||||
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
EPSILON = 1e-10
|
|
||||||
"""""" # Enable auto-doc for data member
|
|
||||||
|
|
||||||
|
|
||||||
def linear(middle, pos):
|
|
||||||
if pos <= middle:
|
|
||||||
if middle < EPSILON:
|
|
||||||
return 0.0
|
|
||||||
else:
|
|
||||||
return 0.5 * pos / middle
|
|
||||||
else:
|
|
||||||
pos = pos - middle
|
|
||||||
middle = 1.0 - middle
|
|
||||||
if middle < EPSILON:
|
|
||||||
return 1.0
|
|
||||||
else:
|
|
||||||
return 0.5 + 0.5 * pos / middle
|
|
||||||
|
|
||||||
|
|
||||||
def curved(middle, pos):
|
|
||||||
return pos ** (log(0.5) / log(max(middle, EPSILON)))
|
|
||||||
|
|
||||||
|
|
||||||
def sine(middle, pos):
|
|
||||||
return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0
|
|
||||||
|
|
||||||
|
|
||||||
def sphere_increasing(middle, pos):
|
|
||||||
return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2)
|
|
||||||
|
|
||||||
|
|
||||||
def sphere_decreasing(middle, pos):
|
|
||||||
return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2)
|
|
||||||
|
|
||||||
|
|
||||||
SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing]
|
|
||||||
"""""" # Enable auto-doc for data member
|
|
||||||
|
|
||||||
|
|
||||||
class GradientFile:
|
|
||||||
gradient = None
|
|
||||||
|
|
||||||
def getpalette(self, entries=256):
|
|
||||||
palette = []
|
|
||||||
|
|
||||||
ix = 0
|
|
||||||
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
|
|
||||||
|
|
||||||
for i in range(entries):
|
|
||||||
x = i / (entries - 1)
|
|
||||||
|
|
||||||
while x1 < x:
|
|
||||||
ix += 1
|
|
||||||
x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix]
|
|
||||||
|
|
||||||
w = x1 - x0
|
|
||||||
|
|
||||||
if w < EPSILON:
|
|
||||||
scale = segment(0.5, 0.5)
|
|
||||||
else:
|
|
||||||
scale = segment((xm - x0) / w, (x - x0) / w)
|
|
||||||
|
|
||||||
# expand to RGBA
|
|
||||||
r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5))
|
|
||||||
g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5))
|
|
||||||
b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5))
|
|
||||||
a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5))
|
|
||||||
|
|
||||||
# add to palette
|
|
||||||
palette.append(r + g + b + a)
|
|
||||||
|
|
||||||
return b"".join(palette), "RGBA"
|
|
||||||
|
|
||||||
|
|
||||||
class GimpGradientFile(GradientFile):
|
|
||||||
"""File handler for GIMP's gradient format."""
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
if fp.readline()[:13] != b"GIMP Gradient":
|
|
||||||
msg = "not a GIMP gradient file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
line = fp.readline()
|
|
||||||
|
|
||||||
# GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do
|
|
||||||
if line.startswith(b"Name: "):
|
|
||||||
line = fp.readline().strip()
|
|
||||||
|
|
||||||
count = int(line)
|
|
||||||
|
|
||||||
gradient = []
|
|
||||||
|
|
||||||
for i in range(count):
|
|
||||||
s = fp.readline().split()
|
|
||||||
w = [float(x) for x in s[:11]]
|
|
||||||
|
|
||||||
x0, x1 = w[0], w[2]
|
|
||||||
xm = w[1]
|
|
||||||
rgb0 = w[3:7]
|
|
||||||
rgb1 = w[7:11]
|
|
||||||
|
|
||||||
segment = SEGMENTS[int(s[11])]
|
|
||||||
cspace = int(s[12])
|
|
||||||
|
|
||||||
if cspace != 0:
|
|
||||||
msg = "cannot handle HSV colour space"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
gradient.append((x0, x1, xm, rgb0, rgb1, segment))
|
|
||||||
|
|
||||||
self.gradient = gradient
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
#
|
|
||||||
# Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# stuff to read GIMP palette files
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1997-08-23 fl Created
|
|
||||||
# 2004-09-07 fl Support GIMP 2.0 palette files.
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997-2004. All rights reserved.
|
|
||||||
# Copyright (c) Fredrik Lundh 1997-2004.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ._binary import o8
|
|
||||||
|
|
||||||
|
|
||||||
class GimpPaletteFile:
|
|
||||||
"""File handler for GIMP's palette format."""
|
|
||||||
|
|
||||||
rawmode = "RGB"
|
|
||||||
|
|
||||||
def __init__(self, fp):
|
|
||||||
self.palette = [o8(i) * 3 for i in range(256)]
|
|
||||||
|
|
||||||
if fp.readline()[:12] != b"GIMP Palette":
|
|
||||||
msg = "not a GIMP palette file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
for i in range(256):
|
|
||||||
s = fp.readline()
|
|
||||||
if not s:
|
|
||||||
break
|
|
||||||
|
|
||||||
# skip fields and comment lines
|
|
||||||
if re.match(rb"\w+:|#", s):
|
|
||||||
continue
|
|
||||||
if len(s) > 100:
|
|
||||||
msg = "bad palette file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
v = tuple(map(int, s.split()[:3]))
|
|
||||||
if len(v) != 3:
|
|
||||||
msg = "bad palette entry"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2])
|
|
||||||
|
|
||||||
self.palette = b"".join(self.palette)
|
|
||||||
|
|
||||||
def getpalette(self):
|
|
||||||
return self.palette, self.rawmode
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# GRIB stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 1996-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific GRIB image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == b"GRIB" and prefix[7] == 1
|
|
||||||
|
|
||||||
|
|
||||||
class GribStubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "GRIB"
|
|
||||||
format_description = "GRIB"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(8)):
|
|
||||||
msg = "Not a GRIB file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self._mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "GRIB save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept)
|
|
||||||
Image.register_save(GribStubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(GribStubImageFile.format, ".grib")
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# HDF5 stub adapter
|
|
||||||
#
|
|
||||||
# Copyright (c) 2000-2003 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageFile
|
|
||||||
|
|
||||||
_handler = None
|
|
||||||
|
|
||||||
|
|
||||||
def register_handler(handler):
|
|
||||||
"""
|
|
||||||
Install application-specific HDF5 image handler.
|
|
||||||
|
|
||||||
:param handler: Handler object.
|
|
||||||
"""
|
|
||||||
global _handler
|
|
||||||
_handler = handler
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Image adapter
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:8] == b"\x89HDF\r\n\x1a\n"
|
|
||||||
|
|
||||||
|
|
||||||
class HDF5StubImageFile(ImageFile.StubImageFile):
|
|
||||||
format = "HDF5"
|
|
||||||
format_description = "HDF5"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
offset = self.fp.tell()
|
|
||||||
|
|
||||||
if not _accept(self.fp.read(8)):
|
|
||||||
msg = "Not an HDF file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.fp.seek(offset)
|
|
||||||
|
|
||||||
# make something up
|
|
||||||
self._mode = "F"
|
|
||||||
self._size = 1, 1
|
|
||||||
|
|
||||||
loader = self._load()
|
|
||||||
if loader:
|
|
||||||
loader.open(self)
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
return _handler
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
if _handler is None or not hasattr(_handler, "save"):
|
|
||||||
msg = "HDF5 save handler not installed"
|
|
||||||
raise OSError(msg)
|
|
||||||
_handler.save(im, fp, filename)
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept)
|
|
||||||
Image.register_save(HDF5StubImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extensions(HDF5StubImageFile.format, [".h5", ".hdf"])
|
|
||||||
@@ -1,400 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# macOS icns file decoder, based on icns.py by Bob Ippolito.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies.
|
|
||||||
# 2020-04-04 Allow saving on all operating systems.
|
|
||||||
#
|
|
||||||
# Copyright (c) 2004 by Bob Ippolito.
|
|
||||||
# Copyright (c) 2004 by Secret Labs.
|
|
||||||
# Copyright (c) 2004 by Fredrik Lundh.
|
|
||||||
# Copyright (c) 2014 by Alastair Houghton.
|
|
||||||
# Copyright (c) 2020 by Pan Jing.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from . import Image, ImageFile, PngImagePlugin, features
|
|
||||||
|
|
||||||
enable_jpeg2k = features.check_codec("jpg_2000")
|
|
||||||
if enable_jpeg2k:
|
|
||||||
from . import Jpeg2KImagePlugin
|
|
||||||
|
|
||||||
MAGIC = b"icns"
|
|
||||||
HEADERSIZE = 8
|
|
||||||
|
|
||||||
|
|
||||||
def nextheader(fobj):
|
|
||||||
return struct.unpack(">4sI", fobj.read(HEADERSIZE))
|
|
||||||
|
|
||||||
|
|
||||||
def read_32t(fobj, start_length, size):
|
|
||||||
# The 128x128 icon seems to have an extra header for some reason.
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
sig = fobj.read(4)
|
|
||||||
if sig != b"\x00\x00\x00\x00":
|
|
||||||
msg = "Unknown signature, expecting 0x00000000"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
return read_32(fobj, (start + 4, length - 4), size)
|
|
||||||
|
|
||||||
|
|
||||||
def read_32(fobj, start_length, size):
|
|
||||||
"""
|
|
||||||
Read a 32bit RGB icon resource. Seems to be either uncompressed or
|
|
||||||
an RLE packbits-like scheme.
|
|
||||||
"""
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
pixel_size = (size[0] * size[2], size[1] * size[2])
|
|
||||||
sizesq = pixel_size[0] * pixel_size[1]
|
|
||||||
if length == sizesq * 3:
|
|
||||||
# uncompressed ("RGBRGBGB")
|
|
||||||
indata = fobj.read(length)
|
|
||||||
im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1)
|
|
||||||
else:
|
|
||||||
# decode image
|
|
||||||
im = Image.new("RGB", pixel_size, None)
|
|
||||||
for band_ix in range(3):
|
|
||||||
data = []
|
|
||||||
bytesleft = sizesq
|
|
||||||
while bytesleft > 0:
|
|
||||||
byte = fobj.read(1)
|
|
||||||
if not byte:
|
|
||||||
break
|
|
||||||
byte = byte[0]
|
|
||||||
if byte & 0x80:
|
|
||||||
blocksize = byte - 125
|
|
||||||
byte = fobj.read(1)
|
|
||||||
for i in range(blocksize):
|
|
||||||
data.append(byte)
|
|
||||||
else:
|
|
||||||
blocksize = byte + 1
|
|
||||||
data.append(fobj.read(blocksize))
|
|
||||||
bytesleft -= blocksize
|
|
||||||
if bytesleft <= 0:
|
|
||||||
break
|
|
||||||
if bytesleft != 0:
|
|
||||||
msg = f"Error reading channel [{repr(bytesleft)} left]"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
band = Image.frombuffer("L", pixel_size, b"".join(data), "raw", "L", 0, 1)
|
|
||||||
im.im.putband(band.im, band_ix)
|
|
||||||
return {"RGB": im}
|
|
||||||
|
|
||||||
|
|
||||||
def read_mk(fobj, start_length, size):
|
|
||||||
# Alpha masks seem to be uncompressed
|
|
||||||
start = start_length[0]
|
|
||||||
fobj.seek(start)
|
|
||||||
pixel_size = (size[0] * size[2], size[1] * size[2])
|
|
||||||
sizesq = pixel_size[0] * pixel_size[1]
|
|
||||||
band = Image.frombuffer("L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1)
|
|
||||||
return {"A": band}
|
|
||||||
|
|
||||||
|
|
||||||
def read_png_or_jpeg2000(fobj, start_length, size):
|
|
||||||
(start, length) = start_length
|
|
||||||
fobj.seek(start)
|
|
||||||
sig = fobj.read(12)
|
|
||||||
if sig[:8] == b"\x89PNG\x0d\x0a\x1a\x0a":
|
|
||||||
fobj.seek(start)
|
|
||||||
im = PngImagePlugin.PngImageFile(fobj)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
return {"RGBA": im}
|
|
||||||
elif (
|
|
||||||
sig[:4] == b"\xff\x4f\xff\x51"
|
|
||||||
or sig[:4] == b"\x0d\x0a\x87\x0a"
|
|
||||||
or sig == b"\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a"
|
|
||||||
):
|
|
||||||
if not enable_jpeg2k:
|
|
||||||
msg = (
|
|
||||||
"Unsupported icon subimage format (rebuild PIL "
|
|
||||||
"with JPEG 2000 support to fix this)"
|
|
||||||
)
|
|
||||||
raise ValueError(msg)
|
|
||||||
# j2k, jpc or j2c
|
|
||||||
fobj.seek(start)
|
|
||||||
jp2kstream = fobj.read(length)
|
|
||||||
f = io.BytesIO(jp2kstream)
|
|
||||||
im = Jpeg2KImagePlugin.Jpeg2KImageFile(f)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
if im.mode != "RGBA":
|
|
||||||
im = im.convert("RGBA")
|
|
||||||
return {"RGBA": im}
|
|
||||||
else:
|
|
||||||
msg = "Unsupported icon subimage format"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class IcnsFile:
|
|
||||||
SIZES = {
|
|
||||||
(512, 512, 2): [(b"ic10", read_png_or_jpeg2000)],
|
|
||||||
(512, 512, 1): [(b"ic09", read_png_or_jpeg2000)],
|
|
||||||
(256, 256, 2): [(b"ic14", read_png_or_jpeg2000)],
|
|
||||||
(256, 256, 1): [(b"ic08", read_png_or_jpeg2000)],
|
|
||||||
(128, 128, 2): [(b"ic13", read_png_or_jpeg2000)],
|
|
||||||
(128, 128, 1): [
|
|
||||||
(b"ic07", read_png_or_jpeg2000),
|
|
||||||
(b"it32", read_32t),
|
|
||||||
(b"t8mk", read_mk),
|
|
||||||
],
|
|
||||||
(64, 64, 1): [(b"icp6", read_png_or_jpeg2000)],
|
|
||||||
(32, 32, 2): [(b"ic12", read_png_or_jpeg2000)],
|
|
||||||
(48, 48, 1): [(b"ih32", read_32), (b"h8mk", read_mk)],
|
|
||||||
(32, 32, 1): [
|
|
||||||
(b"icp5", read_png_or_jpeg2000),
|
|
||||||
(b"il32", read_32),
|
|
||||||
(b"l8mk", read_mk),
|
|
||||||
],
|
|
||||||
(16, 16, 2): [(b"ic11", read_png_or_jpeg2000)],
|
|
||||||
(16, 16, 1): [
|
|
||||||
(b"icp4", read_png_or_jpeg2000),
|
|
||||||
(b"is32", read_32),
|
|
||||||
(b"s8mk", read_mk),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, fobj):
|
|
||||||
"""
|
|
||||||
fobj is a file-like object as an icns resource
|
|
||||||
"""
|
|
||||||
# signature : (start, length)
|
|
||||||
self.dct = dct = {}
|
|
||||||
self.fobj = fobj
|
|
||||||
sig, filesize = nextheader(fobj)
|
|
||||||
if not _accept(sig):
|
|
||||||
msg = "not an icns file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
i = HEADERSIZE
|
|
||||||
while i < filesize:
|
|
||||||
sig, blocksize = nextheader(fobj)
|
|
||||||
if blocksize <= 0:
|
|
||||||
msg = "invalid block header"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
i += HEADERSIZE
|
|
||||||
blocksize -= HEADERSIZE
|
|
||||||
dct[sig] = (i, blocksize)
|
|
||||||
fobj.seek(blocksize, io.SEEK_CUR)
|
|
||||||
i += blocksize
|
|
||||||
|
|
||||||
def itersizes(self):
|
|
||||||
sizes = []
|
|
||||||
for size, fmts in self.SIZES.items():
|
|
||||||
for fmt, reader in fmts:
|
|
||||||
if fmt in self.dct:
|
|
||||||
sizes.append(size)
|
|
||||||
break
|
|
||||||
return sizes
|
|
||||||
|
|
||||||
def bestsize(self):
|
|
||||||
sizes = self.itersizes()
|
|
||||||
if not sizes:
|
|
||||||
msg = "No 32bit icon resources found"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
return max(sizes)
|
|
||||||
|
|
||||||
def dataforsize(self, size):
|
|
||||||
"""
|
|
||||||
Get an icon resource as {channel: array}. Note that
|
|
||||||
the arrays are bottom-up like windows bitmaps and will likely
|
|
||||||
need to be flipped or transposed in some way.
|
|
||||||
"""
|
|
||||||
dct = {}
|
|
||||||
for code, reader in self.SIZES[size]:
|
|
||||||
desc = self.dct.get(code)
|
|
||||||
if desc is not None:
|
|
||||||
dct.update(reader(self.fobj, desc, size))
|
|
||||||
return dct
|
|
||||||
|
|
||||||
def getimage(self, size=None):
|
|
||||||
if size is None:
|
|
||||||
size = self.bestsize()
|
|
||||||
if len(size) == 2:
|
|
||||||
size = (size[0], size[1], 1)
|
|
||||||
channels = self.dataforsize(size)
|
|
||||||
|
|
||||||
im = channels.get("RGBA", None)
|
|
||||||
if im:
|
|
||||||
return im
|
|
||||||
|
|
||||||
im = channels.get("RGB").copy()
|
|
||||||
try:
|
|
||||||
im.putalpha(channels["A"])
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Mac OS icons.
|
|
||||||
|
|
||||||
|
|
||||||
class IcnsImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
PIL image support for Mac OS .icns files.
|
|
||||||
Chooses the best resolution, but will possibly load
|
|
||||||
a different size image if you mutate the size attribute
|
|
||||||
before calling 'load'.
|
|
||||||
|
|
||||||
The info dictionary has a key 'sizes' that is a list
|
|
||||||
of sizes that the icns file has.
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "ICNS"
|
|
||||||
format_description = "Mac OS icns resource"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self.icns = IcnsFile(self.fp)
|
|
||||||
self._mode = "RGBA"
|
|
||||||
self.info["sizes"] = self.icns.itersizes()
|
|
||||||
self.best_size = self.icns.bestsize()
|
|
||||||
self.size = (
|
|
||||||
self.best_size[0] * self.best_size[2],
|
|
||||||
self.best_size[1] * self.best_size[2],
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self):
|
|
||||||
return self._size
|
|
||||||
|
|
||||||
@size.setter
|
|
||||||
def size(self, value):
|
|
||||||
info_size = value
|
|
||||||
if info_size not in self.info["sizes"] and len(info_size) == 2:
|
|
||||||
info_size = (info_size[0], info_size[1], 1)
|
|
||||||
if (
|
|
||||||
info_size not in self.info["sizes"]
|
|
||||||
and len(info_size) == 3
|
|
||||||
and info_size[2] == 1
|
|
||||||
):
|
|
||||||
simple_sizes = [
|
|
||||||
(size[0] * size[2], size[1] * size[2]) for size in self.info["sizes"]
|
|
||||||
]
|
|
||||||
if value in simple_sizes:
|
|
||||||
info_size = self.info["sizes"][simple_sizes.index(value)]
|
|
||||||
if info_size not in self.info["sizes"]:
|
|
||||||
msg = "This is not one of the allowed sizes of this image"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self._size = value
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if len(self.size) == 3:
|
|
||||||
self.best_size = self.size
|
|
||||||
self.size = (
|
|
||||||
self.best_size[0] * self.best_size[2],
|
|
||||||
self.best_size[1] * self.best_size[2],
|
|
||||||
)
|
|
||||||
|
|
||||||
px = Image.Image.load(self)
|
|
||||||
if self.im is not None and self.im.size == self.size:
|
|
||||||
# Already loaded
|
|
||||||
return px
|
|
||||||
self.load_prepare()
|
|
||||||
# This is likely NOT the best way to do it, but whatever.
|
|
||||||
im = self.icns.getimage(self.best_size)
|
|
||||||
|
|
||||||
# If this is a PNG or JPEG 2000, it won't be loaded yet
|
|
||||||
px = im.load()
|
|
||||||
|
|
||||||
self.im = im.im
|
|
||||||
self._mode = im.mode
|
|
||||||
self.size = im.size
|
|
||||||
|
|
||||||
return px
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
"""
|
|
||||||
Saves the image as a series of PNG files,
|
|
||||||
that are then combined into a .icns file.
|
|
||||||
"""
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
sizes = {
|
|
||||||
b"ic07": 128,
|
|
||||||
b"ic08": 256,
|
|
||||||
b"ic09": 512,
|
|
||||||
b"ic10": 1024,
|
|
||||||
b"ic11": 32,
|
|
||||||
b"ic12": 64,
|
|
||||||
b"ic13": 256,
|
|
||||||
b"ic14": 512,
|
|
||||||
}
|
|
||||||
provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])}
|
|
||||||
size_streams = {}
|
|
||||||
for size in set(sizes.values()):
|
|
||||||
image = (
|
|
||||||
provided_images[size]
|
|
||||||
if size in provided_images
|
|
||||||
else im.resize((size, size))
|
|
||||||
)
|
|
||||||
|
|
||||||
temp = io.BytesIO()
|
|
||||||
image.save(temp, "png")
|
|
||||||
size_streams[size] = temp.getvalue()
|
|
||||||
|
|
||||||
entries = []
|
|
||||||
for type, size in sizes.items():
|
|
||||||
stream = size_streams[size]
|
|
||||||
entries.append(
|
|
||||||
{"type": type, "size": HEADERSIZE + len(stream), "stream": stream}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Header
|
|
||||||
fp.write(MAGIC)
|
|
||||||
file_length = HEADERSIZE # Header
|
|
||||||
file_length += HEADERSIZE + 8 * len(entries) # TOC
|
|
||||||
file_length += sum(entry["size"] for entry in entries)
|
|
||||||
fp.write(struct.pack(">i", file_length))
|
|
||||||
|
|
||||||
# TOC
|
|
||||||
fp.write(b"TOC ")
|
|
||||||
fp.write(struct.pack(">i", HEADERSIZE + len(entries) * HEADERSIZE))
|
|
||||||
for entry in entries:
|
|
||||||
fp.write(entry["type"])
|
|
||||||
fp.write(struct.pack(">i", entry["size"]))
|
|
||||||
|
|
||||||
# Data
|
|
||||||
for entry in entries:
|
|
||||||
fp.write(entry["type"])
|
|
||||||
fp.write(struct.pack(">i", entry["size"]))
|
|
||||||
fp.write(entry["stream"])
|
|
||||||
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(IcnsImageFile.format, IcnsImageFile, _accept)
|
|
||||||
Image.register_extension(IcnsImageFile.format, ".icns")
|
|
||||||
|
|
||||||
Image.register_save(IcnsImageFile.format, _save)
|
|
||||||
Image.register_mime(IcnsImageFile.format, "image/icns")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if len(sys.argv) < 2:
|
|
||||||
print("Syntax: python3 IcnsImagePlugin.py [file]")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
with open(sys.argv[1], "rb") as fp:
|
|
||||||
imf = IcnsImageFile(fp)
|
|
||||||
for size in imf.info["sizes"]:
|
|
||||||
width, height, scale = imf.size = size
|
|
||||||
imf.save(f"out-{width}-{height}-{scale}.png")
|
|
||||||
with Image.open(sys.argv[1]) as im:
|
|
||||||
im.save("out.png")
|
|
||||||
if sys.platform == "windows":
|
|
||||||
os.startfile("out.png")
|
|
||||||
@@ -1,356 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# Windows Icon support for PIL
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 96-05-27 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
|
|
||||||
# <casadebender@gmail.com>.
|
|
||||||
# https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
|
|
||||||
#
|
|
||||||
# Icon format references:
|
|
||||||
# * https://en.wikipedia.org/wiki/ICO_(file_format)
|
|
||||||
# * https://msdn.microsoft.com/en-us/library/ms997538.aspx
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
from io import BytesIO
|
|
||||||
from math import ceil, log
|
|
||||||
|
|
||||||
from . import BmpImagePlugin, Image, ImageFile, PngImagePlugin
|
|
||||||
from ._binary import i16le as i16
|
|
||||||
from ._binary import i32le as i32
|
|
||||||
from ._binary import o8
|
|
||||||
from ._binary import o16le as o16
|
|
||||||
from ._binary import o32le as o32
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
_MAGIC = b"\0\0\1\0"
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
fp.write(_MAGIC) # (2+2)
|
|
||||||
bmp = im.encoderinfo.get("bitmap_format") == "bmp"
|
|
||||||
sizes = im.encoderinfo.get(
|
|
||||||
"sizes",
|
|
||||||
[(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)],
|
|
||||||
)
|
|
||||||
frames = []
|
|
||||||
provided_ims = [im] + im.encoderinfo.get("append_images", [])
|
|
||||||
width, height = im.size
|
|
||||||
for size in sorted(set(sizes)):
|
|
||||||
if size[0] > width or size[1] > height or size[0] > 256 or size[1] > 256:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for provided_im in provided_ims:
|
|
||||||
if provided_im.size != size:
|
|
||||||
continue
|
|
||||||
frames.append(provided_im)
|
|
||||||
if bmp:
|
|
||||||
bits = BmpImagePlugin.SAVE[provided_im.mode][1]
|
|
||||||
bits_used = [bits]
|
|
||||||
for other_im in provided_ims:
|
|
||||||
if other_im.size != size:
|
|
||||||
continue
|
|
||||||
bits = BmpImagePlugin.SAVE[other_im.mode][1]
|
|
||||||
if bits not in bits_used:
|
|
||||||
# Another image has been supplied for this size
|
|
||||||
# with a different bit depth
|
|
||||||
frames.append(other_im)
|
|
||||||
bits_used.append(bits)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# TODO: invent a more convenient method for proportional scalings
|
|
||||||
frame = provided_im.copy()
|
|
||||||
frame.thumbnail(size, Image.Resampling.LANCZOS, reducing_gap=None)
|
|
||||||
frames.append(frame)
|
|
||||||
fp.write(o16(len(frames))) # idCount(2)
|
|
||||||
offset = fp.tell() + len(frames) * 16
|
|
||||||
for frame in frames:
|
|
||||||
width, height = frame.size
|
|
||||||
# 0 means 256
|
|
||||||
fp.write(o8(width if width < 256 else 0)) # bWidth(1)
|
|
||||||
fp.write(o8(height if height < 256 else 0)) # bHeight(1)
|
|
||||||
|
|
||||||
bits, colors = BmpImagePlugin.SAVE[frame.mode][1:] if bmp else (32, 0)
|
|
||||||
fp.write(o8(colors)) # bColorCount(1)
|
|
||||||
fp.write(b"\0") # bReserved(1)
|
|
||||||
fp.write(b"\0\0") # wPlanes(2)
|
|
||||||
fp.write(o16(bits)) # wBitCount(2)
|
|
||||||
|
|
||||||
image_io = BytesIO()
|
|
||||||
if bmp:
|
|
||||||
frame.save(image_io, "dib")
|
|
||||||
|
|
||||||
if bits != 32:
|
|
||||||
and_mask = Image.new("1", size)
|
|
||||||
ImageFile._save(
|
|
||||||
and_mask, image_io, [("raw", (0, 0) + size, 0, ("1", 0, -1))]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
frame.save(image_io, "png")
|
|
||||||
image_io.seek(0)
|
|
||||||
image_bytes = image_io.read()
|
|
||||||
if bmp:
|
|
||||||
image_bytes = image_bytes[:8] + o32(height * 2) + image_bytes[12:]
|
|
||||||
bytes_len = len(image_bytes)
|
|
||||||
fp.write(o32(bytes_len)) # dwBytesInRes(4)
|
|
||||||
fp.write(o32(offset)) # dwImageOffset(4)
|
|
||||||
current = fp.tell()
|
|
||||||
fp.seek(offset)
|
|
||||||
fp.write(image_bytes)
|
|
||||||
offset = offset + bytes_len
|
|
||||||
fp.seek(current)
|
|
||||||
|
|
||||||
|
|
||||||
def _accept(prefix):
|
|
||||||
return prefix[:4] == _MAGIC
|
|
||||||
|
|
||||||
|
|
||||||
class IcoFile:
|
|
||||||
def __init__(self, buf):
|
|
||||||
"""
|
|
||||||
Parse image from file-like object containing ico file data
|
|
||||||
"""
|
|
||||||
|
|
||||||
# check magic
|
|
||||||
s = buf.read(6)
|
|
||||||
if not _accept(s):
|
|
||||||
msg = "not an ICO file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
self.buf = buf
|
|
||||||
self.entry = []
|
|
||||||
|
|
||||||
# Number of items in file
|
|
||||||
self.nb_items = i16(s, 4)
|
|
||||||
|
|
||||||
# Get headers for each item
|
|
||||||
for i in range(self.nb_items):
|
|
||||||
s = buf.read(16)
|
|
||||||
|
|
||||||
icon_header = {
|
|
||||||
"width": s[0],
|
|
||||||
"height": s[1],
|
|
||||||
"nb_color": s[2], # No. of colors in image (0 if >=8bpp)
|
|
||||||
"reserved": s[3],
|
|
||||||
"planes": i16(s, 4),
|
|
||||||
"bpp": i16(s, 6),
|
|
||||||
"size": i32(s, 8),
|
|
||||||
"offset": i32(s, 12),
|
|
||||||
}
|
|
||||||
|
|
||||||
# See Wikipedia
|
|
||||||
for j in ("width", "height"):
|
|
||||||
if not icon_header[j]:
|
|
||||||
icon_header[j] = 256
|
|
||||||
|
|
||||||
# See Wikipedia notes about color depth.
|
|
||||||
# We need this just to differ images with equal sizes
|
|
||||||
icon_header["color_depth"] = (
|
|
||||||
icon_header["bpp"]
|
|
||||||
or (
|
|
||||||
icon_header["nb_color"] != 0
|
|
||||||
and ceil(log(icon_header["nb_color"], 2))
|
|
||||||
)
|
|
||||||
or 256
|
|
||||||
)
|
|
||||||
|
|
||||||
icon_header["dim"] = (icon_header["width"], icon_header["height"])
|
|
||||||
icon_header["square"] = icon_header["width"] * icon_header["height"]
|
|
||||||
|
|
||||||
self.entry.append(icon_header)
|
|
||||||
|
|
||||||
self.entry = sorted(self.entry, key=lambda x: x["color_depth"])
|
|
||||||
# ICO images are usually squares
|
|
||||||
self.entry = sorted(self.entry, key=lambda x: x["square"], reverse=True)
|
|
||||||
|
|
||||||
def sizes(self):
|
|
||||||
"""
|
|
||||||
Get a list of all available icon sizes and color depths.
|
|
||||||
"""
|
|
||||||
return {(h["width"], h["height"]) for h in self.entry}
|
|
||||||
|
|
||||||
def getentryindex(self, size, bpp=False):
|
|
||||||
for i, h in enumerate(self.entry):
|
|
||||||
if size == h["dim"] and (bpp is False or bpp == h["color_depth"]):
|
|
||||||
return i
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def getimage(self, size, bpp=False):
|
|
||||||
"""
|
|
||||||
Get an image from the icon
|
|
||||||
"""
|
|
||||||
return self.frame(self.getentryindex(size, bpp))
|
|
||||||
|
|
||||||
def frame(self, idx):
|
|
||||||
"""
|
|
||||||
Get an image from frame idx
|
|
||||||
"""
|
|
||||||
|
|
||||||
header = self.entry[idx]
|
|
||||||
|
|
||||||
self.buf.seek(header["offset"])
|
|
||||||
data = self.buf.read(8)
|
|
||||||
self.buf.seek(header["offset"])
|
|
||||||
|
|
||||||
if data[:8] == PngImagePlugin._MAGIC:
|
|
||||||
# png frame
|
|
||||||
im = PngImagePlugin.PngImageFile(self.buf)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
else:
|
|
||||||
# XOR + AND mask bmp frame
|
|
||||||
im = BmpImagePlugin.DibImageFile(self.buf)
|
|
||||||
Image._decompression_bomb_check(im.size)
|
|
||||||
|
|
||||||
# change tile dimension to only encompass XOR image
|
|
||||||
im._size = (im.size[0], int(im.size[1] / 2))
|
|
||||||
d, e, o, a = im.tile[0]
|
|
||||||
im.tile[0] = d, (0, 0) + im.size, o, a
|
|
||||||
|
|
||||||
# figure out where AND mask image starts
|
|
||||||
bpp = header["bpp"]
|
|
||||||
if 32 == bpp:
|
|
||||||
# 32-bit color depth icon image allows semitransparent areas
|
|
||||||
# PIL's DIB format ignores transparency bits, recover them.
|
|
||||||
# The DIB is packed in BGRX byte order where X is the alpha
|
|
||||||
# channel.
|
|
||||||
|
|
||||||
# Back up to start of bmp data
|
|
||||||
self.buf.seek(o)
|
|
||||||
# extract every 4th byte (eg. 3,7,11,15,...)
|
|
||||||
alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4]
|
|
||||||
|
|
||||||
# convert to an 8bpp grayscale image
|
|
||||||
mask = Image.frombuffer(
|
|
||||||
"L", # 8bpp
|
|
||||||
im.size, # (w, h)
|
|
||||||
alpha_bytes, # source chars
|
|
||||||
"raw", # raw decoder
|
|
||||||
("L", 0, -1), # 8bpp inverted, unpadded, reversed
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# get AND image from end of bitmap
|
|
||||||
w = im.size[0]
|
|
||||||
if (w % 32) > 0:
|
|
||||||
# bitmap row data is aligned to word boundaries
|
|
||||||
w += 32 - (im.size[0] % 32)
|
|
||||||
|
|
||||||
# the total mask data is
|
|
||||||
# padded row size * height / bits per char
|
|
||||||
|
|
||||||
total_bytes = int((w * im.size[1]) / 8)
|
|
||||||
and_mask_offset = header["offset"] + header["size"] - total_bytes
|
|
||||||
|
|
||||||
self.buf.seek(and_mask_offset)
|
|
||||||
mask_data = self.buf.read(total_bytes)
|
|
||||||
|
|
||||||
# convert raw data to image
|
|
||||||
mask = Image.frombuffer(
|
|
||||||
"1", # 1 bpp
|
|
||||||
im.size, # (w, h)
|
|
||||||
mask_data, # source chars
|
|
||||||
"raw", # raw decoder
|
|
||||||
("1;I", int(w / 8), -1), # 1bpp inverted, padded, reversed
|
|
||||||
)
|
|
||||||
|
|
||||||
# now we have two images, im is XOR image and mask is AND image
|
|
||||||
|
|
||||||
# apply mask image as alpha channel
|
|
||||||
im = im.convert("RGBA")
|
|
||||||
im.putalpha(mask)
|
|
||||||
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for Windows Icon files.
|
|
||||||
|
|
||||||
|
|
||||||
class IcoImageFile(ImageFile.ImageFile):
|
|
||||||
"""
|
|
||||||
PIL read-only image support for Microsoft Windows .ico files.
|
|
||||||
|
|
||||||
By default the largest resolution image in the file will be loaded. This
|
|
||||||
can be changed by altering the 'size' attribute before calling 'load'.
|
|
||||||
|
|
||||||
The info dictionary has a key 'sizes' that is a list of the sizes available
|
|
||||||
in the icon file.
|
|
||||||
|
|
||||||
Handles classic, XP and Vista icon formats.
|
|
||||||
|
|
||||||
When saving, PNG compression is used. Support for this was only added in
|
|
||||||
Windows Vista. If you are unable to view the icon in Windows, convert the
|
|
||||||
image to "RGBA" mode before saving.
|
|
||||||
|
|
||||||
This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis
|
|
||||||
<casadebender@gmail.com>.
|
|
||||||
https://code.google.com/archive/p/casadebender/wikis/Win32IconImagePlugin.wiki
|
|
||||||
"""
|
|
||||||
|
|
||||||
format = "ICO"
|
|
||||||
format_description = "Windows Icon"
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
self.ico = IcoFile(self.fp)
|
|
||||||
self.info["sizes"] = self.ico.sizes()
|
|
||||||
self.size = self.ico.entry[0]["dim"]
|
|
||||||
self.load()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size(self):
|
|
||||||
return self._size
|
|
||||||
|
|
||||||
@size.setter
|
|
||||||
def size(self, value):
|
|
||||||
if value not in self.info["sizes"]:
|
|
||||||
msg = "This is not one of the allowed sizes of this image"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self._size = value
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
if self.im is not None and self.im.size == self.size:
|
|
||||||
# Already loaded
|
|
||||||
return Image.Image.load(self)
|
|
||||||
im = self.ico.getimage(self.size)
|
|
||||||
# if tile is PNG, it won't really be loaded yet
|
|
||||||
im.load()
|
|
||||||
self.im = im.im
|
|
||||||
self.pyaccess = None
|
|
||||||
self._mode = im.mode
|
|
||||||
if im.size != self.size:
|
|
||||||
warnings.warn("Image was not the expected size")
|
|
||||||
|
|
||||||
index = self.ico.getentryindex(self.size)
|
|
||||||
sizes = list(self.info["sizes"])
|
|
||||||
sizes[index] = im.size
|
|
||||||
self.info["sizes"] = set(sizes)
|
|
||||||
|
|
||||||
self.size = im.size
|
|
||||||
|
|
||||||
def load_seek(self):
|
|
||||||
# Flag the ImageFile.Parser so that it
|
|
||||||
# just does all the decode at the end.
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(IcoImageFile.format, IcoImageFile, _accept)
|
|
||||||
Image.register_save(IcoImageFile.format, _save)
|
|
||||||
Image.register_extension(IcoImageFile.format, ".ico")
|
|
||||||
|
|
||||||
Image.register_mime(IcoImageFile.format, "image/x-icon")
|
|
||||||
@@ -1,371 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# IFUNC IM file handling for PIL
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1995-09-01 fl Created.
|
|
||||||
# 1997-01-03 fl Save palette images
|
|
||||||
# 1997-01-08 fl Added sequence support
|
|
||||||
# 1997-01-23 fl Added P and RGB save support
|
|
||||||
# 1997-05-31 fl Read floating point images
|
|
||||||
# 1997-06-22 fl Save floating point images
|
|
||||||
# 1997-08-27 fl Read and save 1-bit images
|
|
||||||
# 1998-06-25 fl Added support for RGB+LUT images
|
|
||||||
# 1998-07-02 fl Added support for YCC images
|
|
||||||
# 1998-07-15 fl Renamed offset attribute to avoid name clash
|
|
||||||
# 1998-12-29 fl Added I;16 support
|
|
||||||
# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7)
|
|
||||||
# 2003-09-26 fl Added LA/PA support
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-2001 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
from . import Image, ImageFile, ImagePalette
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Standard tags
|
|
||||||
|
|
||||||
COMMENT = "Comment"
|
|
||||||
DATE = "Date"
|
|
||||||
EQUIPMENT = "Digitalization equipment"
|
|
||||||
FRAMES = "File size (no of images)"
|
|
||||||
LUT = "Lut"
|
|
||||||
NAME = "Name"
|
|
||||||
SCALE = "Scale (x,y)"
|
|
||||||
SIZE = "Image size (x*y)"
|
|
||||||
MODE = "Image type"
|
|
||||||
|
|
||||||
TAGS = {
|
|
||||||
COMMENT: 0,
|
|
||||||
DATE: 0,
|
|
||||||
EQUIPMENT: 0,
|
|
||||||
FRAMES: 0,
|
|
||||||
LUT: 0,
|
|
||||||
NAME: 0,
|
|
||||||
SCALE: 0,
|
|
||||||
SIZE: 0,
|
|
||||||
MODE: 0,
|
|
||||||
}
|
|
||||||
|
|
||||||
OPEN = {
|
|
||||||
# ifunc93/p3cfunc formats
|
|
||||||
"0 1 image": ("1", "1"),
|
|
||||||
"L 1 image": ("1", "1"),
|
|
||||||
"Greyscale image": ("L", "L"),
|
|
||||||
"Grayscale image": ("L", "L"),
|
|
||||||
"RGB image": ("RGB", "RGB;L"),
|
|
||||||
"RLB image": ("RGB", "RLB"),
|
|
||||||
"RYB image": ("RGB", "RLB"),
|
|
||||||
"B1 image": ("1", "1"),
|
|
||||||
"B2 image": ("P", "P;2"),
|
|
||||||
"B4 image": ("P", "P;4"),
|
|
||||||
"X 24 image": ("RGB", "RGB"),
|
|
||||||
"L 32 S image": ("I", "I;32"),
|
|
||||||
"L 32 F image": ("F", "F;32"),
|
|
||||||
# old p3cfunc formats
|
|
||||||
"RGB3 image": ("RGB", "RGB;T"),
|
|
||||||
"RYB3 image": ("RGB", "RYB;T"),
|
|
||||||
# extensions
|
|
||||||
"LA image": ("LA", "LA;L"),
|
|
||||||
"PA image": ("LA", "PA;L"),
|
|
||||||
"RGBA image": ("RGBA", "RGBA;L"),
|
|
||||||
"RGBX image": ("RGBX", "RGBX;L"),
|
|
||||||
"CMYK image": ("CMYK", "CMYK;L"),
|
|
||||||
"YCC image": ("YCbCr", "YCbCr;L"),
|
|
||||||
}
|
|
||||||
|
|
||||||
# ifunc95 extensions
|
|
||||||
for i in ["8", "8S", "16", "16S", "32", "32F"]:
|
|
||||||
OPEN[f"L {i} image"] = ("F", f"F;{i}")
|
|
||||||
OPEN[f"L*{i} image"] = ("F", f"F;{i}")
|
|
||||||
for i in ["16", "16L", "16B"]:
|
|
||||||
OPEN[f"L {i} image"] = (f"I;{i}", f"I;{i}")
|
|
||||||
OPEN[f"L*{i} image"] = (f"I;{i}", f"I;{i}")
|
|
||||||
for i in ["32S"]:
|
|
||||||
OPEN[f"L {i} image"] = ("I", f"I;{i}")
|
|
||||||
OPEN[f"L*{i} image"] = ("I", f"I;{i}")
|
|
||||||
for i in range(2, 33):
|
|
||||||
OPEN[f"L*{i} image"] = ("F", f"F;{i}")
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Read IM directory
|
|
||||||
|
|
||||||
split = re.compile(rb"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$")
|
|
||||||
|
|
||||||
|
|
||||||
def number(s):
|
|
||||||
try:
|
|
||||||
return int(s)
|
|
||||||
except ValueError:
|
|
||||||
return float(s)
|
|
||||||
|
|
||||||
|
|
||||||
##
|
|
||||||
# Image plugin for the IFUNC IM file format.
|
|
||||||
|
|
||||||
|
|
||||||
class ImImageFile(ImageFile.ImageFile):
|
|
||||||
format = "IM"
|
|
||||||
format_description = "IFUNC Image Memory"
|
|
||||||
_close_exclusive_fp_after_loading = False
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
# Quick rejection: if there's not an LF among the first
|
|
||||||
# 100 bytes, this is (probably) not a text header.
|
|
||||||
|
|
||||||
if b"\n" not in self.fp.read(100):
|
|
||||||
msg = "not an IM file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
self.fp.seek(0)
|
|
||||||
|
|
||||||
n = 0
|
|
||||||
|
|
||||||
# Default values
|
|
||||||
self.info[MODE] = "L"
|
|
||||||
self.info[SIZE] = (512, 512)
|
|
||||||
self.info[FRAMES] = 1
|
|
||||||
|
|
||||||
self.rawmode = "L"
|
|
||||||
|
|
||||||
while True:
|
|
||||||
s = self.fp.read(1)
|
|
||||||
|
|
||||||
# Some versions of IFUNC uses \n\r instead of \r\n...
|
|
||||||
if s == b"\r":
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not s or s == b"\0" or s == b"\x1A":
|
|
||||||
break
|
|
||||||
|
|
||||||
# FIXME: this may read whole file if not a text file
|
|
||||||
s = s + self.fp.readline()
|
|
||||||
|
|
||||||
if len(s) > 100:
|
|
||||||
msg = "not an IM file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
if s[-2:] == b"\r\n":
|
|
||||||
s = s[:-2]
|
|
||||||
elif s[-1:] == b"\n":
|
|
||||||
s = s[:-1]
|
|
||||||
|
|
||||||
try:
|
|
||||||
m = split.match(s)
|
|
||||||
except re.error as e:
|
|
||||||
msg = "not an IM file"
|
|
||||||
raise SyntaxError(msg) from e
|
|
||||||
|
|
||||||
if m:
|
|
||||||
k, v = m.group(1, 2)
|
|
||||||
|
|
||||||
# Don't know if this is the correct encoding,
|
|
||||||
# but a decent guess (I guess)
|
|
||||||
k = k.decode("latin-1", "replace")
|
|
||||||
v = v.decode("latin-1", "replace")
|
|
||||||
|
|
||||||
# Convert value as appropriate
|
|
||||||
if k in [FRAMES, SCALE, SIZE]:
|
|
||||||
v = v.replace("*", ",")
|
|
||||||
v = tuple(map(number, v.split(",")))
|
|
||||||
if len(v) == 1:
|
|
||||||
v = v[0]
|
|
||||||
elif k == MODE and v in OPEN:
|
|
||||||
v, self.rawmode = OPEN[v]
|
|
||||||
|
|
||||||
# Add to dictionary. Note that COMMENT tags are
|
|
||||||
# combined into a list of strings.
|
|
||||||
if k == COMMENT:
|
|
||||||
if k in self.info:
|
|
||||||
self.info[k].append(v)
|
|
||||||
else:
|
|
||||||
self.info[k] = [v]
|
|
||||||
else:
|
|
||||||
self.info[k] = v
|
|
||||||
|
|
||||||
if k in TAGS:
|
|
||||||
n += 1
|
|
||||||
|
|
||||||
else:
|
|
||||||
msg = "Syntax error in IM header: " + s.decode("ascii", "replace")
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
if not n:
|
|
||||||
msg = "Not an IM file"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
# Basic attributes
|
|
||||||
self._size = self.info[SIZE]
|
|
||||||
self._mode = self.info[MODE]
|
|
||||||
|
|
||||||
# Skip forward to start of image data
|
|
||||||
while s and s[:1] != b"\x1A":
|
|
||||||
s = self.fp.read(1)
|
|
||||||
if not s:
|
|
||||||
msg = "File truncated"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
|
|
||||||
if LUT in self.info:
|
|
||||||
# convert lookup table to palette or lut attribute
|
|
||||||
palette = self.fp.read(768)
|
|
||||||
greyscale = 1 # greyscale palette
|
|
||||||
linear = 1 # linear greyscale palette
|
|
||||||
for i in range(256):
|
|
||||||
if palette[i] == palette[i + 256] == palette[i + 512]:
|
|
||||||
if palette[i] != i:
|
|
||||||
linear = 0
|
|
||||||
else:
|
|
||||||
greyscale = 0
|
|
||||||
if self.mode in ["L", "LA", "P", "PA"]:
|
|
||||||
if greyscale:
|
|
||||||
if not linear:
|
|
||||||
self.lut = list(palette[:256])
|
|
||||||
else:
|
|
||||||
if self.mode in ["L", "P"]:
|
|
||||||
self._mode = self.rawmode = "P"
|
|
||||||
elif self.mode in ["LA", "PA"]:
|
|
||||||
self._mode = "PA"
|
|
||||||
self.rawmode = "PA;L"
|
|
||||||
self.palette = ImagePalette.raw("RGB;L", palette)
|
|
||||||
elif self.mode == "RGB":
|
|
||||||
if not greyscale or not linear:
|
|
||||||
self.lut = list(palette)
|
|
||||||
|
|
||||||
self.frame = 0
|
|
||||||
|
|
||||||
self.__offset = offs = self.fp.tell()
|
|
||||||
|
|
||||||
self._fp = self.fp # FIXME: hack
|
|
||||||
|
|
||||||
if self.rawmode[:2] == "F;":
|
|
||||||
# ifunc95 formats
|
|
||||||
try:
|
|
||||||
# use bit decoder (if necessary)
|
|
||||||
bits = int(self.rawmode[2:])
|
|
||||||
if bits not in [8, 16, 32]:
|
|
||||||
self.tile = [("bit", (0, 0) + self.size, offs, (bits, 8, 3, 0, -1))]
|
|
||||||
return
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if self.rawmode in ["RGB;T", "RYB;T"]:
|
|
||||||
# Old LabEye/3PC files. Would be very surprised if anyone
|
|
||||||
# ever stumbled upon such a file ;-)
|
|
||||||
size = self.size[0] * self.size[1]
|
|
||||||
self.tile = [
|
|
||||||
("raw", (0, 0) + self.size, offs, ("G", 0, -1)),
|
|
||||||
("raw", (0, 0) + self.size, offs + size, ("R", 0, -1)),
|
|
||||||
("raw", (0, 0) + self.size, offs + 2 * size, ("B", 0, -1)),
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
# LabEye/IFUNC files
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def n_frames(self):
|
|
||||||
return self.info[FRAMES]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_animated(self):
|
|
||||||
return self.info[FRAMES] > 1
|
|
||||||
|
|
||||||
def seek(self, frame):
|
|
||||||
if not self._seek_check(frame):
|
|
||||||
return
|
|
||||||
|
|
||||||
self.frame = frame
|
|
||||||
|
|
||||||
if self.mode == "1":
|
|
||||||
bits = 1
|
|
||||||
else:
|
|
||||||
bits = 8 * len(self.mode)
|
|
||||||
|
|
||||||
size = ((self.size[0] * bits + 7) // 8) * self.size[1]
|
|
||||||
offs = self.__offset + frame * size
|
|
||||||
|
|
||||||
self.fp = self._fp
|
|
||||||
|
|
||||||
self.tile = [("raw", (0, 0) + self.size, offs, (self.rawmode, 0, -1))]
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self.frame
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Save IM files
|
|
||||||
|
|
||||||
|
|
||||||
SAVE = {
|
|
||||||
# mode: (im type, raw mode)
|
|
||||||
"1": ("0 1", "1"),
|
|
||||||
"L": ("Greyscale", "L"),
|
|
||||||
"LA": ("LA", "LA;L"),
|
|
||||||
"P": ("Greyscale", "P"),
|
|
||||||
"PA": ("LA", "PA;L"),
|
|
||||||
"I": ("L 32S", "I;32S"),
|
|
||||||
"I;16": ("L 16", "I;16"),
|
|
||||||
"I;16L": ("L 16L", "I;16L"),
|
|
||||||
"I;16B": ("L 16B", "I;16B"),
|
|
||||||
"F": ("L 32F", "F;32F"),
|
|
||||||
"RGB": ("RGB", "RGB;L"),
|
|
||||||
"RGBA": ("RGBA", "RGBA;L"),
|
|
||||||
"RGBX": ("RGBX", "RGBX;L"),
|
|
||||||
"CMYK": ("CMYK", "CMYK;L"),
|
|
||||||
"YCbCr": ("YCC", "YCbCr;L"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, filename):
|
|
||||||
try:
|
|
||||||
image_type, rawmode = SAVE[im.mode]
|
|
||||||
except KeyError as e:
|
|
||||||
msg = f"Cannot save {im.mode} images as IM"
|
|
||||||
raise ValueError(msg) from e
|
|
||||||
|
|
||||||
frames = im.encoderinfo.get("frames", 1)
|
|
||||||
|
|
||||||
fp.write(f"Image type: {image_type} image\r\n".encode("ascii"))
|
|
||||||
if filename:
|
|
||||||
# Each line must be 100 characters or less,
|
|
||||||
# or: SyntaxError("not an IM file")
|
|
||||||
# 8 characters are used for "Name: " and "\r\n"
|
|
||||||
# Keep just the filename, ditch the potentially overlong path
|
|
||||||
name, ext = os.path.splitext(os.path.basename(filename))
|
|
||||||
name = "".join([name[: 92 - len(ext)], ext])
|
|
||||||
|
|
||||||
fp.write(f"Name: {name}\r\n".encode("ascii"))
|
|
||||||
fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode("ascii"))
|
|
||||||
fp.write(f"File size (no of images): {frames}\r\n".encode("ascii"))
|
|
||||||
if im.mode in ["P", "PA"]:
|
|
||||||
fp.write(b"Lut: 1\r\n")
|
|
||||||
fp.write(b"\000" * (511 - fp.tell()) + b"\032")
|
|
||||||
if im.mode in ["P", "PA"]:
|
|
||||||
im_palette = im.im.getpalette("RGB", "RGB;L")
|
|
||||||
colors = len(im_palette) // 3
|
|
||||||
palette = b""
|
|
||||||
for i in range(3):
|
|
||||||
palette += im_palette[colors * i : colors * (i + 1)]
|
|
||||||
palette += b"\x00" * (256 - colors)
|
|
||||||
fp.write(palette) # 768 bytes
|
|
||||||
ImageFile._save(im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, -1))])
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Registry
|
|
||||||
|
|
||||||
|
|
||||||
Image.register_open(ImImageFile.format, ImImageFile)
|
|
||||||
Image.register_save(ImImageFile.format, _save)
|
|
||||||
|
|
||||||
Image.register_extension(ImImageFile.format, ".im")
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,311 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# standard channel operations
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-03-24 fl Created
|
|
||||||
# 1996-08-13 fl Added logical operations (for "1" images)
|
|
||||||
# 2000-10-12 fl Added offset method (from Image.py)
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2000 by Secret Labs AB
|
|
||||||
# Copyright (c) 1996-2000 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
|
|
||||||
def constant(image: Image.Image, value: int) -> Image.Image:
|
|
||||||
"""Fill a channel with a given gray level.
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return Image.new("L", image.size, value)
|
|
||||||
|
|
||||||
|
|
||||||
def duplicate(image: Image.Image) -> Image.Image:
|
|
||||||
"""Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`.
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return image.copy()
|
|
||||||
|
|
||||||
|
|
||||||
def invert(image: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Invert an image (channel). ::
|
|
||||||
|
|
||||||
out = MAX - image
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image.load()
|
|
||||||
return image._new(image.im.chop_invert())
|
|
||||||
|
|
||||||
|
|
||||||
def lighter(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Compares the two images, pixel by pixel, and returns a new image containing
|
|
||||||
the lighter values. ::
|
|
||||||
|
|
||||||
out = max(image1, image2)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_lighter(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def darker(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Compares the two images, pixel by pixel, and returns a new image containing
|
|
||||||
the darker values. ::
|
|
||||||
|
|
||||||
out = min(image1, image2)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_darker(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def difference(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Returns the absolute value of the pixel-by-pixel difference between the two
|
|
||||||
images. ::
|
|
||||||
|
|
||||||
out = abs(image1 - image2)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_difference(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def multiply(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Superimposes two images on top of each other.
|
|
||||||
|
|
||||||
If you multiply an image with a solid black image, the result is black. If
|
|
||||||
you multiply with a solid white image, the image is unaffected. ::
|
|
||||||
|
|
||||||
out = image1 * image2 / MAX
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_multiply(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def screen(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Superimposes two inverted images on top of each other. ::
|
|
||||||
|
|
||||||
out = MAX - ((MAX - image1) * (MAX - image2) / MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_screen(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def soft_light(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Superimposes two images on top of each other using the Soft Light algorithm
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_soft_light(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def hard_light(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Superimposes two images on top of each other using the Hard Light algorithm
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_hard_light(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def overlay(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Superimposes two images on top of each other using the Overlay algorithm
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_overlay(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def add(
|
|
||||||
image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0
|
|
||||||
) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Adds two images, dividing the result by scale and adding the
|
|
||||||
offset. If omitted, scale defaults to 1.0, and offset to 0.0. ::
|
|
||||||
|
|
||||||
out = ((image1 + image2) / scale + offset)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_add(image2.im, scale, offset))
|
|
||||||
|
|
||||||
|
|
||||||
def subtract(
|
|
||||||
image1: Image.Image, image2: Image.Image, scale: float = 1.0, offset: float = 0
|
|
||||||
) -> Image.Image:
|
|
||||||
"""
|
|
||||||
Subtracts two images, dividing the result by scale and adding the offset.
|
|
||||||
If omitted, scale defaults to 1.0, and offset to 0.0. ::
|
|
||||||
|
|
||||||
out = ((image1 - image2) / scale + offset)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_subtract(image2.im, scale, offset))
|
|
||||||
|
|
||||||
|
|
||||||
def add_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""Add two images, without clipping the result. ::
|
|
||||||
|
|
||||||
out = ((image1 + image2) % MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_add_modulo(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def subtract_modulo(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""Subtract two images, without clipping the result. ::
|
|
||||||
|
|
||||||
out = ((image1 - image2) % MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_subtract_modulo(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def logical_and(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""Logical AND between two images.
|
|
||||||
|
|
||||||
Both of the images must have mode "1". If you would like to perform a
|
|
||||||
logical AND on an image with a mode other than "1", try
|
|
||||||
:py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask
|
|
||||||
as the second image. ::
|
|
||||||
|
|
||||||
out = ((image1 and image2) % MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_and(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def logical_or(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""Logical OR between two images.
|
|
||||||
|
|
||||||
Both of the images must have mode "1". ::
|
|
||||||
|
|
||||||
out = ((image1 or image2) % MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_or(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def logical_xor(image1: Image.Image, image2: Image.Image) -> Image.Image:
|
|
||||||
"""Logical XOR between two images.
|
|
||||||
|
|
||||||
Both of the images must have mode "1". ::
|
|
||||||
|
|
||||||
out = ((bool(image1) != bool(image2)) % MAX)
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
image1.load()
|
|
||||||
image2.load()
|
|
||||||
return image1._new(image1.im.chop_xor(image2.im))
|
|
||||||
|
|
||||||
|
|
||||||
def blend(image1: Image.Image, image2: Image.Image, alpha: float) -> Image.Image:
|
|
||||||
"""Blend images using constant transparency weight. Alias for
|
|
||||||
:py:func:`PIL.Image.blend`.
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return Image.blend(image1, image2, alpha)
|
|
||||||
|
|
||||||
|
|
||||||
def composite(
|
|
||||||
image1: Image.Image, image2: Image.Image, mask: Image.Image
|
|
||||||
) -> Image.Image:
|
|
||||||
"""Create composite using transparency mask. Alias for
|
|
||||||
:py:func:`PIL.Image.composite`.
|
|
||||||
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
return Image.composite(image1, image2, mask)
|
|
||||||
|
|
||||||
|
|
||||||
def offset(image: Image.Image, xoffset: int, yoffset: int | None = None) -> Image.Image:
|
|
||||||
"""Returns a copy of the image where data has been offset by the given
|
|
||||||
distances. Data wraps around the edges. If ``yoffset`` is omitted, it
|
|
||||||
is assumed to be equal to ``xoffset``.
|
|
||||||
|
|
||||||
:param image: Input image.
|
|
||||||
:param xoffset: The horizontal distance.
|
|
||||||
:param yoffset: The vertical distance. If omitted, both
|
|
||||||
distances are set to the same value.
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
|
|
||||||
if yoffset is None:
|
|
||||||
yoffset = xoffset
|
|
||||||
image.load()
|
|
||||||
return image._new(image.im.offset(xoffset, yoffset))
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,317 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# map CSS3-style colour description strings to RGB
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2002-10-24 fl Added support for CSS-style color strings
|
|
||||||
# 2002-12-15 fl Added RGBA support
|
|
||||||
# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2
|
|
||||||
# 2004-07-19 fl Fixed gray/grey spelling issues
|
|
||||||
# 2009-03-05 fl Fixed rounding error in grayscale calculation
|
|
||||||
#
|
|
||||||
# Copyright (c) 2002-2004 by Secret Labs AB
|
|
||||||
# Copyright (c) 2002-2004 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
from functools import lru_cache
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
|
||||||
def getrgb(color):
|
|
||||||
"""
|
|
||||||
Convert a color string to an RGB or RGBA tuple. If the string cannot be
|
|
||||||
parsed, this function raises a :py:exc:`ValueError` exception.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param color: A color string
|
|
||||||
:return: ``(red, green, blue[, alpha])``
|
|
||||||
"""
|
|
||||||
if len(color) > 100:
|
|
||||||
msg = "color specifier is too long"
|
|
||||||
raise ValueError(msg)
|
|
||||||
color = color.lower()
|
|
||||||
|
|
||||||
rgb = colormap.get(color, None)
|
|
||||||
if rgb:
|
|
||||||
if isinstance(rgb, tuple):
|
|
||||||
return rgb
|
|
||||||
colormap[color] = rgb = getrgb(rgb)
|
|
||||||
return rgb
|
|
||||||
|
|
||||||
# check for known string formats
|
|
||||||
if re.match("#[a-f0-9]{3}$", color):
|
|
||||||
return int(color[1] * 2, 16), int(color[2] * 2, 16), int(color[3] * 2, 16)
|
|
||||||
|
|
||||||
if re.match("#[a-f0-9]{4}$", color):
|
|
||||||
return (
|
|
||||||
int(color[1] * 2, 16),
|
|
||||||
int(color[2] * 2, 16),
|
|
||||||
int(color[3] * 2, 16),
|
|
||||||
int(color[4] * 2, 16),
|
|
||||||
)
|
|
||||||
|
|
||||||
if re.match("#[a-f0-9]{6}$", color):
|
|
||||||
return int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)
|
|
||||||
|
|
||||||
if re.match("#[a-f0-9]{8}$", color):
|
|
||||||
return (
|
|
||||||
int(color[1:3], 16),
|
|
||||||
int(color[3:5], 16),
|
|
||||||
int(color[5:7], 16),
|
|
||||||
int(color[7:9], 16),
|
|
||||||
)
|
|
||||||
|
|
||||||
m = re.match(r"rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
|
|
||||||
if m:
|
|
||||||
return int(m.group(1)), int(m.group(2)), int(m.group(3))
|
|
||||||
|
|
||||||
m = re.match(r"rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color)
|
|
||||||
if m:
|
|
||||||
return (
|
|
||||||
int((int(m.group(1)) * 255) / 100.0 + 0.5),
|
|
||||||
int((int(m.group(2)) * 255) / 100.0 + 0.5),
|
|
||||||
int((int(m.group(3)) * 255) / 100.0 + 0.5),
|
|
||||||
)
|
|
||||||
|
|
||||||
m = re.match(
|
|
||||||
r"hsl\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
|
|
||||||
)
|
|
||||||
if m:
|
|
||||||
from colorsys import hls_to_rgb
|
|
||||||
|
|
||||||
rgb = hls_to_rgb(
|
|
||||||
float(m.group(1)) / 360.0,
|
|
||||||
float(m.group(3)) / 100.0,
|
|
||||||
float(m.group(2)) / 100.0,
|
|
||||||
)
|
|
||||||
return (
|
|
||||||
int(rgb[0] * 255 + 0.5),
|
|
||||||
int(rgb[1] * 255 + 0.5),
|
|
||||||
int(rgb[2] * 255 + 0.5),
|
|
||||||
)
|
|
||||||
|
|
||||||
m = re.match(
|
|
||||||
r"hs[bv]\(\s*(\d+\.?\d*)\s*,\s*(\d+\.?\d*)%\s*,\s*(\d+\.?\d*)%\s*\)$", color
|
|
||||||
)
|
|
||||||
if m:
|
|
||||||
from colorsys import hsv_to_rgb
|
|
||||||
|
|
||||||
rgb = hsv_to_rgb(
|
|
||||||
float(m.group(1)) / 360.0,
|
|
||||||
float(m.group(2)) / 100.0,
|
|
||||||
float(m.group(3)) / 100.0,
|
|
||||||
)
|
|
||||||
return (
|
|
||||||
int(rgb[0] * 255 + 0.5),
|
|
||||||
int(rgb[1] * 255 + 0.5),
|
|
||||||
int(rgb[2] * 255 + 0.5),
|
|
||||||
)
|
|
||||||
|
|
||||||
m = re.match(r"rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color)
|
|
||||||
if m:
|
|
||||||
return int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4))
|
|
||||||
msg = f"unknown color specifier: {repr(color)}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
|
||||||
def getcolor(color, mode):
|
|
||||||
"""
|
|
||||||
Same as :py:func:`~PIL.ImageColor.getrgb` for most modes. However, if
|
|
||||||
``mode`` is HSV, converts the RGB value to a HSV value, or if ``mode`` is
|
|
||||||
not color or a palette image, converts the RGB value to a grayscale value.
|
|
||||||
If the string cannot be parsed, this function raises a :py:exc:`ValueError`
|
|
||||||
exception.
|
|
||||||
|
|
||||||
.. versionadded:: 1.1.4
|
|
||||||
|
|
||||||
:param color: A color string
|
|
||||||
:param mode: Convert result to this mode
|
|
||||||
:return: ``(graylevel[, alpha]) or (red, green, blue[, alpha])``
|
|
||||||
"""
|
|
||||||
# same as getrgb, but converts the result to the given mode
|
|
||||||
color, alpha = getrgb(color), 255
|
|
||||||
if len(color) == 4:
|
|
||||||
color, alpha = color[:3], color[3]
|
|
||||||
|
|
||||||
if mode == "HSV":
|
|
||||||
from colorsys import rgb_to_hsv
|
|
||||||
|
|
||||||
r, g, b = color
|
|
||||||
h, s, v = rgb_to_hsv(r / 255, g / 255, b / 255)
|
|
||||||
return int(h * 255), int(s * 255), int(v * 255)
|
|
||||||
elif Image.getmodebase(mode) == "L":
|
|
||||||
r, g, b = color
|
|
||||||
# ITU-R Recommendation 601-2 for nonlinear RGB
|
|
||||||
# scaled to 24 bits to match the convert's implementation.
|
|
||||||
color = (r * 19595 + g * 38470 + b * 7471 + 0x8000) >> 16
|
|
||||||
if mode[-1] == "A":
|
|
||||||
return color, alpha
|
|
||||||
else:
|
|
||||||
if mode[-1] == "A":
|
|
||||||
return color + (alpha,)
|
|
||||||
return color
|
|
||||||
|
|
||||||
|
|
||||||
colormap = {
|
|
||||||
# X11 colour table from https://drafts.csswg.org/css-color-4/, with
|
|
||||||
# gray/grey spelling issues fixed. This is a superset of HTML 4.0
|
|
||||||
# colour names used in CSS 1.
|
|
||||||
"aliceblue": "#f0f8ff",
|
|
||||||
"antiquewhite": "#faebd7",
|
|
||||||
"aqua": "#00ffff",
|
|
||||||
"aquamarine": "#7fffd4",
|
|
||||||
"azure": "#f0ffff",
|
|
||||||
"beige": "#f5f5dc",
|
|
||||||
"bisque": "#ffe4c4",
|
|
||||||
"black": "#000000",
|
|
||||||
"blanchedalmond": "#ffebcd",
|
|
||||||
"blue": "#0000ff",
|
|
||||||
"blueviolet": "#8a2be2",
|
|
||||||
"brown": "#a52a2a",
|
|
||||||
"burlywood": "#deb887",
|
|
||||||
"cadetblue": "#5f9ea0",
|
|
||||||
"chartreuse": "#7fff00",
|
|
||||||
"chocolate": "#d2691e",
|
|
||||||
"coral": "#ff7f50",
|
|
||||||
"cornflowerblue": "#6495ed",
|
|
||||||
"cornsilk": "#fff8dc",
|
|
||||||
"crimson": "#dc143c",
|
|
||||||
"cyan": "#00ffff",
|
|
||||||
"darkblue": "#00008b",
|
|
||||||
"darkcyan": "#008b8b",
|
|
||||||
"darkgoldenrod": "#b8860b",
|
|
||||||
"darkgray": "#a9a9a9",
|
|
||||||
"darkgrey": "#a9a9a9",
|
|
||||||
"darkgreen": "#006400",
|
|
||||||
"darkkhaki": "#bdb76b",
|
|
||||||
"darkmagenta": "#8b008b",
|
|
||||||
"darkolivegreen": "#556b2f",
|
|
||||||
"darkorange": "#ff8c00",
|
|
||||||
"darkorchid": "#9932cc",
|
|
||||||
"darkred": "#8b0000",
|
|
||||||
"darksalmon": "#e9967a",
|
|
||||||
"darkseagreen": "#8fbc8f",
|
|
||||||
"darkslateblue": "#483d8b",
|
|
||||||
"darkslategray": "#2f4f4f",
|
|
||||||
"darkslategrey": "#2f4f4f",
|
|
||||||
"darkturquoise": "#00ced1",
|
|
||||||
"darkviolet": "#9400d3",
|
|
||||||
"deeppink": "#ff1493",
|
|
||||||
"deepskyblue": "#00bfff",
|
|
||||||
"dimgray": "#696969",
|
|
||||||
"dimgrey": "#696969",
|
|
||||||
"dodgerblue": "#1e90ff",
|
|
||||||
"firebrick": "#b22222",
|
|
||||||
"floralwhite": "#fffaf0",
|
|
||||||
"forestgreen": "#228b22",
|
|
||||||
"fuchsia": "#ff00ff",
|
|
||||||
"gainsboro": "#dcdcdc",
|
|
||||||
"ghostwhite": "#f8f8ff",
|
|
||||||
"gold": "#ffd700",
|
|
||||||
"goldenrod": "#daa520",
|
|
||||||
"gray": "#808080",
|
|
||||||
"grey": "#808080",
|
|
||||||
"green": "#008000",
|
|
||||||
"greenyellow": "#adff2f",
|
|
||||||
"honeydew": "#f0fff0",
|
|
||||||
"hotpink": "#ff69b4",
|
|
||||||
"indianred": "#cd5c5c",
|
|
||||||
"indigo": "#4b0082",
|
|
||||||
"ivory": "#fffff0",
|
|
||||||
"khaki": "#f0e68c",
|
|
||||||
"lavender": "#e6e6fa",
|
|
||||||
"lavenderblush": "#fff0f5",
|
|
||||||
"lawngreen": "#7cfc00",
|
|
||||||
"lemonchiffon": "#fffacd",
|
|
||||||
"lightblue": "#add8e6",
|
|
||||||
"lightcoral": "#f08080",
|
|
||||||
"lightcyan": "#e0ffff",
|
|
||||||
"lightgoldenrodyellow": "#fafad2",
|
|
||||||
"lightgreen": "#90ee90",
|
|
||||||
"lightgray": "#d3d3d3",
|
|
||||||
"lightgrey": "#d3d3d3",
|
|
||||||
"lightpink": "#ffb6c1",
|
|
||||||
"lightsalmon": "#ffa07a",
|
|
||||||
"lightseagreen": "#20b2aa",
|
|
||||||
"lightskyblue": "#87cefa",
|
|
||||||
"lightslategray": "#778899",
|
|
||||||
"lightslategrey": "#778899",
|
|
||||||
"lightsteelblue": "#b0c4de",
|
|
||||||
"lightyellow": "#ffffe0",
|
|
||||||
"lime": "#00ff00",
|
|
||||||
"limegreen": "#32cd32",
|
|
||||||
"linen": "#faf0e6",
|
|
||||||
"magenta": "#ff00ff",
|
|
||||||
"maroon": "#800000",
|
|
||||||
"mediumaquamarine": "#66cdaa",
|
|
||||||
"mediumblue": "#0000cd",
|
|
||||||
"mediumorchid": "#ba55d3",
|
|
||||||
"mediumpurple": "#9370db",
|
|
||||||
"mediumseagreen": "#3cb371",
|
|
||||||
"mediumslateblue": "#7b68ee",
|
|
||||||
"mediumspringgreen": "#00fa9a",
|
|
||||||
"mediumturquoise": "#48d1cc",
|
|
||||||
"mediumvioletred": "#c71585",
|
|
||||||
"midnightblue": "#191970",
|
|
||||||
"mintcream": "#f5fffa",
|
|
||||||
"mistyrose": "#ffe4e1",
|
|
||||||
"moccasin": "#ffe4b5",
|
|
||||||
"navajowhite": "#ffdead",
|
|
||||||
"navy": "#000080",
|
|
||||||
"oldlace": "#fdf5e6",
|
|
||||||
"olive": "#808000",
|
|
||||||
"olivedrab": "#6b8e23",
|
|
||||||
"orange": "#ffa500",
|
|
||||||
"orangered": "#ff4500",
|
|
||||||
"orchid": "#da70d6",
|
|
||||||
"palegoldenrod": "#eee8aa",
|
|
||||||
"palegreen": "#98fb98",
|
|
||||||
"paleturquoise": "#afeeee",
|
|
||||||
"palevioletred": "#db7093",
|
|
||||||
"papayawhip": "#ffefd5",
|
|
||||||
"peachpuff": "#ffdab9",
|
|
||||||
"peru": "#cd853f",
|
|
||||||
"pink": "#ffc0cb",
|
|
||||||
"plum": "#dda0dd",
|
|
||||||
"powderblue": "#b0e0e6",
|
|
||||||
"purple": "#800080",
|
|
||||||
"rebeccapurple": "#663399",
|
|
||||||
"red": "#ff0000",
|
|
||||||
"rosybrown": "#bc8f8f",
|
|
||||||
"royalblue": "#4169e1",
|
|
||||||
"saddlebrown": "#8b4513",
|
|
||||||
"salmon": "#fa8072",
|
|
||||||
"sandybrown": "#f4a460",
|
|
||||||
"seagreen": "#2e8b57",
|
|
||||||
"seashell": "#fff5ee",
|
|
||||||
"sienna": "#a0522d",
|
|
||||||
"silver": "#c0c0c0",
|
|
||||||
"skyblue": "#87ceeb",
|
|
||||||
"slateblue": "#6a5acd",
|
|
||||||
"slategray": "#708090",
|
|
||||||
"slategrey": "#708090",
|
|
||||||
"snow": "#fffafa",
|
|
||||||
"springgreen": "#00ff7f",
|
|
||||||
"steelblue": "#4682b4",
|
|
||||||
"tan": "#d2b48c",
|
|
||||||
"teal": "#008080",
|
|
||||||
"thistle": "#d8bfd8",
|
|
||||||
"tomato": "#ff6347",
|
|
||||||
"turquoise": "#40e0d0",
|
|
||||||
"violet": "#ee82ee",
|
|
||||||
"wheat": "#f5deb3",
|
|
||||||
"white": "#ffffff",
|
|
||||||
"whitesmoke": "#f5f5f5",
|
|
||||||
"yellow": "#ffff00",
|
|
||||||
"yellowgreen": "#9acd32",
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,193 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# WCK-style drawing interface operations
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2003-12-07 fl created
|
|
||||||
# 2005-05-15 fl updated; added to PIL as ImageDraw2
|
|
||||||
# 2005-05-15 fl added text support
|
|
||||||
# 2005-05-20 fl added arc/chord/pieslice support
|
|
||||||
#
|
|
||||||
# Copyright (c) 2003-2005 by Secret Labs AB
|
|
||||||
# Copyright (c) 2003-2005 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
(Experimental) WCK-style drawing interface operations
|
|
||||||
|
|
||||||
.. seealso:: :py:mod:`PIL.ImageDraw`
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageColor, ImageDraw, ImageFont, ImagePath
|
|
||||||
|
|
||||||
|
|
||||||
class Pen:
|
|
||||||
"""Stores an outline color and width."""
|
|
||||||
|
|
||||||
def __init__(self, color, width=1, opacity=255):
|
|
||||||
self.color = ImageColor.getrgb(color)
|
|
||||||
self.width = width
|
|
||||||
|
|
||||||
|
|
||||||
class Brush:
|
|
||||||
"""Stores a fill color"""
|
|
||||||
|
|
||||||
def __init__(self, color, opacity=255):
|
|
||||||
self.color = ImageColor.getrgb(color)
|
|
||||||
|
|
||||||
|
|
||||||
class Font:
|
|
||||||
"""Stores a TrueType font and color"""
|
|
||||||
|
|
||||||
def __init__(self, color, file, size=12):
|
|
||||||
# FIXME: add support for bitmap fonts
|
|
||||||
self.color = ImageColor.getrgb(color)
|
|
||||||
self.font = ImageFont.truetype(file, size)
|
|
||||||
|
|
||||||
|
|
||||||
class Draw:
|
|
||||||
"""
|
|
||||||
(Experimental) WCK-style drawing interface
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, image, size=None, color=None):
|
|
||||||
if not hasattr(image, "im"):
|
|
||||||
image = Image.new(image, size, color)
|
|
||||||
self.draw = ImageDraw.Draw(image)
|
|
||||||
self.image = image
|
|
||||||
self.transform = None
|
|
||||||
|
|
||||||
def flush(self):
|
|
||||||
return self.image
|
|
||||||
|
|
||||||
def render(self, op, xy, pen, brush=None):
|
|
||||||
# handle color arguments
|
|
||||||
outline = fill = None
|
|
||||||
width = 1
|
|
||||||
if isinstance(pen, Pen):
|
|
||||||
outline = pen.color
|
|
||||||
width = pen.width
|
|
||||||
elif isinstance(brush, Pen):
|
|
||||||
outline = brush.color
|
|
||||||
width = brush.width
|
|
||||||
if isinstance(brush, Brush):
|
|
||||||
fill = brush.color
|
|
||||||
elif isinstance(pen, Brush):
|
|
||||||
fill = pen.color
|
|
||||||
# handle transformation
|
|
||||||
if self.transform:
|
|
||||||
xy = ImagePath.Path(xy)
|
|
||||||
xy.transform(self.transform)
|
|
||||||
# render the item
|
|
||||||
if op == "line":
|
|
||||||
self.draw.line(xy, fill=outline, width=width)
|
|
||||||
else:
|
|
||||||
getattr(self.draw, op)(xy, fill=fill, outline=outline)
|
|
||||||
|
|
||||||
def settransform(self, offset):
|
|
||||||
"""Sets a transformation offset."""
|
|
||||||
(xoffset, yoffset) = offset
|
|
||||||
self.transform = (1, 0, xoffset, 0, 1, yoffset)
|
|
||||||
|
|
||||||
def arc(self, xy, start, end, *options):
|
|
||||||
"""
|
|
||||||
Draws an arc (a portion of a circle outline) between the start and end
|
|
||||||
angles, inside the given bounding box.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.arc`
|
|
||||||
"""
|
|
||||||
self.render("arc", xy, start, end, *options)
|
|
||||||
|
|
||||||
def chord(self, xy, start, end, *options):
|
|
||||||
"""
|
|
||||||
Same as :py:meth:`~PIL.ImageDraw2.Draw.arc`, but connects the end points
|
|
||||||
with a straight line.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.chord`
|
|
||||||
"""
|
|
||||||
self.render("chord", xy, start, end, *options)
|
|
||||||
|
|
||||||
def ellipse(self, xy, *options):
|
|
||||||
"""
|
|
||||||
Draws an ellipse inside the given bounding box.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.ellipse`
|
|
||||||
"""
|
|
||||||
self.render("ellipse", xy, *options)
|
|
||||||
|
|
||||||
def line(self, xy, *options):
|
|
||||||
"""
|
|
||||||
Draws a line between the coordinates in the ``xy`` list.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.line`
|
|
||||||
"""
|
|
||||||
self.render("line", xy, *options)
|
|
||||||
|
|
||||||
def pieslice(self, xy, start, end, *options):
|
|
||||||
"""
|
|
||||||
Same as arc, but also draws straight lines between the end points and the
|
|
||||||
center of the bounding box.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.pieslice`
|
|
||||||
"""
|
|
||||||
self.render("pieslice", xy, start, end, *options)
|
|
||||||
|
|
||||||
def polygon(self, xy, *options):
|
|
||||||
"""
|
|
||||||
Draws a polygon.
|
|
||||||
|
|
||||||
The polygon outline consists of straight lines between the given
|
|
||||||
coordinates, plus a straight line between the last and the first
|
|
||||||
coordinate.
|
|
||||||
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.polygon`
|
|
||||||
"""
|
|
||||||
self.render("polygon", xy, *options)
|
|
||||||
|
|
||||||
def rectangle(self, xy, *options):
|
|
||||||
"""
|
|
||||||
Draws a rectangle.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.rectangle`
|
|
||||||
"""
|
|
||||||
self.render("rectangle", xy, *options)
|
|
||||||
|
|
||||||
def text(self, xy, text, font):
|
|
||||||
"""
|
|
||||||
Draws the string at the given position.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.text`
|
|
||||||
"""
|
|
||||||
if self.transform:
|
|
||||||
xy = ImagePath.Path(xy)
|
|
||||||
xy.transform(self.transform)
|
|
||||||
self.draw.text(xy, text, font=font.font, fill=font.color)
|
|
||||||
|
|
||||||
def textbbox(self, xy, text, font):
|
|
||||||
"""
|
|
||||||
Returns bounding box (in pixels) of given text.
|
|
||||||
|
|
||||||
:return: ``(left, top, right, bottom)`` bounding box
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textbbox`
|
|
||||||
"""
|
|
||||||
if self.transform:
|
|
||||||
xy = ImagePath.Path(xy)
|
|
||||||
xy.transform(self.transform)
|
|
||||||
return self.draw.textbbox(xy, text, font=font.font)
|
|
||||||
|
|
||||||
def textlength(self, text, font):
|
|
||||||
"""
|
|
||||||
Returns length (in pixels) of given text.
|
|
||||||
This is the amount by which following text should be offset.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`PIL.ImageDraw.ImageDraw.textlength`
|
|
||||||
"""
|
|
||||||
return self.draw.textlength(text, font=font.font)
|
|
||||||
@@ -1,104 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# image enhancement classes
|
|
||||||
#
|
|
||||||
# For a background, see "Image Processing By Interpolation and
|
|
||||||
# Extrapolation", Paul Haeberli and Douglas Voorhies. Available
|
|
||||||
# at http://www.graficaobscura.com/interp/index.html
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-03-23 fl Created
|
|
||||||
# 2009-06-16 fl Fixed mean calculation
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image, ImageFilter, ImageStat
|
|
||||||
|
|
||||||
|
|
||||||
class _Enhance:
|
|
||||||
def enhance(self, factor):
|
|
||||||
"""
|
|
||||||
Returns an enhanced image.
|
|
||||||
|
|
||||||
:param factor: A floating point value controlling the enhancement.
|
|
||||||
Factor 1.0 always returns a copy of the original image,
|
|
||||||
lower factors mean less color (brightness, contrast,
|
|
||||||
etc), and higher values more. There are no restrictions
|
|
||||||
on this value.
|
|
||||||
:rtype: :py:class:`~PIL.Image.Image`
|
|
||||||
"""
|
|
||||||
return Image.blend(self.degenerate, self.image, factor)
|
|
||||||
|
|
||||||
|
|
||||||
class Color(_Enhance):
|
|
||||||
"""Adjust image color balance.
|
|
||||||
|
|
||||||
This class can be used to adjust the colour balance of an image, in
|
|
||||||
a manner similar to the controls on a colour TV set. An enhancement
|
|
||||||
factor of 0.0 gives a black and white image. A factor of 1.0 gives
|
|
||||||
the original image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, image):
|
|
||||||
self.image = image
|
|
||||||
self.intermediate_mode = "L"
|
|
||||||
if "A" in image.getbands():
|
|
||||||
self.intermediate_mode = "LA"
|
|
||||||
|
|
||||||
self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
|
|
||||||
|
|
||||||
|
|
||||||
class Contrast(_Enhance):
|
|
||||||
"""Adjust image contrast.
|
|
||||||
|
|
||||||
This class can be used to control the contrast of an image, similar
|
|
||||||
to the contrast control on a TV set. An enhancement factor of 0.0
|
|
||||||
gives a solid gray image. A factor of 1.0 gives the original image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, image):
|
|
||||||
self.image = image
|
|
||||||
mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
|
|
||||||
self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
|
|
||||||
|
|
||||||
if "A" in image.getbands():
|
|
||||||
self.degenerate.putalpha(image.getchannel("A"))
|
|
||||||
|
|
||||||
|
|
||||||
class Brightness(_Enhance):
|
|
||||||
"""Adjust image brightness.
|
|
||||||
|
|
||||||
This class can be used to control the brightness of an image. An
|
|
||||||
enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
|
|
||||||
original image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, image):
|
|
||||||
self.image = image
|
|
||||||
self.degenerate = Image.new(image.mode, image.size, 0)
|
|
||||||
|
|
||||||
if "A" in image.getbands():
|
|
||||||
self.degenerate.putalpha(image.getchannel("A"))
|
|
||||||
|
|
||||||
|
|
||||||
class Sharpness(_Enhance):
|
|
||||||
"""Adjust image sharpness.
|
|
||||||
|
|
||||||
This class can be used to adjust the sharpness of an image. An
|
|
||||||
enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
|
|
||||||
original image, and a factor of 2.0 gives a sharpened image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, image):
|
|
||||||
self.image = image
|
|
||||||
self.degenerate = image.filter(ImageFilter.SMOOTH)
|
|
||||||
|
|
||||||
if "A" in image.getbands():
|
|
||||||
self.degenerate.putalpha(image.getchannel("A"))
|
|
||||||
@@ -1,795 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# base class for image file handlers
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1995-09-09 fl Created
|
|
||||||
# 1996-03-11 fl Fixed load mechanism.
|
|
||||||
# 1996-04-15 fl Added pcx/xbm decoders.
|
|
||||||
# 1996-04-30 fl Added encoders.
|
|
||||||
# 1996-12-14 fl Added load helpers
|
|
||||||
# 1997-01-11 fl Use encode_to_file where possible
|
|
||||||
# 1997-08-27 fl Flush output in _save
|
|
||||||
# 1998-03-05 fl Use memory mapping for some modes
|
|
||||||
# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B"
|
|
||||||
# 1999-05-31 fl Added image parser
|
|
||||||
# 2000-10-12 fl Set readonly flag on memory-mapped images
|
|
||||||
# 2002-03-20 fl Use better messages for common decoder errors
|
|
||||||
# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available
|
|
||||||
# 2003-10-30 fl Added StubImageFile class
|
|
||||||
# 2004-02-25 fl Made incremental parser more robust
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2004 by Secret Labs AB
|
|
||||||
# Copyright (c) 1995-2004 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import itertools
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
from typing import Any, NamedTuple
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
from ._deprecate import deprecate
|
|
||||||
from ._util import is_path
|
|
||||||
|
|
||||||
MAXBLOCK = 65536
|
|
||||||
|
|
||||||
SAFEBLOCK = 1024 * 1024
|
|
||||||
|
|
||||||
LOAD_TRUNCATED_IMAGES = False
|
|
||||||
"""Whether or not to load truncated image files. User code may change this."""
|
|
||||||
|
|
||||||
ERRORS = {
|
|
||||||
-1: "image buffer overrun error",
|
|
||||||
-2: "decoding error",
|
|
||||||
-3: "unknown error",
|
|
||||||
-8: "bad configuration",
|
|
||||||
-9: "out of memory error",
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
Dict of known error codes returned from :meth:`.PyDecoder.decode`,
|
|
||||||
:meth:`.PyEncoder.encode` :meth:`.PyEncoder.encode_to_pyfd` and
|
|
||||||
:meth:`.PyEncoder.encode_to_file`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Helpers
|
|
||||||
|
|
||||||
|
|
||||||
def _get_oserror(error, *, encoder):
|
|
||||||
try:
|
|
||||||
msg = Image.core.getcodecstatus(error)
|
|
||||||
except AttributeError:
|
|
||||||
msg = ERRORS.get(error)
|
|
||||||
if not msg:
|
|
||||||
msg = f"{'encoder' if encoder else 'decoder'} error {error}"
|
|
||||||
msg += f" when {'writing' if encoder else 'reading'} image file"
|
|
||||||
return OSError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def raise_oserror(error):
|
|
||||||
deprecate(
|
|
||||||
"raise_oserror",
|
|
||||||
12,
|
|
||||||
action="It is only useful for translating error codes returned by a codec's "
|
|
||||||
"decode() method, which ImageFile already does automatically.",
|
|
||||||
)
|
|
||||||
raise _get_oserror(error, encoder=False)
|
|
||||||
|
|
||||||
|
|
||||||
def _tilesort(t):
|
|
||||||
# sort on offset
|
|
||||||
return t[2]
|
|
||||||
|
|
||||||
|
|
||||||
class _Tile(NamedTuple):
|
|
||||||
encoder_name: str
|
|
||||||
extents: tuple[int, int, int, int]
|
|
||||||
offset: int
|
|
||||||
args: tuple[Any, ...] | str | None
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# ImageFile base class
|
|
||||||
|
|
||||||
|
|
||||||
class ImageFile(Image.Image):
|
|
||||||
"""Base class for image file format handlers."""
|
|
||||||
|
|
||||||
def __init__(self, fp=None, filename=None):
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
self._min_frame = 0
|
|
||||||
|
|
||||||
self.custom_mimetype = None
|
|
||||||
|
|
||||||
self.tile = None
|
|
||||||
""" A list of tile descriptors, or ``None`` """
|
|
||||||
|
|
||||||
self.readonly = 1 # until we know better
|
|
||||||
|
|
||||||
self.decoderconfig = ()
|
|
||||||
self.decodermaxblock = MAXBLOCK
|
|
||||||
|
|
||||||
if is_path(fp):
|
|
||||||
# filename
|
|
||||||
self.fp = open(fp, "rb")
|
|
||||||
self.filename = fp
|
|
||||||
self._exclusive_fp = True
|
|
||||||
else:
|
|
||||||
# stream
|
|
||||||
self.fp = fp
|
|
||||||
self.filename = filename
|
|
||||||
# can be overridden
|
|
||||||
self._exclusive_fp = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
self._open()
|
|
||||||
except (
|
|
||||||
IndexError, # end of data
|
|
||||||
TypeError, # end of data (ord)
|
|
||||||
KeyError, # unsupported mode
|
|
||||||
EOFError, # got header but not the first frame
|
|
||||||
struct.error,
|
|
||||||
) as v:
|
|
||||||
raise SyntaxError(v) from v
|
|
||||||
|
|
||||||
if not self.mode or self.size[0] <= 0 or self.size[1] <= 0:
|
|
||||||
msg = "not identified by this driver"
|
|
||||||
raise SyntaxError(msg)
|
|
||||||
except BaseException:
|
|
||||||
# close the file only if we have opened it this constructor
|
|
||||||
if self._exclusive_fp:
|
|
||||||
self.fp.close()
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_format_mimetype(self):
|
|
||||||
if self.custom_mimetype:
|
|
||||||
return self.custom_mimetype
|
|
||||||
if self.format is not None:
|
|
||||||
return Image.MIME.get(self.format.upper())
|
|
||||||
|
|
||||||
def __setstate__(self, state):
|
|
||||||
self.tile = []
|
|
||||||
super().__setstate__(state)
|
|
||||||
|
|
||||||
def verify(self):
|
|
||||||
"""Check file integrity"""
|
|
||||||
|
|
||||||
# raise exception if something's wrong. must be called
|
|
||||||
# directly after open, and closes file when finished.
|
|
||||||
if self._exclusive_fp:
|
|
||||||
self.fp.close()
|
|
||||||
self.fp = None
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
"""Load image data based on tile list"""
|
|
||||||
|
|
||||||
if self.tile is None:
|
|
||||||
msg = "cannot load this image"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
pixel = Image.Image.load(self)
|
|
||||||
if not self.tile:
|
|
||||||
return pixel
|
|
||||||
|
|
||||||
self.map = None
|
|
||||||
use_mmap = self.filename and len(self.tile) == 1
|
|
||||||
# As of pypy 2.1.0, memory mapping was failing here.
|
|
||||||
use_mmap = use_mmap and not hasattr(sys, "pypy_version_info")
|
|
||||||
|
|
||||||
readonly = 0
|
|
||||||
|
|
||||||
# look for read/seek overrides
|
|
||||||
try:
|
|
||||||
read = self.load_read
|
|
||||||
# don't use mmap if there are custom read/seek functions
|
|
||||||
use_mmap = False
|
|
||||||
except AttributeError:
|
|
||||||
read = self.fp.read
|
|
||||||
|
|
||||||
try:
|
|
||||||
seek = self.load_seek
|
|
||||||
use_mmap = False
|
|
||||||
except AttributeError:
|
|
||||||
seek = self.fp.seek
|
|
||||||
|
|
||||||
if use_mmap:
|
|
||||||
# try memory mapping
|
|
||||||
decoder_name, extents, offset, args = self.tile[0]
|
|
||||||
if isinstance(args, str):
|
|
||||||
args = (args, 0, 1)
|
|
||||||
if (
|
|
||||||
decoder_name == "raw"
|
|
||||||
and len(args) >= 3
|
|
||||||
and args[0] == self.mode
|
|
||||||
and args[0] in Image._MAPMODES
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
# use mmap, if possible
|
|
||||||
import mmap
|
|
||||||
|
|
||||||
with open(self.filename) as fp:
|
|
||||||
self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
|
|
||||||
if offset + self.size[1] * args[1] > self.map.size():
|
|
||||||
msg = "buffer is not large enough"
|
|
||||||
raise OSError(msg)
|
|
||||||
self.im = Image.core.map_buffer(
|
|
||||||
self.map, self.size, decoder_name, offset, args
|
|
||||||
)
|
|
||||||
readonly = 1
|
|
||||||
# After trashing self.im,
|
|
||||||
# we might need to reload the palette data.
|
|
||||||
if self.palette:
|
|
||||||
self.palette.dirty = 1
|
|
||||||
except (AttributeError, OSError, ImportError):
|
|
||||||
self.map = None
|
|
||||||
|
|
||||||
self.load_prepare()
|
|
||||||
err_code = -3 # initialize to unknown error
|
|
||||||
if not self.map:
|
|
||||||
# sort tiles in file order
|
|
||||||
self.tile.sort(key=_tilesort)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# FIXME: This is a hack to handle TIFF's JpegTables tag.
|
|
||||||
prefix = self.tile_prefix
|
|
||||||
except AttributeError:
|
|
||||||
prefix = b""
|
|
||||||
|
|
||||||
# Remove consecutive duplicates that only differ by their offset
|
|
||||||
self.tile = [
|
|
||||||
list(tiles)[-1]
|
|
||||||
for _, tiles in itertools.groupby(
|
|
||||||
self.tile, lambda tile: (tile[0], tile[1], tile[3])
|
|
||||||
)
|
|
||||||
]
|
|
||||||
for decoder_name, extents, offset, args in self.tile:
|
|
||||||
seek(offset)
|
|
||||||
decoder = Image._getdecoder(
|
|
||||||
self.mode, decoder_name, args, self.decoderconfig
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
decoder.setimage(self.im, extents)
|
|
||||||
if decoder.pulls_fd:
|
|
||||||
decoder.setfd(self.fp)
|
|
||||||
err_code = decoder.decode(b"")[1]
|
|
||||||
else:
|
|
||||||
b = prefix
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
s = read(self.decodermaxblock)
|
|
||||||
except (IndexError, struct.error) as e:
|
|
||||||
# truncated png/gif
|
|
||||||
if LOAD_TRUNCATED_IMAGES:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
msg = "image file is truncated"
|
|
||||||
raise OSError(msg) from e
|
|
||||||
|
|
||||||
if not s: # truncated jpeg
|
|
||||||
if LOAD_TRUNCATED_IMAGES:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
msg = (
|
|
||||||
"image file is truncated "
|
|
||||||
f"({len(b)} bytes not processed)"
|
|
||||||
)
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
b = b + s
|
|
||||||
n, err_code = decoder.decode(b)
|
|
||||||
if n < 0:
|
|
||||||
break
|
|
||||||
b = b[n:]
|
|
||||||
finally:
|
|
||||||
# Need to cleanup here to prevent leaks
|
|
||||||
decoder.cleanup()
|
|
||||||
|
|
||||||
self.tile = []
|
|
||||||
self.readonly = readonly
|
|
||||||
|
|
||||||
self.load_end()
|
|
||||||
|
|
||||||
if self._exclusive_fp and self._close_exclusive_fp_after_loading:
|
|
||||||
self.fp.close()
|
|
||||||
self.fp = None
|
|
||||||
|
|
||||||
if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0:
|
|
||||||
# still raised if decoder fails to return anything
|
|
||||||
raise _get_oserror(err_code, encoder=False)
|
|
||||||
|
|
||||||
return Image.Image.load(self)
|
|
||||||
|
|
||||||
def load_prepare(self):
|
|
||||||
# create image memory if necessary
|
|
||||||
if not self.im or self.im.mode != self.mode or self.im.size != self.size:
|
|
||||||
self.im = Image.core.new(self.mode, self.size)
|
|
||||||
# create palette (optional)
|
|
||||||
if self.mode == "P":
|
|
||||||
Image.Image.load(self)
|
|
||||||
|
|
||||||
def load_end(self):
|
|
||||||
# may be overridden
|
|
||||||
pass
|
|
||||||
|
|
||||||
# may be defined for contained formats
|
|
||||||
# def load_seek(self, pos):
|
|
||||||
# pass
|
|
||||||
|
|
||||||
# may be defined for blocked formats (e.g. PNG)
|
|
||||||
# def load_read(self, bytes):
|
|
||||||
# pass
|
|
||||||
|
|
||||||
def _seek_check(self, frame):
|
|
||||||
if (
|
|
||||||
frame < self._min_frame
|
|
||||||
# Only check upper limit on frames if additional seek operations
|
|
||||||
# are not required to do so
|
|
||||||
or (
|
|
||||||
not (hasattr(self, "_n_frames") and self._n_frames is None)
|
|
||||||
and frame >= self.n_frames + self._min_frame
|
|
||||||
)
|
|
||||||
):
|
|
||||||
msg = "attempt to seek outside sequence"
|
|
||||||
raise EOFError(msg)
|
|
||||||
|
|
||||||
return self.tell() != frame
|
|
||||||
|
|
||||||
|
|
||||||
class StubImageFile(ImageFile):
|
|
||||||
"""
|
|
||||||
Base class for stub image loaders.
|
|
||||||
|
|
||||||
A stub loader is an image loader that can identify files of a
|
|
||||||
certain format, but relies on external code to load the file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _open(self):
|
|
||||||
msg = "StubImageFile subclass must implement _open"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
def load(self):
|
|
||||||
loader = self._load()
|
|
||||||
if loader is None:
|
|
||||||
msg = f"cannot find loader for this {self.format} file"
|
|
||||||
raise OSError(msg)
|
|
||||||
image = loader.load(self)
|
|
||||||
assert image is not None
|
|
||||||
# become the other object (!)
|
|
||||||
self.__class__ = image.__class__
|
|
||||||
self.__dict__ = image.__dict__
|
|
||||||
return image.load()
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
"""(Hook) Find actual image loader."""
|
|
||||||
msg = "StubImageFile subclass must implement _load"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class Parser:
|
|
||||||
"""
|
|
||||||
Incremental image parser. This class implements the standard
|
|
||||||
feed/close consumer interface.
|
|
||||||
"""
|
|
||||||
|
|
||||||
incremental = None
|
|
||||||
image = None
|
|
||||||
data = None
|
|
||||||
decoder = None
|
|
||||||
offset = 0
|
|
||||||
finished = 0
|
|
||||||
|
|
||||||
def reset(self):
|
|
||||||
"""
|
|
||||||
(Consumer) Reset the parser. Note that you can only call this
|
|
||||||
method immediately after you've created a parser; parser
|
|
||||||
instances cannot be reused.
|
|
||||||
"""
|
|
||||||
assert self.data is None, "cannot reuse parsers"
|
|
||||||
|
|
||||||
def feed(self, data):
|
|
||||||
"""
|
|
||||||
(Consumer) Feed data to the parser.
|
|
||||||
|
|
||||||
:param data: A string buffer.
|
|
||||||
:exception OSError: If the parser failed to parse the image file.
|
|
||||||
"""
|
|
||||||
# collect data
|
|
||||||
|
|
||||||
if self.finished:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.data is None:
|
|
||||||
self.data = data
|
|
||||||
else:
|
|
||||||
self.data = self.data + data
|
|
||||||
|
|
||||||
# parse what we have
|
|
||||||
if self.decoder:
|
|
||||||
if self.offset > 0:
|
|
||||||
# skip header
|
|
||||||
skip = min(len(self.data), self.offset)
|
|
||||||
self.data = self.data[skip:]
|
|
||||||
self.offset = self.offset - skip
|
|
||||||
if self.offset > 0 or not self.data:
|
|
||||||
return
|
|
||||||
|
|
||||||
n, e = self.decoder.decode(self.data)
|
|
||||||
|
|
||||||
if n < 0:
|
|
||||||
# end of stream
|
|
||||||
self.data = None
|
|
||||||
self.finished = 1
|
|
||||||
if e < 0:
|
|
||||||
# decoding error
|
|
||||||
self.image = None
|
|
||||||
raise _get_oserror(e, encoder=False)
|
|
||||||
else:
|
|
||||||
# end of image
|
|
||||||
return
|
|
||||||
self.data = self.data[n:]
|
|
||||||
|
|
||||||
elif self.image:
|
|
||||||
# if we end up here with no decoder, this file cannot
|
|
||||||
# be incrementally parsed. wait until we've gotten all
|
|
||||||
# available data
|
|
||||||
pass
|
|
||||||
|
|
||||||
else:
|
|
||||||
# attempt to open this file
|
|
||||||
try:
|
|
||||||
with io.BytesIO(self.data) as fp:
|
|
||||||
im = Image.open(fp)
|
|
||||||
except OSError:
|
|
||||||
pass # not enough data
|
|
||||||
else:
|
|
||||||
flag = hasattr(im, "load_seek") or hasattr(im, "load_read")
|
|
||||||
if flag or len(im.tile) != 1:
|
|
||||||
# custom load code, or multiple tiles
|
|
||||||
self.decode = None
|
|
||||||
else:
|
|
||||||
# initialize decoder
|
|
||||||
im.load_prepare()
|
|
||||||
d, e, o, a = im.tile[0]
|
|
||||||
im.tile = []
|
|
||||||
self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig)
|
|
||||||
self.decoder.setimage(im.im, e)
|
|
||||||
|
|
||||||
# calculate decoder offset
|
|
||||||
self.offset = o
|
|
||||||
if self.offset <= len(self.data):
|
|
||||||
self.data = self.data[self.offset :]
|
|
||||||
self.offset = 0
|
|
||||||
|
|
||||||
self.image = im
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, *args):
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""
|
|
||||||
(Consumer) Close the stream.
|
|
||||||
|
|
||||||
:returns: An image object.
|
|
||||||
:exception OSError: If the parser failed to parse the image file either
|
|
||||||
because it cannot be identified or cannot be
|
|
||||||
decoded.
|
|
||||||
"""
|
|
||||||
# finish decoding
|
|
||||||
if self.decoder:
|
|
||||||
# get rid of what's left in the buffers
|
|
||||||
self.feed(b"")
|
|
||||||
self.data = self.decoder = None
|
|
||||||
if not self.finished:
|
|
||||||
msg = "image was incomplete"
|
|
||||||
raise OSError(msg)
|
|
||||||
if not self.image:
|
|
||||||
msg = "cannot parse this image"
|
|
||||||
raise OSError(msg)
|
|
||||||
if self.data:
|
|
||||||
# incremental parsing not possible; reopen the file
|
|
||||||
# not that we have all data
|
|
||||||
with io.BytesIO(self.data) as fp:
|
|
||||||
try:
|
|
||||||
self.image = Image.open(fp)
|
|
||||||
finally:
|
|
||||||
self.image.load()
|
|
||||||
return self.image
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def _save(im, fp, tile, bufsize=0):
|
|
||||||
"""Helper to save image based on tile list
|
|
||||||
|
|
||||||
:param im: Image object.
|
|
||||||
:param fp: File object.
|
|
||||||
:param tile: Tile list.
|
|
||||||
:param bufsize: Optional buffer size
|
|
||||||
"""
|
|
||||||
|
|
||||||
im.load()
|
|
||||||
if not hasattr(im, "encoderconfig"):
|
|
||||||
im.encoderconfig = ()
|
|
||||||
tile.sort(key=_tilesort)
|
|
||||||
# FIXME: make MAXBLOCK a configuration parameter
|
|
||||||
# It would be great if we could have the encoder specify what it needs
|
|
||||||
# But, it would need at least the image size in most cases. RawEncode is
|
|
||||||
# a tricky case.
|
|
||||||
bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c
|
|
||||||
try:
|
|
||||||
fh = fp.fileno()
|
|
||||||
fp.flush()
|
|
||||||
_encode_tile(im, fp, tile, bufsize, fh)
|
|
||||||
except (AttributeError, io.UnsupportedOperation) as exc:
|
|
||||||
_encode_tile(im, fp, tile, bufsize, None, exc)
|
|
||||||
if hasattr(fp, "flush"):
|
|
||||||
fp.flush()
|
|
||||||
|
|
||||||
|
|
||||||
def _encode_tile(im, fp, tile: list[_Tile], bufsize, fh, exc=None):
|
|
||||||
for encoder_name, extents, offset, args in tile:
|
|
||||||
if offset > 0:
|
|
||||||
fp.seek(offset)
|
|
||||||
encoder = Image._getencoder(im.mode, encoder_name, args, im.encoderconfig)
|
|
||||||
try:
|
|
||||||
encoder.setimage(im.im, extents)
|
|
||||||
if encoder.pushes_fd:
|
|
||||||
encoder.setfd(fp)
|
|
||||||
errcode = encoder.encode_to_pyfd()[1]
|
|
||||||
else:
|
|
||||||
if exc:
|
|
||||||
# compress to Python file-compatible object
|
|
||||||
while True:
|
|
||||||
errcode, data = encoder.encode(bufsize)[1:]
|
|
||||||
fp.write(data)
|
|
||||||
if errcode:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# slight speedup: compress to real file object
|
|
||||||
errcode = encoder.encode_to_file(fh, bufsize)
|
|
||||||
if errcode < 0:
|
|
||||||
raise _get_oserror(errcode, encoder=True) from exc
|
|
||||||
finally:
|
|
||||||
encoder.cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
def _safe_read(fp, size):
|
|
||||||
"""
|
|
||||||
Reads large blocks in a safe way. Unlike fp.read(n), this function
|
|
||||||
doesn't trust the user. If the requested size is larger than
|
|
||||||
SAFEBLOCK, the file is read block by block.
|
|
||||||
|
|
||||||
:param fp: File handle. Must implement a <b>read</b> method.
|
|
||||||
:param size: Number of bytes to read.
|
|
||||||
:returns: A string containing <i>size</i> bytes of data.
|
|
||||||
|
|
||||||
Raises an OSError if the file is truncated and the read cannot be completed
|
|
||||||
|
|
||||||
"""
|
|
||||||
if size <= 0:
|
|
||||||
return b""
|
|
||||||
if size <= SAFEBLOCK:
|
|
||||||
data = fp.read(size)
|
|
||||||
if len(data) < size:
|
|
||||||
msg = "Truncated File Read"
|
|
||||||
raise OSError(msg)
|
|
||||||
return data
|
|
||||||
data = []
|
|
||||||
remaining_size = size
|
|
||||||
while remaining_size > 0:
|
|
||||||
block = fp.read(min(remaining_size, SAFEBLOCK))
|
|
||||||
if not block:
|
|
||||||
break
|
|
||||||
data.append(block)
|
|
||||||
remaining_size -= len(block)
|
|
||||||
if sum(len(d) for d in data) < size:
|
|
||||||
msg = "Truncated File Read"
|
|
||||||
raise OSError(msg)
|
|
||||||
return b"".join(data)
|
|
||||||
|
|
||||||
|
|
||||||
class PyCodecState:
|
|
||||||
def __init__(self):
|
|
||||||
self.xsize = 0
|
|
||||||
self.ysize = 0
|
|
||||||
self.xoff = 0
|
|
||||||
self.yoff = 0
|
|
||||||
|
|
||||||
def extents(self):
|
|
||||||
return self.xoff, self.yoff, self.xoff + self.xsize, self.yoff + self.ysize
|
|
||||||
|
|
||||||
|
|
||||||
class PyCodec:
|
|
||||||
def __init__(self, mode, *args):
|
|
||||||
self.im = None
|
|
||||||
self.state = PyCodecState()
|
|
||||||
self.fd = None
|
|
||||||
self.mode = mode
|
|
||||||
self.init(args)
|
|
||||||
|
|
||||||
def init(self, args):
|
|
||||||
"""
|
|
||||||
Override to perform codec specific initialization
|
|
||||||
|
|
||||||
:param args: Array of args items from the tile entry
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
self.args = args
|
|
||||||
|
|
||||||
def cleanup(self):
|
|
||||||
"""
|
|
||||||
Override to perform codec specific cleanup
|
|
||||||
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setfd(self, fd):
|
|
||||||
"""
|
|
||||||
Called from ImageFile to set the Python file-like object
|
|
||||||
|
|
||||||
:param fd: A Python file-like object
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
self.fd = fd
|
|
||||||
|
|
||||||
def setimage(self, im, extents=None):
|
|
||||||
"""
|
|
||||||
Called from ImageFile to set the core output image for the codec
|
|
||||||
|
|
||||||
:param im: A core image object
|
|
||||||
:param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle
|
|
||||||
for this tile
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
|
|
||||||
# following c code
|
|
||||||
self.im = im
|
|
||||||
|
|
||||||
if extents:
|
|
||||||
(x0, y0, x1, y1) = extents
|
|
||||||
else:
|
|
||||||
(x0, y0, x1, y1) = (0, 0, 0, 0)
|
|
||||||
|
|
||||||
if x0 == 0 and x1 == 0:
|
|
||||||
self.state.xsize, self.state.ysize = self.im.size
|
|
||||||
else:
|
|
||||||
self.state.xoff = x0
|
|
||||||
self.state.yoff = y0
|
|
||||||
self.state.xsize = x1 - x0
|
|
||||||
self.state.ysize = y1 - y0
|
|
||||||
|
|
||||||
if self.state.xsize <= 0 or self.state.ysize <= 0:
|
|
||||||
msg = "Size cannot be negative"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
if (
|
|
||||||
self.state.xsize + self.state.xoff > self.im.size[0]
|
|
||||||
or self.state.ysize + self.state.yoff > self.im.size[1]
|
|
||||||
):
|
|
||||||
msg = "Tile cannot extend outside image"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class PyDecoder(PyCodec):
|
|
||||||
"""
|
|
||||||
Python implementation of a format decoder. Override this class and
|
|
||||||
add the decoding logic in the :meth:`decode` method.
|
|
||||||
|
|
||||||
See :ref:`Writing Your Own File Codec in Python<file-codecs-py>`
|
|
||||||
"""
|
|
||||||
|
|
||||||
_pulls_fd = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pulls_fd(self):
|
|
||||||
return self._pulls_fd
|
|
||||||
|
|
||||||
def decode(self, buffer):
|
|
||||||
"""
|
|
||||||
Override to perform the decoding process.
|
|
||||||
|
|
||||||
:param buffer: A bytes object with the data to be decoded.
|
|
||||||
:returns: A tuple of ``(bytes consumed, errcode)``.
|
|
||||||
If finished with decoding return -1 for the bytes consumed.
|
|
||||||
Err codes are from :data:`.ImageFile.ERRORS`.
|
|
||||||
"""
|
|
||||||
msg = "unavailable in base decoder"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
def set_as_raw(self, data, rawmode=None):
|
|
||||||
"""
|
|
||||||
Convenience method to set the internal image from a stream of raw data
|
|
||||||
|
|
||||||
:param data: Bytes to be set
|
|
||||||
:param rawmode: The rawmode to be used for the decoder.
|
|
||||||
If not specified, it will default to the mode of the image
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not rawmode:
|
|
||||||
rawmode = self.mode
|
|
||||||
d = Image._getdecoder(self.mode, "raw", rawmode)
|
|
||||||
d.setimage(self.im, self.state.extents())
|
|
||||||
s = d.decode(data)
|
|
||||||
|
|
||||||
if s[0] >= 0:
|
|
||||||
msg = "not enough image data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
if s[1] != 0:
|
|
||||||
msg = "cannot decode image data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
class PyEncoder(PyCodec):
|
|
||||||
"""
|
|
||||||
Python implementation of a format encoder. Override this class and
|
|
||||||
add the decoding logic in the :meth:`encode` method.
|
|
||||||
|
|
||||||
See :ref:`Writing Your Own File Codec in Python<file-codecs-py>`
|
|
||||||
"""
|
|
||||||
|
|
||||||
_pushes_fd = False
|
|
||||||
|
|
||||||
@property
|
|
||||||
def pushes_fd(self):
|
|
||||||
return self._pushes_fd
|
|
||||||
|
|
||||||
def encode(self, bufsize):
|
|
||||||
"""
|
|
||||||
Override to perform the encoding process.
|
|
||||||
|
|
||||||
:param bufsize: Buffer size.
|
|
||||||
:returns: A tuple of ``(bytes encoded, errcode, bytes)``.
|
|
||||||
If finished with encoding return 1 for the error code.
|
|
||||||
Err codes are from :data:`.ImageFile.ERRORS`.
|
|
||||||
"""
|
|
||||||
msg = "unavailable in base encoder"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
def encode_to_pyfd(self):
|
|
||||||
"""
|
|
||||||
If ``pushes_fd`` is ``True``, then this method will be used,
|
|
||||||
and ``encode()`` will only be called once.
|
|
||||||
|
|
||||||
:returns: A tuple of ``(bytes consumed, errcode)``.
|
|
||||||
Err codes are from :data:`.ImageFile.ERRORS`.
|
|
||||||
"""
|
|
||||||
if not self.pushes_fd:
|
|
||||||
return 0, -8 # bad configuration
|
|
||||||
bytes_consumed, errcode, data = self.encode(0)
|
|
||||||
if data:
|
|
||||||
self.fd.write(data)
|
|
||||||
return bytes_consumed, errcode
|
|
||||||
|
|
||||||
def encode_to_file(self, fh, bufsize):
|
|
||||||
"""
|
|
||||||
:param fh: File handle.
|
|
||||||
:param bufsize: Buffer size.
|
|
||||||
|
|
||||||
:returns: If finished successfully, return 0.
|
|
||||||
Otherwise, return an error code. Err codes are from
|
|
||||||
:data:`.ImageFile.ERRORS`.
|
|
||||||
"""
|
|
||||||
errcode = 0
|
|
||||||
while errcode == 0:
|
|
||||||
status, errcode, buf = self.encode(bufsize)
|
|
||||||
if status > 0:
|
|
||||||
fh.write(buf[status:])
|
|
||||||
return errcode
|
|
||||||
@@ -1,568 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# standard filters
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1995-11-27 fl Created
|
|
||||||
# 2002-06-08 fl Added rank and mode filters
|
|
||||||
# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2003 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1995-2002 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import functools
|
|
||||||
|
|
||||||
|
|
||||||
class Filter:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MultibandFilter(Filter):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BuiltinFilter(MultibandFilter):
|
|
||||||
def filter(self, image):
|
|
||||||
if image.mode == "P":
|
|
||||||
msg = "cannot filter palette images"
|
|
||||||
raise ValueError(msg)
|
|
||||||
return image.filter(*self.filterargs)
|
|
||||||
|
|
||||||
|
|
||||||
class Kernel(BuiltinFilter):
|
|
||||||
"""
|
|
||||||
Create a convolution kernel. The current version only
|
|
||||||
supports 3x3 and 5x5 integer and floating point kernels.
|
|
||||||
|
|
||||||
In the current version, kernels can only be applied to
|
|
||||||
"L" and "RGB" images.
|
|
||||||
|
|
||||||
:param size: Kernel size, given as (width, height). In the current
|
|
||||||
version, this must be (3,3) or (5,5).
|
|
||||||
:param kernel: A sequence containing kernel weights. The kernel will
|
|
||||||
be flipped vertically before being applied to the image.
|
|
||||||
:param scale: Scale factor. If given, the result for each pixel is
|
|
||||||
divided by this value. The default is the sum of the
|
|
||||||
kernel weights.
|
|
||||||
:param offset: Offset. If given, this value is added to the result,
|
|
||||||
after it has been divided by the scale factor.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Kernel"
|
|
||||||
|
|
||||||
def __init__(self, size, kernel, scale=None, offset=0):
|
|
||||||
if scale is None:
|
|
||||||
# default scale is sum of kernel
|
|
||||||
scale = functools.reduce(lambda a, b: a + b, kernel)
|
|
||||||
if size[0] * size[1] != len(kernel):
|
|
||||||
msg = "not enough coefficients in kernel"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.filterargs = size, scale, offset, kernel
|
|
||||||
|
|
||||||
|
|
||||||
class RankFilter(Filter):
|
|
||||||
"""
|
|
||||||
Create a rank filter. The rank filter sorts all pixels in
|
|
||||||
a window of the given size, and returns the ``rank``'th value.
|
|
||||||
|
|
||||||
:param size: The kernel size, in pixels.
|
|
||||||
:param rank: What pixel value to pick. Use 0 for a min filter,
|
|
||||||
``size * size / 2`` for a median filter, ``size * size - 1``
|
|
||||||
for a max filter, etc.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Rank"
|
|
||||||
|
|
||||||
def __init__(self, size, rank):
|
|
||||||
self.size = size
|
|
||||||
self.rank = rank
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
if image.mode == "P":
|
|
||||||
msg = "cannot filter palette images"
|
|
||||||
raise ValueError(msg)
|
|
||||||
image = image.expand(self.size // 2, self.size // 2)
|
|
||||||
return image.rankfilter(self.size, self.rank)
|
|
||||||
|
|
||||||
|
|
||||||
class MedianFilter(RankFilter):
|
|
||||||
"""
|
|
||||||
Create a median filter. Picks the median pixel value in a window with the
|
|
||||||
given size.
|
|
||||||
|
|
||||||
:param size: The kernel size, in pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Median"
|
|
||||||
|
|
||||||
def __init__(self, size=3):
|
|
||||||
self.size = size
|
|
||||||
self.rank = size * size // 2
|
|
||||||
|
|
||||||
|
|
||||||
class MinFilter(RankFilter):
|
|
||||||
"""
|
|
||||||
Create a min filter. Picks the lowest pixel value in a window with the
|
|
||||||
given size.
|
|
||||||
|
|
||||||
:param size: The kernel size, in pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Min"
|
|
||||||
|
|
||||||
def __init__(self, size=3):
|
|
||||||
self.size = size
|
|
||||||
self.rank = 0
|
|
||||||
|
|
||||||
|
|
||||||
class MaxFilter(RankFilter):
|
|
||||||
"""
|
|
||||||
Create a max filter. Picks the largest pixel value in a window with the
|
|
||||||
given size.
|
|
||||||
|
|
||||||
:param size: The kernel size, in pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Max"
|
|
||||||
|
|
||||||
def __init__(self, size=3):
|
|
||||||
self.size = size
|
|
||||||
self.rank = size * size - 1
|
|
||||||
|
|
||||||
|
|
||||||
class ModeFilter(Filter):
|
|
||||||
"""
|
|
||||||
Create a mode filter. Picks the most frequent pixel value in a box with the
|
|
||||||
given size. Pixel values that occur only once or twice are ignored; if no
|
|
||||||
pixel value occurs more than twice, the original pixel value is preserved.
|
|
||||||
|
|
||||||
:param size: The kernel size, in pixels.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Mode"
|
|
||||||
|
|
||||||
def __init__(self, size=3):
|
|
||||||
self.size = size
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
return image.modefilter(self.size)
|
|
||||||
|
|
||||||
|
|
||||||
class GaussianBlur(MultibandFilter):
|
|
||||||
"""Blurs the image with a sequence of extended box filters, which
|
|
||||||
approximates a Gaussian kernel. For details on accuracy see
|
|
||||||
<https://www.mia.uni-saarland.de/Publications/gwosdek-ssvm11.pdf>
|
|
||||||
|
|
||||||
:param radius: Standard deviation of the Gaussian kernel. Either a sequence of two
|
|
||||||
numbers for x and y, or a single number for both.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "GaussianBlur"
|
|
||||||
|
|
||||||
def __init__(self, radius=2):
|
|
||||||
self.radius = radius
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
xy = self.radius
|
|
||||||
if not isinstance(xy, (tuple, list)):
|
|
||||||
xy = (xy, xy)
|
|
||||||
if xy == (0, 0):
|
|
||||||
return image.copy()
|
|
||||||
return image.gaussian_blur(xy)
|
|
||||||
|
|
||||||
|
|
||||||
class BoxBlur(MultibandFilter):
|
|
||||||
"""Blurs the image by setting each pixel to the average value of the pixels
|
|
||||||
in a square box extending radius pixels in each direction.
|
|
||||||
Supports float radius of arbitrary size. Uses an optimized implementation
|
|
||||||
which runs in linear time relative to the size of the image
|
|
||||||
for any radius value.
|
|
||||||
|
|
||||||
:param radius: Size of the box in a direction. Either a sequence of two numbers for
|
|
||||||
x and y, or a single number for both.
|
|
||||||
|
|
||||||
Radius 0 does not blur, returns an identical image.
|
|
||||||
Radius 1 takes 1 pixel in each direction, i.e. 9 pixels in total.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "BoxBlur"
|
|
||||||
|
|
||||||
def __init__(self, radius):
|
|
||||||
xy = radius
|
|
||||||
if not isinstance(xy, (tuple, list)):
|
|
||||||
xy = (xy, xy)
|
|
||||||
if xy[0] < 0 or xy[1] < 0:
|
|
||||||
msg = "radius must be >= 0"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.radius = radius
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
xy = self.radius
|
|
||||||
if not isinstance(xy, (tuple, list)):
|
|
||||||
xy = (xy, xy)
|
|
||||||
if xy == (0, 0):
|
|
||||||
return image.copy()
|
|
||||||
return image.box_blur(xy)
|
|
||||||
|
|
||||||
|
|
||||||
class UnsharpMask(MultibandFilter):
|
|
||||||
"""Unsharp mask filter.
|
|
||||||
|
|
||||||
See Wikipedia's entry on `digital unsharp masking`_ for an explanation of
|
|
||||||
the parameters.
|
|
||||||
|
|
||||||
:param radius: Blur Radius
|
|
||||||
:param percent: Unsharp strength, in percent
|
|
||||||
:param threshold: Threshold controls the minimum brightness change that
|
|
||||||
will be sharpened
|
|
||||||
|
|
||||||
.. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "UnsharpMask"
|
|
||||||
|
|
||||||
def __init__(self, radius=2, percent=150, threshold=3):
|
|
||||||
self.radius = radius
|
|
||||||
self.percent = percent
|
|
||||||
self.threshold = threshold
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
return image.unsharp_mask(self.radius, self.percent, self.threshold)
|
|
||||||
|
|
||||||
|
|
||||||
class BLUR(BuiltinFilter):
|
|
||||||
name = "Blur"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (5, 5), 16, 0, (
|
|
||||||
1, 1, 1, 1, 1,
|
|
||||||
1, 0, 0, 0, 1,
|
|
||||||
1, 0, 0, 0, 1,
|
|
||||||
1, 0, 0, 0, 1,
|
|
||||||
1, 1, 1, 1, 1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class CONTOUR(BuiltinFilter):
|
|
||||||
name = "Contour"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 1, 255, (
|
|
||||||
-1, -1, -1,
|
|
||||||
-1, 8, -1,
|
|
||||||
-1, -1, -1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class DETAIL(BuiltinFilter):
|
|
||||||
name = "Detail"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 6, 0, (
|
|
||||||
0, -1, 0,
|
|
||||||
-1, 10, -1,
|
|
||||||
0, -1, 0,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class EDGE_ENHANCE(BuiltinFilter):
|
|
||||||
name = "Edge-enhance"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 2, 0, (
|
|
||||||
-1, -1, -1,
|
|
||||||
-1, 10, -1,
|
|
||||||
-1, -1, -1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class EDGE_ENHANCE_MORE(BuiltinFilter):
|
|
||||||
name = "Edge-enhance More"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 1, 0, (
|
|
||||||
-1, -1, -1,
|
|
||||||
-1, 9, -1,
|
|
||||||
-1, -1, -1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class EMBOSS(BuiltinFilter):
|
|
||||||
name = "Emboss"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 1, 128, (
|
|
||||||
-1, 0, 0,
|
|
||||||
0, 1, 0,
|
|
||||||
0, 0, 0,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class FIND_EDGES(BuiltinFilter):
|
|
||||||
name = "Find Edges"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 1, 0, (
|
|
||||||
-1, -1, -1,
|
|
||||||
-1, 8, -1,
|
|
||||||
-1, -1, -1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class SHARPEN(BuiltinFilter):
|
|
||||||
name = "Sharpen"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 16, 0, (
|
|
||||||
-2, -2, -2,
|
|
||||||
-2, 32, -2,
|
|
||||||
-2, -2, -2,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class SMOOTH(BuiltinFilter):
|
|
||||||
name = "Smooth"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (3, 3), 13, 0, (
|
|
||||||
1, 1, 1,
|
|
||||||
1, 5, 1,
|
|
||||||
1, 1, 1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class SMOOTH_MORE(BuiltinFilter):
|
|
||||||
name = "Smooth More"
|
|
||||||
# fmt: off
|
|
||||||
filterargs = (5, 5), 100, 0, (
|
|
||||||
1, 1, 1, 1, 1,
|
|
||||||
1, 5, 5, 5, 1,
|
|
||||||
1, 5, 44, 5, 1,
|
|
||||||
1, 5, 5, 5, 1,
|
|
||||||
1, 1, 1, 1, 1,
|
|
||||||
)
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class Color3DLUT(MultibandFilter):
|
|
||||||
"""Three-dimensional color lookup table.
|
|
||||||
|
|
||||||
Transforms 3-channel pixels using the values of the channels as coordinates
|
|
||||||
in the 3D lookup table and interpolating the nearest elements.
|
|
||||||
|
|
||||||
This method allows you to apply almost any color transformation
|
|
||||||
in constant time by using pre-calculated decimated tables.
|
|
||||||
|
|
||||||
.. versionadded:: 5.2.0
|
|
||||||
|
|
||||||
:param size: Size of the table. One int or tuple of (int, int, int).
|
|
||||||
Minimal size in any dimension is 2, maximum is 65.
|
|
||||||
:param table: Flat lookup table. A list of ``channels * size**3``
|
|
||||||
float elements or a list of ``size**3`` channels-sized
|
|
||||||
tuples with floats. Channels are changed first,
|
|
||||||
then first dimension, then second, then third.
|
|
||||||
Value 0.0 corresponds lowest value of output, 1.0 highest.
|
|
||||||
:param channels: Number of channels in the table. Could be 3 or 4.
|
|
||||||
Default is 3.
|
|
||||||
:param target_mode: A mode for the result image. Should have not less
|
|
||||||
than ``channels`` channels. Default is ``None``,
|
|
||||||
which means that mode wouldn't be changed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
name = "Color 3D LUT"
|
|
||||||
|
|
||||||
def __init__(self, size, table, channels=3, target_mode=None, **kwargs):
|
|
||||||
if channels not in (3, 4):
|
|
||||||
msg = "Only 3 or 4 output channels are supported"
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.size = size = self._check_size(size)
|
|
||||||
self.channels = channels
|
|
||||||
self.mode = target_mode
|
|
||||||
|
|
||||||
# Hidden flag `_copy_table=False` could be used to avoid extra copying
|
|
||||||
# of the table if the table is specially made for the constructor.
|
|
||||||
copy_table = kwargs.get("_copy_table", True)
|
|
||||||
items = size[0] * size[1] * size[2]
|
|
||||||
wrong_size = False
|
|
||||||
|
|
||||||
numpy = None
|
|
||||||
if hasattr(table, "shape"):
|
|
||||||
try:
|
|
||||||
import numpy
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if numpy and isinstance(table, numpy.ndarray):
|
|
||||||
if copy_table:
|
|
||||||
table = table.copy()
|
|
||||||
|
|
||||||
if table.shape in [
|
|
||||||
(items * channels,),
|
|
||||||
(items, channels),
|
|
||||||
(size[2], size[1], size[0], channels),
|
|
||||||
]:
|
|
||||||
table = table.reshape(items * channels)
|
|
||||||
else:
|
|
||||||
wrong_size = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
if copy_table:
|
|
||||||
table = list(table)
|
|
||||||
|
|
||||||
# Convert to a flat list
|
|
||||||
if table and isinstance(table[0], (list, tuple)):
|
|
||||||
table, raw_table = [], table
|
|
||||||
for pixel in raw_table:
|
|
||||||
if len(pixel) != channels:
|
|
||||||
msg = (
|
|
||||||
"The elements of the table should "
|
|
||||||
f"have a length of {channels}."
|
|
||||||
)
|
|
||||||
raise ValueError(msg)
|
|
||||||
table.extend(pixel)
|
|
||||||
|
|
||||||
if wrong_size or len(table) != items * channels:
|
|
||||||
msg = (
|
|
||||||
"The table should have either channels * size**3 float items "
|
|
||||||
"or size**3 items of channels-sized tuples with floats. "
|
|
||||||
f"Table should be: {channels}x{size[0]}x{size[1]}x{size[2]}. "
|
|
||||||
f"Actual length: {len(table)}"
|
|
||||||
)
|
|
||||||
raise ValueError(msg)
|
|
||||||
self.table = table
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _check_size(size):
|
|
||||||
try:
|
|
||||||
_, _, _ = size
|
|
||||||
except ValueError as e:
|
|
||||||
msg = "Size should be either an integer or a tuple of three integers."
|
|
||||||
raise ValueError(msg) from e
|
|
||||||
except TypeError:
|
|
||||||
size = (size, size, size)
|
|
||||||
size = [int(x) for x in size]
|
|
||||||
for size_1d in size:
|
|
||||||
if not 2 <= size_1d <= 65:
|
|
||||||
msg = "Size should be in [2, 65] range."
|
|
||||||
raise ValueError(msg)
|
|
||||||
return size
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def generate(cls, size, callback, channels=3, target_mode=None):
|
|
||||||
"""Generates new LUT using provided callback.
|
|
||||||
|
|
||||||
:param size: Size of the table. Passed to the constructor.
|
|
||||||
:param callback: Function with three parameters which correspond
|
|
||||||
three color channels. Will be called ``size**3``
|
|
||||||
times with values from 0.0 to 1.0 and should return
|
|
||||||
a tuple with ``channels`` elements.
|
|
||||||
:param channels: The number of channels which should return callback.
|
|
||||||
:param target_mode: Passed to the constructor of the resulting
|
|
||||||
lookup table.
|
|
||||||
"""
|
|
||||||
size_1d, size_2d, size_3d = cls._check_size(size)
|
|
||||||
if channels not in (3, 4):
|
|
||||||
msg = "Only 3 or 4 output channels are supported"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
table = [0] * (size_1d * size_2d * size_3d * channels)
|
|
||||||
idx_out = 0
|
|
||||||
for b in range(size_3d):
|
|
||||||
for g in range(size_2d):
|
|
||||||
for r in range(size_1d):
|
|
||||||
table[idx_out : idx_out + channels] = callback(
|
|
||||||
r / (size_1d - 1), g / (size_2d - 1), b / (size_3d - 1)
|
|
||||||
)
|
|
||||||
idx_out += channels
|
|
||||||
|
|
||||||
return cls(
|
|
||||||
(size_1d, size_2d, size_3d),
|
|
||||||
table,
|
|
||||||
channels=channels,
|
|
||||||
target_mode=target_mode,
|
|
||||||
_copy_table=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def transform(self, callback, with_normals=False, channels=None, target_mode=None):
|
|
||||||
"""Transforms the table values using provided callback and returns
|
|
||||||
a new LUT with altered values.
|
|
||||||
|
|
||||||
:param callback: A function which takes old lookup table values
|
|
||||||
and returns a new set of values. The number
|
|
||||||
of arguments which function should take is
|
|
||||||
``self.channels`` or ``3 + self.channels``
|
|
||||||
if ``with_normals`` flag is set.
|
|
||||||
Should return a tuple of ``self.channels`` or
|
|
||||||
``channels`` elements if it is set.
|
|
||||||
:param with_normals: If true, ``callback`` will be called with
|
|
||||||
coordinates in the color cube as the first
|
|
||||||
three arguments. Otherwise, ``callback``
|
|
||||||
will be called only with actual color values.
|
|
||||||
:param channels: The number of channels in the resulting lookup table.
|
|
||||||
:param target_mode: Passed to the constructor of the resulting
|
|
||||||
lookup table.
|
|
||||||
"""
|
|
||||||
if channels not in (None, 3, 4):
|
|
||||||
msg = "Only 3 or 4 output channels are supported"
|
|
||||||
raise ValueError(msg)
|
|
||||||
ch_in = self.channels
|
|
||||||
ch_out = channels or ch_in
|
|
||||||
size_1d, size_2d, size_3d = self.size
|
|
||||||
|
|
||||||
table = [0] * (size_1d * size_2d * size_3d * ch_out)
|
|
||||||
idx_in = 0
|
|
||||||
idx_out = 0
|
|
||||||
for b in range(size_3d):
|
|
||||||
for g in range(size_2d):
|
|
||||||
for r in range(size_1d):
|
|
||||||
values = self.table[idx_in : idx_in + ch_in]
|
|
||||||
if with_normals:
|
|
||||||
values = callback(
|
|
||||||
r / (size_1d - 1),
|
|
||||||
g / (size_2d - 1),
|
|
||||||
b / (size_3d - 1),
|
|
||||||
*values,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
values = callback(*values)
|
|
||||||
table[idx_out : idx_out + ch_out] = values
|
|
||||||
idx_in += ch_in
|
|
||||||
idx_out += ch_out
|
|
||||||
|
|
||||||
return type(self)(
|
|
||||||
self.size,
|
|
||||||
table,
|
|
||||||
channels=ch_out,
|
|
||||||
target_mode=target_mode or self.mode,
|
|
||||||
_copy_table=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
r = [
|
|
||||||
f"{self.__class__.__name__} from {self.table.__class__.__name__}",
|
|
||||||
"size={:d}x{:d}x{:d}".format(*self.size),
|
|
||||||
f"channels={self.channels:d}",
|
|
||||||
]
|
|
||||||
if self.mode:
|
|
||||||
r.append(f"target_mode={self.mode}")
|
|
||||||
return "<{}>".format(" ".join(r))
|
|
||||||
|
|
||||||
def filter(self, image):
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
return image.color_lut_3d(
|
|
||||||
self.mode or image.mode,
|
|
||||||
Image.Resampling.BILINEAR,
|
|
||||||
self.channels,
|
|
||||||
self.size[0],
|
|
||||||
self.size[1],
|
|
||||||
self.size[2],
|
|
||||||
self.table,
|
|
||||||
)
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,178 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# screen grabber
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2001-04-26 fl created
|
|
||||||
# 2001-09-17 fl use builtin driver, if present
|
|
||||||
# 2002-11-19 fl added grabclipboard support
|
|
||||||
#
|
|
||||||
# Copyright (c) 2001-2002 by Secret Labs AB
|
|
||||||
# Copyright (c) 2001-2002 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
|
|
||||||
def grab(bbox=None, include_layered_windows=False, all_screens=False, xdisplay=None):
|
|
||||||
if xdisplay is None:
|
|
||||||
if sys.platform == "darwin":
|
|
||||||
fh, filepath = tempfile.mkstemp(".png")
|
|
||||||
os.close(fh)
|
|
||||||
args = ["screencapture"]
|
|
||||||
if bbox:
|
|
||||||
left, top, right, bottom = bbox
|
|
||||||
args += ["-R", f"{left},{top},{right-left},{bottom-top}"]
|
|
||||||
subprocess.call(args + ["-x", filepath])
|
|
||||||
im = Image.open(filepath)
|
|
||||||
im.load()
|
|
||||||
os.unlink(filepath)
|
|
||||||
if bbox:
|
|
||||||
im_resized = im.resize((right - left, bottom - top))
|
|
||||||
im.close()
|
|
||||||
return im_resized
|
|
||||||
return im
|
|
||||||
elif sys.platform == "win32":
|
|
||||||
offset, size, data = Image.core.grabscreen_win32(
|
|
||||||
include_layered_windows, all_screens
|
|
||||||
)
|
|
||||||
im = Image.frombytes(
|
|
||||||
"RGB",
|
|
||||||
size,
|
|
||||||
data,
|
|
||||||
# RGB, 32-bit line padding, origin lower left corner
|
|
||||||
"raw",
|
|
||||||
"BGR",
|
|
||||||
(size[0] * 3 + 3) & -4,
|
|
||||||
-1,
|
|
||||||
)
|
|
||||||
if bbox:
|
|
||||||
x0, y0 = offset
|
|
||||||
left, top, right, bottom = bbox
|
|
||||||
im = im.crop((left - x0, top - y0, right - x0, bottom - y0))
|
|
||||||
return im
|
|
||||||
try:
|
|
||||||
if not Image.core.HAVE_XCB:
|
|
||||||
msg = "Pillow was built without XCB support"
|
|
||||||
raise OSError(msg)
|
|
||||||
size, data = Image.core.grabscreen_x11(xdisplay)
|
|
||||||
except OSError:
|
|
||||||
if (
|
|
||||||
xdisplay is None
|
|
||||||
and sys.platform not in ("darwin", "win32")
|
|
||||||
and shutil.which("gnome-screenshot")
|
|
||||||
):
|
|
||||||
fh, filepath = tempfile.mkstemp(".png")
|
|
||||||
os.close(fh)
|
|
||||||
subprocess.call(["gnome-screenshot", "-f", filepath])
|
|
||||||
im = Image.open(filepath)
|
|
||||||
im.load()
|
|
||||||
os.unlink(filepath)
|
|
||||||
if bbox:
|
|
||||||
im_cropped = im.crop(bbox)
|
|
||||||
im.close()
|
|
||||||
return im_cropped
|
|
||||||
return im
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
im = Image.frombytes("RGB", size, data, "raw", "BGRX", size[0] * 4, 1)
|
|
||||||
if bbox:
|
|
||||||
im = im.crop(bbox)
|
|
||||||
return im
|
|
||||||
|
|
||||||
|
|
||||||
def grabclipboard():
|
|
||||||
if sys.platform == "darwin":
|
|
||||||
fh, filepath = tempfile.mkstemp(".png")
|
|
||||||
os.close(fh)
|
|
||||||
commands = [
|
|
||||||
'set theFile to (open for access POSIX file "'
|
|
||||||
+ filepath
|
|
||||||
+ '" with write permission)',
|
|
||||||
"try",
|
|
||||||
" write (the clipboard as «class PNGf») to theFile",
|
|
||||||
"end try",
|
|
||||||
"close access theFile",
|
|
||||||
]
|
|
||||||
script = ["osascript"]
|
|
||||||
for command in commands:
|
|
||||||
script += ["-e", command]
|
|
||||||
subprocess.call(script)
|
|
||||||
|
|
||||||
im = None
|
|
||||||
if os.stat(filepath).st_size != 0:
|
|
||||||
im = Image.open(filepath)
|
|
||||||
im.load()
|
|
||||||
os.unlink(filepath)
|
|
||||||
return im
|
|
||||||
elif sys.platform == "win32":
|
|
||||||
fmt, data = Image.core.grabclipboard_win32()
|
|
||||||
if fmt == "file": # CF_HDROP
|
|
||||||
import struct
|
|
||||||
|
|
||||||
o = struct.unpack_from("I", data)[0]
|
|
||||||
if data[16] != 0:
|
|
||||||
files = data[o:].decode("utf-16le").split("\0")
|
|
||||||
else:
|
|
||||||
files = data[o:].decode("mbcs").split("\0")
|
|
||||||
return files[: files.index("")]
|
|
||||||
if isinstance(data, bytes):
|
|
||||||
data = io.BytesIO(data)
|
|
||||||
if fmt == "png":
|
|
||||||
from . import PngImagePlugin
|
|
||||||
|
|
||||||
return PngImagePlugin.PngImageFile(data)
|
|
||||||
elif fmt == "DIB":
|
|
||||||
from . import BmpImagePlugin
|
|
||||||
|
|
||||||
return BmpImagePlugin.DibImageFile(data)
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
if os.getenv("WAYLAND_DISPLAY"):
|
|
||||||
session_type = "wayland"
|
|
||||||
elif os.getenv("DISPLAY"):
|
|
||||||
session_type = "x11"
|
|
||||||
else: # Session type check failed
|
|
||||||
session_type = None
|
|
||||||
|
|
||||||
if shutil.which("wl-paste") and session_type in ("wayland", None):
|
|
||||||
output = subprocess.check_output(["wl-paste", "-l"]).decode()
|
|
||||||
mimetypes = output.splitlines()
|
|
||||||
if "image/png" in mimetypes:
|
|
||||||
mimetype = "image/png"
|
|
||||||
elif mimetypes:
|
|
||||||
mimetype = mimetypes[0]
|
|
||||||
else:
|
|
||||||
mimetype = None
|
|
||||||
|
|
||||||
args = ["wl-paste"]
|
|
||||||
if mimetype:
|
|
||||||
args.extend(["-t", mimetype])
|
|
||||||
elif shutil.which("xclip") and session_type in ("x11", None):
|
|
||||||
args = ["xclip", "-selection", "clipboard", "-t", "image/png", "-o"]
|
|
||||||
else:
|
|
||||||
msg = "wl-paste or xclip is required for ImageGrab.grabclipboard() on Linux"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
p = subprocess.run(args, capture_output=True)
|
|
||||||
err = p.stderr
|
|
||||||
if err:
|
|
||||||
msg = f"{args[0]} error: {err.strip().decode()}"
|
|
||||||
raise ChildProcessError(msg)
|
|
||||||
data = io.BytesIO(p.stdout)
|
|
||||||
im = Image.open(data)
|
|
||||||
im.load()
|
|
||||||
return im
|
|
||||||
@@ -1,265 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# a simple math add-on for the Python Imaging Library
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1999-02-15 fl Original PIL Plus release
|
|
||||||
# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6
|
|
||||||
# 2005-09-12 fl Fixed int() and float() for Python 2.4.1
|
|
||||||
#
|
|
||||||
# Copyright (c) 1999-2005 by Secret Labs AB
|
|
||||||
# Copyright (c) 2005 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import builtins
|
|
||||||
|
|
||||||
from . import Image, _imagingmath
|
|
||||||
|
|
||||||
|
|
||||||
class _Operand:
|
|
||||||
"""Wraps an image operand, providing standard operators"""
|
|
||||||
|
|
||||||
def __init__(self, im):
|
|
||||||
self.im = im
|
|
||||||
|
|
||||||
def __fixup(self, im1):
|
|
||||||
# convert image to suitable mode
|
|
||||||
if isinstance(im1, _Operand):
|
|
||||||
# argument was an image.
|
|
||||||
if im1.im.mode in ("1", "L"):
|
|
||||||
return im1.im.convert("I")
|
|
||||||
elif im1.im.mode in ("I", "F"):
|
|
||||||
return im1.im
|
|
||||||
else:
|
|
||||||
msg = f"unsupported mode: {im1.im.mode}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
else:
|
|
||||||
# argument was a constant
|
|
||||||
if isinstance(im1, (int, float)) and self.im.mode in ("1", "L", "I"):
|
|
||||||
return Image.new("I", self.im.size, im1)
|
|
||||||
else:
|
|
||||||
return Image.new("F", self.im.size, im1)
|
|
||||||
|
|
||||||
def apply(self, op, im1, im2=None, mode=None):
|
|
||||||
im1 = self.__fixup(im1)
|
|
||||||
if im2 is None:
|
|
||||||
# unary operation
|
|
||||||
out = Image.new(mode or im1.mode, im1.size, None)
|
|
||||||
im1.load()
|
|
||||||
try:
|
|
||||||
op = getattr(_imagingmath, op + "_" + im1.mode)
|
|
||||||
except AttributeError as e:
|
|
||||||
msg = f"bad operand type for '{op}'"
|
|
||||||
raise TypeError(msg) from e
|
|
||||||
_imagingmath.unop(op, out.im.id, im1.im.id)
|
|
||||||
else:
|
|
||||||
# binary operation
|
|
||||||
im2 = self.__fixup(im2)
|
|
||||||
if im1.mode != im2.mode:
|
|
||||||
# convert both arguments to floating point
|
|
||||||
if im1.mode != "F":
|
|
||||||
im1 = im1.convert("F")
|
|
||||||
if im2.mode != "F":
|
|
||||||
im2 = im2.convert("F")
|
|
||||||
if im1.size != im2.size:
|
|
||||||
# crop both arguments to a common size
|
|
||||||
size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1]))
|
|
||||||
if im1.size != size:
|
|
||||||
im1 = im1.crop((0, 0) + size)
|
|
||||||
if im2.size != size:
|
|
||||||
im2 = im2.crop((0, 0) + size)
|
|
||||||
out = Image.new(mode or im1.mode, im1.size, None)
|
|
||||||
im1.load()
|
|
||||||
im2.load()
|
|
||||||
try:
|
|
||||||
op = getattr(_imagingmath, op + "_" + im1.mode)
|
|
||||||
except AttributeError as e:
|
|
||||||
msg = f"bad operand type for '{op}'"
|
|
||||||
raise TypeError(msg) from e
|
|
||||||
_imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id)
|
|
||||||
return _Operand(out)
|
|
||||||
|
|
||||||
# unary operators
|
|
||||||
def __bool__(self):
|
|
||||||
# an image is "true" if it contains at least one non-zero pixel
|
|
||||||
return self.im.getbbox() is not None
|
|
||||||
|
|
||||||
def __abs__(self):
|
|
||||||
return self.apply("abs", self)
|
|
||||||
|
|
||||||
def __pos__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __neg__(self):
|
|
||||||
return self.apply("neg", self)
|
|
||||||
|
|
||||||
# binary operators
|
|
||||||
def __add__(self, other):
|
|
||||||
return self.apply("add", self, other)
|
|
||||||
|
|
||||||
def __radd__(self, other):
|
|
||||||
return self.apply("add", other, self)
|
|
||||||
|
|
||||||
def __sub__(self, other):
|
|
||||||
return self.apply("sub", self, other)
|
|
||||||
|
|
||||||
def __rsub__(self, other):
|
|
||||||
return self.apply("sub", other, self)
|
|
||||||
|
|
||||||
def __mul__(self, other):
|
|
||||||
return self.apply("mul", self, other)
|
|
||||||
|
|
||||||
def __rmul__(self, other):
|
|
||||||
return self.apply("mul", other, self)
|
|
||||||
|
|
||||||
def __truediv__(self, other):
|
|
||||||
return self.apply("div", self, other)
|
|
||||||
|
|
||||||
def __rtruediv__(self, other):
|
|
||||||
return self.apply("div", other, self)
|
|
||||||
|
|
||||||
def __mod__(self, other):
|
|
||||||
return self.apply("mod", self, other)
|
|
||||||
|
|
||||||
def __rmod__(self, other):
|
|
||||||
return self.apply("mod", other, self)
|
|
||||||
|
|
||||||
def __pow__(self, other):
|
|
||||||
return self.apply("pow", self, other)
|
|
||||||
|
|
||||||
def __rpow__(self, other):
|
|
||||||
return self.apply("pow", other, self)
|
|
||||||
|
|
||||||
# bitwise
|
|
||||||
def __invert__(self):
|
|
||||||
return self.apply("invert", self)
|
|
||||||
|
|
||||||
def __and__(self, other):
|
|
||||||
return self.apply("and", self, other)
|
|
||||||
|
|
||||||
def __rand__(self, other):
|
|
||||||
return self.apply("and", other, self)
|
|
||||||
|
|
||||||
def __or__(self, other):
|
|
||||||
return self.apply("or", self, other)
|
|
||||||
|
|
||||||
def __ror__(self, other):
|
|
||||||
return self.apply("or", other, self)
|
|
||||||
|
|
||||||
def __xor__(self, other):
|
|
||||||
return self.apply("xor", self, other)
|
|
||||||
|
|
||||||
def __rxor__(self, other):
|
|
||||||
return self.apply("xor", other, self)
|
|
||||||
|
|
||||||
def __lshift__(self, other):
|
|
||||||
return self.apply("lshift", self, other)
|
|
||||||
|
|
||||||
def __rshift__(self, other):
|
|
||||||
return self.apply("rshift", self, other)
|
|
||||||
|
|
||||||
# logical
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.apply("eq", self, other)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return self.apply("ne", self, other)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
return self.apply("lt", self, other)
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return self.apply("le", self, other)
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
return self.apply("gt", self, other)
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
return self.apply("ge", self, other)
|
|
||||||
|
|
||||||
|
|
||||||
# conversions
|
|
||||||
def imagemath_int(self):
|
|
||||||
return _Operand(self.im.convert("I"))
|
|
||||||
|
|
||||||
|
|
||||||
def imagemath_float(self):
|
|
||||||
return _Operand(self.im.convert("F"))
|
|
||||||
|
|
||||||
|
|
||||||
# logical
|
|
||||||
def imagemath_equal(self, other):
|
|
||||||
return self.apply("eq", self, other, mode="I")
|
|
||||||
|
|
||||||
|
|
||||||
def imagemath_notequal(self, other):
|
|
||||||
return self.apply("ne", self, other, mode="I")
|
|
||||||
|
|
||||||
|
|
||||||
def imagemath_min(self, other):
|
|
||||||
return self.apply("min", self, other)
|
|
||||||
|
|
||||||
|
|
||||||
def imagemath_max(self, other):
|
|
||||||
return self.apply("max", self, other)
|
|
||||||
|
|
||||||
|
|
||||||
def imagemath_convert(self, mode):
|
|
||||||
return _Operand(self.im.convert(mode))
|
|
||||||
|
|
||||||
|
|
||||||
ops = {}
|
|
||||||
for k, v in list(globals().items()):
|
|
||||||
if k[:10] == "imagemath_":
|
|
||||||
ops[k[10:]] = v
|
|
||||||
|
|
||||||
|
|
||||||
def eval(expression, _dict={}, **kw):
|
|
||||||
"""
|
|
||||||
Evaluates an image expression.
|
|
||||||
|
|
||||||
:param expression: A string containing a Python-style expression.
|
|
||||||
:param options: Values to add to the evaluation context. You
|
|
||||||
can either use a dictionary, or one or more keyword
|
|
||||||
arguments.
|
|
||||||
:return: The evaluated expression. This is usually an image object, but can
|
|
||||||
also be an integer, a floating point value, or a pixel tuple,
|
|
||||||
depending on the expression.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# build execution namespace
|
|
||||||
args = ops.copy()
|
|
||||||
for k in list(_dict.keys()) + list(kw.keys()):
|
|
||||||
if "__" in k or hasattr(builtins, k):
|
|
||||||
msg = f"'{k}' not allowed"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
args.update(_dict)
|
|
||||||
args.update(kw)
|
|
||||||
for k, v in args.items():
|
|
||||||
if hasattr(v, "im"):
|
|
||||||
args[k] = _Operand(v)
|
|
||||||
|
|
||||||
compiled_code = compile(expression, "<string>", "eval")
|
|
||||||
|
|
||||||
def scan(code):
|
|
||||||
for const in code.co_consts:
|
|
||||||
if type(const) is type(compiled_code):
|
|
||||||
scan(const)
|
|
||||||
|
|
||||||
for name in code.co_names:
|
|
||||||
if name not in args and name != "abs":
|
|
||||||
msg = f"'{name}' not allowed"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
scan(compiled_code)
|
|
||||||
out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args)
|
|
||||||
try:
|
|
||||||
return out.im
|
|
||||||
except AttributeError:
|
|
||||||
return out
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# standard mode descriptors
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2006-03-20 fl Added
|
|
||||||
#
|
|
||||||
# Copyright (c) 2006 by Secret Labs AB.
|
|
||||||
# Copyright (c) 2006 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from functools import lru_cache
|
|
||||||
|
|
||||||
|
|
||||||
class ModeDescriptor:
|
|
||||||
"""Wrapper for mode strings."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
mode: str,
|
|
||||||
bands: tuple[str, ...],
|
|
||||||
basemode: str,
|
|
||||||
basetype: str,
|
|
||||||
typestr: str,
|
|
||||||
) -> None:
|
|
||||||
self.mode = mode
|
|
||||||
self.bands = bands
|
|
||||||
self.basemode = basemode
|
|
||||||
self.basetype = basetype
|
|
||||||
self.typestr = typestr
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return self.mode
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache
|
|
||||||
def getmode(mode: str) -> ModeDescriptor:
|
|
||||||
"""Gets a mode descriptor for the given mode."""
|
|
||||||
# initialize mode cache
|
|
||||||
endian = "<" if sys.byteorder == "little" else ">"
|
|
||||||
|
|
||||||
modes = {
|
|
||||||
# core modes
|
|
||||||
# Bits need to be extended to bytes
|
|
||||||
"1": ("L", "L", ("1",), "|b1"),
|
|
||||||
"L": ("L", "L", ("L",), "|u1"),
|
|
||||||
"I": ("L", "I", ("I",), endian + "i4"),
|
|
||||||
"F": ("L", "F", ("F",), endian + "f4"),
|
|
||||||
"P": ("P", "L", ("P",), "|u1"),
|
|
||||||
"RGB": ("RGB", "L", ("R", "G", "B"), "|u1"),
|
|
||||||
"RGBX": ("RGB", "L", ("R", "G", "B", "X"), "|u1"),
|
|
||||||
"RGBA": ("RGB", "L", ("R", "G", "B", "A"), "|u1"),
|
|
||||||
"CMYK": ("RGB", "L", ("C", "M", "Y", "K"), "|u1"),
|
|
||||||
"YCbCr": ("RGB", "L", ("Y", "Cb", "Cr"), "|u1"),
|
|
||||||
# UNDONE - unsigned |u1i1i1
|
|
||||||
"LAB": ("RGB", "L", ("L", "A", "B"), "|u1"),
|
|
||||||
"HSV": ("RGB", "L", ("H", "S", "V"), "|u1"),
|
|
||||||
# extra experimental modes
|
|
||||||
"RGBa": ("RGB", "L", ("R", "G", "B", "a"), "|u1"),
|
|
||||||
"BGR;15": ("RGB", "L", ("B", "G", "R"), "|u1"),
|
|
||||||
"BGR;16": ("RGB", "L", ("B", "G", "R"), "|u1"),
|
|
||||||
"BGR;24": ("RGB", "L", ("B", "G", "R"), "|u1"),
|
|
||||||
"LA": ("L", "L", ("L", "A"), "|u1"),
|
|
||||||
"La": ("L", "L", ("L", "a"), "|u1"),
|
|
||||||
"PA": ("RGB", "L", ("P", "A"), "|u1"),
|
|
||||||
}
|
|
||||||
if mode in modes:
|
|
||||||
base_mode, base_type, bands, type_str = modes[mode]
|
|
||||||
return ModeDescriptor(mode, bands, base_mode, base_type, type_str)
|
|
||||||
|
|
||||||
mapping_modes = {
|
|
||||||
# I;16 == I;16L, and I;32 == I;32L
|
|
||||||
"I;16": "<u2",
|
|
||||||
"I;16S": "<i2",
|
|
||||||
"I;16L": "<u2",
|
|
||||||
"I;16LS": "<i2",
|
|
||||||
"I;16B": ">u2",
|
|
||||||
"I;16BS": ">i2",
|
|
||||||
"I;16N": endian + "u2",
|
|
||||||
"I;16NS": endian + "i2",
|
|
||||||
"I;32": "<u4",
|
|
||||||
"I;32B": ">u4",
|
|
||||||
"I;32L": "<u4",
|
|
||||||
"I;32S": "<i4",
|
|
||||||
"I;32BS": ">i4",
|
|
||||||
"I;32LS": "<i4",
|
|
||||||
}
|
|
||||||
|
|
||||||
type_str = mapping_modes[mode]
|
|
||||||
return ModeDescriptor(mode, ("I",), "L", "L", type_str)
|
|
||||||
@@ -1,255 +0,0 @@
|
|||||||
# A binary morphology add-on for the Python Imaging Library
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2014-06-04 Initial version.
|
|
||||||
#
|
|
||||||
# Copyright (c) 2014 Dov Grobgeld <dov.grobgeld@gmail.com>
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
from . import Image, _imagingmorph
|
|
||||||
|
|
||||||
LUT_SIZE = 1 << 9
|
|
||||||
|
|
||||||
# fmt: off
|
|
||||||
ROTATION_MATRIX = [
|
|
||||||
6, 3, 0,
|
|
||||||
7, 4, 1,
|
|
||||||
8, 5, 2,
|
|
||||||
]
|
|
||||||
MIRROR_MATRIX = [
|
|
||||||
2, 1, 0,
|
|
||||||
5, 4, 3,
|
|
||||||
8, 7, 6,
|
|
||||||
]
|
|
||||||
# fmt: on
|
|
||||||
|
|
||||||
|
|
||||||
class LutBuilder:
|
|
||||||
"""A class for building a MorphLut from a descriptive language
|
|
||||||
|
|
||||||
The input patterns is a list of a strings sequences like these::
|
|
||||||
|
|
||||||
4:(...
|
|
||||||
.1.
|
|
||||||
111)->1
|
|
||||||
|
|
||||||
(whitespaces including linebreaks are ignored). The option 4
|
|
||||||
describes a series of symmetry operations (in this case a
|
|
||||||
4-rotation), the pattern is described by:
|
|
||||||
|
|
||||||
- . or X - Ignore
|
|
||||||
- 1 - Pixel is on
|
|
||||||
- 0 - Pixel is off
|
|
||||||
|
|
||||||
The result of the operation is described after "->" string.
|
|
||||||
|
|
||||||
The default is to return the current pixel value, which is
|
|
||||||
returned if no other match is found.
|
|
||||||
|
|
||||||
Operations:
|
|
||||||
|
|
||||||
- 4 - 4 way rotation
|
|
||||||
- N - Negate
|
|
||||||
- 1 - Dummy op for no other operation (an op must always be given)
|
|
||||||
- M - Mirroring
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
lb = LutBuilder(patterns = ["4:(... .1. 111)->1"])
|
|
||||||
lut = lb.build_lut()
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, patterns=None, op_name=None):
|
|
||||||
if patterns is not None:
|
|
||||||
self.patterns = patterns
|
|
||||||
else:
|
|
||||||
self.patterns = []
|
|
||||||
self.lut = None
|
|
||||||
if op_name is not None:
|
|
||||||
known_patterns = {
|
|
||||||
"corner": ["1:(... ... ...)->0", "4:(00. 01. ...)->1"],
|
|
||||||
"dilation4": ["4:(... .0. .1.)->1"],
|
|
||||||
"dilation8": ["4:(... .0. .1.)->1", "4:(... .0. ..1)->1"],
|
|
||||||
"erosion4": ["4:(... .1. .0.)->0"],
|
|
||||||
"erosion8": ["4:(... .1. .0.)->0", "4:(... .1. ..0)->0"],
|
|
||||||
"edge": [
|
|
||||||
"1:(... ... ...)->0",
|
|
||||||
"4:(.0. .1. ...)->1",
|
|
||||||
"4:(01. .1. ...)->1",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
if op_name not in known_patterns:
|
|
||||||
msg = "Unknown pattern " + op_name + "!"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
self.patterns = known_patterns[op_name]
|
|
||||||
|
|
||||||
def add_patterns(self, patterns):
|
|
||||||
self.patterns += patterns
|
|
||||||
|
|
||||||
def build_default_lut(self):
|
|
||||||
symbols = [0, 1]
|
|
||||||
m = 1 << 4 # pos of current pixel
|
|
||||||
self.lut = bytearray(symbols[(i & m) > 0] for i in range(LUT_SIZE))
|
|
||||||
|
|
||||||
def get_lut(self):
|
|
||||||
return self.lut
|
|
||||||
|
|
||||||
def _string_permute(self, pattern, permutation):
|
|
||||||
"""string_permute takes a pattern and a permutation and returns the
|
|
||||||
string permuted according to the permutation list.
|
|
||||||
"""
|
|
||||||
assert len(permutation) == 9
|
|
||||||
return "".join(pattern[p] for p in permutation)
|
|
||||||
|
|
||||||
def _pattern_permute(self, basic_pattern, options, basic_result):
|
|
||||||
"""pattern_permute takes a basic pattern and its result and clones
|
|
||||||
the pattern according to the modifications described in the $options
|
|
||||||
parameter. It returns a list of all cloned patterns."""
|
|
||||||
patterns = [(basic_pattern, basic_result)]
|
|
||||||
|
|
||||||
# rotations
|
|
||||||
if "4" in options:
|
|
||||||
res = patterns[-1][1]
|
|
||||||
for i in range(4):
|
|
||||||
patterns.append(
|
|
||||||
(self._string_permute(patterns[-1][0], ROTATION_MATRIX), res)
|
|
||||||
)
|
|
||||||
# mirror
|
|
||||||
if "M" in options:
|
|
||||||
n = len(patterns)
|
|
||||||
for pattern, res in patterns[:n]:
|
|
||||||
patterns.append((self._string_permute(pattern, MIRROR_MATRIX), res))
|
|
||||||
|
|
||||||
# negate
|
|
||||||
if "N" in options:
|
|
||||||
n = len(patterns)
|
|
||||||
for pattern, res in patterns[:n]:
|
|
||||||
# Swap 0 and 1
|
|
||||||
pattern = pattern.replace("0", "Z").replace("1", "0").replace("Z", "1")
|
|
||||||
res = 1 - int(res)
|
|
||||||
patterns.append((pattern, res))
|
|
||||||
|
|
||||||
return patterns
|
|
||||||
|
|
||||||
def build_lut(self):
|
|
||||||
"""Compile all patterns into a morphology lut.
|
|
||||||
|
|
||||||
TBD :Build based on (file) morphlut:modify_lut
|
|
||||||
"""
|
|
||||||
self.build_default_lut()
|
|
||||||
patterns = []
|
|
||||||
|
|
||||||
# Parse and create symmetries of the patterns strings
|
|
||||||
for p in self.patterns:
|
|
||||||
m = re.search(r"(\w*):?\s*\((.+?)\)\s*->\s*(\d)", p.replace("\n", ""))
|
|
||||||
if not m:
|
|
||||||
msg = 'Syntax error in pattern "' + p + '"'
|
|
||||||
raise Exception(msg)
|
|
||||||
options = m.group(1)
|
|
||||||
pattern = m.group(2)
|
|
||||||
result = int(m.group(3))
|
|
||||||
|
|
||||||
# Get rid of spaces
|
|
||||||
pattern = pattern.replace(" ", "").replace("\n", "")
|
|
||||||
|
|
||||||
patterns += self._pattern_permute(pattern, options, result)
|
|
||||||
|
|
||||||
# compile the patterns into regular expressions for speed
|
|
||||||
for i, pattern in enumerate(patterns):
|
|
||||||
p = pattern[0].replace(".", "X").replace("X", "[01]")
|
|
||||||
p = re.compile(p)
|
|
||||||
patterns[i] = (p, pattern[1])
|
|
||||||
|
|
||||||
# Step through table and find patterns that match.
|
|
||||||
# Note that all the patterns are searched. The last one
|
|
||||||
# caught overrides
|
|
||||||
for i in range(LUT_SIZE):
|
|
||||||
# Build the bit pattern
|
|
||||||
bitpattern = bin(i)[2:]
|
|
||||||
bitpattern = ("0" * (9 - len(bitpattern)) + bitpattern)[::-1]
|
|
||||||
|
|
||||||
for p, r in patterns:
|
|
||||||
if p.match(bitpattern):
|
|
||||||
self.lut[i] = [0, 1][r]
|
|
||||||
|
|
||||||
return self.lut
|
|
||||||
|
|
||||||
|
|
||||||
class MorphOp:
|
|
||||||
"""A class for binary morphological operators"""
|
|
||||||
|
|
||||||
def __init__(self, lut=None, op_name=None, patterns=None):
|
|
||||||
"""Create a binary morphological operator"""
|
|
||||||
self.lut = lut
|
|
||||||
if op_name is not None:
|
|
||||||
self.lut = LutBuilder(op_name=op_name).build_lut()
|
|
||||||
elif patterns is not None:
|
|
||||||
self.lut = LutBuilder(patterns=patterns).build_lut()
|
|
||||||
|
|
||||||
def apply(self, image):
|
|
||||||
"""Run a single morphological operation on an image
|
|
||||||
|
|
||||||
Returns a tuple of the number of changed pixels and the
|
|
||||||
morphed image"""
|
|
||||||
if self.lut is None:
|
|
||||||
msg = "No operator loaded"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
if image.mode != "L":
|
|
||||||
msg = "Image mode must be L"
|
|
||||||
raise ValueError(msg)
|
|
||||||
outimage = Image.new(image.mode, image.size, None)
|
|
||||||
count = _imagingmorph.apply(bytes(self.lut), image.im.id, outimage.im.id)
|
|
||||||
return count, outimage
|
|
||||||
|
|
||||||
def match(self, image):
|
|
||||||
"""Get a list of coordinates matching the morphological operation on
|
|
||||||
an image.
|
|
||||||
|
|
||||||
Returns a list of tuples of (x,y) coordinates
|
|
||||||
of all matching pixels. See :ref:`coordinate-system`."""
|
|
||||||
if self.lut is None:
|
|
||||||
msg = "No operator loaded"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
if image.mode != "L":
|
|
||||||
msg = "Image mode must be L"
|
|
||||||
raise ValueError(msg)
|
|
||||||
return _imagingmorph.match(bytes(self.lut), image.im.id)
|
|
||||||
|
|
||||||
def get_on_pixels(self, image):
|
|
||||||
"""Get a list of all turned on pixels in a binary image
|
|
||||||
|
|
||||||
Returns a list of tuples of (x,y) coordinates
|
|
||||||
of all matching pixels. See :ref:`coordinate-system`."""
|
|
||||||
|
|
||||||
if image.mode != "L":
|
|
||||||
msg = "Image mode must be L"
|
|
||||||
raise ValueError(msg)
|
|
||||||
return _imagingmorph.get_on_pixels(image.im.id)
|
|
||||||
|
|
||||||
def load_lut(self, filename):
|
|
||||||
"""Load an operator from an mrl file"""
|
|
||||||
with open(filename, "rb") as f:
|
|
||||||
self.lut = bytearray(f.read())
|
|
||||||
|
|
||||||
if len(self.lut) != LUT_SIZE:
|
|
||||||
self.lut = None
|
|
||||||
msg = "Wrong size operator file!"
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
def save_lut(self, filename):
|
|
||||||
"""Save an operator to an mrl file"""
|
|
||||||
if self.lut is None:
|
|
||||||
msg = "No operator loaded"
|
|
||||||
raise Exception(msg)
|
|
||||||
with open(filename, "wb") as f:
|
|
||||||
f.write(self.lut)
|
|
||||||
|
|
||||||
def set_lut(self, lut):
|
|
||||||
"""Set the lut from an external source"""
|
|
||||||
self.lut = lut
|
|
||||||
@@ -1,655 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# standard image operations
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 2001-10-20 fl Created
|
|
||||||
# 2001-10-23 fl Added autocontrast operator
|
|
||||||
# 2001-12-18 fl Added Kevin's fit operator
|
|
||||||
# 2004-03-14 fl Fixed potential division by zero in equalize
|
|
||||||
# 2005-05-05 fl Fixed equalize for low number of values
|
|
||||||
#
|
|
||||||
# Copyright (c) 2001-2004 by Secret Labs AB
|
|
||||||
# Copyright (c) 2001-2004 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import functools
|
|
||||||
import operator
|
|
||||||
import re
|
|
||||||
|
|
||||||
from . import ExifTags, Image, ImagePalette
|
|
||||||
|
|
||||||
#
|
|
||||||
# helpers
|
|
||||||
|
|
||||||
|
|
||||||
def _border(border):
|
|
||||||
if isinstance(border, tuple):
|
|
||||||
if len(border) == 2:
|
|
||||||
left, top = right, bottom = border
|
|
||||||
elif len(border) == 4:
|
|
||||||
left, top, right, bottom = border
|
|
||||||
else:
|
|
||||||
left = top = right = bottom = border
|
|
||||||
return left, top, right, bottom
|
|
||||||
|
|
||||||
|
|
||||||
def _color(color, mode):
|
|
||||||
if isinstance(color, str):
|
|
||||||
from . import ImageColor
|
|
||||||
|
|
||||||
color = ImageColor.getcolor(color, mode)
|
|
||||||
return color
|
|
||||||
|
|
||||||
|
|
||||||
def _lut(image, lut):
|
|
||||||
if image.mode == "P":
|
|
||||||
# FIXME: apply to lookup table, not image data
|
|
||||||
msg = "mode P support coming soon"
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
elif image.mode in ("L", "RGB"):
|
|
||||||
if image.mode == "RGB" and len(lut) == 256:
|
|
||||||
lut = lut + lut + lut
|
|
||||||
return image.point(lut)
|
|
||||||
else:
|
|
||||||
msg = f"not supported for mode {image.mode}"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# actions
|
|
||||||
|
|
||||||
|
|
||||||
def autocontrast(image, cutoff=0, ignore=None, mask=None, preserve_tone=False):
|
|
||||||
"""
|
|
||||||
Maximize (normalize) image contrast. This function calculates a
|
|
||||||
histogram of the input image (or mask region), removes ``cutoff`` percent of the
|
|
||||||
lightest and darkest pixels from the histogram, and remaps the image
|
|
||||||
so that the darkest pixel becomes black (0), and the lightest
|
|
||||||
becomes white (255).
|
|
||||||
|
|
||||||
:param image: The image to process.
|
|
||||||
:param cutoff: The percent to cut off from the histogram on the low and
|
|
||||||
high ends. Either a tuple of (low, high), or a single
|
|
||||||
number for both.
|
|
||||||
:param ignore: The background pixel value (use None for no background).
|
|
||||||
:param mask: Histogram used in contrast operation is computed using pixels
|
|
||||||
within the mask. If no mask is given the entire image is used
|
|
||||||
for histogram computation.
|
|
||||||
:param preserve_tone: Preserve image tone in Photoshop-like style autocontrast.
|
|
||||||
|
|
||||||
.. versionadded:: 8.2.0
|
|
||||||
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
if preserve_tone:
|
|
||||||
histogram = image.convert("L").histogram(mask)
|
|
||||||
else:
|
|
||||||
histogram = image.histogram(mask)
|
|
||||||
|
|
||||||
lut = []
|
|
||||||
for layer in range(0, len(histogram), 256):
|
|
||||||
h = histogram[layer : layer + 256]
|
|
||||||
if ignore is not None:
|
|
||||||
# get rid of outliers
|
|
||||||
try:
|
|
||||||
h[ignore] = 0
|
|
||||||
except TypeError:
|
|
||||||
# assume sequence
|
|
||||||
for ix in ignore:
|
|
||||||
h[ix] = 0
|
|
||||||
if cutoff:
|
|
||||||
# cut off pixels from both ends of the histogram
|
|
||||||
if not isinstance(cutoff, tuple):
|
|
||||||
cutoff = (cutoff, cutoff)
|
|
||||||
# get number of pixels
|
|
||||||
n = 0
|
|
||||||
for ix in range(256):
|
|
||||||
n = n + h[ix]
|
|
||||||
# remove cutoff% pixels from the low end
|
|
||||||
cut = n * cutoff[0] // 100
|
|
||||||
for lo in range(256):
|
|
||||||
if cut > h[lo]:
|
|
||||||
cut = cut - h[lo]
|
|
||||||
h[lo] = 0
|
|
||||||
else:
|
|
||||||
h[lo] -= cut
|
|
||||||
cut = 0
|
|
||||||
if cut <= 0:
|
|
||||||
break
|
|
||||||
# remove cutoff% samples from the high end
|
|
||||||
cut = n * cutoff[1] // 100
|
|
||||||
for hi in range(255, -1, -1):
|
|
||||||
if cut > h[hi]:
|
|
||||||
cut = cut - h[hi]
|
|
||||||
h[hi] = 0
|
|
||||||
else:
|
|
||||||
h[hi] -= cut
|
|
||||||
cut = 0
|
|
||||||
if cut <= 0:
|
|
||||||
break
|
|
||||||
# find lowest/highest samples after preprocessing
|
|
||||||
for lo in range(256):
|
|
||||||
if h[lo]:
|
|
||||||
break
|
|
||||||
for hi in range(255, -1, -1):
|
|
||||||
if h[hi]:
|
|
||||||
break
|
|
||||||
if hi <= lo:
|
|
||||||
# don't bother
|
|
||||||
lut.extend(list(range(256)))
|
|
||||||
else:
|
|
||||||
scale = 255.0 / (hi - lo)
|
|
||||||
offset = -lo * scale
|
|
||||||
for ix in range(256):
|
|
||||||
ix = int(ix * scale + offset)
|
|
||||||
if ix < 0:
|
|
||||||
ix = 0
|
|
||||||
elif ix > 255:
|
|
||||||
ix = 255
|
|
||||||
lut.append(ix)
|
|
||||||
return _lut(image, lut)
|
|
||||||
|
|
||||||
|
|
||||||
def colorize(image, black, white, mid=None, blackpoint=0, whitepoint=255, midpoint=127):
|
|
||||||
"""
|
|
||||||
Colorize grayscale image.
|
|
||||||
This function calculates a color wedge which maps all black pixels in
|
|
||||||
the source image to the first color and all white pixels to the
|
|
||||||
second color. If ``mid`` is specified, it uses three-color mapping.
|
|
||||||
The ``black`` and ``white`` arguments should be RGB tuples or color names;
|
|
||||||
optionally you can use three-color mapping by also specifying ``mid``.
|
|
||||||
Mapping positions for any of the colors can be specified
|
|
||||||
(e.g. ``blackpoint``), where these parameters are the integer
|
|
||||||
value corresponding to where the corresponding color should be mapped.
|
|
||||||
These parameters must have logical order, such that
|
|
||||||
``blackpoint <= midpoint <= whitepoint`` (if ``mid`` is specified).
|
|
||||||
|
|
||||||
:param image: The image to colorize.
|
|
||||||
:param black: The color to use for black input pixels.
|
|
||||||
:param white: The color to use for white input pixels.
|
|
||||||
:param mid: The color to use for midtone input pixels.
|
|
||||||
:param blackpoint: an int value [0, 255] for the black mapping.
|
|
||||||
:param whitepoint: an int value [0, 255] for the white mapping.
|
|
||||||
:param midpoint: an int value [0, 255] for the midtone mapping.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Initial asserts
|
|
||||||
assert image.mode == "L"
|
|
||||||
if mid is None:
|
|
||||||
assert 0 <= blackpoint <= whitepoint <= 255
|
|
||||||
else:
|
|
||||||
assert 0 <= blackpoint <= midpoint <= whitepoint <= 255
|
|
||||||
|
|
||||||
# Define colors from arguments
|
|
||||||
black = _color(black, "RGB")
|
|
||||||
white = _color(white, "RGB")
|
|
||||||
if mid is not None:
|
|
||||||
mid = _color(mid, "RGB")
|
|
||||||
|
|
||||||
# Empty lists for the mapping
|
|
||||||
red = []
|
|
||||||
green = []
|
|
||||||
blue = []
|
|
||||||
|
|
||||||
# Create the low-end values
|
|
||||||
for i in range(0, blackpoint):
|
|
||||||
red.append(black[0])
|
|
||||||
green.append(black[1])
|
|
||||||
blue.append(black[2])
|
|
||||||
|
|
||||||
# Create the mapping (2-color)
|
|
||||||
if mid is None:
|
|
||||||
range_map = range(0, whitepoint - blackpoint)
|
|
||||||
|
|
||||||
for i in range_map:
|
|
||||||
red.append(black[0] + i * (white[0] - black[0]) // len(range_map))
|
|
||||||
green.append(black[1] + i * (white[1] - black[1]) // len(range_map))
|
|
||||||
blue.append(black[2] + i * (white[2] - black[2]) // len(range_map))
|
|
||||||
|
|
||||||
# Create the mapping (3-color)
|
|
||||||
else:
|
|
||||||
range_map1 = range(0, midpoint - blackpoint)
|
|
||||||
range_map2 = range(0, whitepoint - midpoint)
|
|
||||||
|
|
||||||
for i in range_map1:
|
|
||||||
red.append(black[0] + i * (mid[0] - black[0]) // len(range_map1))
|
|
||||||
green.append(black[1] + i * (mid[1] - black[1]) // len(range_map1))
|
|
||||||
blue.append(black[2] + i * (mid[2] - black[2]) // len(range_map1))
|
|
||||||
for i in range_map2:
|
|
||||||
red.append(mid[0] + i * (white[0] - mid[0]) // len(range_map2))
|
|
||||||
green.append(mid[1] + i * (white[1] - mid[1]) // len(range_map2))
|
|
||||||
blue.append(mid[2] + i * (white[2] - mid[2]) // len(range_map2))
|
|
||||||
|
|
||||||
# Create the high-end values
|
|
||||||
for i in range(0, 256 - whitepoint):
|
|
||||||
red.append(white[0])
|
|
||||||
green.append(white[1])
|
|
||||||
blue.append(white[2])
|
|
||||||
|
|
||||||
# Return converted image
|
|
||||||
image = image.convert("RGB")
|
|
||||||
return _lut(image, red + green + blue)
|
|
||||||
|
|
||||||
|
|
||||||
def contain(image, size, method=Image.Resampling.BICUBIC):
|
|
||||||
"""
|
|
||||||
Returns a resized version of the image, set to the maximum width and height
|
|
||||||
within the requested size, while maintaining the original aspect ratio.
|
|
||||||
|
|
||||||
:param image: The image to resize.
|
|
||||||
:param size: The requested output size in pixels, given as a
|
|
||||||
(width, height) tuple.
|
|
||||||
:param method: Resampling method to use. Default is
|
|
||||||
:py:attr:`~PIL.Image.Resampling.BICUBIC`.
|
|
||||||
See :ref:`concept-filters`.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
im_ratio = image.width / image.height
|
|
||||||
dest_ratio = size[0] / size[1]
|
|
||||||
|
|
||||||
if im_ratio != dest_ratio:
|
|
||||||
if im_ratio > dest_ratio:
|
|
||||||
new_height = round(image.height / image.width * size[0])
|
|
||||||
if new_height != size[1]:
|
|
||||||
size = (size[0], new_height)
|
|
||||||
else:
|
|
||||||
new_width = round(image.width / image.height * size[1])
|
|
||||||
if new_width != size[0]:
|
|
||||||
size = (new_width, size[1])
|
|
||||||
return image.resize(size, resample=method)
|
|
||||||
|
|
||||||
|
|
||||||
def cover(image, size, method=Image.Resampling.BICUBIC):
|
|
||||||
"""
|
|
||||||
Returns a resized version of the image, so that the requested size is
|
|
||||||
covered, while maintaining the original aspect ratio.
|
|
||||||
|
|
||||||
:param image: The image to resize.
|
|
||||||
:param size: The requested output size in pixels, given as a
|
|
||||||
(width, height) tuple.
|
|
||||||
:param method: Resampling method to use. Default is
|
|
||||||
:py:attr:`~PIL.Image.Resampling.BICUBIC`.
|
|
||||||
See :ref:`concept-filters`.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
im_ratio = image.width / image.height
|
|
||||||
dest_ratio = size[0] / size[1]
|
|
||||||
|
|
||||||
if im_ratio != dest_ratio:
|
|
||||||
if im_ratio < dest_ratio:
|
|
||||||
new_height = round(image.height / image.width * size[0])
|
|
||||||
if new_height != size[1]:
|
|
||||||
size = (size[0], new_height)
|
|
||||||
else:
|
|
||||||
new_width = round(image.width / image.height * size[1])
|
|
||||||
if new_width != size[0]:
|
|
||||||
size = (new_width, size[1])
|
|
||||||
return image.resize(size, resample=method)
|
|
||||||
|
|
||||||
|
|
||||||
def pad(image, size, method=Image.Resampling.BICUBIC, color=None, centering=(0.5, 0.5)):
|
|
||||||
"""
|
|
||||||
Returns a resized and padded version of the image, expanded to fill the
|
|
||||||
requested aspect ratio and size.
|
|
||||||
|
|
||||||
:param image: The image to resize and crop.
|
|
||||||
:param size: The requested output size in pixels, given as a
|
|
||||||
(width, height) tuple.
|
|
||||||
:param method: Resampling method to use. Default is
|
|
||||||
:py:attr:`~PIL.Image.Resampling.BICUBIC`.
|
|
||||||
See :ref:`concept-filters`.
|
|
||||||
:param color: The background color of the padded image.
|
|
||||||
:param centering: Control the position of the original image within the
|
|
||||||
padded version.
|
|
||||||
|
|
||||||
(0.5, 0.5) will keep the image centered
|
|
||||||
(0, 0) will keep the image aligned to the top left
|
|
||||||
(1, 1) will keep the image aligned to the bottom
|
|
||||||
right
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
resized = contain(image, size, method)
|
|
||||||
if resized.size == size:
|
|
||||||
out = resized
|
|
||||||
else:
|
|
||||||
out = Image.new(image.mode, size, color)
|
|
||||||
if resized.palette:
|
|
||||||
out.putpalette(resized.getpalette())
|
|
||||||
if resized.width != size[0]:
|
|
||||||
x = round((size[0] - resized.width) * max(0, min(centering[0], 1)))
|
|
||||||
out.paste(resized, (x, 0))
|
|
||||||
else:
|
|
||||||
y = round((size[1] - resized.height) * max(0, min(centering[1], 1)))
|
|
||||||
out.paste(resized, (0, y))
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def crop(image, border=0):
|
|
||||||
"""
|
|
||||||
Remove border from image. The same amount of pixels are removed
|
|
||||||
from all four sides. This function works on all image modes.
|
|
||||||
|
|
||||||
.. seealso:: :py:meth:`~PIL.Image.Image.crop`
|
|
||||||
|
|
||||||
:param image: The image to crop.
|
|
||||||
:param border: The number of pixels to remove.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
left, top, right, bottom = _border(border)
|
|
||||||
return image.crop((left, top, image.size[0] - right, image.size[1] - bottom))
|
|
||||||
|
|
||||||
|
|
||||||
def scale(image, factor, resample=Image.Resampling.BICUBIC):
|
|
||||||
"""
|
|
||||||
Returns a rescaled image by a specific factor given in parameter.
|
|
||||||
A factor greater than 1 expands the image, between 0 and 1 contracts the
|
|
||||||
image.
|
|
||||||
|
|
||||||
:param image: The image to rescale.
|
|
||||||
:param factor: The expansion factor, as a float.
|
|
||||||
:param resample: Resampling method to use. Default is
|
|
||||||
:py:attr:`~PIL.Image.Resampling.BICUBIC`.
|
|
||||||
See :ref:`concept-filters`.
|
|
||||||
:returns: An :py:class:`~PIL.Image.Image` object.
|
|
||||||
"""
|
|
||||||
if factor == 1:
|
|
||||||
return image.copy()
|
|
||||||
elif factor <= 0:
|
|
||||||
msg = "the factor must be greater than 0"
|
|
||||||
raise ValueError(msg)
|
|
||||||
else:
|
|
||||||
size = (round(factor * image.width), round(factor * image.height))
|
|
||||||
return image.resize(size, resample)
|
|
||||||
|
|
||||||
|
|
||||||
def deform(image, deformer, resample=Image.Resampling.BILINEAR):
|
|
||||||
"""
|
|
||||||
Deform the image.
|
|
||||||
|
|
||||||
:param image: The image to deform.
|
|
||||||
:param deformer: A deformer object. Any object that implements a
|
|
||||||
``getmesh`` method can be used.
|
|
||||||
:param resample: An optional resampling filter. Same values possible as
|
|
||||||
in the PIL.Image.transform function.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
return image.transform(
|
|
||||||
image.size, Image.Transform.MESH, deformer.getmesh(image), resample
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def equalize(image, mask=None):
|
|
||||||
"""
|
|
||||||
Equalize the image histogram. This function applies a non-linear
|
|
||||||
mapping to the input image, in order to create a uniform
|
|
||||||
distribution of grayscale values in the output image.
|
|
||||||
|
|
||||||
:param image: The image to equalize.
|
|
||||||
:param mask: An optional mask. If given, only the pixels selected by
|
|
||||||
the mask are included in the analysis.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
if image.mode == "P":
|
|
||||||
image = image.convert("RGB")
|
|
||||||
h = image.histogram(mask)
|
|
||||||
lut = []
|
|
||||||
for b in range(0, len(h), 256):
|
|
||||||
histo = [_f for _f in h[b : b + 256] if _f]
|
|
||||||
if len(histo) <= 1:
|
|
||||||
lut.extend(list(range(256)))
|
|
||||||
else:
|
|
||||||
step = (functools.reduce(operator.add, histo) - histo[-1]) // 255
|
|
||||||
if not step:
|
|
||||||
lut.extend(list(range(256)))
|
|
||||||
else:
|
|
||||||
n = step // 2
|
|
||||||
for i in range(256):
|
|
||||||
lut.append(n // step)
|
|
||||||
n = n + h[i + b]
|
|
||||||
return _lut(image, lut)
|
|
||||||
|
|
||||||
|
|
||||||
def expand(image, border=0, fill=0):
|
|
||||||
"""
|
|
||||||
Add border to the image
|
|
||||||
|
|
||||||
:param image: The image to expand.
|
|
||||||
:param border: Border width, in pixels.
|
|
||||||
:param fill: Pixel fill value (a color value). Default is 0 (black).
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
left, top, right, bottom = _border(border)
|
|
||||||
width = left + image.size[0] + right
|
|
||||||
height = top + image.size[1] + bottom
|
|
||||||
color = _color(fill, image.mode)
|
|
||||||
if image.palette:
|
|
||||||
palette = ImagePalette.ImagePalette(palette=image.getpalette())
|
|
||||||
if isinstance(color, tuple):
|
|
||||||
color = palette.getcolor(color)
|
|
||||||
else:
|
|
||||||
palette = None
|
|
||||||
out = Image.new(image.mode, (width, height), color)
|
|
||||||
if palette:
|
|
||||||
out.putpalette(palette.palette)
|
|
||||||
out.paste(image, (left, top))
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def fit(image, size, method=Image.Resampling.BICUBIC, bleed=0.0, centering=(0.5, 0.5)):
|
|
||||||
"""
|
|
||||||
Returns a resized and cropped version of the image, cropped to the
|
|
||||||
requested aspect ratio and size.
|
|
||||||
|
|
||||||
This function was contributed by Kevin Cazabon.
|
|
||||||
|
|
||||||
:param image: The image to resize and crop.
|
|
||||||
:param size: The requested output size in pixels, given as a
|
|
||||||
(width, height) tuple.
|
|
||||||
:param method: Resampling method to use. Default is
|
|
||||||
:py:attr:`~PIL.Image.Resampling.BICUBIC`.
|
|
||||||
See :ref:`concept-filters`.
|
|
||||||
:param bleed: Remove a border around the outside of the image from all
|
|
||||||
four edges. The value is a decimal percentage (use 0.01 for
|
|
||||||
one percent). The default value is 0 (no border).
|
|
||||||
Cannot be greater than or equal to 0.5.
|
|
||||||
:param centering: Control the cropping position. Use (0.5, 0.5) for
|
|
||||||
center cropping (e.g. if cropping the width, take 50% off
|
|
||||||
of the left side, and therefore 50% off the right side).
|
|
||||||
(0.0, 0.0) will crop from the top left corner (i.e. if
|
|
||||||
cropping the width, take all of the crop off of the right
|
|
||||||
side, and if cropping the height, take all of it off the
|
|
||||||
bottom). (1.0, 0.0) will crop from the bottom left
|
|
||||||
corner, etc. (i.e. if cropping the width, take all of the
|
|
||||||
crop off the left side, and if cropping the height take
|
|
||||||
none from the top, and therefore all off the bottom).
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# by Kevin Cazabon, Feb 17/2000
|
|
||||||
# kevin@cazabon.com
|
|
||||||
# https://www.cazabon.com
|
|
||||||
|
|
||||||
# ensure centering is mutable
|
|
||||||
centering = list(centering)
|
|
||||||
|
|
||||||
if not 0.0 <= centering[0] <= 1.0:
|
|
||||||
centering[0] = 0.5
|
|
||||||
if not 0.0 <= centering[1] <= 1.0:
|
|
||||||
centering[1] = 0.5
|
|
||||||
|
|
||||||
if not 0.0 <= bleed < 0.5:
|
|
||||||
bleed = 0.0
|
|
||||||
|
|
||||||
# calculate the area to use for resizing and cropping, subtracting
|
|
||||||
# the 'bleed' around the edges
|
|
||||||
|
|
||||||
# number of pixels to trim off on Top and Bottom, Left and Right
|
|
||||||
bleed_pixels = (bleed * image.size[0], bleed * image.size[1])
|
|
||||||
|
|
||||||
live_size = (
|
|
||||||
image.size[0] - bleed_pixels[0] * 2,
|
|
||||||
image.size[1] - bleed_pixels[1] * 2,
|
|
||||||
)
|
|
||||||
|
|
||||||
# calculate the aspect ratio of the live_size
|
|
||||||
live_size_ratio = live_size[0] / live_size[1]
|
|
||||||
|
|
||||||
# calculate the aspect ratio of the output image
|
|
||||||
output_ratio = size[0] / size[1]
|
|
||||||
|
|
||||||
# figure out if the sides or top/bottom will be cropped off
|
|
||||||
if live_size_ratio == output_ratio:
|
|
||||||
# live_size is already the needed ratio
|
|
||||||
crop_width = live_size[0]
|
|
||||||
crop_height = live_size[1]
|
|
||||||
elif live_size_ratio >= output_ratio:
|
|
||||||
# live_size is wider than what's needed, crop the sides
|
|
||||||
crop_width = output_ratio * live_size[1]
|
|
||||||
crop_height = live_size[1]
|
|
||||||
else:
|
|
||||||
# live_size is taller than what's needed, crop the top and bottom
|
|
||||||
crop_width = live_size[0]
|
|
||||||
crop_height = live_size[0] / output_ratio
|
|
||||||
|
|
||||||
# make the crop
|
|
||||||
crop_left = bleed_pixels[0] + (live_size[0] - crop_width) * centering[0]
|
|
||||||
crop_top = bleed_pixels[1] + (live_size[1] - crop_height) * centering[1]
|
|
||||||
|
|
||||||
crop = (crop_left, crop_top, crop_left + crop_width, crop_top + crop_height)
|
|
||||||
|
|
||||||
# resize the image and return it
|
|
||||||
return image.resize(size, method, box=crop)
|
|
||||||
|
|
||||||
|
|
||||||
def flip(image):
|
|
||||||
"""
|
|
||||||
Flip the image vertically (top to bottom).
|
|
||||||
|
|
||||||
:param image: The image to flip.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
return image.transpose(Image.Transpose.FLIP_TOP_BOTTOM)
|
|
||||||
|
|
||||||
|
|
||||||
def grayscale(image):
|
|
||||||
"""
|
|
||||||
Convert the image to grayscale.
|
|
||||||
|
|
||||||
:param image: The image to convert.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
return image.convert("L")
|
|
||||||
|
|
||||||
|
|
||||||
def invert(image):
|
|
||||||
"""
|
|
||||||
Invert (negate) the image.
|
|
||||||
|
|
||||||
:param image: The image to invert.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
lut = list(range(255, -1, -1))
|
|
||||||
return image.point(lut) if image.mode == "1" else _lut(image, lut)
|
|
||||||
|
|
||||||
|
|
||||||
def mirror(image):
|
|
||||||
"""
|
|
||||||
Flip image horizontally (left to right).
|
|
||||||
|
|
||||||
:param image: The image to mirror.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
return image.transpose(Image.Transpose.FLIP_LEFT_RIGHT)
|
|
||||||
|
|
||||||
|
|
||||||
def posterize(image, bits):
|
|
||||||
"""
|
|
||||||
Reduce the number of bits for each color channel.
|
|
||||||
|
|
||||||
:param image: The image to posterize.
|
|
||||||
:param bits: The number of bits to keep for each channel (1-8).
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
mask = ~(2 ** (8 - bits) - 1)
|
|
||||||
lut = [i & mask for i in range(256)]
|
|
||||||
return _lut(image, lut)
|
|
||||||
|
|
||||||
|
|
||||||
def solarize(image, threshold=128):
|
|
||||||
"""
|
|
||||||
Invert all pixel values above a threshold.
|
|
||||||
|
|
||||||
:param image: The image to solarize.
|
|
||||||
:param threshold: All pixels above this grayscale level are inverted.
|
|
||||||
:return: An image.
|
|
||||||
"""
|
|
||||||
lut = []
|
|
||||||
for i in range(256):
|
|
||||||
if i < threshold:
|
|
||||||
lut.append(i)
|
|
||||||
else:
|
|
||||||
lut.append(255 - i)
|
|
||||||
return _lut(image, lut)
|
|
||||||
|
|
||||||
|
|
||||||
def exif_transpose(image, *, in_place=False):
|
|
||||||
"""
|
|
||||||
If an image has an EXIF Orientation tag, other than 1, transpose the image
|
|
||||||
accordingly, and remove the orientation data.
|
|
||||||
|
|
||||||
:param image: The image to transpose.
|
|
||||||
:param in_place: Boolean. Keyword-only argument.
|
|
||||||
If ``True``, the original image is modified in-place, and ``None`` is returned.
|
|
||||||
If ``False`` (default), a new :py:class:`~PIL.Image.Image` object is returned
|
|
||||||
with the transposition applied. If there is no transposition, a copy of the
|
|
||||||
image will be returned.
|
|
||||||
"""
|
|
||||||
image.load()
|
|
||||||
image_exif = image.getexif()
|
|
||||||
orientation = image_exif.get(ExifTags.Base.Orientation)
|
|
||||||
method = {
|
|
||||||
2: Image.Transpose.FLIP_LEFT_RIGHT,
|
|
||||||
3: Image.Transpose.ROTATE_180,
|
|
||||||
4: Image.Transpose.FLIP_TOP_BOTTOM,
|
|
||||||
5: Image.Transpose.TRANSPOSE,
|
|
||||||
6: Image.Transpose.ROTATE_270,
|
|
||||||
7: Image.Transpose.TRANSVERSE,
|
|
||||||
8: Image.Transpose.ROTATE_90,
|
|
||||||
}.get(orientation)
|
|
||||||
if method is not None:
|
|
||||||
transposed_image = image.transpose(method)
|
|
||||||
if in_place:
|
|
||||||
image.im = transposed_image.im
|
|
||||||
image.pyaccess = None
|
|
||||||
image._size = transposed_image._size
|
|
||||||
exif_image = image if in_place else transposed_image
|
|
||||||
|
|
||||||
exif = exif_image.getexif()
|
|
||||||
if ExifTags.Base.Orientation in exif:
|
|
||||||
del exif[ExifTags.Base.Orientation]
|
|
||||||
if "exif" in exif_image.info:
|
|
||||||
exif_image.info["exif"] = exif.tobytes()
|
|
||||||
elif "Raw profile type exif" in exif_image.info:
|
|
||||||
exif_image.info["Raw profile type exif"] = exif.tobytes().hex()
|
|
||||||
elif "XML:com.adobe.xmp" in exif_image.info:
|
|
||||||
for pattern in (
|
|
||||||
r'tiff:Orientation="([0-9])"',
|
|
||||||
r"<tiff:Orientation>([0-9])</tiff:Orientation>",
|
|
||||||
):
|
|
||||||
exif_image.info["XML:com.adobe.xmp"] = re.sub(
|
|
||||||
pattern, "", exif_image.info["XML:com.adobe.xmp"]
|
|
||||||
)
|
|
||||||
if not in_place:
|
|
||||||
return transposed_image
|
|
||||||
elif not in_place:
|
|
||||||
return image.copy()
|
|
||||||
@@ -1,262 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# image palette object
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-03-11 fl Rewritten.
|
|
||||||
# 1997-01-03 fl Up and running.
|
|
||||||
# 1997-08-23 fl Added load hack
|
|
||||||
# 2001-04-16 fl Fixed randint shadow bug in random()
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997-2001 by Secret Labs AB
|
|
||||||
# Copyright (c) 1996-1997 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import array
|
|
||||||
|
|
||||||
from . import GimpGradientFile, GimpPaletteFile, ImageColor, PaletteFile
|
|
||||||
|
|
||||||
|
|
||||||
class ImagePalette:
|
|
||||||
"""
|
|
||||||
Color palette for palette mapped images
|
|
||||||
|
|
||||||
:param mode: The mode to use for the palette. See:
|
|
||||||
:ref:`concept-modes`. Defaults to "RGB"
|
|
||||||
:param palette: An optional palette. If given, it must be a bytearray,
|
|
||||||
an array or a list of ints between 0-255. The list must consist of
|
|
||||||
all channels for one color followed by the next color (e.g. RGBRGBRGB).
|
|
||||||
Defaults to an empty palette.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, mode="RGB", palette=None):
|
|
||||||
self.mode = mode
|
|
||||||
self.rawmode = None # if set, palette contains raw data
|
|
||||||
self.palette = palette or bytearray()
|
|
||||||
self.dirty = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def palette(self):
|
|
||||||
return self._palette
|
|
||||||
|
|
||||||
@palette.setter
|
|
||||||
def palette(self, palette):
|
|
||||||
self._colors = None
|
|
||||||
self._palette = palette
|
|
||||||
|
|
||||||
@property
|
|
||||||
def colors(self):
|
|
||||||
if self._colors is None:
|
|
||||||
mode_len = len(self.mode)
|
|
||||||
self._colors = {}
|
|
||||||
for i in range(0, len(self.palette), mode_len):
|
|
||||||
color = tuple(self.palette[i : i + mode_len])
|
|
||||||
if color in self._colors:
|
|
||||||
continue
|
|
||||||
self._colors[color] = i // mode_len
|
|
||||||
return self._colors
|
|
||||||
|
|
||||||
@colors.setter
|
|
||||||
def colors(self, colors):
|
|
||||||
self._colors = colors
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
new = ImagePalette()
|
|
||||||
|
|
||||||
new.mode = self.mode
|
|
||||||
new.rawmode = self.rawmode
|
|
||||||
if self.palette is not None:
|
|
||||||
new.palette = self.palette[:]
|
|
||||||
new.dirty = self.dirty
|
|
||||||
|
|
||||||
return new
|
|
||||||
|
|
||||||
def getdata(self):
|
|
||||||
"""
|
|
||||||
Get palette contents in format suitable for the low-level
|
|
||||||
``im.putpalette`` primitive.
|
|
||||||
|
|
||||||
.. warning:: This method is experimental.
|
|
||||||
"""
|
|
||||||
if self.rawmode:
|
|
||||||
return self.rawmode, self.palette
|
|
||||||
return self.mode, self.tobytes()
|
|
||||||
|
|
||||||
def tobytes(self):
|
|
||||||
"""Convert palette to bytes.
|
|
||||||
|
|
||||||
.. warning:: This method is experimental.
|
|
||||||
"""
|
|
||||||
if self.rawmode:
|
|
||||||
msg = "palette contains raw palette data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
if isinstance(self.palette, bytes):
|
|
||||||
return self.palette
|
|
||||||
arr = array.array("B", self.palette)
|
|
||||||
return arr.tobytes()
|
|
||||||
|
|
||||||
# Declare tostring as an alias for tobytes
|
|
||||||
tostring = tobytes
|
|
||||||
|
|
||||||
def _new_color_index(self, image=None, e=None):
|
|
||||||
if not isinstance(self.palette, bytearray):
|
|
||||||
self._palette = bytearray(self.palette)
|
|
||||||
index = len(self.palette) // 3
|
|
||||||
special_colors = ()
|
|
||||||
if image:
|
|
||||||
special_colors = (
|
|
||||||
image.info.get("background"),
|
|
||||||
image.info.get("transparency"),
|
|
||||||
)
|
|
||||||
while index in special_colors:
|
|
||||||
index += 1
|
|
||||||
if index >= 256:
|
|
||||||
if image:
|
|
||||||
# Search for an unused index
|
|
||||||
for i, count in reversed(list(enumerate(image.histogram()))):
|
|
||||||
if count == 0 and i not in special_colors:
|
|
||||||
index = i
|
|
||||||
break
|
|
||||||
if index >= 256:
|
|
||||||
msg = "cannot allocate more than 256 colors"
|
|
||||||
raise ValueError(msg) from e
|
|
||||||
return index
|
|
||||||
|
|
||||||
def getcolor(self, color, image=None):
|
|
||||||
"""Given an rgb tuple, allocate palette entry.
|
|
||||||
|
|
||||||
.. warning:: This method is experimental.
|
|
||||||
"""
|
|
||||||
if self.rawmode:
|
|
||||||
msg = "palette contains raw palette data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
if isinstance(color, tuple):
|
|
||||||
if self.mode == "RGB":
|
|
||||||
if len(color) == 4:
|
|
||||||
if color[3] != 255:
|
|
||||||
msg = "cannot add non-opaque RGBA color to RGB palette"
|
|
||||||
raise ValueError(msg)
|
|
||||||
color = color[:3]
|
|
||||||
elif self.mode == "RGBA":
|
|
||||||
if len(color) == 3:
|
|
||||||
color += (255,)
|
|
||||||
try:
|
|
||||||
return self.colors[color]
|
|
||||||
except KeyError as e:
|
|
||||||
# allocate new color slot
|
|
||||||
index = self._new_color_index(image, e)
|
|
||||||
self.colors[color] = index
|
|
||||||
if index * 3 < len(self.palette):
|
|
||||||
self._palette = (
|
|
||||||
self.palette[: index * 3]
|
|
||||||
+ bytes(color)
|
|
||||||
+ self.palette[index * 3 + 3 :]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self._palette += bytes(color)
|
|
||||||
self.dirty = 1
|
|
||||||
return index
|
|
||||||
else:
|
|
||||||
msg = f"unknown color specifier: {repr(color)}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
def save(self, fp):
|
|
||||||
"""Save palette to text file.
|
|
||||||
|
|
||||||
.. warning:: This method is experimental.
|
|
||||||
"""
|
|
||||||
if self.rawmode:
|
|
||||||
msg = "palette contains raw palette data"
|
|
||||||
raise ValueError(msg)
|
|
||||||
if isinstance(fp, str):
|
|
||||||
fp = open(fp, "w")
|
|
||||||
fp.write("# Palette\n")
|
|
||||||
fp.write(f"# Mode: {self.mode}\n")
|
|
||||||
for i in range(256):
|
|
||||||
fp.write(f"{i}")
|
|
||||||
for j in range(i * len(self.mode), (i + 1) * len(self.mode)):
|
|
||||||
try:
|
|
||||||
fp.write(f" {self.palette[j]}")
|
|
||||||
except IndexError:
|
|
||||||
fp.write(" 0")
|
|
||||||
fp.write("\n")
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Internal
|
|
||||||
|
|
||||||
|
|
||||||
def raw(rawmode, data):
|
|
||||||
palette = ImagePalette()
|
|
||||||
palette.rawmode = rawmode
|
|
||||||
palette.palette = data
|
|
||||||
palette.dirty = 1
|
|
||||||
return palette
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------------------
|
|
||||||
# Factories
|
|
||||||
|
|
||||||
|
|
||||||
def make_linear_lut(black, white):
|
|
||||||
if black == 0:
|
|
||||||
return [white * i // 255 for i in range(256)]
|
|
||||||
|
|
||||||
msg = "unavailable when black is non-zero"
|
|
||||||
raise NotImplementedError(msg) # FIXME
|
|
||||||
|
|
||||||
|
|
||||||
def make_gamma_lut(exp):
|
|
||||||
return [int(((i / 255.0) ** exp) * 255.0 + 0.5) for i in range(256)]
|
|
||||||
|
|
||||||
|
|
||||||
def negative(mode="RGB"):
|
|
||||||
palette = list(range(256 * len(mode)))
|
|
||||||
palette.reverse()
|
|
||||||
return ImagePalette(mode, [i // len(mode) for i in palette])
|
|
||||||
|
|
||||||
|
|
||||||
def random(mode="RGB"):
|
|
||||||
from random import randint
|
|
||||||
|
|
||||||
palette = [randint(0, 255) for _ in range(256 * len(mode))]
|
|
||||||
return ImagePalette(mode, palette)
|
|
||||||
|
|
||||||
|
|
||||||
def sepia(white="#fff0c0"):
|
|
||||||
bands = [make_linear_lut(0, band) for band in ImageColor.getrgb(white)]
|
|
||||||
return ImagePalette("RGB", [bands[i % 3][i // 3] for i in range(256 * 3)])
|
|
||||||
|
|
||||||
|
|
||||||
def wedge(mode="RGB"):
|
|
||||||
palette = list(range(256 * len(mode)))
|
|
||||||
return ImagePalette(mode, [i // len(mode) for i in palette])
|
|
||||||
|
|
||||||
|
|
||||||
def load(filename):
|
|
||||||
# FIXME: supports GIMP gradients only
|
|
||||||
|
|
||||||
with open(filename, "rb") as fp:
|
|
||||||
for paletteHandler in [
|
|
||||||
GimpPaletteFile.GimpPaletteFile,
|
|
||||||
GimpGradientFile.GimpGradientFile,
|
|
||||||
PaletteFile.PaletteFile,
|
|
||||||
]:
|
|
||||||
try:
|
|
||||||
fp.seek(0)
|
|
||||||
lut = paletteHandler(fp).getpalette()
|
|
||||||
if lut:
|
|
||||||
break
|
|
||||||
except (SyntaxError, ValueError):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
msg = "cannot load palette"
|
|
||||||
raise OSError(msg)
|
|
||||||
|
|
||||||
return lut # data, rawmode
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# path interface
|
|
||||||
#
|
|
||||||
# History:
|
|
||||||
# 1996-11-04 fl Created
|
|
||||||
# 2002-04-14 fl Added documentation stub class
|
|
||||||
#
|
|
||||||
# Copyright (c) Secret Labs AB 1997.
|
|
||||||
# Copyright (c) Fredrik Lundh 1996.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
Path = Image.core.path
|
|
||||||
@@ -1,197 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# a simple Qt image interface.
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 2006-06-03 fl: created
|
|
||||||
# 2006-06-04 fl: inherit from QImage instead of wrapping it
|
|
||||||
# 2006-06-05 fl: removed toimage helper; move string support to ImageQt
|
|
||||||
# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com)
|
|
||||||
#
|
|
||||||
# Copyright (c) 2006 by Secret Labs AB
|
|
||||||
# Copyright (c) 2006 by Fredrik Lundh
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
from ._util import is_path
|
|
||||||
|
|
||||||
qt_versions = [
|
|
||||||
["6", "PyQt6"],
|
|
||||||
["side6", "PySide6"],
|
|
||||||
]
|
|
||||||
|
|
||||||
# If a version has already been imported, attempt it first
|
|
||||||
qt_versions.sort(key=lambda qt_version: qt_version[1] in sys.modules, reverse=True)
|
|
||||||
for qt_version, qt_module in qt_versions:
|
|
||||||
try:
|
|
||||||
if qt_module == "PyQt6":
|
|
||||||
from PyQt6.QtCore import QBuffer, QIODevice
|
|
||||||
from PyQt6.QtGui import QImage, QPixmap, qRgba
|
|
||||||
elif qt_module == "PySide6":
|
|
||||||
from PySide6.QtCore import QBuffer, QIODevice
|
|
||||||
from PySide6.QtGui import QImage, QPixmap, qRgba
|
|
||||||
except (ImportError, RuntimeError):
|
|
||||||
continue
|
|
||||||
qt_is_installed = True
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
qt_is_installed = False
|
|
||||||
qt_version = None
|
|
||||||
|
|
||||||
|
|
||||||
def rgb(r, g, b, a=255):
|
|
||||||
"""(Internal) Turns an RGB color into a Qt compatible color integer."""
|
|
||||||
# use qRgb to pack the colors, and then turn the resulting long
|
|
||||||
# into a negative integer with the same bitpattern.
|
|
||||||
return qRgba(r, g, b, a) & 0xFFFFFFFF
|
|
||||||
|
|
||||||
|
|
||||||
def fromqimage(im):
|
|
||||||
"""
|
|
||||||
:param im: QImage or PIL ImageQt object
|
|
||||||
"""
|
|
||||||
buffer = QBuffer()
|
|
||||||
if qt_version == "6":
|
|
||||||
try:
|
|
||||||
qt_openmode = QIODevice.OpenModeFlag
|
|
||||||
except AttributeError:
|
|
||||||
qt_openmode = QIODevice.OpenMode
|
|
||||||
else:
|
|
||||||
qt_openmode = QIODevice
|
|
||||||
buffer.open(qt_openmode.ReadWrite)
|
|
||||||
# preserve alpha channel with png
|
|
||||||
# otherwise ppm is more friendly with Image.open
|
|
||||||
if im.hasAlphaChannel():
|
|
||||||
im.save(buffer, "png")
|
|
||||||
else:
|
|
||||||
im.save(buffer, "ppm")
|
|
||||||
|
|
||||||
b = BytesIO()
|
|
||||||
b.write(buffer.data())
|
|
||||||
buffer.close()
|
|
||||||
b.seek(0)
|
|
||||||
|
|
||||||
return Image.open(b)
|
|
||||||
|
|
||||||
|
|
||||||
def fromqpixmap(im):
|
|
||||||
return fromqimage(im)
|
|
||||||
|
|
||||||
|
|
||||||
def align8to32(bytes, width, mode):
|
|
||||||
"""
|
|
||||||
converts each scanline of data from 8 bit to 32 bit aligned
|
|
||||||
"""
|
|
||||||
|
|
||||||
bits_per_pixel = {"1": 1, "L": 8, "P": 8, "I;16": 16}[mode]
|
|
||||||
|
|
||||||
# calculate bytes per line and the extra padding if needed
|
|
||||||
bits_per_line = bits_per_pixel * width
|
|
||||||
full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8)
|
|
||||||
bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0)
|
|
||||||
|
|
||||||
extra_padding = -bytes_per_line % 4
|
|
||||||
|
|
||||||
# already 32 bit aligned by luck
|
|
||||||
if not extra_padding:
|
|
||||||
return bytes
|
|
||||||
|
|
||||||
new_data = [
|
|
||||||
bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + b"\x00" * extra_padding
|
|
||||||
for i in range(len(bytes) // bytes_per_line)
|
|
||||||
]
|
|
||||||
|
|
||||||
return b"".join(new_data)
|
|
||||||
|
|
||||||
|
|
||||||
def _toqclass_helper(im):
|
|
||||||
data = None
|
|
||||||
colortable = None
|
|
||||||
exclusive_fp = False
|
|
||||||
|
|
||||||
# handle filename, if given instead of image name
|
|
||||||
if hasattr(im, "toUtf8"):
|
|
||||||
# FIXME - is this really the best way to do this?
|
|
||||||
im = str(im.toUtf8(), "utf-8")
|
|
||||||
if is_path(im):
|
|
||||||
im = Image.open(im)
|
|
||||||
exclusive_fp = True
|
|
||||||
|
|
||||||
qt_format = QImage.Format if qt_version == "6" else QImage
|
|
||||||
if im.mode == "1":
|
|
||||||
format = qt_format.Format_Mono
|
|
||||||
elif im.mode == "L":
|
|
||||||
format = qt_format.Format_Indexed8
|
|
||||||
colortable = [rgb(i, i, i) for i in range(256)]
|
|
||||||
elif im.mode == "P":
|
|
||||||
format = qt_format.Format_Indexed8
|
|
||||||
palette = im.getpalette()
|
|
||||||
colortable = [rgb(*palette[i : i + 3]) for i in range(0, len(palette), 3)]
|
|
||||||
elif im.mode == "RGB":
|
|
||||||
# Populate the 4th channel with 255
|
|
||||||
im = im.convert("RGBA")
|
|
||||||
|
|
||||||
data = im.tobytes("raw", "BGRA")
|
|
||||||
format = qt_format.Format_RGB32
|
|
||||||
elif im.mode == "RGBA":
|
|
||||||
data = im.tobytes("raw", "BGRA")
|
|
||||||
format = qt_format.Format_ARGB32
|
|
||||||
elif im.mode == "I;16" and hasattr(qt_format, "Format_Grayscale16"): # Qt 5.13+
|
|
||||||
im = im.point(lambda i: i * 256)
|
|
||||||
|
|
||||||
format = qt_format.Format_Grayscale16
|
|
||||||
else:
|
|
||||||
if exclusive_fp:
|
|
||||||
im.close()
|
|
||||||
msg = f"unsupported image mode {repr(im.mode)}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
size = im.size
|
|
||||||
__data = data or align8to32(im.tobytes(), size[0], im.mode)
|
|
||||||
if exclusive_fp:
|
|
||||||
im.close()
|
|
||||||
return {"data": __data, "size": size, "format": format, "colortable": colortable}
|
|
||||||
|
|
||||||
|
|
||||||
if qt_is_installed:
|
|
||||||
|
|
||||||
class ImageQt(QImage):
|
|
||||||
def __init__(self, im):
|
|
||||||
"""
|
|
||||||
An PIL image wrapper for Qt. This is a subclass of PyQt's QImage
|
|
||||||
class.
|
|
||||||
|
|
||||||
:param im: A PIL Image object, or a file name (given either as
|
|
||||||
Python string or a PyQt string object).
|
|
||||||
"""
|
|
||||||
im_data = _toqclass_helper(im)
|
|
||||||
# must keep a reference, or Qt will crash!
|
|
||||||
# All QImage constructors that take data operate on an existing
|
|
||||||
# buffer, so this buffer has to hang on for the life of the image.
|
|
||||||
# Fixes https://github.com/python-pillow/Pillow/issues/1370
|
|
||||||
self.__data = im_data["data"]
|
|
||||||
super().__init__(
|
|
||||||
self.__data,
|
|
||||||
im_data["size"][0],
|
|
||||||
im_data["size"][1],
|
|
||||||
im_data["format"],
|
|
||||||
)
|
|
||||||
if im_data["colortable"]:
|
|
||||||
self.setColorTable(im_data["colortable"])
|
|
||||||
|
|
||||||
|
|
||||||
def toqimage(im):
|
|
||||||
return ImageQt(im)
|
|
||||||
|
|
||||||
|
|
||||||
def toqpixmap(im):
|
|
||||||
qimage = toqimage(im)
|
|
||||||
return QPixmap.fromImage(qimage)
|
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
#
|
|
||||||
# The Python Imaging Library.
|
|
||||||
# $Id$
|
|
||||||
#
|
|
||||||
# sequence support classes
|
|
||||||
#
|
|
||||||
# history:
|
|
||||||
# 1997-02-20 fl Created
|
|
||||||
#
|
|
||||||
# Copyright (c) 1997 by Secret Labs AB.
|
|
||||||
# Copyright (c) 1997 by Fredrik Lundh.
|
|
||||||
#
|
|
||||||
# See the README file for information on usage and redistribution.
|
|
||||||
#
|
|
||||||
|
|
||||||
##
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from typing import Callable
|
|
||||||
|
|
||||||
from . import Image
|
|
||||||
|
|
||||||
|
|
||||||
class Iterator:
|
|
||||||
"""
|
|
||||||
This class implements an iterator object that can be used to loop
|
|
||||||
over an image sequence.
|
|
||||||
|
|
||||||
You can use the ``[]`` operator to access elements by index. This operator
|
|
||||||
will raise an :py:exc:`IndexError` if you try to access a nonexistent
|
|
||||||
frame.
|
|
||||||
|
|
||||||
:param im: An image object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, im: Image.Image):
|
|
||||||
if not hasattr(im, "seek"):
|
|
||||||
msg = "im must have seek method"
|
|
||||||
raise AttributeError(msg)
|
|
||||||
self.im = im
|
|
||||||
self.position = getattr(self.im, "_min_frame", 0)
|
|
||||||
|
|
||||||
def __getitem__(self, ix: int) -> Image.Image:
|
|
||||||
try:
|
|
||||||
self.im.seek(ix)
|
|
||||||
return self.im
|
|
||||||
except EOFError as e:
|
|
||||||
msg = "end of sequence"
|
|
||||||
raise IndexError(msg) from e
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator:
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __next__(self) -> Image.Image:
|
|
||||||
try:
|
|
||||||
self.im.seek(self.position)
|
|
||||||
self.position += 1
|
|
||||||
return self.im
|
|
||||||
except EOFError as e:
|
|
||||||
msg = "end of sequence"
|
|
||||||
raise StopIteration(msg) from e
|
|
||||||
|
|
||||||
|
|
||||||
def all_frames(
|
|
||||||
im: Image.Image | list[Image.Image],
|
|
||||||
func: Callable[[Image.Image], Image.Image] | None = None,
|
|
||||||
) -> list[Image.Image]:
|
|
||||||
"""
|
|
||||||
Applies a given function to all frames in an image or a list of images.
|
|
||||||
The frames are returned as a list of separate images.
|
|
||||||
|
|
||||||
:param im: An image, or a list of images.
|
|
||||||
:param func: The function to apply to all of the image frames.
|
|
||||||
:returns: A list of images.
|
|
||||||
"""
|
|
||||||
if not isinstance(im, list):
|
|
||||||
im = [im]
|
|
||||||
|
|
||||||
ims = []
|
|
||||||
for imSequence in im:
|
|
||||||
current = imSequence.tell()
|
|
||||||
|
|
||||||
ims += [im_frame.copy() for im_frame in Iterator(imSequence)]
|
|
||||||
|
|
||||||
imSequence.seek(current)
|
|
||||||
return [func(im) for im in ims] if func else ims
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user